instance_id
stringlengths 10
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2014-04-30 14:58:36
2025-04-30 20:14:11
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
273k
| patch
stringlengths 251
7.06M
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 231
997k
| meta
dict | version
stringclasses 851
values | install_config
dict | requirements
stringlengths 93
34.2k
⌀ | environment
stringlengths 760
20.5k
⌀ | FAIL_TO_PASS
listlengths 1
9.39k
| FAIL_TO_FAIL
listlengths 0
2.69k
| PASS_TO_PASS
listlengths 0
7.87k
| PASS_TO_FAIL
listlengths 0
192
| license_name
stringclasses 55
values | __index_level_0__
int64 0
21.4k
| before_filepaths
listlengths 1
105
| after_filepaths
listlengths 1
105
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
watson-developer-cloud__python-sdk-208 | 13290dc52276d56ec04bf4eb19f21669d8ca3828 | 2017-05-19 22:05:31 | f0fda953cf81204d2bf11d61d1e792292ced0d6f | diff --git a/watson_developer_cloud/speech_to_text_v1.py b/watson_developer_cloud/speech_to_text_v1.py
index e849ad67..c2f717a6 100644
--- a/watson_developer_cloud/speech_to_text_v1.py
+++ b/watson_developer_cloud/speech_to_text_v1.py
@@ -118,7 +118,7 @@ class SpeechToTextV1(WatsonDeveloperCloudService):
headers = {'Content-Type': 'application/octet-stream'}
- return self.request(method='GET',
+ return self.request(method='POST',
url=url.format(customization_id,
corpus_name),
headers=headers,
| SpeechToTextV1.add_corpus() uses wrong http method (GET, should use POST)
Currently, using `add_corpus` will break with a `WatsonException: Unknown error` error. This is due to the method using the wrong HTTP method (GET, instead of POST). The tests don't catch this because the assertion in `test_custom_corpora` only checks the number of requests made and not the details of those requests.
I have a patch that should be ready soon. | watson-developer-cloud/python-sdk | diff --git a/test/test_speech_to_text_v1.py b/test/test_speech_to_text_v1.py
index 2334f4b5..3153dcdf 100755
--- a/test/test_speech_to_text_v1.py
+++ b/test/test_speech_to_text_v1.py
@@ -106,49 +106,48 @@ def test_custom_model():
assert len(responses.calls) == 5
[email protected]
def test_custom_corpora():
corpora_url = 'https://stream.watsonplatform.net/speech-to-text/api/v1/customizations/{0}/corpora'
get_corpora_url = '{0}/{1}'.format(corpora_url.format('customid'),'corpus')
- responses.add(responses.GET, corpora_url.format('customid'),
- body='{"get response": "yep"}', status=200,
- content_type='application/json')
+ with responses.RequestsMock(assert_all_requests_are_fired=True) as rsps:
+ rsps.add(responses.GET, corpora_url.format('customid'),
+ body='{"get response": "yep"}', status=200,
+ content_type='application/json')
- responses.add(responses.POST, get_corpora_url,
+ rsps.add(responses.POST, get_corpora_url,
body='{"get response": "yep"}',
status=200,
content_type='application/json')
- responses.add(responses.GET, get_corpora_url,
+ rsps.add(responses.GET, get_corpora_url,
body='{"get response": "yep"}',
status=200,
content_type='application/json')
- responses.add(responses.DELETE, get_corpora_url,
- body='{"get response": "yep"}',
- status=200,
- content_type='application/json')
+ rsps.add(responses.DELETE, get_corpora_url,
+ body='{"get response": "yep"}',
+ status=200,
+ content_type='application/json')
- speech_to_text = watson_developer_cloud.SpeechToTextV1(
- username="username", password="password")
+ speech_to_text = watson_developer_cloud.SpeechToTextV1(
+ username="username", password="password")
- speech_to_text.list_corpora(customization_id='customid')
+ speech_to_text.list_corpora(customization_id='customid')
- file_path = '../resources/speech_to_text/corpus-short-1.txt'
- full_path = os.path.join(os.path.dirname(__file__), file_path)
- with open(full_path) as corpus_file:
- speech_to_text.add_corpus(customization_id='customid',
- corpus_name="corpus", file_data=corpus_file)
+ file_path = '../resources/speech_to_text/corpus-short-1.txt'
+ full_path = os.path.join(os.path.dirname(__file__), file_path)
+ with open(full_path) as corpus_file:
+ speech_to_text.add_corpus(customization_id='customid',
+ corpus_name="corpus", file_data=corpus_file)
- speech_to_text.get_corpus(customization_id='customid',
- corpus_name='corpus')
+ speech_to_text.get_corpus(customization_id='customid',
+ corpus_name='corpus')
- speech_to_text.delete_corpus(customization_id='customid',
- corpus_name='corpus')
+ speech_to_text.delete_corpus(customization_id='customid',
+ corpus_name='corpus')
- assert len(responses.calls) == 4
@responses.activate
def test_custom_words():
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.26 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"responses",
"python_dotenv"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
cryptography==40.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycparser==2.21
pyOpenSSL==23.2.0
pyparsing==3.1.4
pysolr==3.10.0
pytest==7.0.1
python-dotenv==0.20.0
requests==2.27.1
responses==0.17.0
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/watson-developer-cloud/python-sdk.git@13290dc52276d56ec04bf4eb19f21669d8ca3828#egg=watson_developer_cloud
zipp==3.6.0
| name: python-sdk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- attrs==22.2.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- cryptography==40.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycparser==2.21
- pyopenssl==23.2.0
- pyparsing==3.1.4
- pysolr==3.10.0
- pytest==7.0.1
- python-dotenv==0.20.0
- requests==2.27.1
- responses==0.17.0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/python-sdk
| [
"test/test_speech_to_text_v1.py::test_custom_corpora"
]
| []
| [
"test/test_speech_to_text_v1.py::test_success",
"test/test_speech_to_text_v1.py::test_get_model",
"test/test_speech_to_text_v1.py::test_custom_model",
"test/test_speech_to_text_v1.py::test_custom_words"
]
| []
| Apache License 2.0 | 1,277 | [
"watson_developer_cloud/speech_to_text_v1.py"
]
| [
"watson_developer_cloud/speech_to_text_v1.py"
]
|
|
Azure__azure-cli-3415 | 46d5382995c2740df260704f270df0e5cac965a5 | 2017-05-20 00:07:55 | 58aac4203905792244f4bb244910354fd44425d6 | codecov-io: # [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/3415?src=pr&el=h1) Report
> Merging [#3415](https://codecov.io/gh/Azure/azure-cli/pull/3415?src=pr&el=desc) into [master](https://codecov.io/gh/Azure/azure-cli/commit/46d5382995c2740df260704f270df0e5cac965a5?src=pr&el=desc) will **increase** coverage by `<.01%`.
> The diff coverage is `72.72%`.
[](https://codecov.io/gh/Azure/azure-cli/pull/3415?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #3415 +/- ##
==========================================
+ Coverage 70.81% 70.81% +<.01%
==========================================
Files 394 394
Lines 25533 25532 -1
Branches 3889 3890 +1
==========================================
Hits 18081 18081
+ Misses 6310 6308 -2
- Partials 1142 1143 +1
```
| [Impacted Files](https://codecov.io/gh/Azure/azure-cli/pull/3415?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/azure-cli-core/azure/cli/core/\_profile.py](https://codecov.io/gh/Azure/azure-cli/pull/3415?src=pr&el=tree#diff-c3JjL2F6dXJlLWNsaS1jb3JlL2F6dXJlL2NsaS9jb3JlL19wcm9maWxlLnB5) | `87.85% <72.72%> (+0.2%)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/Azure/azure-cli/pull/3415?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/3415?src=pr&el=footer). Last update [46d5382...287b9e0](https://codecov.io/gh/Azure/azure-cli/pull/3415?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/src/azure-cli-core/HISTORY.rst b/src/azure-cli-core/HISTORY.rst
index 103f25a5c..867cd36a3 100644
--- a/src/azure-cli-core/HISTORY.rst
+++ b/src/azure-cli-core/HISTORY.rst
@@ -7,6 +7,7 @@ unreleased
^^^^^^^^^^^^^^^^^^
* Command paths are no longer case sensitive.
* Certain boolean-type parameters are no longer case sensitive.
+* Support login to ADFS on prem server like Azure Stack
2.0.6 (2017-05-09)
^^^^^^^^^^^^^^^^^^
diff --git a/src/azure-cli-core/azure/cli/core/_profile.py b/src/azure-cli-core/azure/cli/core/_profile.py
index 79191e18a..43fbe0d3b 100644
--- a/src/azure-cli-core/azure/cli/core/_profile.py
+++ b/src/azure-cli-core/azure/cli/core/_profile.py
@@ -56,9 +56,14 @@ _CLIENT_ID = '04b07795-8ddb-461a-bbee-02f9e1bf7b46'
_COMMON_TENANT = 'common'
-def _authentication_context_factory(authority, cache):
+def _authentication_context_factory(tenant, cache):
import adal
- return adal.AuthenticationContext(authority, cache=cache, api_version=None)
+ authority_url = CLOUD.endpoints.active_directory
+ is_adfs = authority_url.lower().endswith('/adfs')
+ if not is_adfs:
+ authority_url = authority_url + '/' + (tenant or _COMMON_TENANT)
+ return adal.AuthenticationContext(authority_url, cache=cache, api_version=None,
+ validate_authority=(not is_adfs))
_AUTH_CTX_FACTORY = _authentication_context_factory
@@ -69,10 +74,6 @@ CLOUD = get_active_cloud()
logger.debug('Current cloud config:\n%s', str(CLOUD))
-def get_authority_url(tenant=None):
- return CLOUD.endpoints.active_directory + '/' + (tenant or _COMMON_TENANT)
-
-
def _load_tokens_from_file(file_path):
all_entries = []
if os.path.isfile(file_path):
@@ -393,7 +394,7 @@ class SubscriptionFinder(object):
self.tenants = []
def find_from_user_account(self, username, password, tenant, resource):
- context = self._create_auth_context(tenant or _COMMON_TENANT)
+ context = self._create_auth_context(tenant)
token_entry = context.acquire_token_with_username_password(
resource,
username,
@@ -408,8 +409,7 @@ class SubscriptionFinder(object):
return result
def find_through_interactive_flow(self, tenant, resource):
-
- context = self._create_auth_context(tenant or _COMMON_TENANT)
+ context = self._create_auth_context(tenant)
code = context.acquire_user_code(resource, _CLIENT_ID)
logger.warning(code['message'])
token_entry = context.acquire_token_with_device_code(resource, code, _CLIENT_ID)
@@ -430,8 +430,7 @@ class SubscriptionFinder(object):
def _create_auth_context(self, tenant, use_token_cache=True):
token_cache = self._adal_token_cache if use_token_cache else None
- authority = get_authority_url(tenant)
- return self._auth_context_factory(authority, token_cache)
+ return self._auth_context_factory(tenant, token_cache)
def _find_using_common_tenant(self, access_token, resource):
import adal
@@ -514,8 +513,7 @@ class CredsCache(object):
cred_file.write(json.dumps(all_creds))
def retrieve_token_for_user(self, username, tenant, resource):
- authority = get_authority_url(tenant)
- context = self._auth_ctx_factory(authority, cache=self.adal_token_cache)
+ context = self._auth_ctx_factory(tenant, cache=self.adal_token_cache)
token_entry = context.acquire_token(resource, username, _CLIENT_ID)
if not token_entry:
raise CLIError("Could not retrieve token from local cache, please run 'az login'.")
@@ -530,8 +528,7 @@ class CredsCache(object):
if not matched:
raise CLIError("Please run 'az account set' to select active account.")
cred = matched[0]
- authority_url = get_authority_url(cred[_SERVICE_PRINCIPAL_TENANT])
- context = self._auth_ctx_factory(authority_url, None)
+ context = self._auth_ctx_factory(cred[_SERVICE_PRINCIPAL_TENANT], None)
sp_auth = ServicePrincipalAuth(cred.get(_ACCESS_TOKEN, None) or
cred.get(_SERVICE_PRINCIPAL_CERT_FILE, None))
token_entry = sp_auth.acquire_token(context, resource, sp_id)
| login: expose ADFS flow for azure stack env | Azure/azure-cli | diff --git a/src/azure-cli-core/tests/test_profile.py b/src/azure-cli-core/tests/test_profile.py
index ea3648494..90603c558 100644
--- a/src/azure-cli-core/tests/test_profile.py
+++ b/src/azure-cli-core/tests/test_profile.py
@@ -13,7 +13,7 @@ from adal import AdalError
from azure.mgmt.resource.subscriptions.models import (SubscriptionState, Subscription,
SubscriptionPolicies, SpendingLimit)
from azure.cli.core._profile import (Profile, CredsCache, SubscriptionFinder,
- ServicePrincipalAuth, CLOUD)
+ ServicePrincipalAuth, CLOUD, _AUTH_CTX_FACTORY)
from azure.cli.core.util import CLIError
@@ -451,6 +451,38 @@ class Test_Profile(unittest.TestCase): # pylint: disable=too-many-public-method
mock_auth_context.acquire_token.assert_called_once_with(
mgmt_resource, self.user1, mock.ANY)
+ @mock.patch('adal.AuthenticationContext.acquire_token_with_username_password', autospec=True)
+ @mock.patch('adal.AuthenticationContext.acquire_token', autospec=True)
+ @mock.patch('azure.cli.core._profile.CLOUD', autospec=True)
+ def test_find_subscriptions_thru_username_password_adfs(self, mock_get_cloud, mock_acquire_token,
+ mock_acquire_token_username_password):
+ TEST_ADFS_AUTH_URL = 'https://adfs.local.azurestack.external/adfs'
+
+ def test_acquire_token(self, resource, username, password, client_id):
+ global acquire_token_invoked
+ acquire_token_invoked = True
+ if (self.authority.url == TEST_ADFS_AUTH_URL and self.authority.is_adfs_authority):
+ return Test_Profile.token_entry1
+ else:
+ raise ValueError('AuthContext was not initialized correctly for ADFS')
+
+ mock_acquire_token_username_password.side_effect = test_acquire_token
+ mock_acquire_token.return_value = self.token_entry1
+ mock_arm_client = mock.MagicMock()
+ mock_arm_client.tenants.list.return_value = [TenantStub(self.tenant_id)]
+ mock_arm_client.subscriptions.list.return_value = [self.subscription1]
+ mock_get_cloud.endpoints.active_directory = TEST_ADFS_AUTH_URL
+ finder = SubscriptionFinder(_AUTH_CTX_FACTORY,
+ None,
+ lambda _: mock_arm_client)
+ mgmt_resource = 'https://management.core.windows.net/'
+ # action
+ subs = finder.find_from_user_account(self.user1, 'bar', None, mgmt_resource)
+
+ # assert
+ self.assertEqual([self.subscription1], subs)
+ self.assertTrue(acquire_token_invoked)
+
@mock.patch('adal.AuthenticationContext', autospec=True)
@mock.patch('azure.cli.core._profile.logger', autospec=True)
def test_find_subscriptions_thru_username_password_with_account_disabled(self, mock_logger,
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 2
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==1.4.9
attrs==22.2.0
autopep8==1.2.4
azure-batch==3.0.0
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_cdn&subdirectory=src/command_modules/azure-cli-cdn
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_cognitiveservices&subdirectory=src/command_modules/azure-cli-cognitiveservices
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_cosmosdb&subdirectory=src/command_modules/azure-cli-cosmosdb
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_dla&subdirectory=src/command_modules/azure-cli-dla
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_dls&subdirectory=src/command_modules/azure-cli-dls
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_interactive&subdirectory=src/command_modules/azure-cli-interactive
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_lab&subdirectory=src/command_modules/azure-cli-lab
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_monitor&subdirectory=src/command_modules/azure-cli-monitor
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_rdbms&subdirectory=src/command_modules/azure-cli-rdbms
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_sf&subdirectory=src/command_modules/azure-cli-sf
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@46d5382995c2740df260704f270df0e5cac965a5#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.28
azure-core==1.24.2
azure-datalake-store==0.0.9
azure-graphrbac==0.30.0rc6
azure-keyvault==0.3.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==4.0.0
azure-mgmt-cdn==0.30.2
azure-mgmt-cognitiveservices==1.0.0
azure-mgmt-compute==1.0.0rc1
azure-mgmt-containerregistry==0.2.1
azure-mgmt-datalake-analytics==0.1.4
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.4
azure-mgmt-devtestlabs==2.0.0
azure-mgmt-dns==1.0.1
azure-mgmt-documentdb==0.1.3
azure-mgmt-iothub==0.2.2
azure-mgmt-keyvault==0.31.0
azure-mgmt-monitor==0.2.1
azure-mgmt-network==1.0.0rc3
azure-mgmt-nspkg==1.0.0
azure-mgmt-rdbms==0.1.0
azure-mgmt-redis==1.0.0
azure-mgmt-resource==1.1.0rc1
azure-mgmt-sql==0.4.0
azure-mgmt-storage==1.0.0rc1
azure-mgmt-trafficmanager==0.30.0
azure-mgmt-web==0.32.0
azure-monitor==0.3.0
azure-multiapi-storage==0.1.0
azure-nspkg==1.0.0
azure-servicefabric==5.6.130
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==4.2
cryptography==40.0.2
flake8==3.2.1
futures==3.1.1
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.7.0
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.5.3
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pep8==1.7.1
pluggy==1.0.0
prompt-toolkit==3.0.36
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.2.0
pycparser==2.21
pydocumentdb==2.3.5
pyflakes==1.3.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.7
tomli==1.2.3
typing-extensions==4.1.1
urllib3==1.16
vcrpy==1.10.3
vsts-cd-manager==1.0.2
wcwidth==0.2.13
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==1.4.9
- attrs==22.2.0
- autopep8==1.2.4
- azure-batch==3.0.0
- azure-common==1.1.28
- azure-core==1.24.2
- azure-datalake-store==0.0.9
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.3.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==4.0.0
- azure-mgmt-cdn==0.30.2
- azure-mgmt-cognitiveservices==1.0.0
- azure-mgmt-compute==1.0.0rc1
- azure-mgmt-containerregistry==0.2.1
- azure-mgmt-datalake-analytics==0.1.4
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.4
- azure-mgmt-devtestlabs==2.0.0
- azure-mgmt-dns==1.0.1
- azure-mgmt-documentdb==0.1.3
- azure-mgmt-iothub==0.2.2
- azure-mgmt-keyvault==0.31.0
- azure-mgmt-monitor==0.2.1
- azure-mgmt-network==1.0.0rc3
- azure-mgmt-nspkg==1.0.0
- azure-mgmt-rdbms==0.1.0
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==1.1.0rc1
- azure-mgmt-sql==0.4.0
- azure-mgmt-storage==1.0.0rc1
- azure-mgmt-trafficmanager==0.30.0
- azure-mgmt-web==0.32.0
- azure-monitor==0.3.0
- azure-multiapi-storage==0.1.0
- azure-nspkg==1.0.0
- azure-servicefabric==5.6.130
- cffi==1.15.1
- colorama==0.3.7
- coverage==4.2
- cryptography==40.0.2
- flake8==3.2.1
- futures==3.1.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.7.0
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.5.3
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pep8==1.7.1
- pip==9.0.1
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.2.0
- pycparser==2.21
- pydocumentdb==2.3.5
- pyflakes==1.3.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.7
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.10.3
- vsts-cd-manager==1.0.2
- wcwidth==0.2.13
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_thru_username_password_adfs"
]
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_service_principal_using_cert",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_service_principal_auth_client_cert"
]
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_create_account_without_subscriptions",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_create_account_without_subscriptions_without_tenant",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_create_token_cache",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_add_new_sp_creds",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_add_preexisting_sp_creds",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_load_tokens_and_sp_creds_with_cert",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_load_tokens_and_sp_creds_with_secret",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_new_token_added_by_adal",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_remove_creds",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_default_active_subscription_to_non_disabled_one",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_particular_tenent",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_service_principal_id",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_interactive_from_particular_tenent",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_through_interactive_flow",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_thru_username_password",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_thru_username_password_with_account_disabled",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_current_account_user",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_expanded_subscription_info",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_expanded_subscription_info_for_logged_in_service_principal",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_login_credentials",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_login_credentials_for_graph_client",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_subscription",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_load_cached_tokens",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_logout",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_logout_all",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_normalize",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_service_principal_auth_client_secret",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_set_active_subscription",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_update_add_two_different_subscriptions",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_update_with_same_subscription_added_twice"
]
| []
| MIT License | 1,278 | [
"src/azure-cli-core/HISTORY.rst",
"src/azure-cli-core/azure/cli/core/_profile.py"
]
| [
"src/azure-cli-core/HISTORY.rst",
"src/azure-cli-core/azure/cli/core/_profile.py"
]
|
jbasko__configmanager-86 | 7046d348f0afcf11df79afa3d6ad9fb0f6757605 | 2017-05-20 10:41:10 | 7046d348f0afcf11df79afa3d6ad9fb0f6757605 | diff --git a/configmanager/__init__.py b/configmanager/__init__.py
index 246eadc..f2d2fcc 100644
--- a/configmanager/__init__.py
+++ b/configmanager/__init__.py
@@ -1,4 +1,4 @@
-__version__ = '1.2.0'
+__version__ = '1.3.0'
from .managers import Config
from .items import Item
diff --git a/configmanager/managers.py b/configmanager/managers.py
index 5a611d8..a0f6346 100644
--- a/configmanager/managers.py
+++ b/configmanager/managers.py
@@ -49,6 +49,7 @@ class Config(BaseSection):
instance = super(Config, cls).__new__(cls)
instance._cm__section = None
+ instance._cm__section_alias = None
instance._cm__configs = collections.OrderedDict()
instance._cm__configparser_adapter = None
instance._cm__json_adapter = None
@@ -68,10 +69,41 @@ class Config(BaseSection):
def __repr__(self):
return '<{cls} at {id}>'.format(cls=self.__class__.__name__, id=id(self))
- def __contains__(self, item):
- return item in self._cm__configs
+ def _resolve_config_key(self, key):
+ if isinstance(key, six.string_types):
+ return self._cm__configs[key]
+ elif isinstance(key, (tuple, list)) and len(key) > 0:
+ if len(key) == 1:
+ return self[key[0]]
+ else:
+ return self[key[0]][key[1:]]
+ else:
+ raise TypeError('Expected either a string or a tuple as key, got {!r}'.format(key))
+
+ def __contains__(self, key):
+ try:
+ _ = self._resolve_config_key(key)
+ return True
+ except KeyError:
+ return False
+
+ def __setitem__(self, key, value):
+ if isinstance(key, six.string_types):
+ name = key
+ rest = None
+ elif isinstance(key, (tuple, list)) and len(key) > 0:
+ name = key[0]
+ if len(key) == 1:
+ rest = None
+ else:
+ rest = key[1:]
+ else:
+ raise TypeError('Expected either a string or a tuple as key, got {!r}'.format(key))
+
+ if rest:
+ self[name][rest] = value
+ return
- def __setitem__(self, name, value):
if is_config_item(value):
self.cm__add_item(name, value)
elif isinstance(value, self.__class__):
@@ -85,8 +117,8 @@ class Config(BaseSection):
)
)
- def __getitem__(self, name):
- return self._cm__configs[name]
+ def __getitem__(self, key):
+ return self._resolve_config_key(key)
def __getattr__(self, name):
if name in self._cm__configs:
@@ -110,6 +142,15 @@ class Config(BaseSection):
)
)
+ def __len__(self):
+ return sum(1 for _ in self.iter_items())
+
+ def __nonzero__(self):
+ return True
+
+ def __bool__(self):
+ return True
+
def iter_items(self):
"""
Iterate over all items contained (recursively).
@@ -224,6 +265,17 @@ class Config(BaseSection):
"""
return self._cm__section
+ @property
+ def alias(self):
+ """
+ Returns alias with which this section was added to another or ``None`` if it hasn't been added
+ to any.
+
+ Returns:
+ (str)
+ """
+ return self._cm__section_alias
+
def added_to_section(self, alias, section):
"""
A hook that is called when this section is added to another.
@@ -234,6 +286,7 @@ class Config(BaseSection):
section (:class:`.Config`): section to which this section has been added
"""
self._cm__section = section
+ self._cm__section_alias = alias
@property
def configparser(self):
diff --git a/configmanager/persistence.py b/configmanager/persistence.py
index 2a284f2..9e4e360 100644
--- a/configmanager/persistence.py
+++ b/configmanager/persistence.py
@@ -126,7 +126,7 @@ class JsonAdapter(object):
else:
self._config.read_dict(json.load(source), as_defaults=as_defaults)
- def write(self, destination, with_defaults=True):
+ def write(self, destination, with_defaults=False):
if isinstance(destination, six.string_types):
with open(destination, 'w') as f:
json.dump(self._config.to_dict(with_defaults=with_defaults), f)
diff --git a/requirements.txt b/requirements.txt
index b2945bc..b7fcb9f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,6 +2,7 @@ six
configparser
pytest
+pytest-random-order
coverage
tox
| len(Config)?
What would you expect -- number of sections or number of items? Probably items. | jbasko/configmanager | diff --git a/tests/test_config.py b/tests/test_config.py
index 856120e..562ab1c 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -343,7 +343,6 @@ def test_can_inspect_config_contents(mixed_app_config):
assert 'db' in config
assert 'dbe' not in config
- assert ('db',) not in config
assert 'logging' in config
@@ -379,4 +378,67 @@ def test_can_have_a_dict_as_a_config_value_if_wrapped_inside_item():
# value, not the real thing.
config.aws.value['secret_key'] = 'NEW_SECRET'
- assert config.to_dict()['aws'] == {'access_key': '123', 'secret_key': 'secret'}
\ No newline at end of file
+ assert config.to_dict()['aws'] == {'access_key': '123', 'secret_key': 'secret'}
+
+
+def test_len_of_config_returns_number_of_items_in_it():
+ assert len(Config()) == 0
+
+ assert len(Config({'enabled': True})) == 1
+
+ assert len(Config({'uploads': Config()})) == 0
+ assert len(Config({'uploads': {}})) == 0
+
+ assert len(Config({'uploads': {'enabled': False}})) == 1
+ assert len(Config({'uploads': {'enabled': False, 'threads': 1}})) == 2
+
+ assert len(Config({'uploads': {'enabled': False, 'threads': 0}, 'greeting': 'Hi'})) == 3
+
+
+def test__getitem__handles_paths_to_sections_and_items_and_so_does__contains__():
+ config = Config()
+ with pytest.raises(KeyError):
+ assert not config['uploads', 'enabled']
+ assert ('uploads',) not in config
+ assert ('uploads', 'enabled') not in config
+
+ config.uploads = Config({'enabled': True, 'db': {'user': 'root'}})
+ assert config['uploads', 'enabled'] is config.uploads.enabled
+ assert config['uploads', 'db'] is config.uploads.db
+
+ assert 'uploads' in config
+ assert ('uploads',) in config
+ assert ('uploads', 'enabled') in config
+ assert ('uploads', 'db') in config
+ assert ('uploads', 'db', 'user') in config
+
+ assert config.uploads.db.user.value == 'root'
+
+ config['uploads', 'db', 'user'].set('admin')
+ assert config.uploads.db.user.value == 'admin'
+
+
+def test_can_use__setitem__to_create_new_deep_paths():
+ config = Config()
+ config['uploads'] = Config({'enabled': True})
+
+ with pytest.raises(TypeError):
+ config['uploads', 'threads'] = 5
+
+ config['uploads', 'threads'] = Item(value=5)
+ assert config.uploads.threads.type is int
+
+ config['uploads', 'db'] = Config({'user': 'root'})
+ assert config.uploads.db
+
+
+def test_section_knows_its_alias():
+ config = Config()
+ config.uploads = Config({
+ 'enabled': True
+ })
+ assert config.uploads.alias == 'uploads'
+
+ config.uploads.db = Config({'connection': {'user': 'root'}})
+ assert config.uploads.db.alias == 'db'
+ assert config.uploads.db.connection.alias == 'connection'
diff --git a/tests/test_json.py b/tests/test_json.py
index 68c9c6a..0e7530b 100644
--- a/tests/test_json.py
+++ b/tests/test_json.py
@@ -91,3 +91,24 @@ def test_json_read_and_write(defaults_json_path, user_json_path):
c1.json.read(user_json_path)
assert c1.to_dict() == c2.to_dict() == c3.to_dict()
+
+
+def test_json_writes_with_defaults_false_by_default(user_json_path):
+ c = Config({'greeting': 'Hello'})
+ c.json.write(user_json_path)
+
+ d = Config()
+ d.json.read(user_json_path, as_defaults=True)
+ assert len(d) == 0
+
+ c.json.write(user_json_path, with_defaults=True)
+
+ d.json.read(user_json_path, as_defaults=True)
+ assert len(d) == 1
+ assert d.greeting.value == 'Hello'
+
+ c.greeting.value = 'Hey!'
+ c.json.write(user_json_path)
+
+ d.json.read(user_json_path, as_defaults=True)
+ assert d.greeting.value == 'Hey!'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 4
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/jbasko/configmanager.git@7046d348f0afcf11df79afa3d6ad9fb0f6757605#egg=configmanager
configparser==5.2.0
coverage==6.2
distlib==0.3.9
filelock==3.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.17.1
zipp==3.6.0
| name: configmanager
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- configparser==5.2.0
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.17.1
- zipp==3.6.0
prefix: /opt/conda/envs/configmanager
| [
"tests/test_config.py::test_len_of_config_returns_number_of_items_in_it",
"tests/test_config.py::test__getitem__handles_paths_to_sections_and_items_and_so_does__contains__",
"tests/test_config.py::test_can_use__setitem__to_create_new_deep_paths",
"tests/test_config.py::test_section_knows_its_alias",
"tests/test_json.py::test_json_writes_with_defaults_false_by_default"
]
| []
| [
"tests/test_config.py::test_items_are_created_using_cm_create_item_method",
"tests/test_config.py::test_reset_resets_values_to_defaults",
"tests/test_config.py::test_repr_of_config",
"tests/test_config.py::test_assigning_nameless_item_directly_to_config_should_set_its_name",
"tests/test_config.py::test_assigning_item_with_name_directly_to_config_should_preserve_its_name",
"tests/test_config.py::test_item_name_and_alias_must_be_a_string",
"tests/test_config.py::test_section_name_must_be_a_string",
"tests/test_config.py::test_to_dict_should_not_include_items_with_no_usable_value",
"tests/test_config.py::test_read_dict_recursively_loads_values_from_a_dictionary",
"tests/test_config.py::test_read_dict_as_defaults_loads_default_values_from_a_dictionary",
"tests/test_config.py::test_declaration_parser_does_not_modify_config",
"tests/test_config.py::test_allows_iteration_over_all_items",
"tests/test_config.py::test_forbids_accidental_item_overwrite_via_setitem",
"tests/test_config.py::test_allows_iteration_over_sections",
"tests/test_config.py::test_attribute_read_access",
"tests/test_config.py::test_attribute_write_access",
"tests/test_config.py::test_forbids_accidental_item_overwrite_via_setattr",
"tests/test_config.py::test_to_dict",
"tests/test_config.py::test_can_inspect_config_contents",
"tests/test_config.py::test_can_have_a_dict_as_a_config_value_if_wrapped_inside_item",
"tests/test_json.py::test_json_read_and_write"
]
| []
| MIT License | 1,279 | [
"configmanager/persistence.py",
"requirements.txt",
"configmanager/__init__.py",
"configmanager/managers.py"
]
| [
"configmanager/persistence.py",
"requirements.txt",
"configmanager/__init__.py",
"configmanager/managers.py"
]
|
|
tornadoweb__tornado-2045 | 06be57c11f98b24ce0f6ee2d04f0cfe8bfdb3b4d | 2017-05-20 16:16:33 | 03f13800e854a6fc9e6efa2168e694d9599348bd | diff --git a/tornado/web.py b/tornado/web.py
index 132f42dd..d79889fa 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -319,7 +319,10 @@ class RequestHandler(object):
if reason is not None:
self._reason = escape.native_str(reason)
else:
- self._reason = httputil.responses.get(status_code, "Unknown")
+ try:
+ self._reason = httputil.responses[status_code]
+ except KeyError:
+ raise ValueError("unknown status code %d" % status_code)
def get_status(self):
"""Returns the status code for our response."""
@@ -1558,7 +1561,11 @@ class RequestHandler(object):
# send a response.
return
if isinstance(e, HTTPError):
- self.send_error(e.status_code, exc_info=sys.exc_info())
+ if e.status_code not in httputil.responses and not e.reason:
+ gen_log.error("Bad HTTP status code: %d", e.status_code)
+ self.send_error(500, exc_info=sys.exc_info())
+ else:
+ self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
diff --git a/tornado/websocket.py b/tornado/websocket.py
index 69437ee4..7600910c 100644
--- a/tornado/websocket.py
+++ b/tornado/websocket.py
@@ -764,10 +764,7 @@ class WebSocketProtocol13(WebSocketProtocol):
data = mask + _websocket_mask(mask, data)
frame += data
self._wire_bytes_out += len(frame)
- try:
- return self.stream.write(frame)
- except StreamClosedError:
- self._abort()
+ return self.stream.write(frame)
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
@@ -951,7 +948,10 @@ class WebSocketProtocol13(WebSocketProtocol):
self.close(self.handler.close_code)
elif opcode == 0x9:
# Ping
- self._write_frame(True, 0xA, data)
+ try:
+ self._write_frame(True, 0xA, data)
+ except StreamClosedError:
+ self._abort()
self._run_callback(self.handler.on_ping, data)
elif opcode == 0xA:
# Pong
@@ -972,7 +972,10 @@ class WebSocketProtocol13(WebSocketProtocol):
close_data = struct.pack('>H', code)
if reason is not None:
close_data += utf8(reason)
- self._write_frame(True, 0x8, close_data)
+ try:
+ self._write_frame(True, 0x8, close_data)
+ except StreamClosedError:
+ self._abort()
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
| websocket `yield connection.write_message()` throws BadYieldError
At present code such as:
yield connection.write_message(data)
will throw BadYieldError if called on a closed connection.
Documentation for WebSocketHandler.write_message says it was changed in 4.3 so that it " Returns a `.Future` which can be used for flow control." In fact it can return either a Future or None. It actually returns the result from the method WebSocketProtocol13._write_frame, and that ends with:
try:
return self.stream.write(frame)
except StreamClosedError:
self._abort()
so in the case where the stream is closed it will return None. It should instead use gen.Return() to send back None as the result.:
try:
return self.stream.write(frame)
except StreamClosedError:
self._abort()
raise gen.Return()
| tornadoweb/tornado | diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index de26c42e..d79ea52c 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -917,10 +917,6 @@ class ErrorResponseTest(WebTestCase):
self.assertEqual(response.code, 503)
self.assertTrue(b"503: Service Unavailable" in response.body)
- response = self.fetch("/default?status=435")
- self.assertEqual(response.code, 435)
- self.assertTrue(b"435: Unknown" in response.body)
-
def test_write_error(self):
with ExpectLog(app_log, "Uncaught exception"):
response = self.fetch("/write_error")
@@ -1493,9 +1489,9 @@ class StatusReasonTest(SimpleHandlerTestCase):
response = self.fetch("/?code=682&reason=Bar")
self.assertEqual(response.code, 682)
self.assertEqual(response.reason, "Bar")
- response = self.fetch("/?code=682")
- self.assertEqual(response.code, 682)
- self.assertEqual(response.reason, "Unknown")
+ with ExpectLog(app_log, 'Uncaught exception'):
+ response = self.fetch("/?code=682")
+ self.assertEqual(response.code, 500)
@wsgi_safe
diff --git a/tornado/test/websocket_test.py b/tornado/test/websocket_test.py
index d47a74e6..e0b5573d 100644
--- a/tornado/test/websocket_test.py
+++ b/tornado/test/websocket_test.py
@@ -7,6 +7,7 @@ import traceback
from tornado.concurrent import Future
from tornado import gen
from tornado.httpclient import HTTPError, HTTPRequest
+from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log
from tornado.template import DictLoader
from tornado.testing import AsyncHTTPTestCase, gen_test, bind_unused_port, ExpectLog
@@ -50,7 +51,10 @@ class TestWebSocketHandler(WebSocketHandler):
class EchoHandler(TestWebSocketHandler):
def on_message(self, message):
- self.write_message(message, isinstance(message, bytes))
+ try:
+ self.write_message(message, isinstance(message, bytes))
+ except StreamClosedError:
+ pass
class ErrorInOnMessageHandler(TestWebSocketHandler):
@@ -327,6 +331,14 @@ class WebSocketTest(WebSocketBaseTestCase):
self.assertEqual(code, 1001)
self.assertEqual(reason, 'goodbye')
+ @gen_test
+ def test_write_after_close(self):
+ ws = yield self.ws_connect('/close_reason')
+ msg = yield ws.read_message()
+ self.assertIs(msg, None)
+ with self.assertRaises(StreamClosedError):
+ ws.write_message('hello')
+
@gen_test
def test_async_prepare(self):
# Previously, an async prepare method triggered a bug that would
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 2
} | 4.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@06be57c11f98b24ce0f6ee2d04f0cfe8bfdb3b4d#egg=tornado
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/tornado
| [
"tornado/test/web_test.py::StatusReasonTest::test_status",
"tornado/test/websocket_test.py::WebSocketTest::test_write_after_close"
]
| []
| [
"tornado/test/web_test.py::SecureCookieV1Test::test_arbitrary_bytes",
"tornado/test/web_test.py::SecureCookieV1Test::test_cookie_tampering_future_timestamp",
"tornado/test/web_test.py::SecureCookieV1Test::test_round_trip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_increment_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_invalidate_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip_differing_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_round_trip",
"tornado/test/web_test.py::CookieTest::test_cookie_special_char",
"tornado/test/web_test.py::CookieTest::test_get_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie_domain",
"tornado/test/web_test.py::CookieTest::test_set_cookie_expires_days",
"tornado/test/web_test.py::CookieTest::test_set_cookie_false_flags",
"tornado/test/web_test.py::CookieTest::test_set_cookie_max_age",
"tornado/test/web_test.py::CookieTest::test_set_cookie_overwrite",
"tornado/test/web_test.py::AuthRedirectTest::test_absolute_auth_redirect",
"tornado/test/web_test.py::AuthRedirectTest::test_relative_auth_redirect",
"tornado/test/web_test.py::ConnectionCloseTest::test_connection_close",
"tornado/test/web_test.py::RequestEncodingTest::test_error",
"tornado/test/web_test.py::RequestEncodingTest::test_group_encoding",
"tornado/test/web_test.py::RequestEncodingTest::test_group_question_mark",
"tornado/test/web_test.py::RequestEncodingTest::test_slashes",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_invalid_unicode",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_plus",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_body_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_query_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_header_injection",
"tornado/test/web_test.py::WSGISafeWebTest::test_multi_header",
"tornado/test/web_test.py::WSGISafeWebTest::test_no_gzip",
"tornado/test/web_test.py::WSGISafeWebTest::test_optional_path",
"tornado/test/web_test.py::WSGISafeWebTest::test_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_reverse_url",
"tornado/test/web_test.py::WSGISafeWebTest::test_types",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_resources",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_unescaped",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect_double_slash",
"tornado/test/web_test.py::NonWSGIWebTests::test_empty_flush",
"tornado/test/web_test.py::NonWSGIWebTests::test_flow_control",
"tornado/test/web_test.py::ErrorResponseTest::test_default",
"tornado/test/web_test.py::ErrorResponseTest::test_failed_write_error",
"tornado/test/web_test.py::ErrorResponseTest::test_write_error",
"tornado/test/web_test.py::StaticFileTest::test_absolute_static_url",
"tornado/test/web_test.py::StaticFileTest::test_absolute_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_include_host_override",
"tornado/test/web_test.py::StaticFileTest::test_path_traversal_protection",
"tornado/test/web_test.py::StaticFileTest::test_relative_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_root_static_path",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_modified_since",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_404",
"tornado/test/web_test.py::StaticFileTest::test_static_compressed_files",
"tornado/test/web_test.py::StaticFileTest::test_static_etag",
"tornado/test/web_test.py::StaticFileTest::test_static_files",
"tornado/test/web_test.py::StaticFileTest::test_static_head",
"tornado/test/web_test.py::StaticFileTest::test_static_head_range",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_pre_epoch",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_time_zone",
"tornado/test/web_test.py::StaticFileTest::test_static_invalid_range",
"tornado/test/web_test.py::StaticFileTest::test_static_range_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_invalid_start",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_zero_suffix",
"tornado/test/web_test.py::StaticFileTest::test_static_url",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_end_edge",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_file",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_past_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_neg_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_partial_past_end",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_filename",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_redirect",
"tornado/test/web_test.py::StaticFileWithPathTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_static_url",
"tornado/test/web_test.py::HostMatchingTest::test_host_matching",
"tornado/test/web_test.py::DefaultHostMatchingTest::test_default_host_matching",
"tornado/test/web_test.py::NamedURLSpecGroupsTest::test_named_urlspec_groups",
"tornado/test/web_test.py::ClearHeaderTest::test_clear_header",
"tornado/test/web_test.py::Header204Test::test_204_headers",
"tornado/test/web_test.py::Header304Test::test_304_headers",
"tornado/test/web_test.py::DateHeaderTest::test_date_header",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str_from_httputil",
"tornado/test/web_test.py::RaiseWithReasonTest::test_raise_with_reason",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_404_xsrf",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_error_xsrf",
"tornado/test/web_test.py::GzipTestCase::test_gzip",
"tornado/test/web_test.py::GzipTestCase::test_gzip_not_requested",
"tornado/test/web_test.py::GzipTestCase::test_gzip_static",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present_multiple",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_kw",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_pos",
"tornado/test/web_test.py::ClearAllCookiesTest::test_clear_all_cookies",
"tornado/test/web_test.py::ExceptionHandlerTest::test_http_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_known_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_unknown_error",
"tornado/test/web_test.py::BuggyLoggingTest::test_buggy_log_exception",
"tornado/test/web_test.py::UIMethodUIModuleTest::test_ui_method",
"tornado/test/web_test.py::GetArgumentErrorTest::test_catch_error",
"tornado/test/web_test.py::MultipleExceptionTest::test_multi_exception",
"tornado/test/web_test.py::SetLazyPropertiesTest::test_set_properties",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_is_lazy",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_works",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_works",
"tornado/test/web_test.py::UnimplementedHTTPMethodsTest::test_unimplemented_standard_methods",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_other",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_patch",
"tornado/test/web_test.py::AllHTTPMethodsTest::test_standard_methods",
"tornado/test/web_test.py::PatchMethodTest::test_other",
"tornado/test/web_test.py::PatchMethodTest::test_patch",
"tornado/test/web_test.py::FinishInPrepareTest::test_finish_in_prepare",
"tornado/test/web_test.py::Default404Test::test_404",
"tornado/test/web_test.py::Custom404Test::test_404",
"tornado/test/web_test.py::DefaultHandlerArgumentsTest::test_403",
"tornado/test/web_test.py::HandlerByNameTest::test_handler_by_name",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_close_during_upload",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return_with_data",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_streaming_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_high",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_low",
"tornado/test/web_test.py::ClientCloseTest::test_client_close",
"tornado/test/web_test.py::SignedValueTest::test_expired",
"tornado/test/web_test.py::SignedValueTest::test_key_version_retrieval",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_invalid_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_default_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_non_default_key",
"tornado/test/web_test.py::SignedValueTest::test_known_values",
"tornado/test/web_test.py::SignedValueTest::test_name_swap",
"tornado/test/web_test.py::SignedValueTest::test_non_ascii",
"tornado/test/web_test.py::SignedValueTest::test_payload_tampering",
"tornado/test/web_test.py::SignedValueTest::test_signature_tampering",
"tornado/test/web_test.py::XSRFTest::test_cross_user",
"tornado/test/web_test.py::XSRFTest::test_distinct_tokens",
"tornado/test/web_test.py::XSRFTest::test_refresh_token",
"tornado/test/web_test.py::XSRFTest::test_versioning",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_argument_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_body_no_cookie",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_no_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_no_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_header",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_non_hex_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_post_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_query_string",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_short_token",
"tornado/test/web_test.py::XSRFCookieKwargsTest::test_xsrf_httponly",
"tornado/test/web_test.py::FinishExceptionTest::test_finish_exception",
"tornado/test/web_test.py::DecoratorTest::test_addslash",
"tornado/test/web_test.py::DecoratorTest::test_removeslash",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_wildcard_etag",
"tornado/test/web_test.py::RequestSummaryTest::test_missing_remote_ip",
"tornado/test/web_test.py::HTTPErrorTest::test_copy",
"tornado/test/web_test.py::ApplicationTest::test_listen",
"tornado/test/web_test.py::URLSpecReverseTest::test_non_reversible",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse_arguments",
"tornado/test/web_test.py::RedirectHandlerTest::test_basic_redirect",
"tornado/test/web_test.py::RedirectHandlerTest::test_redirect_pattern",
"tornado/test/websocket_test.py::WebSocketTest::test_async_prepare",
"tornado/test/websocket_test.py::WebSocketTest::test_bad_websocket_version",
"tornado/test/websocket_test.py::WebSocketTest::test_binary_message",
"tornado/test/websocket_test.py::WebSocketTest::test_check_origin_invalid",
"tornado/test/websocket_test.py::WebSocketTest::test_check_origin_invalid_partial_url",
"tornado/test/websocket_test.py::WebSocketTest::test_check_origin_invalid_subdomains",
"tornado/test/websocket_test.py::WebSocketTest::test_check_origin_valid_no_path",
"tornado/test/websocket_test.py::WebSocketTest::test_check_origin_valid_with_path",
"tornado/test/websocket_test.py::WebSocketTest::test_client_close_reason",
"tornado/test/websocket_test.py::WebSocketTest::test_coroutine",
"tornado/test/websocket_test.py::WebSocketTest::test_error_in_on_message",
"tornado/test/websocket_test.py::WebSocketTest::test_http_request",
"tornado/test/websocket_test.py::WebSocketTest::test_path_args",
"tornado/test/websocket_test.py::WebSocketTest::test_render_message",
"tornado/test/websocket_test.py::WebSocketTest::test_server_close_reason",
"tornado/test/websocket_test.py::WebSocketTest::test_unicode_message",
"tornado/test/websocket_test.py::WebSocketTest::test_websocket_callbacks",
"tornado/test/websocket_test.py::WebSocketTest::test_websocket_close_buffered_data",
"tornado/test/websocket_test.py::WebSocketTest::test_websocket_gen",
"tornado/test/websocket_test.py::WebSocketTest::test_websocket_header_echo",
"tornado/test/websocket_test.py::WebSocketTest::test_websocket_headers",
"tornado/test/websocket_test.py::WebSocketTest::test_websocket_http_fail",
"tornado/test/websocket_test.py::WebSocketTest::test_websocket_http_success",
"tornado/test/websocket_test.py::WebSocketTest::test_websocket_network_fail",
"tornado/test/websocket_test.py::WebSocketNativeCoroutineTest::test_native_coroutine",
"tornado/test/websocket_test.py::NoCompressionTest::test_message_sizes",
"tornado/test/websocket_test.py::ServerOnlyCompressionTest::test_message_sizes",
"tornado/test/websocket_test.py::ClientOnlyCompressionTest::test_message_sizes",
"tornado/test/websocket_test.py::DefaultCompressionTest::test_message_sizes",
"tornado/test/websocket_test.py::PythonMaskFunctionTest::test_mask",
"tornado/test/websocket_test.py::CythonMaskFunctionTest::test_mask",
"tornado/test/websocket_test.py::ServerPeriodicPingTest::test_server_ping",
"tornado/test/websocket_test.py::ClientPeriodicPingTest::test_client_ping",
"tornado/test/websocket_test.py::MaxMessageSizeTest::test_large_message"
]
| []
| Apache License 2.0 | 1,280 | [
"tornado/web.py",
"tornado/websocket.py"
]
| [
"tornado/web.py",
"tornado/websocket.py"
]
|
|
tornadoweb__tornado-2047 | 06be57c11f98b24ce0f6ee2d04f0cfe8bfdb3b4d | 2017-05-20 17:01:53 | 03f13800e854a6fc9e6efa2168e694d9599348bd | diff --git a/tornado/ioloop.py b/tornado/ioloop.py
index ad35787f..a1288e77 100644
--- a/tornado/ioloop.py
+++ b/tornado/ioloop.py
@@ -121,6 +121,16 @@ class IOLoop(Configurable):
current instance. If ``make_current=False``, the new `IOLoop` will
not try to become current.
+ In general, an `IOLoop` cannot survive a fork or be shared across
+ processes in any way. When multiple processes are being used, each
+ process should create its own `IOLoop`, which also implies that
+ any objects which depend on the `IOLoop` (such as
+ `.AsyncHTTPClient`) must also be created in the child processes.
+ As a guideline, anything that starts processes (including the
+ `tornado.process` and `multiprocessing` modules) should do so as
+ early as possible, ideally the first thing the application does
+ after loading its configuration in ``main()``.
+
.. versionchanged:: 4.2
Added the ``make_current`` keyword argument to the `IOLoop`
constructor.
@@ -701,6 +711,7 @@ class PollIOLoop(IOLoop):
self._stopped = False
self._closing = False
self._thread_ident = None
+ self._pid = os.getpid()
self._blocking_signal_threshold = None
self._timeout_counter = itertools.count()
@@ -753,6 +764,8 @@ class PollIOLoop(IOLoop):
def start(self):
if self._running:
raise RuntimeError("IOLoop is already running")
+ if os.getpid() != self._pid:
+ raise RuntimeError("Cannot share PollIOLoops across processes")
self._setup_logging()
if self._stopped:
self._stopped = False
diff --git a/tornado/web.py b/tornado/web.py
index 132f42dd..d79889fa 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -319,7 +319,10 @@ class RequestHandler(object):
if reason is not None:
self._reason = escape.native_str(reason)
else:
- self._reason = httputil.responses.get(status_code, "Unknown")
+ try:
+ self._reason = httputil.responses[status_code]
+ except KeyError:
+ raise ValueError("unknown status code %d" % status_code)
def get_status(self):
"""Returns the status code for our response."""
@@ -1558,7 +1561,11 @@ class RequestHandler(object):
# send a response.
return
if isinstance(e, HTTPError):
- self.send_error(e.status_code, exc_info=sys.exc_info())
+ if e.status_code not in httputil.responses and not e.reason:
+ gen_log.error("Bad HTTP status code: %d", e.status_code)
+ self.send_error(500, exc_info=sys.exc_info())
+ else:
+ self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
| Check _events and _handlers before start()
```
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/tornado/ioloop.py", line 887, in start
fd_obj, handler_func = self._handlers[fd]
KeyError: 16
```
https://github.com/tornadoweb/tornado/blob/master/tornado/ioloop.py#L883
I ran into this issue when testing using multiple processes and threads. We sandbox our test processes using multiprocessing and were getting this cryptic error.
I found a blog post @toumorokoshi wrote going over his own experiences with this issue: http://y.tsutsumi.io/keyerror-in-self_handlers-a-journey-deep-into-tornados-internals.html
At the very least it would be nice to have some documentation suggesting that you run IOLoop.clear_instance() and IOLoop.clear_current() after a fork() | tornadoweb/tornado | diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index de26c42e..d79ea52c 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -917,10 +917,6 @@ class ErrorResponseTest(WebTestCase):
self.assertEqual(response.code, 503)
self.assertTrue(b"503: Service Unavailable" in response.body)
- response = self.fetch("/default?status=435")
- self.assertEqual(response.code, 435)
- self.assertTrue(b"435: Unknown" in response.body)
-
def test_write_error(self):
with ExpectLog(app_log, "Uncaught exception"):
response = self.fetch("/write_error")
@@ -1493,9 +1489,9 @@ class StatusReasonTest(SimpleHandlerTestCase):
response = self.fetch("/?code=682&reason=Bar")
self.assertEqual(response.code, 682)
self.assertEqual(response.reason, "Bar")
- response = self.fetch("/?code=682")
- self.assertEqual(response.code, 682)
- self.assertEqual(response.reason, "Unknown")
+ with ExpectLog(app_log, 'Uncaught exception'):
+ response = self.fetch("/?code=682")
+ self.assertEqual(response.code, 500)
@wsgi_safe
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 4.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc python3-dev"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
execnet==1.9.0
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
tomli==1.2.3
-e git+https://github.com/tornadoweb/tornado.git@06be57c11f98b24ce0f6ee2d04f0cfe8bfdb3b4d#egg=tornado
typing_extensions==4.1.1
zipp==3.6.0
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- execnet==1.9.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/tornado
| [
"tornado/test/web_test.py::StatusReasonTest::test_status"
]
| []
| [
"tornado/test/web_test.py::SecureCookieV1Test::test_arbitrary_bytes",
"tornado/test/web_test.py::SecureCookieV1Test::test_cookie_tampering_future_timestamp",
"tornado/test/web_test.py::SecureCookieV1Test::test_round_trip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_increment_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_invalidate_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip_differing_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_round_trip",
"tornado/test/web_test.py::CookieTest::test_cookie_special_char",
"tornado/test/web_test.py::CookieTest::test_get_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie_domain",
"tornado/test/web_test.py::CookieTest::test_set_cookie_expires_days",
"tornado/test/web_test.py::CookieTest::test_set_cookie_false_flags",
"tornado/test/web_test.py::CookieTest::test_set_cookie_max_age",
"tornado/test/web_test.py::CookieTest::test_set_cookie_overwrite",
"tornado/test/web_test.py::AuthRedirectTest::test_absolute_auth_redirect",
"tornado/test/web_test.py::AuthRedirectTest::test_relative_auth_redirect",
"tornado/test/web_test.py::ConnectionCloseTest::test_connection_close",
"tornado/test/web_test.py::RequestEncodingTest::test_error",
"tornado/test/web_test.py::RequestEncodingTest::test_group_encoding",
"tornado/test/web_test.py::RequestEncodingTest::test_group_question_mark",
"tornado/test/web_test.py::RequestEncodingTest::test_slashes",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_invalid_unicode",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_plus",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_body_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_query_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_header_injection",
"tornado/test/web_test.py::WSGISafeWebTest::test_multi_header",
"tornado/test/web_test.py::WSGISafeWebTest::test_no_gzip",
"tornado/test/web_test.py::WSGISafeWebTest::test_optional_path",
"tornado/test/web_test.py::WSGISafeWebTest::test_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_reverse_url",
"tornado/test/web_test.py::WSGISafeWebTest::test_types",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_resources",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_unescaped",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect_double_slash",
"tornado/test/web_test.py::NonWSGIWebTests::test_empty_flush",
"tornado/test/web_test.py::NonWSGIWebTests::test_flow_control",
"tornado/test/web_test.py::ErrorResponseTest::test_default",
"tornado/test/web_test.py::ErrorResponseTest::test_failed_write_error",
"tornado/test/web_test.py::ErrorResponseTest::test_write_error",
"tornado/test/web_test.py::StaticFileTest::test_absolute_static_url",
"tornado/test/web_test.py::StaticFileTest::test_absolute_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_include_host_override",
"tornado/test/web_test.py::StaticFileTest::test_path_traversal_protection",
"tornado/test/web_test.py::StaticFileTest::test_relative_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_root_static_path",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_modified_since",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_404",
"tornado/test/web_test.py::StaticFileTest::test_static_compressed_files",
"tornado/test/web_test.py::StaticFileTest::test_static_etag",
"tornado/test/web_test.py::StaticFileTest::test_static_files",
"tornado/test/web_test.py::StaticFileTest::test_static_head",
"tornado/test/web_test.py::StaticFileTest::test_static_head_range",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_pre_epoch",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_time_zone",
"tornado/test/web_test.py::StaticFileTest::test_static_invalid_range",
"tornado/test/web_test.py::StaticFileTest::test_static_range_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_invalid_start",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_zero_suffix",
"tornado/test/web_test.py::StaticFileTest::test_static_url",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_end_edge",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_file",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_past_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_neg_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_partial_past_end",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_filename",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_redirect",
"tornado/test/web_test.py::StaticFileWithPathTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_static_url",
"tornado/test/web_test.py::HostMatchingTest::test_host_matching",
"tornado/test/web_test.py::DefaultHostMatchingTest::test_default_host_matching",
"tornado/test/web_test.py::NamedURLSpecGroupsTest::test_named_urlspec_groups",
"tornado/test/web_test.py::ClearHeaderTest::test_clear_header",
"tornado/test/web_test.py::Header204Test::test_204_headers",
"tornado/test/web_test.py::Header304Test::test_304_headers",
"tornado/test/web_test.py::DateHeaderTest::test_date_header",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str_from_httputil",
"tornado/test/web_test.py::RaiseWithReasonTest::test_raise_with_reason",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_404_xsrf",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_error_xsrf",
"tornado/test/web_test.py::GzipTestCase::test_gzip",
"tornado/test/web_test.py::GzipTestCase::test_gzip_not_requested",
"tornado/test/web_test.py::GzipTestCase::test_gzip_static",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present_multiple",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_kw",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_pos",
"tornado/test/web_test.py::ClearAllCookiesTest::test_clear_all_cookies",
"tornado/test/web_test.py::ExceptionHandlerTest::test_http_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_known_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_unknown_error",
"tornado/test/web_test.py::BuggyLoggingTest::test_buggy_log_exception",
"tornado/test/web_test.py::UIMethodUIModuleTest::test_ui_method",
"tornado/test/web_test.py::GetArgumentErrorTest::test_catch_error",
"tornado/test/web_test.py::MultipleExceptionTest::test_multi_exception",
"tornado/test/web_test.py::SetLazyPropertiesTest::test_set_properties",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_is_lazy",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_works",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_works",
"tornado/test/web_test.py::UnimplementedHTTPMethodsTest::test_unimplemented_standard_methods",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_other",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_patch",
"tornado/test/web_test.py::AllHTTPMethodsTest::test_standard_methods",
"tornado/test/web_test.py::PatchMethodTest::test_other",
"tornado/test/web_test.py::PatchMethodTest::test_patch",
"tornado/test/web_test.py::FinishInPrepareTest::test_finish_in_prepare",
"tornado/test/web_test.py::Default404Test::test_404",
"tornado/test/web_test.py::Custom404Test::test_404",
"tornado/test/web_test.py::DefaultHandlerArgumentsTest::test_403",
"tornado/test/web_test.py::HandlerByNameTest::test_handler_by_name",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_close_during_upload",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return_with_data",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_streaming_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_high",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_low",
"tornado/test/web_test.py::ClientCloseTest::test_client_close",
"tornado/test/web_test.py::SignedValueTest::test_expired",
"tornado/test/web_test.py::SignedValueTest::test_key_version_retrieval",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_invalid_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_default_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_non_default_key",
"tornado/test/web_test.py::SignedValueTest::test_known_values",
"tornado/test/web_test.py::SignedValueTest::test_name_swap",
"tornado/test/web_test.py::SignedValueTest::test_non_ascii",
"tornado/test/web_test.py::SignedValueTest::test_payload_tampering",
"tornado/test/web_test.py::SignedValueTest::test_signature_tampering",
"tornado/test/web_test.py::XSRFTest::test_cross_user",
"tornado/test/web_test.py::XSRFTest::test_distinct_tokens",
"tornado/test/web_test.py::XSRFTest::test_refresh_token",
"tornado/test/web_test.py::XSRFTest::test_versioning",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_argument_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_body_no_cookie",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_no_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_no_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_header",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_non_hex_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_post_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_query_string",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_short_token",
"tornado/test/web_test.py::XSRFCookieKwargsTest::test_xsrf_httponly",
"tornado/test/web_test.py::FinishExceptionTest::test_finish_exception",
"tornado/test/web_test.py::DecoratorTest::test_addslash",
"tornado/test/web_test.py::DecoratorTest::test_removeslash",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_wildcard_etag",
"tornado/test/web_test.py::RequestSummaryTest::test_missing_remote_ip",
"tornado/test/web_test.py::HTTPErrorTest::test_copy",
"tornado/test/web_test.py::ApplicationTest::test_listen",
"tornado/test/web_test.py::URLSpecReverseTest::test_non_reversible",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse_arguments",
"tornado/test/web_test.py::RedirectHandlerTest::test_basic_redirect",
"tornado/test/web_test.py::RedirectHandlerTest::test_redirect_pattern"
]
| []
| Apache License 2.0 | 1,281 | [
"tornado/web.py",
"tornado/ioloop.py"
]
| [
"tornado/web.py",
"tornado/ioloop.py"
]
|
|
tornadoweb__tornado-2048 | 06be57c11f98b24ce0f6ee2d04f0cfe8bfdb3b4d | 2017-05-20 17:44:48 | 03f13800e854a6fc9e6efa2168e694d9599348bd | diff --git a/tornado/options.py b/tornado/options.py
index 0a72cc65..707fbd35 100644
--- a/tornado/options.py
+++ b/tornado/options.py
@@ -223,9 +223,10 @@ class OptionParser(object):
override options set earlier on the command line, but can be overridden
by later flags.
"""
- if name in self._options:
+ normalized = self._normalize_name(name)
+ if normalized in self._options:
raise Error("Option %r already defined in %s" %
- (name, self._options[name].file_name))
+ (normalized, self._options[normalized].file_name))
frame = sys._getframe(0)
options_file = frame.f_code.co_filename
@@ -247,7 +248,6 @@ class OptionParser(object):
group_name = group
else:
group_name = file_name
- normalized = self._normalize_name(name)
option = _Option(name, file_name=file_name,
default=default, type=type, help=help,
metavar=metavar, multiple=multiple,
diff --git a/tornado/web.py b/tornado/web.py
index 132f42dd..d79889fa 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -319,7 +319,10 @@ class RequestHandler(object):
if reason is not None:
self._reason = escape.native_str(reason)
else:
- self._reason = httputil.responses.get(status_code, "Unknown")
+ try:
+ self._reason = httputil.responses[status_code]
+ except KeyError:
+ raise ValueError("unknown status code %d" % status_code)
def get_status(self):
"""Returns the status code for our response."""
@@ -1558,7 +1561,11 @@ class RequestHandler(object):
# send a response.
return
if isinstance(e, HTTPError):
- self.send_error(e.status_code, exc_info=sys.exc_info())
+ if e.status_code not in httputil.responses and not e.reason:
+ gen_log.error("Bad HTTP status code: %d", e.status_code)
+ self.send_error(500, exc_info=sys.exc_info())
+ else:
+ self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
| Redefining options that contain an underscore do not raise exceptions, from 4.3.0
From the release notes in 4.3.0: Dashes and underscores are now fully interchangeable in option names.
Discovered issue using Tornado 4.4.1 in Python 2.7 on Mac OS and Linux.
Internally all options with underscores are converted to dashes e.g. genoa_smtp_host is converted to genoa-smtp-host.
looking at the following code one would expect this to raise an exception:
```
options.define("genoa_smtp_host", 'localhost', help="SMTP server address")
options.define("genoa_smtp_host", 'localhost', help="SMTP server address")
```
However it does not, running the next two lines does raise an exception:
```
options.define("genoa_smtp_host", 'localhost', help="SMTP server address")
options.define("genoa-smtp-host", 'localhost', help="SMTP server address")
```
**Proposed solution:**
The first few lines in OptionParser.define() are:
```
if name in self._options:
raise Error("Option %r already defined in %s" %
(name, self._options[name].file_name))
```
I suggest the following:
```
normalized = self._normalize_name(name)
if normalized in self._options:
raise Error("Option %r already defined in %s" %
(name, self._options[normalized].file_name))
```
One consideration for the implementor of the solution to this issue, is the description of the error. It could be confusing to a developer/user of Tornado to see an error description as "Option 'genoa-smtp-host' already defined in my_module.py" when the option name they are using is genoa_smtp_host.
| tornadoweb/tornado | diff --git a/tornado/test/options_test.py b/tornado/test/options_test.py
index bafeea6f..1a0ac8fb 100644
--- a/tornado/test/options_test.py
+++ b/tornado/test/options_test.py
@@ -7,7 +7,7 @@ import sys
from tornado.options import OptionParser, Error
from tornado.util import basestring_type, PY3
-from tornado.test.util import unittest
+from tornado.test.util import unittest, subTest
if PY3:
from io import StringIO
@@ -232,6 +232,24 @@ class OptionsTest(unittest.TestCase):
self.assertRegexpMatches(str(cm.exception),
'Option.*foo.*already defined')
+ def test_error_redefine_underscore(self):
+ # Ensure that the dash/underscore normalization doesn't
+ # interfere with the redefinition error.
+ tests = [
+ ('foo-bar', 'foo-bar'),
+ ('foo_bar', 'foo_bar'),
+ ('foo-bar', 'foo_bar'),
+ ('foo_bar', 'foo-bar'),
+ ]
+ for a, b in tests:
+ with subTest(self, a=a, b=b):
+ options = OptionParser()
+ options.define(a)
+ with self.assertRaises(Error) as cm:
+ options.define(b)
+ self.assertRegexpMatches(str(cm.exception),
+ 'Option.*foo.bar.*already defined')
+
def test_dash_underscore_cli(self):
# Dashes and underscores should be interchangeable.
for defined_name in ['foo-bar', 'foo_bar']:
diff --git a/tornado/test/util.py b/tornado/test/util.py
index 6c032da6..5f534e84 100644
--- a/tornado/test/util.py
+++ b/tornado/test/util.py
@@ -1,5 +1,6 @@
from __future__ import absolute_import, division, print_function
+import contextlib
import os
import platform
import socket
@@ -94,3 +95,15 @@ def is_coverage_running():
except AttributeError:
return False
return mod.startswith('coverage')
+
+
+def subTest(test, *args, **kwargs):
+ """Compatibility shim for unittest.TestCase.subTest.
+
+ Usage: ``with tornado.test.util.subTest(self, x=x):``
+ """
+ try:
+ subTest = test.subTest # py34+
+ except AttributeError:
+ subTest = contextlib.contextmanager(lambda *a, **kw: (yield))
+ return subTest(*args, **kwargs)
diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index de26c42e..d79ea52c 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -917,10 +917,6 @@ class ErrorResponseTest(WebTestCase):
self.assertEqual(response.code, 503)
self.assertTrue(b"503: Service Unavailable" in response.body)
- response = self.fetch("/default?status=435")
- self.assertEqual(response.code, 435)
- self.assertTrue(b"435: Unknown" in response.body)
-
def test_write_error(self):
with ExpectLog(app_log, "Uncaught exception"):
response = self.fetch("/write_error")
@@ -1493,9 +1489,9 @@ class StatusReasonTest(SimpleHandlerTestCase):
response = self.fetch("/?code=682&reason=Bar")
self.assertEqual(response.code, 682)
self.assertEqual(response.reason, "Bar")
- response = self.fetch("/?code=682")
- self.assertEqual(response.code, 682)
- self.assertEqual(response.reason, "Unknown")
+ with ExpectLog(app_log, 'Uncaught exception'):
+ response = self.fetch("/?code=682")
+ self.assertEqual(response.code, 500)
@wsgi_safe
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 4.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"futures",
"mock",
"monotonic",
"trollius",
"sphinx",
"sphinx_rtd_theme",
"codecov",
"virtualenv",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
distlib==0.3.9
docutils==0.18.1
filelock==3.4.1
futures==2.2.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
monotonic==1.6
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@06be57c11f98b24ce0f6ee2d04f0cfe8bfdb3b4d#egg=tornado
trollius==2.1.post2
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
virtualenv==20.17.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.11.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- distlib==0.3.9
- docutils==0.18.1
- filelock==3.4.1
- futures==2.2.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- monotonic==1.6
- platformdirs==2.4.0
- pygments==2.14.0
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- trollius==2.1.post2
- urllib3==1.26.20
- virtualenv==20.17.1
prefix: /opt/conda/envs/tornado
| [
"tornado/test/options_test.py::OptionsTest::test_error_redefine_underscore",
"tornado/test/web_test.py::StatusReasonTest::test_status"
]
| []
| [
"tornado/test/options_test.py::OptionsTest::test_as_dict",
"tornado/test/options_test.py::OptionsTest::test_dash_underscore_cli",
"tornado/test/options_test.py::OptionsTest::test_dash_underscore_file",
"tornado/test/options_test.py::OptionsTest::test_dash_underscore_introspection",
"tornado/test/options_test.py::OptionsTest::test_error_redefine",
"tornado/test/options_test.py::OptionsTest::test_getitem",
"tornado/test/options_test.py::OptionsTest::test_group_dict",
"tornado/test/options_test.py::OptionsTest::test_help",
"tornado/test/options_test.py::OptionsTest::test_items",
"tornado/test/options_test.py::OptionsTest::test_iter",
"tornado/test/options_test.py::OptionsTest::test_mock_patch",
"tornado/test/options_test.py::OptionsTest::test_multiple_int",
"tornado/test/options_test.py::OptionsTest::test_multiple_string",
"tornado/test/options_test.py::OptionsTest::test_parse_callbacks",
"tornado/test/options_test.py::OptionsTest::test_parse_command_line",
"tornado/test/options_test.py::OptionsTest::test_parse_config_file",
"tornado/test/options_test.py::OptionsTest::test_setattr",
"tornado/test/options_test.py::OptionsTest::test_setattr_type_check",
"tornado/test/options_test.py::OptionsTest::test_setattr_with_callback",
"tornado/test/options_test.py::OptionsTest::test_setitem",
"tornado/test/options_test.py::OptionsTest::test_subcommand",
"tornado/test/options_test.py::OptionsTest::test_types",
"tornado/test/web_test.py::SecureCookieV1Test::test_arbitrary_bytes",
"tornado/test/web_test.py::SecureCookieV1Test::test_cookie_tampering_future_timestamp",
"tornado/test/web_test.py::SecureCookieV1Test::test_round_trip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_increment_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_invalidate_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip",
"tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip_differing_version",
"tornado/test/web_test.py::SecureCookieV2Test::test_round_trip",
"tornado/test/web_test.py::CookieTest::test_cookie_special_char",
"tornado/test/web_test.py::CookieTest::test_get_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie",
"tornado/test/web_test.py::CookieTest::test_set_cookie_domain",
"tornado/test/web_test.py::CookieTest::test_set_cookie_expires_days",
"tornado/test/web_test.py::CookieTest::test_set_cookie_false_flags",
"tornado/test/web_test.py::CookieTest::test_set_cookie_max_age",
"tornado/test/web_test.py::CookieTest::test_set_cookie_overwrite",
"tornado/test/web_test.py::AuthRedirectTest::test_absolute_auth_redirect",
"tornado/test/web_test.py::AuthRedirectTest::test_relative_auth_redirect",
"tornado/test/web_test.py::ConnectionCloseTest::test_connection_close",
"tornado/test/web_test.py::RequestEncodingTest::test_error",
"tornado/test/web_test.py::RequestEncodingTest::test_group_encoding",
"tornado/test/web_test.py::RequestEncodingTest::test_group_question_mark",
"tornado/test/web_test.py::RequestEncodingTest::test_slashes",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_invalid_unicode",
"tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_plus",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_argument",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_body_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_get_query_arguments",
"tornado/test/web_test.py::WSGISafeWebTest::test_header_injection",
"tornado/test/web_test.py::WSGISafeWebTest::test_multi_header",
"tornado/test/web_test.py::WSGISafeWebTest::test_no_gzip",
"tornado/test/web_test.py::WSGISafeWebTest::test_optional_path",
"tornado/test/web_test.py::WSGISafeWebTest::test_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_reverse_url",
"tornado/test/web_test.py::WSGISafeWebTest::test_types",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_resources",
"tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_unescaped",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect",
"tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect_double_slash",
"tornado/test/web_test.py::NonWSGIWebTests::test_empty_flush",
"tornado/test/web_test.py::NonWSGIWebTests::test_flow_control",
"tornado/test/web_test.py::ErrorResponseTest::test_default",
"tornado/test/web_test.py::ErrorResponseTest::test_failed_write_error",
"tornado/test/web_test.py::ErrorResponseTest::test_write_error",
"tornado/test/web_test.py::StaticFileTest::test_absolute_static_url",
"tornado/test/web_test.py::StaticFileTest::test_absolute_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_include_host_override",
"tornado/test/web_test.py::StaticFileTest::test_path_traversal_protection",
"tornado/test/web_test.py::StaticFileTest::test_relative_version_exclusion",
"tornado/test/web_test.py::StaticFileTest::test_root_static_path",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_modified_since",
"tornado/test/web_test.py::StaticFileTest::test_static_304_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_404",
"tornado/test/web_test.py::StaticFileTest::test_static_compressed_files",
"tornado/test/web_test.py::StaticFileTest::test_static_etag",
"tornado/test/web_test.py::StaticFileTest::test_static_files",
"tornado/test/web_test.py::StaticFileTest::test_static_head",
"tornado/test/web_test.py::StaticFileTest::test_static_head_range",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_pre_epoch",
"tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_time_zone",
"tornado/test/web_test.py::StaticFileTest::test_static_invalid_range",
"tornado/test/web_test.py::StaticFileTest::test_static_range_if_none_match",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_invalid_start",
"tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_zero_suffix",
"tornado/test/web_test.py::StaticFileTest::test_static_url",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_end_edge",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_file",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_past_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_neg_end",
"tornado/test/web_test.py::StaticFileTest::test_static_with_range_partial_past_end",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_filename",
"tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_redirect",
"tornado/test/web_test.py::StaticFileWithPathTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_serve",
"tornado/test/web_test.py::CustomStaticFileTest::test_static_url",
"tornado/test/web_test.py::HostMatchingTest::test_host_matching",
"tornado/test/web_test.py::DefaultHostMatchingTest::test_default_host_matching",
"tornado/test/web_test.py::NamedURLSpecGroupsTest::test_named_urlspec_groups",
"tornado/test/web_test.py::ClearHeaderTest::test_clear_header",
"tornado/test/web_test.py::Header204Test::test_204_headers",
"tornado/test/web_test.py::Header304Test::test_304_headers",
"tornado/test/web_test.py::DateHeaderTest::test_date_header",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str",
"tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str_from_httputil",
"tornado/test/web_test.py::RaiseWithReasonTest::test_raise_with_reason",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_404_xsrf",
"tornado/test/web_test.py::ErrorHandlerXSRFTest::test_error_xsrf",
"tornado/test/web_test.py::GzipTestCase::test_gzip",
"tornado/test/web_test.py::GzipTestCase::test_gzip_not_requested",
"tornado/test/web_test.py::GzipTestCase::test_gzip_static",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present",
"tornado/test/web_test.py::GzipTestCase::test_vary_already_present_multiple",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_kw",
"tornado/test/web_test.py::PathArgsInPrepareTest::test_pos",
"tornado/test/web_test.py::ClearAllCookiesTest::test_clear_all_cookies",
"tornado/test/web_test.py::ExceptionHandlerTest::test_http_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_known_error",
"tornado/test/web_test.py::ExceptionHandlerTest::test_unknown_error",
"tornado/test/web_test.py::BuggyLoggingTest::test_buggy_log_exception",
"tornado/test/web_test.py::UIMethodUIModuleTest::test_ui_method",
"tornado/test/web_test.py::GetArgumentErrorTest::test_catch_error",
"tornado/test/web_test.py::MultipleExceptionTest::test_multi_exception",
"tornado/test/web_test.py::SetLazyPropertiesTest::test_set_properties",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_is_lazy",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_works",
"tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_works",
"tornado/test/web_test.py::UnimplementedHTTPMethodsTest::test_unimplemented_standard_methods",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_other",
"tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_patch",
"tornado/test/web_test.py::AllHTTPMethodsTest::test_standard_methods",
"tornado/test/web_test.py::PatchMethodTest::test_other",
"tornado/test/web_test.py::PatchMethodTest::test_patch",
"tornado/test/web_test.py::FinishInPrepareTest::test_finish_in_prepare",
"tornado/test/web_test.py::Default404Test::test_404",
"tornado/test/web_test.py::Custom404Test::test_404",
"tornado/test/web_test.py::DefaultHandlerArgumentsTest::test_403",
"tornado/test/web_test.py::HandlerByNameTest::test_handler_by_name",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_close_during_upload",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return_with_data",
"tornado/test/web_test.py::StreamingRequestBodyTest::test_streaming_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_chunked_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_compressed_body",
"tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_fixed_body",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_high",
"tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_low",
"tornado/test/web_test.py::ClientCloseTest::test_client_close",
"tornado/test/web_test.py::SignedValueTest::test_expired",
"tornado/test/web_test.py::SignedValueTest::test_key_version_retrieval",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_invalid_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_default_key",
"tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_non_default_key",
"tornado/test/web_test.py::SignedValueTest::test_known_values",
"tornado/test/web_test.py::SignedValueTest::test_name_swap",
"tornado/test/web_test.py::SignedValueTest::test_non_ascii",
"tornado/test/web_test.py::SignedValueTest::test_payload_tampering",
"tornado/test/web_test.py::SignedValueTest::test_signature_tampering",
"tornado/test/web_test.py::XSRFTest::test_cross_user",
"tornado/test/web_test.py::XSRFTest::test_distinct_tokens",
"tornado/test/web_test.py::XSRFTest::test_refresh_token",
"tornado/test/web_test.py::XSRFTest::test_versioning",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_argument_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_body_no_cookie",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_invalid_format",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_no_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_fail_no_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_header",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_non_hex_token",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_post_body",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_query_string",
"tornado/test/web_test.py::XSRFTest::test_xsrf_success_short_token",
"tornado/test/web_test.py::XSRFCookieKwargsTest::test_xsrf_httponly",
"tornado/test/web_test.py::FinishExceptionTest::test_finish_exception",
"tornado/test/web_test.py::DecoratorTest::test_addslash",
"tornado/test/web_test.py::DecoratorTest::test_removeslash",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_match",
"tornado/test/web_test.py::CacheTest::test_strong_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_match",
"tornado/test/web_test.py::CacheTest::test_weak_etag_not_match",
"tornado/test/web_test.py::CacheTest::test_wildcard_etag",
"tornado/test/web_test.py::RequestSummaryTest::test_missing_remote_ip",
"tornado/test/web_test.py::HTTPErrorTest::test_copy",
"tornado/test/web_test.py::ApplicationTest::test_listen",
"tornado/test/web_test.py::URLSpecReverseTest::test_non_reversible",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse",
"tornado/test/web_test.py::URLSpecReverseTest::test_reverse_arguments",
"tornado/test/web_test.py::RedirectHandlerTest::test_basic_redirect",
"tornado/test/web_test.py::RedirectHandlerTest::test_redirect_pattern"
]
| []
| Apache License 2.0 | 1,282 | [
"tornado/web.py",
"tornado/options.py"
]
| [
"tornado/web.py",
"tornado/options.py"
]
|
|
imageio__imageio-255 | 0e0be918f05f8f2812277c9204677b05c7d6f73f | 2017-05-21 01:28:34 | 48e81976e70f2c4795dfdd105d8115bc53f66a11 | diff --git a/imageio/plugins/tifffile.py b/imageio/plugins/tifffile.py
index f6d25ad..a26ea79 100644
--- a/imageio/plugins/tifffile.py
+++ b/imageio/plugins/tifffile.py
@@ -37,8 +37,11 @@ READ_METADATA_KEYS = ('planar_configuration', 'is_fluoview', 'is_nih',
class TiffFormat(Format):
-
""" Provides support for a wide range of Tiff images.
+
+ Images that contain multiple pages can be read using ``imageio.mimread()``
+ to read the individual pages, or ``imageio.volread()`` to obtain a
+ single (higher dimensional) array.
Parameters for reading
----------------------
@@ -177,17 +180,28 @@ class TiffFormat(Format):
def _close(self):
self._tf.close()
-
+
def _get_length(self):
- return len(self._tf)
-
+ if self.request.mode[1] in 'vV':
+ return 1 # or can there be pages in pages or something?
+ else:
+ return len(self._tf)
+
def _get_data(self, index):
- # Get data
- if index < 0 or index >= len(self._tf):
- raise IndexError(
- 'Index out of range while reading from tiff file')
- im = self._tf[index].asarray()
- meta = self._meta or self._get_meta_data(index)
+ if self.request.mode[1] in 'vV':
+ # Read data as single 3D (+ color channels) array
+ if index != 0:
+ raise IndexError(
+ 'Tiff support no more than 1 "volume" per file')
+ im = self._tf.asarray() # request as singleton image
+ meta = self._meta
+ else:
+ # Read as 2D image
+ if index < 0 or index >= len(self._tf):
+ raise IndexError(
+ 'Index out of range while reading from tiff file')
+ im = self._tf[index].asarray()
+ meta = self._meta or self._get_meta_data(index)
# Return array and empty meta data
return im, meta
@@ -216,6 +230,8 @@ class TiffFormat(Format):
def _append_data(self, im, meta):
if meta:
self.set_meta_data(meta)
+ # No need to check self.request.mode; tiffile figures out whether
+ # this is a single page, or all page data at once.
self._tf.save(np.asanyarray(im), **self._meta)
def set_meta_data(self, meta):
| Incorrect image dimensions after reading in 3D+c TIFF
I'm trying to read the following file:
https://www.dropbox.com/s/0ynbscx4cmd5k91/E_z2_512_1um_CONTROL.tif?dl=1
skimage (ie tifffile) reads this correctly, but imageio doesn't — even though I thought it also used tifffile as a backend for tiffs?
```python
In [1]: import imageio
In [2]: from skimage import io
In [3]: im_iio = imageio.imread('E_z2_512_1um_CONTROL.tif')
In [4]: im_iio.shape
Out[4]: (159, 320, 512)
In [5]: im_ski = io.imread('E_z2_512_1um_CONTROL.tif')
In [6]: im_ski.shape
Out[6]: (53, 320, 512, 3)
```
What imageio is doing is reading the file in the array order present in the file (plane, channel, row, column), and squashing pln and ch dimensions together, while skimage is behaving as expected and producing two separate dimensions for pln and ch and (maybe a bit more than expected) moving ch to the last dimension, which is the dimension order expected by most scipy packages.
Any ideas about what is going wrong here? | imageio/imageio | diff --git a/tests/test_tifffile.py b/tests/test_tifffile.py
index 5ab5ce7..ea7ad7d 100644
--- a/tests/test_tifffile.py
+++ b/tests/test_tifffile.py
@@ -34,16 +34,29 @@ def test_tifffile_reading_writing():
imageio.imsave(filename1, im2)
im = imageio.imread(filename1)
ims = imageio.mimread(filename1)
+ assert im.shape == im2.shape
assert (im == im2).all()
assert len(ims) == 1
-
+
# Multiple images
imageio.mimsave(filename1, [im2, im2, im2])
im = imageio.imread(filename1)
ims = imageio.mimread(filename1)
- assert (im == im2).all()
- assert len(ims) == 3, ims[0].shape
-
+ assert im.shape == im2.shape
+ assert (im == im2).all() # note: this does not imply that the shape match!
+ assert len(ims) == 3
+ for i in range(3):
+ assert ims[i].shape == im2.shape
+ assert (ims[i] == im2).all()
+
+ # Read all planes as one array - we call it a volume for clarity
+ vol = imageio.volread(filename1)
+ vols = imageio.mvolread(filename1)
+ assert vol.shape == (3, ) + im2.shape
+ assert len(vols) == 1 and vol.shape == vols[0].shape
+ for i in range(3):
+ assert (vol[i] == im2).all()
+
# remote multipage rgb file
filename2 = get_remote_file('images/multipage_rgb.tif')
img = imageio.mimread(filename2)
@@ -67,13 +80,24 @@ def test_tifffile_reading_writing():
# Fail
raises(IndexError, R.get_data, -1)
raises(IndexError, R.get_data, 3)
-
- # Ensure imwrite write works round trip
+
+ # Ensure imread + imwrite works round trip
+ filename3 = os.path.join(test_dir, 'test_tiff2.tiff')
+ im1 = imageio.imread(filename1)
+ imageio.imwrite(filename3, im1)
+ im3 = imageio.imread(filename3)
+ assert im1.ndim == 3
+ assert im1.shape == im3.shape
+ assert (im1 == im3).all()
+
+ # Ensure imread + imwrite works round trip - volume like
filename3 = os.path.join(test_dir, 'test_tiff2.tiff')
- R = imageio.imread(filename1)
- imageio.imwrite(filename3, R)
- R2 = imageio.imread(filename3)
- assert (R == R2).all()
+ im1 = imageio.volread(filename1)
+ imageio.volwrite(filename3, im1)
+ im3 = imageio.volread(filename3)
+ assert im1.ndim == 4
+ assert im1.shape == im3.shape
+ assert (im1 == im3).all()
run_tests_if_main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 2.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y libfreeimage3"
],
"python": "3.6",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
-e git+https://github.com/imageio/imageio.git@0e0be918f05f8f2812277c9204677b05c7d6f73f#egg=imageio
importlib-metadata==4.8.3
iniconfig==1.1.1
numpy==1.19.5
packaging==21.3
Pillow==8.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: imageio
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- numpy==1.19.5
- packaging==21.3
- pillow==8.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/imageio
| [
"tests/test_tifffile.py::test_tifffile_reading_writing"
]
| []
| [
"tests/test_tifffile.py::test_tifffile_format"
]
| []
| BSD 2-Clause "Simplified" License | 1,283 | [
"imageio/plugins/tifffile.py"
]
| [
"imageio/plugins/tifffile.py"
]
|
|
jbasko__configmanager-95 | d3e5b8a2d94beb39c7dc9b6063d696d62470b3bc | 2017-05-21 13:44:15 | dbdd2467741f0875fca53d0062f4dca1f307df8a | diff --git a/configmanager/__init__.py b/configmanager/__init__.py
index 9b6663e..0939e31 100644
--- a/configmanager/__init__.py
+++ b/configmanager/__init__.py
@@ -1,4 +1,4 @@
-__version__ = '1.5.1'
+__version__ = '1.5.2'
from .managers import Config
from .items import Item
diff --git a/configmanager/items.py b/configmanager/items.py
index 0ff7467..de3f8fd 100644
--- a/configmanager/items.py
+++ b/configmanager/items.py
@@ -1,6 +1,7 @@
import copy
import six
+from builtins import str
from .exceptions import ConfigValueMissing
from .base import ItemAttribute, BaseItem
@@ -44,13 +45,27 @@ class Item(BaseItem):
if 'type' in kwargs:
self.type = kwargs.pop('type')
else:
+ #
# Type guessing
+ #
value = kwargs.get('value', not_set)
default = kwargs.get('default', not_set)
+
+ # 'str' is from builtins package which means that
+ # it is actually a unicode string in Python 2 too.
+ type_ = None
if value is not not_set and value is not None:
- self.type = type(value)
+ type_ = type(value)
elif default is not not_set and default is not None:
- self.type = type(default)
+ type_ = type(default)
+
+ if type_:
+ if issubclass(type_, six.string_types):
+ self.type = str
+ else:
+ self.type = type_
+
+ # TODO default should also be stringified accordingly!
self._value = not_set
for k, v in kwargs.items():
@@ -67,6 +82,13 @@ class Item(BaseItem):
return '<{} {} {!r}>'.format(self.__class__.__name__, self.name, value)
def __str__(self):
+ return self.str_value
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+ @property
+ def str_value(self):
if self.raw_str_value is not not_set:
return self.raw_str_value
if self._value is not not_set or self.default is not not_set:
@@ -74,9 +96,6 @@ class Item(BaseItem):
else:
return repr(self)
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
@property
def value(self):
"""
diff --git a/configmanager/persistence.py b/configmanager/persistence.py
index 191d05e..7ea756d 100644
--- a/configmanager/persistence.py
+++ b/configmanager/persistence.py
@@ -1,5 +1,6 @@
+from builtins import str
import configparser
-
+from io import open
import six
@@ -27,12 +28,12 @@ class ConfigPersistenceAdapter(object):
def load(self, source, as_defaults=False):
if isinstance(source, six.string_types):
- with open(source) as f:
+ with open(source, encoding='utf-8') as f:
self._rw.load_config_from_file(self._config, f, as_defaults=as_defaults)
elif isinstance(source, (list, tuple)):
for s in source:
- with open(s) as f:
+ with open(s, encoding='utf-8') as f:
self._rw.load_config_from_file(self._config, f, as_defaults=as_defaults)
else:
@@ -43,7 +44,7 @@ class ConfigPersistenceAdapter(object):
def dump(self, destination, with_defaults=False):
if isinstance(destination, six.string_types):
- with open(destination, 'w') as f:
+ with open(destination, 'w', encoding='utf-8') as f:
self._rw.dump_config_to_file(self._config, f, with_defaults=with_defaults)
else:
self._rw.dump_config_to_file(self._config, destination, with_defaults=with_defaults)
@@ -60,10 +61,20 @@ class JsonReaderWriter(ConfigReaderWriter):
self.json = json
def dump_config_to_file(self, config, file_obj, with_defaults=False, **kwargs):
- self.json.dump(config.to_dict(with_defaults=with_defaults), file_obj, **kwargs)
+ # See comment in JsonReaderWriter.dump_config_to_string
+ file_obj.write(self.dump_config_to_string(config, with_defaults=with_defaults), **kwargs)
def dump_config_to_string(self, config, with_defaults=False, **kwargs):
- return self.json.dumps(config.to_dict(with_defaults=with_defaults), **kwargs)
+ # There is some inconsistent behaviour in Python 2's json.dump as described here:
+ # http://stackoverflow.com/a/36008538/38611
+ # and io.open which we use for file opening is very strict and fails if
+ # the string we are trying to write is not unicode in Python 2
+ # because we open files with encoding=utf-8.
+ result = self.json.dumps(config.to_dict(with_defaults=with_defaults), ensure_ascii=False, **kwargs)
+ if not isinstance(result, str):
+ return str(result)
+ else:
+ return result
def load_config_from_file(self, config, file_obj, as_defaults=False, **kwargs):
config.read_dict(self.json.load(file_obj, **kwargs), as_defaults=as_defaults)
@@ -119,6 +130,17 @@ class ConfigParserReaderWriter(ConfigReaderWriter):
self._load_config_from_config_parser(config, cp, as_defaults=as_defaults)
def _load_config_from_config_parser(self, config, cp, as_defaults=False):
+ for option, value in cp.defaults().items():
+ if as_defaults:
+ if option not in config:
+ config.cm__add_item(option, config.cm__create_item(option, default=value))
+ else:
+ config[option].default = value
+ else:
+ if option not in config:
+ continue
+ config[option].value = value
+
for section in cp.sections():
for option in cp.options(section):
value = cp.get(section, option)
@@ -149,9 +171,9 @@ class ConfigParserReaderWriter(ConfigReaderWriter):
if len(item_path) == 2:
section, option = item_path
else:
- section = 'DEFAULT'
+ section = cp.default_section
option = item_path[0]
- if not cp.has_section(section):
+ if not cp.has_section(section) and section != cp.default_section:
cp.add_section(section)
- cp.set(section, option, str(item))
+ cp.set(section, option, item.str_value)
diff --git a/configmanager/utils.py b/configmanager/utils.py
index 6c884ee..754c166 100644
--- a/configmanager/utils.py
+++ b/configmanager/utils.py
@@ -1,3 +1,6 @@
+from builtins import str
+
+
class _NotSet(object):
instance = None
diff --git a/requirements.txt b/requirements.txt
index 5e29590..e1b0220 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,6 +2,7 @@
# Real dependencies
#
six
+future
#
# Potentially optional dependencies
diff --git a/setup.py b/setup.py
index 15cdf05..f872b89 100644
--- a/setup.py
+++ b/setup.py
@@ -26,7 +26,7 @@ setup(
description='Self-conscious items of configuration in Python',
long_description=read('README.rst'),
packages=find_packages(),
- install_requires=['six', 'configparser'],
+ install_requires=['six', 'future', 'configparser'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
| Add Item.str_value
Because str(item) is not intuitive and easy to miss the implications of | jbasko/configmanager | diff --git a/tests/test_config.py b/tests/test_config.py
index 562ab1c..a51c096 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -1,5 +1,9 @@
+# -*- coding: utf-8 -*-
+
import pytest
+from builtins import str
+
from configmanager import Config, Item
@@ -442,3 +446,17 @@ def test_section_knows_its_alias():
config.uploads.db = Config({'connection': {'user': 'root'}})
assert config.uploads.db.alias == 'db'
assert config.uploads.db.connection.alias == 'connection'
+
+
+def test_config_item_value_can_be_unicode_str(tmpdir):
+ config1 = Config({'greeting': u'Hello, {name}', 'name': u'Anonymous'})
+ config1.name.value = u'Jānis Bērziņš'
+ assert config1.name.type is str
+
+ path = tmpdir.join('config.ini').strpath
+ config1.configparser.dump(path, with_defaults=True)
+
+ config2 = Config({'greeting': '', 'name': ''})
+ config2.configparser.load(path)
+ assert config2.name.value == u'Jānis Bērziņš'
+ assert config1.to_dict(with_defaults=True) == config2.to_dict(with_defaults=True)
diff --git a/tests/test_configparser.py b/tests/test_configparser.py
index 0121cc3..5e3bd27 100644
--- a/tests/test_configparser.py
+++ b/tests/test_configparser.py
@@ -1,3 +1,4 @@
+import collections
import pytest
from configmanager import Config, Item
@@ -263,3 +264,22 @@ def test_write_string_returns_valid_configparser_string():
config = Config({'db': {'user': 'root'}})
assert config.configparser.dumps() == ''
assert config.configparser.dumps(with_defaults=True) == '[db]\nuser = root\n\n'
+
+
+def test_writes_to_and_reads_from_default_section_transparently(tmpdir):
+ config_ini = tmpdir.join('config.ini').strpath
+
+ config1 = Config(collections.OrderedDict([('greeting', 'Hello'), ('name', 'World')]))
+ config1.configparser.dump(config_ini, with_defaults=True)
+
+ with open(config_ini) as f:
+ assert f.read() == (
+ '[DEFAULT]\n'
+ 'greeting = Hello\n'
+ 'name = World\n\n'
+ )
+
+ config2 = Config()
+ config2.configparser.load(config_ini, as_defaults=True)
+
+ assert config1.to_dict() == config2.to_dict() == {'greeting': 'Hello', 'name': 'World'}
diff --git a/tests/test_item.py b/tests/test_item.py
index 4df094b..1b87cf0 100644
--- a/tests/test_item.py
+++ b/tests/test_item.py
@@ -1,5 +1,8 @@
+# -*- coding: utf-8 -*-
+
import pytest
import six
+from builtins import str
from configmanager.utils import not_set
from configmanager import Item, ConfigValueMissing
@@ -55,28 +58,28 @@ def test_int_value():
def test_raw_str_value_is_reset_on_reset():
c = Item('a', type=int, default=25)
- assert str(c) == '25'
+ assert c.str_value == '25'
c.value = '23'
- assert str(c) == '23'
+ assert c.str_value == '23'
c.reset()
- assert str(c) == '25'
+ assert c.str_value == '25'
def test_raw_str_value_is_reset_on_non_str_value_set():
c = Item('a', type=int, default=25)
c.value = '23'
- assert str(c) == '23'
+ assert c.str_value == '23'
c.value = 25
- assert str(c) == '25'
+ assert c.str_value == '25'
c.value = 24
- assert str(c) == '24'
+ assert c.str_value == '24'
c.value = '22'
- assert str(c) == '22'
+ assert c.str_value == '22'
def test_bool_of_value():
@@ -108,16 +111,16 @@ def test_repr_makes_clear_name_and_value():
def test_str_and_repr_of_not_set_value_should_not_fail():
c = Item('a')
- assert str(c) == '<Item a <NotSet>>'
+ assert c.str_value == '<Item a <NotSet>>'
assert repr(c) == '<Item a <NotSet>>'
def test_bool_str_is_a_str():
c = Item('a', type=bool)
- assert isinstance(str(c), six.string_types)
+ assert isinstance(c.str_value, six.string_types)
c.value = True
- assert isinstance(str(c), six.string_types)
+ assert isinstance(c.str_value, six.string_types)
def test_bool_config_preserves_raw_str_value_used_to_set_it():
@@ -125,27 +128,27 @@ def test_bool_config_preserves_raw_str_value_used_to_set_it():
assert c.value is False
assert not c.value
- assert str(c) == 'False'
+ assert c.str_value == 'False'
assert c.value is False
c.value = 'False'
assert not c.value
- assert str(c) == 'False'
+ assert c.str_value == 'False'
assert c.value is False
c.value = 'no'
assert not c.value
- assert str(c) == 'no'
+ assert c.str_value == 'no'
assert c.value is False
c.value = '0'
assert not c.value
- assert str(c) == '0'
+ assert c.str_value == '0'
assert c.value is False
c.value = '1'
assert c.value
- assert str(c) == '1'
+ assert c.str_value == '1'
assert c.value is True
c.reset()
@@ -153,7 +156,7 @@ def test_bool_config_preserves_raw_str_value_used_to_set_it():
assert c.value is False
c.value = 'yes'
- assert str(c) == 'yes'
+ assert c.str_value == 'yes'
assert c.value is True
@@ -265,6 +268,20 @@ def test_type_is_guessed_either_from_default_or_value():
c = Item()
assert c.type is str
+ c = Item(value='haha')
+ assert c.type is str
+
+ c = Item(value=u'hāhā')
+ assert c.type is str
+ assert c.value == u'hāhā'
+ assert c.str_value == u'hāhā'
+
+ c = Item(default='haha')
+ assert c.type is str
+
+ c = Item(default=u'haha')
+ assert c.type is str
+
d = Item(default=5)
assert d.type is int
@@ -295,3 +312,12 @@ def test_item_value_is_not_deep_copied_on_value_request():
assert c.value == ['c', 'd', 'e']
assert c.default == ['a', 'b']
+
+
+def test_item_value_can_be_unicode_str():
+ c = Item(default=u'Jānis Bērziņš')
+ assert c.str_value == u'Jānis Bērziņš'
+
+ c.value = u'Pēteris Liepiņš'
+ assert c.str_value == u'Pēteris Liepiņš'
+ assert c.default == u'Jānis Bērziņš'
diff --git a/tests/test_item_get_set.py b/tests/test_item_get_set.py
index ac3d80a..edd07d1 100644
--- a/tests/test_item_get_set.py
+++ b/tests/test_item_get_set.py
@@ -34,9 +34,9 @@ def test_get_returns_default_value_when_available():
def test_get_returns_value_when_value_and_default_available():
assert Item(default='a', value=None).get() is None
- assert Item(default='a', value='b').get() is 'b'
+ assert Item(default='a', value='b').get() == 'b'
assert Item(default=None, value=None).get(True) is None
- assert Item(default='a', value='b').get('c') is 'b'
+ assert Item(default='a', value='b').get('c') == 'b'
def test_value_calls_get_so_users_can_extend_item_class_by_overriding_just_get():
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 6
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"PyYAML"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/jbasko/configmanager.git@d3e5b8a2d94beb39c7dc9b6063d696d62470b3bc#egg=configmanager
configparser==5.2.0
coverage==6.2
distlib==0.3.9
filelock==3.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-random-order==1.1.1
PyYAML==6.0.1
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.17.1
zipp==3.6.0
| name: configmanager
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- configparser==5.2.0
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-random-order==1.1.1
- pyyaml==6.0.1
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.17.1
- zipp==3.6.0
prefix: /opt/conda/envs/configmanager
| [
"tests/test_config.py::test_config_item_value_can_be_unicode_str",
"tests/test_configparser.py::test_writes_to_and_reads_from_default_section_transparently",
"tests/test_item.py::test_raw_str_value_is_reset_on_reset",
"tests/test_item.py::test_raw_str_value_is_reset_on_non_str_value_set",
"tests/test_item.py::test_str_and_repr_of_not_set_value_should_not_fail",
"tests/test_item.py::test_bool_str_is_a_str",
"tests/test_item.py::test_bool_config_preserves_raw_str_value_used_to_set_it",
"tests/test_item.py::test_type_is_guessed_either_from_default_or_value",
"tests/test_item.py::test_item_value_can_be_unicode_str"
]
| []
| [
"tests/test_config.py::test_items_are_created_using_cm_create_item_method",
"tests/test_config.py::test_reset_resets_values_to_defaults",
"tests/test_config.py::test_repr_of_config",
"tests/test_config.py::test_assigning_nameless_item_directly_to_config_should_set_its_name",
"tests/test_config.py::test_assigning_item_with_name_directly_to_config_should_preserve_its_name",
"tests/test_config.py::test_item_name_and_alias_must_be_a_string",
"tests/test_config.py::test_section_name_must_be_a_string",
"tests/test_config.py::test_to_dict_should_not_include_items_with_no_usable_value",
"tests/test_config.py::test_read_dict_recursively_loads_values_from_a_dictionary",
"tests/test_config.py::test_read_dict_as_defaults_loads_default_values_from_a_dictionary",
"tests/test_config.py::test_declaration_parser_does_not_modify_config",
"tests/test_config.py::test_allows_iteration_over_all_items",
"tests/test_config.py::test_forbids_accidental_item_overwrite_via_setitem",
"tests/test_config.py::test_allows_iteration_over_sections",
"tests/test_config.py::test_attribute_read_access",
"tests/test_config.py::test_attribute_write_access",
"tests/test_config.py::test_forbids_accidental_item_overwrite_via_setattr",
"tests/test_config.py::test_to_dict",
"tests/test_config.py::test_can_inspect_config_contents",
"tests/test_config.py::test_can_have_a_dict_as_a_config_value_if_wrapped_inside_item",
"tests/test_config.py::test_len_of_config_returns_number_of_items_in_it",
"tests/test_config.py::test__getitem__handles_paths_to_sections_and_items_and_so_does__contains__",
"tests/test_config.py::test_can_use__setitem__to_create_new_deep_paths",
"tests/test_config.py::test_section_knows_its_alias",
"tests/test_configparser.py::test_reads_empty_config_from_file_obj",
"tests/test_configparser.py::test_reads_simple_config_from_file_obj",
"tests/test_configparser.py::test_writes_config_to_file",
"tests/test_configparser.py::test_preserves_bool_notation",
"tests/test_configparser.py::test_configparser_writer_does_not_accept_three_deep_paths",
"tests/test_configparser.py::test_read_reads_multiple_files_in_order",
"tests/test_configparser.py::test_read_string",
"tests/test_configparser.py::test_read_as_defaults_treats_all_values_as_declarations",
"tests/test_configparser.py::test_write_with_defaults_writes_defaults_too",
"tests/test_configparser.py::test_write_string_returns_valid_configparser_string",
"tests/test_item.py::test_missing_required_value_raises_config_value_missing",
"tests/test_item.py::test_required_item_falls_back_to_default_when_there_is_one",
"tests/test_item.py::test_item_with_no_value_and_no_default_returns_not_set_as_value",
"tests/test_item.py::test_value_gets_stringified",
"tests/test_item.py::test_int_value",
"tests/test_item.py::test_bool_of_value",
"tests/test_item.py::test_repr_makes_clear_name_and_value",
"tests/test_item.py::test_can_set_str_value_to_none",
"tests/test_item.py::test_setting_value_to_not_set_resets_it",
"tests/test_item.py::test_can_set_int_value_to_none",
"tests/test_item.py::test_equality",
"tests/test_item.py::test_item_is_equal_to_itself",
"tests/test_item.py::test_is_default",
"tests/test_item.py::test_has_value_returns_true_if_value_or_default_is_set",
"tests/test_item.py::test_item_default_value_is_deep_copied_on_value_request",
"tests/test_item.py::test_item_value_is_not_deep_copied_on_value_request",
"tests/test_item_get_set.py::test_get_returns_value_when_value_is_set",
"tests/test_item_get_set.py::test_get_with_no_value_and_no_default_returns_not_set",
"tests/test_item_get_set.py::test_get_returns_fallback_when_no_value_and_no_default_is_set",
"tests/test_item_get_set.py::test_get_required_with_no_value_and_no_default_returns_fallback_if_available",
"tests/test_item_get_set.py::test_get_returns_default_value_when_available",
"tests/test_item_get_set.py::test_get_returns_value_when_value_and_default_available",
"tests/test_item_get_set.py::test_value_calls_get_so_users_can_extend_item_class_by_overriding_just_get",
"tests/test_item_get_set.py::test_set_sets_value",
"tests/test_item_get_set.py::test_value_setting_calls_set_so_users_can_extend_item_class_by_overriding_just_set"
]
| []
| MIT License | 1,284 | [
"setup.py",
"configmanager/utils.py",
"configmanager/persistence.py",
"configmanager/items.py",
"configmanager/__init__.py",
"requirements.txt"
]
| [
"setup.py",
"configmanager/utils.py",
"configmanager/persistence.py",
"configmanager/items.py",
"configmanager/__init__.py",
"requirements.txt"
]
|
|
jbasko__configmanager-96 | dbdd2467741f0875fca53d0062f4dca1f307df8a | 2017-05-21 17:04:57 | dbdd2467741f0875fca53d0062f4dca1f307df8a | diff --git a/configmanager/managers.py b/configmanager/managers.py
index 1b03f33..9679217 100644
--- a/configmanager/managers.py
+++ b/configmanager/managers.py
@@ -46,8 +46,10 @@ class Config(BaseSection):
cm__configparser_factory = configparser.ConfigParser
def __new__(cls, config_declaration=None, item_cls=None, configparser_factory=None):
- instance = super(Config, cls).__new__(cls)
+ if config_declaration and isinstance(config_declaration, cls):
+ return copy.deepcopy(config_declaration)
+ instance = super(Config, cls).__new__(cls)
instance._cm__section = None
instance._cm__section_alias = None
instance._cm__configs = collections.OrderedDict()
| Allow deep-copying of Config with config2 = Config(config1) | jbasko/configmanager | diff --git a/tests/test_config.py b/tests/test_config.py
index a51c096..29f62e3 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -460,3 +460,21 @@ def test_config_item_value_can_be_unicode_str(tmpdir):
config2.configparser.load(path)
assert config2.name.value == u'Jānis Bērziņš'
assert config1.to_dict(with_defaults=True) == config2.to_dict(with_defaults=True)
+
+
+def test_config_of_config_is_a_deep_copy_of_original_config():
+ config1 = Config({'uploads': {'enabled': True, 'db': {'user': 'root'}}})
+ config1.uploads.enabled.value = False
+
+ config2 = Config(config1)
+ assert config1 is not config2
+ assert config1.to_dict() == config2.to_dict()
+ assert config1.to_dict(with_defaults=True) == config2.to_dict(with_defaults=True)
+
+ config1.uploads.enabled.value = True
+ config1.uploads.db.read_dict({'user': 'admin'})
+
+ assert config2.to_dict(with_defaults=True) == {'uploads': {'enabled': False, 'db': {'user': 'root'}}}
+
+ config2.uploads.db.user.default = 'default-user'
+ assert config1.uploads.db.user.default == 'root'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-random-order"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/jbasko/configmanager.git@dbdd2467741f0875fca53d0062f4dca1f307df8a#egg=configmanager
configparser==5.2.0
coverage==6.2
distlib==0.3.9
filelock==3.4.1
future==1.0.0
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-random-order==1.1.1
PyYAML==6.0.1
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.17.1
zipp==3.6.0
| name: configmanager
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- configparser==5.2.0
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- future==1.0.0
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-random-order==1.1.1
- pyyaml==6.0.1
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.17.1
- zipp==3.6.0
prefix: /opt/conda/envs/configmanager
| [
"tests/test_config.py::test_config_of_config_is_a_deep_copy_of_original_config"
]
| []
| [
"tests/test_config.py::test_items_are_created_using_cm_create_item_method",
"tests/test_config.py::test_reset_resets_values_to_defaults",
"tests/test_config.py::test_repr_of_config",
"tests/test_config.py::test_assigning_nameless_item_directly_to_config_should_set_its_name",
"tests/test_config.py::test_assigning_item_with_name_directly_to_config_should_preserve_its_name",
"tests/test_config.py::test_item_name_and_alias_must_be_a_string",
"tests/test_config.py::test_section_name_must_be_a_string",
"tests/test_config.py::test_to_dict_should_not_include_items_with_no_usable_value",
"tests/test_config.py::test_read_dict_recursively_loads_values_from_a_dictionary",
"tests/test_config.py::test_read_dict_as_defaults_loads_default_values_from_a_dictionary",
"tests/test_config.py::test_declaration_parser_does_not_modify_config",
"tests/test_config.py::test_allows_iteration_over_all_items",
"tests/test_config.py::test_forbids_accidental_item_overwrite_via_setitem",
"tests/test_config.py::test_allows_iteration_over_sections",
"tests/test_config.py::test_attribute_read_access",
"tests/test_config.py::test_attribute_write_access",
"tests/test_config.py::test_forbids_accidental_item_overwrite_via_setattr",
"tests/test_config.py::test_to_dict",
"tests/test_config.py::test_can_inspect_config_contents",
"tests/test_config.py::test_can_have_a_dict_as_a_config_value_if_wrapped_inside_item",
"tests/test_config.py::test_len_of_config_returns_number_of_items_in_it",
"tests/test_config.py::test__getitem__handles_paths_to_sections_and_items_and_so_does__contains__",
"tests/test_config.py::test_can_use__setitem__to_create_new_deep_paths",
"tests/test_config.py::test_section_knows_its_alias",
"tests/test_config.py::test_config_item_value_can_be_unicode_str"
]
| []
| MIT License | 1,285 | [
"configmanager/managers.py"
]
| [
"configmanager/managers.py"
]
|
|
imageio__imageio-257 | 48e81976e70f2c4795dfdd105d8115bc53f66a11 | 2017-05-21 22:41:29 | 48e81976e70f2c4795dfdd105d8115bc53f66a11 | diff --git a/docs/examples.rst b/docs/examples.rst
index 4a25d4f..f2a1f24 100644
--- a/docs/examples.rst
+++ b/docs/examples.rst
@@ -49,6 +49,19 @@ Iterate over frames in a movie
print('Mean of frame %i is %1.1f' % (i, im.mean()))
+Grab screenshot or image from the clipboard
+-------------------------------------------
+
+(Screenshots are supported on Windows and OS X, clipboard on Windows only.)
+
+.. code-block:: python
+
+ import imageio
+
+ im_screen = imageio.imread('<screen>')
+ im_clipboard = imageio.imread('<clipboard>')
+
+
Grab frames from your webcam
----------------------------
diff --git a/imageio/core/functions.py b/imageio/core/functions.py
index d07409b..653d6fe 100644
--- a/imageio/core/functions.py
+++ b/imageio/core/functions.py
@@ -33,12 +33,31 @@ be used to read/write data and meta data in a more controlled manner.
This also allows specific scientific formats to be exposed in a way
that best suits that file-format.
+----
-.. note::
-
- Some of these functions were renamed in v1.1 to realize a more clear
- and consistent API. The old functions are still available for
- backward compatibility (and will be in the foreseeable future).
+Supported resource URI's:
+
+All functions described here accept a URI to describe the resource to
+read from or write to. These can be a wide range of things. (Imageio
+takes care of handling the URI so that plugins can access the data in
+an easy way.)
+
+For reading and writing:
+
+* a normal filename, e.g. ``'c:\\foo\\bar.png'``
+* a file in a zipfile, e.g. ``'c:\\foo\\bar.zip\\eggs.png'``
+* a file object with a ``read()`` / ``write()`` method.
+
+For reading:
+
+* an http/ftp address, e.g. ``'http://example.com/foo.png'``
+* the raw bytes of an image file
+* ``get_reader("<video0>")`` to grab images from a (web) camera.
+* ``imread("<screen>")`` to grab a screenshot (on Windows or OS X).
+* ``imread("<clipboard>")`` to grab an image from the clipboard (on Windows).
+
+For writing one can also use ``'<bytes>'`` or ``imageio.RETURN_BYTES`` to
+make a write function return the bytes instead of writing to a file.
"""
@@ -80,9 +99,8 @@ def get_reader(uri, format=None, mode='?', **kwargs):
Parameters
----------
uri : {str, bytes, file}
- The resource to load the image from. This can be a normal
- filename, a file in a zipfile, an http/ftp address, a file
- object, or the raw bytes of an image file.
+ The resource to load the image from, e.g. a filename, http address or
+ file object, see the docs for more info.
format : str
The format to use to read the file. By default imageio selects
the appropriate for you based on the filename and its contents.
@@ -120,10 +138,8 @@ def get_writer(uri, format=None, mode='?', **kwargs):
Parameters
----------
uri : {str, file}
- The resource to write the image to. This can be a normal
- filename, a file in a zipfile, a file object, or
- ``imageio.RETURN_BYTES``, in which case the raw bytes are
- returned.
+ The resource to write the image to, e.g. a filename or file object,
+ see the docs for more info.
format : str
The format to use to read the file. By default imageio selects
the appropriate for you based on the filename.
@@ -170,9 +186,8 @@ def imread(uri, format=None, **kwargs):
Parameters
----------
uri : {str, bytes, file}
- The resource to load the image from. This can be a normal
- filename, a file in a zipfile, an http/ftp address, a file
- object, or the raw bytes.
+ The resource to load the image from, e.g. a filename, http address or
+ file object, see the docs for more info.
format : str
The format to use to read the file. By default imageio selects
the appropriate for you based on the filename and its contents.
@@ -195,10 +210,8 @@ def imwrite(uri, im, format=None, **kwargs):
Parameters
----------
uri : {str, file}
- The resource to write the image to. This can be a normal
- filename, a file in a zipfile, a file object, or
- ``imageio.RETURN_BYTES``, in which case the raw bytes are
- returned.
+ The resource to write the image to, e.g. a filename or file object,
+ see the docs for more info.
im : numpy.ndarray
The image data. Must be NxM, NxMx3 or NxMx4.
format : str
@@ -240,9 +253,8 @@ def mimread(uri, format=None, memtest=True, **kwargs):
Parameters
----------
uri : {str, bytes, file}
- The resource to load the images from. This can be a normal
- filename, a file in a zipfile, an http/ftp address, a file
- object, or the raw bytes.
+ The resource to load the images from, e.g. a filename, http address or
+ file object, see the docs for more info.
format : str
The format to use to read the file. By default imageio selects
the appropriate for you based on the filename and its contents.
@@ -284,10 +296,8 @@ def mimwrite(uri, ims, format=None, **kwargs):
Parameters
----------
uri : {str, file}
- The resource to write the images to. This can be a normal
- filename, a file in a zipfile, a file object, or
- ``imageio.RETURN_BYTES``, in which case the raw bytes are
- returned.
+ The resource to write the images to, e.g. a filename or file object,
+ see the docs for more info.
ims : sequence of numpy arrays
The image data. Each array must be NxM, NxMx3 or NxMx4.
format : str
@@ -339,9 +349,8 @@ def volread(uri, format=None, **kwargs):
Parameters
----------
uri : {str, bytes, file}
- The resource to load the volume from. This can be a normal
- filename, a file in a zipfile, an http/ftp address, a file
- object, or the raw bytes.
+ The resource to load the volume from, e.g. a filename, http address or
+ file object, see the docs for more info.
format : str
The format to use to read the file. By default imageio selects
the appropriate for you based on the filename and its contents.
@@ -364,10 +373,8 @@ def volwrite(uri, im, format=None, **kwargs):
Parameters
----------
uri : {str, file}
- The resource to write the image to. This can be a normal
- filename, a file in a zipfile, a file object, or
- ``imageio.RETURN_BYTES``, in which case the raw bytes are
- returned.
+ The resource to write the image to, e.g. a filename or file object,
+ see the docs for more info.
vol : numpy.ndarray
The image data. Must be NxMxL (or NxMxLxK if each voxel is a tuple).
format : str
@@ -410,9 +417,8 @@ def mvolread(uri, format=None, memtest=True, **kwargs):
Parameters
----------
uri : {str, bytes, file}
- The resource to load the volumes from. This can be a normal
- filename, a file in a zipfile, an http/ftp address, a file
- object, or the raw bytes.
+ The resource to load the volumes from, e.g. a filename, http address or
+ file object, see the docs for more info.
format : str
The format to use to read the file. By default imageio selects
the appropriate for you based on the filename and its contents.
@@ -453,10 +459,8 @@ def mvolwrite(uri, ims, format=None, **kwargs):
Parameters
----------
uri : {str, file}
- The resource to write the volumes to. This can be a normal
- filename, a file in a zipfile, a file object, or
- ``imageio.RETURN_BYTES``, in which case the raw bytes are
- returned.
+ The resource to write the volumes to, e.g. a filename or file object,
+ see the docs for more info.
ims : sequence of numpy arrays
The image data. Each array must be NxMxL (or NxMxLxK if each
voxel is a tuple).
diff --git a/imageio/core/request.py b/imageio/core/request.py
index bc07b25..c77b067 100644
--- a/imageio/core/request.py
+++ b/imageio/core/request.py
@@ -26,6 +26,8 @@ URI_ZIPPED = 4
URI_HTTP = 5
URI_FTP = 6
+SPECIAL_READ_URIS = '<video', '<screen>', '<clipboard>'
+
# The user can use this string in a write call to get the data back as bytes.
RETURN_BYTES = '<bytes>'
@@ -148,7 +150,7 @@ class Request(object):
elif uri.startswith('file://'):
self._uri_type = URI_FILENAME
self._filename = uri[7:]
- elif uri.startswith('<video') and is_read_request:
+ elif uri.startswith(SPECIAL_READ_URIS) and is_read_request:
self._uri_type = URI_BYTES
self._filename = uri
elif uri.startswith(RETURN_BYTES) and is_write_request:
diff --git a/imageio/plugins/__init__.py b/imageio/plugins/__init__.py
index 160647b..ee86ffc 100644
--- a/imageio/plugins/__init__.py
+++ b/imageio/plugins/__init__.py
@@ -83,6 +83,7 @@ For the Format.Writer class:
# First import plugins that we want to take precedence over freeimage
from . import tifffile
from . import pillow
+from . import grab
from . import freeimage
from . import freeimagemulti
diff --git a/imageio/plugins/grab.py b/imageio/plugins/grab.py
new file mode 100644
index 0000000..9d08d11
--- /dev/null
+++ b/imageio/plugins/grab.py
@@ -0,0 +1,123 @@
+"""
+PIL-based formats to take screenshots and grab from the clipboard.
+"""
+
+from __future__ import absolute_import, print_function, division
+
+import threading
+
+import numpy as np
+
+from .. import formats
+from ..core import Format
+
+
+class BaseGrabFormat(Format):
+ """ Base format for grab formats.
+ """
+
+ _pillow_imported = False
+ _ImageGrab = None
+
+ def __init__(self, *args, **kwargs):
+ super(BaseGrabFormat, self).__init__(*args, **kwargs)
+ self._lock = threading.RLock()
+
+ def _can_write(self, request):
+ return False
+
+ def _init_pillow(self):
+ with self._lock:
+ if not self._pillow_imported:
+ self._pillow_imported = True # more like tried to import
+ import PIL
+ if not hasattr(PIL, 'PILLOW_VERSION'): # pragma: no cover
+ raise ImportError('Imageio Pillow requires '
+ 'Pillow, not PIL!')
+ try:
+ from PIL import ImageGrab
+ except ImportError:
+ return None
+ self._ImageGrab = ImageGrab
+ return self._ImageGrab
+
+ class Reader(Format.Reader):
+
+ def _open(self):
+ pass
+
+ def _close(self):
+ pass
+
+ def _get_data(self, index):
+ return self.format._get_data(index)
+
+
+class ScreenGrabFormat(BaseGrabFormat):
+ """ The ScreenGrabFormat provided a means to grab screenshots using
+ the uri of "<screen>".
+
+ This functionality is provided via Pillow. Note that "<screen>" is
+ only supported on Windows and OS X.
+
+ Parameters for reading
+ ----------------------
+ No parameters.
+ """
+
+ def _can_read(self, request):
+ if request.mode[1] not in 'i?':
+ return False
+ if request.filename != '<screen>':
+ return False
+ return bool(self._init_pillow())
+
+ def _get_data(self, index):
+ ImageGrab = self._init_pillow()
+ assert ImageGrab
+
+ pil_im = ImageGrab.grab()
+ assert pil_im is not None
+ im = np.asarray(pil_im)
+ return im, {}
+
+
+class ClipboardGrabFormat(BaseGrabFormat):
+ """ The ClipboardGrabFormat provided a means to grab image data from
+ the clipboard, using the uri "<clipboard>"
+
+ This functionality is provided via Pillow. Note that "<clipboard>" is
+ only supported on Windows.
+
+ Parameters for reading
+ ----------------------
+ No parameters.
+ """
+
+ def _can_read(self, request):
+ if request.mode[1] not in 'i?':
+ return False
+ if request.filename != '<clipboard>':
+ return False
+ return bool(self._init_pillow())
+
+ def _get_data(self, index):
+ ImageGrab = self._init_pillow()
+ assert ImageGrab
+
+ pil_im = ImageGrab.grabclipboard()
+ if pil_im is None:
+ raise RuntimeError('There seems to be no image data on the '
+ 'clipboard now.')
+ im = np.asarray(pil_im)
+ return im, {}
+
+
+# Register. You register an *instance* of a Format class.
+format = ScreenGrabFormat('screengrab',
+ 'Grab screenshots (Windows and OS X only)', [], 'i')
+formats.add_format(format)
+
+format = ClipboardGrabFormat('clipboardgrab',
+ 'Grab from clipboard (Windows only)', [], 'i')
+formats.add_format(format)
diff --git a/imageio/plugins/pillow.py b/imageio/plugins/pillow.py
index ac9ce19..561a76c 100644
--- a/imageio/plugins/pillow.py
+++ b/imageio/plugins/pillow.py
@@ -46,12 +46,12 @@ class PillowFormat(Format):
if not self._pillow_imported:
self._pillow_imported = True # more like tried to import
import PIL
- if not hasattr(PIL, 'PILLOW_VERSION'):
+ if not hasattr(PIL, 'PILLOW_VERSION'): # pragma: no cover
raise ImportError('Imageio Pillow requires '
'Pillow, not PIL!')
from PIL import Image
self._Image = Image
- elif self._Image is None:
+ elif self._Image is None: # pragma: no cover
raise RuntimeError('Imageio Pillow plugin requires '
'Pillow lib.')
Image = self._Image
diff --git a/imageio/plugins/pillow_info.py b/imageio/plugins/pillow_info.py
index 89a9a24..1b5a1b0 100644
--- a/imageio/plugins/pillow_info.py
+++ b/imageio/plugins/pillow_info.py
@@ -10,7 +10,7 @@ if run as a script.
"""
-def generate_info():
+def generate_info(): # pragma: no cover
from urllib.request import urlopen
import PIL
from PIL import Image
@@ -124,8 +124,8 @@ pillow_formats = [
('IM', 'IFUNC Image Memory', '.im'),
('IMT', 'IM Tools', ''),
('IPTC', 'IPTC/NAA', '.iim'),
- ('JPEG', 'JPEG (ISO 10918)', '.jfif .jpeg .jpg .jpe'),
- ('JPEG2000', 'JPEG 2000 (ISO 15444)', '.jpf .j2c .jpc .jp2 .j2k .jpx'),
+ ('JPEG', 'JPEG (ISO 10918)', '.jfif .jpe .jpg .jpeg'),
+ ('JPEG2000', 'JPEG 2000 (ISO 15444)', '.jp2 .j2k .jpc .jpf .jpx .j2c'),
('MCIDAS', 'McIdas area file', ''),
('MIC', 'Microsoft Image Composer', '.mic'),
('MPEG', 'MPEG', '.mpg .mpeg'),
@@ -135,14 +135,13 @@ pillow_formats = [
('PCX', 'Paintbrush', '.pcx'),
('PIXAR', 'PIXAR raster image', '.pxr'),
('PNG', 'Portable network graphics', '.png'),
- ('PPM', 'Pbmplus image', '.ppm .pgm .pbm'),
+ ('PPM', 'Pbmplus image', '.pbm .pgm .ppm'),
('PSD', 'Adobe Photoshop', '.psd'),
- ('SGI', 'SGI Image File Format', '.rgb .sgi .bw .rgba'),
+ ('SGI', 'SGI Image File Format', '.bw .rgb .rgba .sgi'),
('SPIDER', 'Spider 2D image', ''),
('SUN', 'Sun Raster File', '.ras'),
('TGA', 'Targa', '.tga'),
('TIFF', 'Adobe TIFF', '.tif .tiff'),
- ('WEBP', 'WebP image', '.webp'),
('WMF', 'Windows Metafile', '.wmf .emf'),
('XBM', 'X11 Bitmap', '.xbm'),
('XPM', 'X11 Pixel Map', '.xpm'),
@@ -317,9 +316,6 @@ u"""*This is a copy from the Pillow docs.*
**background**
Default background color (a palette color index).
- **duration**
- Time between frames in an animation (in milliseconds).
-
**transparency**
Transparency color index. This key is omitted if the image is not
transparent.
@@ -328,8 +324,8 @@ u"""*This is a copy from the Pillow docs.*
Version (either ``GIF87a`` or ``GIF89a``).
**duration**
- May not be present. The time to display each frame of the GIF, in
- milliseconds.
+ May not be present. The time to display the current frame
+ of the GIF, in milliseconds.
**loop**
May not be present. The number of times the GIF should loop.
@@ -343,20 +339,41 @@ u"""*This is a copy from the Pillow docs.*
``im.seek()`` raises an ``EOFError`` if you try to seek after the last frame.
- Saving sequences
- ~~~~~~~~~~~~~~~~
+ Saving
+ ~~~~~~
- When calling :py:meth:`~PIL.Image.Image.save`, if a multiframe image is used,
- by default only the first frame will be saved. To save all frames, the
- ``save_all`` parameter must be present and set to ``True``. To append
- additional frames when saving, the ``append_images`` parameter works with
- ``save_all`` to append a list of images containing the extra frames::
+ When calling :py:meth:`~PIL.Image.Image.save`, the following options
+ are available::
im.save(out, save_all=True, append_images=[im1, im2, ...])
- If present, the ``loop`` parameter can be used to set the number of times
- the GIF should loop, and the ``duration`` parameter can set the number of
- milliseconds between each frame.
+ **save_all**
+ If present and true, all frames of the image will be saved. If
+ not, then only the first frame of a multiframe image will be saved.
+
+ **append_images**
+ A list of images to append as additional frames. Each of the
+ images in the list can be single or multiframe images.
+
+ **duration**
+ The display duration of each frame of the multiframe gif, in
+ milliseconds. Pass a single integer for a constant duration, or a
+ list or tuple to set the duration for each frame separately.
+
+ **loop**
+ Integer number of times the GIF should loop.
+
+ **optimize**
+ If present and true, attempt to compress the palette by
+ eliminating unused colors. This is only useful if the palette can
+ be compressed to the next smaller power of 2 elements.
+
+ **palette**
+ Use the specified palette for the saved image. The palette should
+ be a bytes or bytearray object containing the palette entries in
+ RGBRGB... form. It should be no more than 768 bytes. Alternately,
+ the palette can be passed in as an
+ :py:class:`PIL.ImagePalette.ImagePalette` object.
Reading local images
~~~~~~~~~~~~~~~~~~~~
@@ -797,12 +814,14 @@ u"""*This is a copy from the Pillow docs.*
PIL identifies and reads PSD files written by Adobe Photoshop 2.5 and 3.0.
+
""",
'SGI':
u"""*This is a copy from the Pillow docs.*
- PIL reads uncompressed ``L``, ``RGB``, and ``RGBA`` files.
+ Pillow reads and writes uncompressed ``L``, ``RGB``, and ``RGBA`` files.
+
""",
'SPIDER':
u"""*This is a copy from the Pillow docs.*
@@ -842,8 +861,8 @@ u"""*This is a copy from the Pillow docs.*
For more information about the SPIDER image processing package, see the
`SPIDER homepage`_ at `Wadsworth Center`_.
- .. _SPIDER homepage: http://spider.wadsworth.org/spider_doc/spider/docs/spider.html
- .. _Wadsworth Center: http://www.wadsworth.org/
+ .. _SPIDER homepage: https://spider.wadsworth.org/spider_doc/spider/docs/spider.html
+ .. _Wadsworth Center: https://www.wadsworth.org/
""",
'SUN':
u"""No docs for SUN.""",
@@ -981,30 +1000,6 @@ u"""*This is a copy from the Pillow docs.*
an equal x and y resolution, dpi also implies a unit of inches.
""",
-'WEBP':
-u"""*This is a copy from the Pillow docs.*
-
-
- PIL reads and writes WebP files. The specifics of PIL's capabilities with this
- format are currently undocumented.
-
- The :py:meth:`~PIL.Image.Image.save` method supports the following options:
-
- **lossless**
- If present and true, instructs the WEBP writer to use lossless compression.
-
- **quality**
- Integer, 1-100, Defaults to 80. Sets the quality level for
- lossy compression.
-
- **icc_procfile**
- The ICC Profile to include in the saved file. Only supported if
- the system webp library was built with webpmux support.
-
- **exif**
- The exif data to include in the saved file. Only supported if
- the system webp library was built with webpmux support.
- """,
'WMF':
u"""*This is a copy from the Pillow docs.*
diff --git a/imageio/plugins/tifffile.py b/imageio/plugins/tifffile.py
index a26ea79..f6d25ad 100644
--- a/imageio/plugins/tifffile.py
+++ b/imageio/plugins/tifffile.py
@@ -37,11 +37,8 @@ READ_METADATA_KEYS = ('planar_configuration', 'is_fluoview', 'is_nih',
class TiffFormat(Format):
+
""" Provides support for a wide range of Tiff images.
-
- Images that contain multiple pages can be read using ``imageio.mimread()``
- to read the individual pages, or ``imageio.volread()`` to obtain a
- single (higher dimensional) array.
Parameters for reading
----------------------
@@ -180,28 +177,17 @@ class TiffFormat(Format):
def _close(self):
self._tf.close()
-
+
def _get_length(self):
- if self.request.mode[1] in 'vV':
- return 1 # or can there be pages in pages or something?
- else:
- return len(self._tf)
-
+ return len(self._tf)
+
def _get_data(self, index):
- if self.request.mode[1] in 'vV':
- # Read data as single 3D (+ color channels) array
- if index != 0:
- raise IndexError(
- 'Tiff support no more than 1 "volume" per file')
- im = self._tf.asarray() # request as singleton image
- meta = self._meta
- else:
- # Read as 2D image
- if index < 0 or index >= len(self._tf):
- raise IndexError(
- 'Index out of range while reading from tiff file')
- im = self._tf[index].asarray()
- meta = self._meta or self._get_meta_data(index)
+ # Get data
+ if index < 0 or index >= len(self._tf):
+ raise IndexError(
+ 'Index out of range while reading from tiff file')
+ im = self._tf[index].asarray()
+ meta = self._meta or self._get_meta_data(index)
# Return array and empty meta data
return im, meta
@@ -230,8 +216,6 @@ class TiffFormat(Format):
def _append_data(self, im, meta):
if meta:
self.set_meta_data(meta)
- # No need to check self.request.mode; tiffile figures out whether
- # this is a single page, or all page data at once.
self._tf.save(np.asanyarray(im), **self._meta)
def set_meta_data(self, meta):
| Screengrab plugin
Pillow can apparently do screen-grabs. That would be a nice addition to the webcam support. | imageio/imageio | diff --git a/tests/test_grab.py b/tests/test_grab.py
new file mode 100644
index 0000000..1cb41bc
--- /dev/null
+++ b/tests/test_grab.py
@@ -0,0 +1,98 @@
+import sys
+
+import numpy as np
+
+from pytest import raises
+from imageio.testing import run_tests_if_main
+
+import imageio
+
+
+def test_grab_plugin_load():
+
+ imageio.plugins.grab.BaseGrabFormat._ImageGrab = FakeImageGrab
+ imageio.plugins.grab.BaseGrabFormat._pillow_imported = True
+ _plat = sys.platform
+ sys.platform = 'win32'
+
+ try:
+
+ reader = imageio.get_reader('<screen>')
+ assert reader.format.name == 'SCREENGRAB'
+
+ reader = imageio.get_reader('<clipboard>')
+ assert reader.format.name == 'CLIPBOARDGRAB'
+
+ with raises(ValueError):
+ imageio.get_writer('<clipboard>')
+ with raises(ValueError):
+ imageio.get_writer('<screen>')
+
+ finally:
+ sys.platform = _plat
+ imageio.plugins.grab.BaseGrabFormat._ImageGrab = None
+ imageio.plugins.grab.BaseGrabFormat._pillow_imported = False
+
+
+class FakeImageGrab:
+
+ has_clipboard = True
+
+ @classmethod
+ def grab(cls):
+ return np.zeros((8, 8, 3), np.uint8)
+
+ @classmethod
+ def grabclipboard(cls):
+ if cls.has_clipboard:
+ return np.zeros((9, 9, 3), np.uint8)
+ else:
+ return None
+
+
+def test_grab_simulated():
+ # Hard to test for real, if only because its only fully suppored on
+ # Windows, but we can monkey patch so we can test all the imageio bits.
+
+ imageio.plugins.grab.BaseGrabFormat._ImageGrab = FakeImageGrab
+ imageio.plugins.grab.BaseGrabFormat._pillow_imported = True
+ _plat = sys.platform
+ sys.platform = 'win32'
+
+ try:
+
+ im = imageio.imread('<screen>')
+ assert im.shape == (8, 8, 3)
+
+ reader = imageio.get_reader('<screen>')
+ im1 = reader.get_data(0)
+ im2 = reader.get_data(0)
+ im3 = reader.get_data(1)
+ assert im1.shape == (8, 8, 3)
+ assert im2.shape == (8, 8, 3)
+ assert im3.shape == (8, 8, 3)
+
+ im = imageio.imread('<clipboard>')
+ assert im.shape == (9, 9, 3)
+
+ reader = imageio.get_reader('<clipboard>')
+ im1 = reader.get_data(0)
+ im2 = reader.get_data(0)
+ im3 = reader.get_data(1)
+ assert im1.shape == (9, 9, 3)
+ assert im2.shape == (9, 9, 3)
+ assert im3.shape == (9, 9, 3)
+
+ # Grabbing from clipboard can fail if there is no image data to grab
+ FakeImageGrab.has_clipboard = False
+ with raises(RuntimeError):
+ im = imageio.imread('<clipboard>')
+
+ finally:
+ sys.platform = _plat
+ imageio.plugins.grab.BaseGrabFormat._ImageGrab = None
+ imageio.plugins.grab.BaseGrabFormat._pillow_imported = False
+ FakeImageGrab.has_clipboard = True
+
+
+run_tests_if_main()
diff --git a/tests/test_tifffile.py b/tests/test_tifffile.py
index ea7ad7d..5ab5ce7 100644
--- a/tests/test_tifffile.py
+++ b/tests/test_tifffile.py
@@ -34,29 +34,16 @@ def test_tifffile_reading_writing():
imageio.imsave(filename1, im2)
im = imageio.imread(filename1)
ims = imageio.mimread(filename1)
- assert im.shape == im2.shape
assert (im == im2).all()
assert len(ims) == 1
-
+
# Multiple images
imageio.mimsave(filename1, [im2, im2, im2])
im = imageio.imread(filename1)
ims = imageio.mimread(filename1)
- assert im.shape == im2.shape
- assert (im == im2).all() # note: this does not imply that the shape match!
- assert len(ims) == 3
- for i in range(3):
- assert ims[i].shape == im2.shape
- assert (ims[i] == im2).all()
-
- # Read all planes as one array - we call it a volume for clarity
- vol = imageio.volread(filename1)
- vols = imageio.mvolread(filename1)
- assert vol.shape == (3, ) + im2.shape
- assert len(vols) == 1 and vol.shape == vols[0].shape
- for i in range(3):
- assert (vol[i] == im2).all()
-
+ assert (im == im2).all()
+ assert len(ims) == 3, ims[0].shape
+
# remote multipage rgb file
filename2 = get_remote_file('images/multipage_rgb.tif')
img = imageio.mimread(filename2)
@@ -80,24 +67,13 @@ def test_tifffile_reading_writing():
# Fail
raises(IndexError, R.get_data, -1)
raises(IndexError, R.get_data, 3)
-
- # Ensure imread + imwrite works round trip
- filename3 = os.path.join(test_dir, 'test_tiff2.tiff')
- im1 = imageio.imread(filename1)
- imageio.imwrite(filename3, im1)
- im3 = imageio.imread(filename3)
- assert im1.ndim == 3
- assert im1.shape == im3.shape
- assert (im1 == im3).all()
-
- # Ensure imread + imwrite works round trip - volume like
+
+ # Ensure imwrite write works round trip
filename3 = os.path.join(test_dir, 'test_tiff2.tiff')
- im1 = imageio.volread(filename1)
- imageio.volwrite(filename3, im1)
- im3 = imageio.volread(filename3)
- assert im1.ndim == 4
- assert im1.shape == im3.shape
- assert (im1 == im3).all()
+ R = imageio.imread(filename1)
+ imageio.imwrite(filename3, R)
+ R2 = imageio.imread(filename3)
+ assert (R == R2).all()
run_tests_if_main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 7
} | 2.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"coveralls"
],
"pre_install": [
"apt-get update",
"apt-get install -y libfreeimage3"
],
"python": "3.6",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
coveralls==3.3.1
docopt==0.6.2
idna==3.10
-e git+https://github.com/imageio/imageio.git@48e81976e70f2c4795dfdd105d8115bc53f66a11#egg=imageio
importlib-metadata==4.8.3
iniconfig==1.1.1
numpy==1.19.5
packaging==21.3
Pillow==8.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: imageio
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- coveralls==3.3.1
- docopt==0.6.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- numpy==1.19.5
- packaging==21.3
- pillow==8.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/imageio
| [
"tests/test_grab.py::test_grab_plugin_load",
"tests/test_grab.py::test_grab_simulated"
]
| []
| [
"tests/test_tifffile.py::test_tifffile_format",
"tests/test_tifffile.py::test_tifffile_reading_writing"
]
| []
| BSD 2-Clause "Simplified" License | 1,286 | [
"imageio/plugins/tifffile.py",
"imageio/core/functions.py",
"imageio/plugins/pillow.py",
"imageio/plugins/__init__.py",
"imageio/plugins/pillow_info.py",
"docs/examples.rst",
"imageio/core/request.py",
"imageio/plugins/grab.py"
]
| [
"imageio/plugins/tifffile.py",
"imageio/core/functions.py",
"imageio/plugins/pillow.py",
"imageio/plugins/__init__.py",
"imageio/plugins/pillow_info.py",
"docs/examples.rst",
"imageio/core/request.py",
"imageio/plugins/grab.py"
]
|
|
zopefoundation__zope.publisher-19 | 4021bcdecb998b1b45ca32222eaa1126925de794 | 2017-05-22 14:04:02 | dc4f23e4b71835bdc858c338a1ca230cf4178feb | diff --git a/CHANGES.rst b/CHANGES.rst
index 608b131..747afad 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -4,7 +4,10 @@ Changes
4.3.2 (unreleased)
------------------
-- Nothing changed yet.
+- Fix instances of ``BaseRequest`` (including ``BrowserRequest``)
+ being unexpectedly ``False`` on Python 3 by defining ``__bool__``.
+ Such instances were always ``True`` on Python 2. See `issue 18
+ <https://github.com/zopefoundation/zope.publisher/issues/18>`_.
4.3.1 (2017-04-24)
diff --git a/src/zope/publisher/base.py b/src/zope/publisher/base.py
index 4c6a0a2..ee6de47 100644
--- a/src/zope/publisher/base.py
+++ b/src/zope/publisher/base.py
@@ -353,9 +353,11 @@ class BaseRequest(object):
# Should be overridden by subclasses
return BaseResponse()
- def __nonzero__(self):
+ def __bool__(self):
# This is here to avoid calling __len__ for boolean tests
- return 1
+ return True
+
+ __nonzero__ = __bool__ # Python 2
def __str__(self):
L1 = self.items()
| `BaseRequest` needs to define `__bool__` for Python 3
It defines `__nonzero__` to always be True for Python 2, but it doesn't define `__bool__` for Python 3. So it falls back to `__len__` and you get different behaviour (requests seem false when they shouldn't be).
Found testing zc.resourcelibrary with Python 3. https://github.com/zopefoundation/zc.resourcelibrary/pull/2 | zopefoundation/zope.publisher | diff --git a/src/zope/publisher/tests/test_baserequest.py b/src/zope/publisher/tests/test_baserequest.py
index eff4d0d..9e1824b 100644
--- a/src/zope/publisher/tests/test_baserequest.py
+++ b/src/zope/publisher/tests/test_baserequest.py
@@ -40,6 +40,9 @@ class TestBaseRequest(BaseTestIPublicationRequest,
def _Test__expectedViewType(self):
return None # we don't expect
+ def test_bool_empty(self):
+ self.assertTrue(self._Test__new())
+
def test_IApplicationRequest_bodyStream(self):
from zope.publisher.base import BaseRequest
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 4.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-gettext==4.1
pytz==2025.2
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
zope.browser==2.4
zope.component==5.1.0
zope.configuration==4.4.1
zope.contenttype==4.6
zope.deprecation==4.4.0
zope.event==4.6
zope.exceptions==4.6
zope.hookable==5.4
zope.i18n==4.9.0
zope.i18nmessageid==5.1.1
zope.interface==5.5.2
zope.location==4.3
zope.proxy==4.6.1
-e git+https://github.com/zopefoundation/zope.publisher.git@4021bcdecb998b1b45ca32222eaa1126925de794#egg=zope.publisher
zope.schema==6.2.1
zope.security==5.8
zope.testing==5.0.1
zope.testrunner==5.6
| name: zope.publisher
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- python-gettext==4.1
- pytz==2025.2
- six==1.17.0
- zope-browser==2.4
- zope-component==5.1.0
- zope-configuration==4.4.1
- zope-contenttype==4.6
- zope-deprecation==4.4.0
- zope-event==4.6
- zope-exceptions==4.6
- zope-hookable==5.4
- zope-i18n==4.9.0
- zope-i18nmessageid==5.1.1
- zope-interface==5.5.2
- zope-location==4.3
- zope-proxy==4.6.1
- zope-schema==6.2.1
- zope-security==5.8
- zope-testing==5.0.1
- zope-testrunner==5.6
prefix: /opt/conda/envs/zope.publisher
| [
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_bool_empty"
]
| []
| [
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testEnvironment",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testGetAndDefaultInMapping",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testHaveCustomTestsForIApplicationRequest",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testHaveCustomTestsForIPublicationRequest",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testHaveCustomTestsForIPublisherRequest",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testHoldCloseAndGetResponse",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testIReadMapping",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testPublicationManagement",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testSkinManagement",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testTraversalStack",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testVerifyIApplicationRequest",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testVerifyIPublicationRequest",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::testVerifyIPublisherRequest",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_AnnotationsExist",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_IApplicationRequest_bodyStream",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_IPublicationRequest_getPositionalArguments",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_IPublisherRequest_processInputs",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_IPublisherRequest_retry",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_IPublisherRequest_traverse",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_SetRequestInResponse",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test___len__",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_items",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_keys",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_retry_keeps_everything",
"src/zope/publisher/tests/test_baserequest.py::TestBaseRequest::test_values",
"src/zope/publisher/tests/test_baserequest.py::test_suite"
]
| []
| Zope Public License 2.1 | 1,287 | [
"src/zope/publisher/base.py",
"CHANGES.rst"
]
| [
"src/zope/publisher/base.py",
"CHANGES.rst"
]
|
|
sciunto-org__python-bibtexparser-162 | 19051fdaeb3eea869aef1f7534d0a678f12f1b8c | 2017-05-22 14:53:54 | 19051fdaeb3eea869aef1f7534d0a678f12f1b8c | coveralls:
[](https://coveralls.io/builds/11631371)
Coverage increased (+0.6%) to 96.411% when pulling **6ede2250c3fe812408bbfa9c4b94379facf985c6 on Phyks:dev** into **8679713d01972ef173b88657395456e0fe3f2c95 on sciunto-org:master**.
coveralls:
[](https://coveralls.io/builds/11631371)
Coverage increased (+0.6%) to 96.411% when pulling **6ede2250c3fe812408bbfa9c4b94379facf985c6 on Phyks:dev** into **8679713d01972ef173b88657395456e0fe3f2c95 on sciunto-org:master**.
coveralls:
[](https://coveralls.io/builds/11631371)
Coverage increased (+0.6%) to 96.411% when pulling **6ede2250c3fe812408bbfa9c4b94379facf985c6 on Phyks:dev** into **8679713d01972ef173b88657395456e0fe3f2c95 on sciunto-org:master**.
sciunto: The failure seems to be related to this: https://github.com/travis-ci/travis-ci/issues/7771
sciunto: Could you rebase please? | diff --git a/bibtexparser/latexenc.py b/bibtexparser/latexenc.py
index e225de4..b4ac36d 100644
--- a/bibtexparser/latexenc.py
+++ b/bibtexparser/latexenc.py
@@ -85,6 +85,9 @@ def latex_to_unicode(string):
# to normalize to the latter.
cleaned_string = unicodedata.normalize("NFC", "".join(cleaned_string))
+ # Remove any left braces
+ cleaned_string = cleaned_string.replace("{", "").replace("}", "")
+
return cleaned_string
| Inconsistent results using `bibtexparser.latex_to_unicode`
Thanks for writing and maintaining this package!
I found that using `bibtexparser.latex_to_unicode` yields inconsistent results:
>>> latex_to_unicode(r"p\^{a}t\'{e}")
'pâté'
>>> latex_to_unicode(r"\^{i}le")
'{î}le'
Why are there braces around i-circumflex but not around a-circumflex or e-acut? | sciunto-org/python-bibtexparser | diff --git a/bibtexparser/tests/test_customization.py b/bibtexparser/tests/test_customization.py
index e38c078..d6d42b5 100644
--- a/bibtexparser/tests/test_customization.py
+++ b/bibtexparser/tests/test_customization.py
@@ -89,7 +89,16 @@ class TestBibtexParserMethod(unittest.TestCase):
# From issue 121
record = {'title': '{Two Gedenk\\"uberlieferung der Angelsachsen}'}
result = convert_to_unicode(record)
- expected = {'title': '{Two Gedenküberlieferung der Angelsachsen}'}
+ expected = {'title': 'Two Gedenküberlieferung der Angelsachsen'}
+ self.assertEqual(result, expected)
+ # From issue 161
+ record = {'title': r"p\^{a}t\'{e}"}
+ result = convert_to_unicode(record)
+ expected = {'title': "pâté"}
+ self.assertEqual(result, expected)
+ record = {'title': r"\^{i}le"}
+ result = convert_to_unicode(record)
+ expected = {'title': "île"}
self.assertEqual(result, expected)
###########
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/sciunto-org/python-bibtexparser.git@19051fdaeb3eea869aef1f7534d0a678f12f1b8c#egg=bibtexparser
exceptiongroup==1.2.2
future==1.0.0
iniconfig==2.1.0
nose==1.3.7
packaging==24.2
pluggy==1.5.0
pyparsing==3.2.3
pytest==8.3.5
tomli==2.2.1
| name: python-bibtexparser
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- future==1.0.0
- iniconfig==2.1.0
- nose==1.3.7
- packaging==24.2
- pluggy==1.5.0
- pyparsing==3.2.3
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/python-bibtexparser
| [
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_convert_to_unicode"
]
| []
| [
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_add_plaintext_fields",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_getnames",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_homogenize",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_keywords",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_page_double_hyphen_alreadyOK",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_page_double_hyphen_nothing",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_page_double_hyphen_simple",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_page_double_hyphen_space"
]
| []
| MIT License | 1,288 | [
"bibtexparser/latexenc.py"
]
| [
"bibtexparser/latexenc.py"
]
|
typesafehub__conductr-cli-466 | d6fafba900ac59074dd36f96c7c2221c9f46aef9 | 2017-05-22 18:32:30 | 39719b38ec6fc0f598756700a8a815b56bd8bc59 | diff --git a/conductr_cli/bndl_create.py b/conductr_cli/bndl_create.py
index e5f3cf7..edb06b9 100644
--- a/conductr_cli/bndl_create.py
+++ b/conductr_cli/bndl_create.py
@@ -55,6 +55,7 @@ def bndl_create(args):
mtime = None
bundle_conf_data = b''
runtime_conf_data = b''
+ runtime_conf_str = ''
try:
process_oci = False
@@ -156,19 +157,17 @@ def bndl_create(args):
runtime_conf_path = os.path.join(input_dir, 'runtime-config.sh')
- runtime_conf_str = ''
-
if os.path.exists(runtime_conf_path):
with open(runtime_conf_path, 'r') as runtime_conf_fileobj:
runtime_conf_str = runtime_conf_fileobj.read()
- for env in args.envs if hasattr(args, 'envs') else []:
- if runtime_conf_str:
- runtime_conf_str += '\n'
- runtime_conf_str += 'export \'{}\''.format(env.replace('\'', ''))
-
+ for env in args.envs if hasattr(args, 'envs') else []:
if runtime_conf_str:
- runtime_conf_data = runtime_conf_str.encode('UTF-8')
+ runtime_conf_str += '\n'
+ runtime_conf_str += 'export \'{}\''.format(env.replace('\'', ''))
+
+ if runtime_conf_str:
+ runtime_conf_data = runtime_conf_str.encode('UTF-8')
if not args.name:
try:
| `bndl` `runtime-config.sh` manipulation broken for non-conductr bundles
The code for `runtime-config.sh` was placed in the wrong spot and only works for input types of `bundle` currently. Needs fix to ensure that `docker`, `oci-image`, `oci-bundle` inputs can also specifiy `--env` flags. Note that this doesn't apply to `conduct load --env` flags which operate correctly due to the input in that case being `bundle` (a configuration bundle) | typesafehub/conductr-cli | diff --git a/conductr_cli/test/test_bndl_create.py b/conductr_cli/test/test_bndl_create.py
index c477a36..9cdb5df 100644
--- a/conductr_cli/test/test_bndl_create.py
+++ b/conductr_cli/test/test_bndl_create.py
@@ -623,3 +623,52 @@ class TestBndlCreate(CliTestCase):
self.assertTrue(saw_config)
finally:
shutil.rmtree(temp_dir)
+
+ def test_oci_env(self):
+ stdout_mock = MagicMock()
+ tmpdir = tempfile.mkdtemp()
+ tmpfile = os.path.join(tmpdir, 'output')
+
+ try:
+ attributes = create_attributes_object({
+ 'name': 'test',
+ 'source': tmpdir,
+ 'format': 'oci-image',
+ 'image_tag': 'latest',
+ 'output': tmpfile,
+ 'component_description': '',
+ 'use_shazar': True,
+ 'use_default_endpoints': True,
+ 'annotations': [],
+ 'envs': [
+ 'ENV1=123',
+ 'ENV2=456'
+ ]
+ })
+
+ os.mkdir(os.path.join(tmpdir, 'refs'))
+ open(os.path.join(tmpdir, 'oci-layout'), 'w').close()
+ refs = open(os.path.join(tmpdir, 'refs/latest'), 'w')
+ refs.write('{}')
+ refs.close()
+
+ with \
+ patch('sys.stdin', MagicMock(**{'buffer': BytesIO(b'')})), \
+ patch('sys.stdout.buffer.write', stdout_mock):
+ self.assertEqual(bndl_create.bndl_create(attributes), 0)
+
+ self.assertTrue(zipfile.is_zipfile(tmpfile))
+
+ files = {}
+
+ with zipfile.ZipFile(tmpfile) as zip:
+ infos = zip.infolist()
+ for info in infos:
+ files[info.filename] = zip.read(info.filename)
+
+ self.assertEqual(
+ files['test/runtime-config.sh'],
+ b'export \'ENV1=123\'\nexport \'ENV2=456\''
+ )
+ finally:
+ shutil.rmtree(tmpdir)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argcomplete==3.6.1
arrow==1.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
colorama==0.4.6
-e git+https://github.com/typesafehub/conductr-cli.git@d6fafba900ac59074dd36f96c7c2221c9f46aef9#egg=conductr_cli
coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
execnet==2.1.1
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jsonschema==2.6.0
packaging @ file:///croot/packaging_1734472117206/work
pager==3.3
pluggy @ file:///croot/pluggy_1733169602837/work
prettytable==0.7.2
psutil==5.9.8
Pygments==2.19.1
pyhocon==0.3.35
PyJWT==1.4.2
pyparsing==3.2.3
pyreadline==2.1
pytest @ file:///croot/pytest_1738938843180/work
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
requests==2.32.3
requests-toolbelt==1.0.0
six==1.17.0
sseclient==0.0.14
toml==0.10.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
urllib3==2.3.0
www-authenticate==0.9.2
| name: conductr-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argcomplete==3.6.1
- arrow==1.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- execnet==2.1.1
- idna==3.10
- jsonschema==2.6.0
- pager==3.3
- prettytable==0.7.2
- psutil==5.9.8
- pygments==2.19.1
- pyhocon==0.3.35
- pyjwt==1.4.2
- pyparsing==3.2.3
- pyreadline==2.1
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- requests==2.32.3
- requests-toolbelt==1.0.0
- six==1.17.0
- sseclient==0.0.14
- toml==0.10.2
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- urllib3==2.3.0
- www-authenticate==0.9.2
prefix: /opt/conda/envs/conductr-cli
| [
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_oci_env"
]
| [
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_mtime_from_config"
]
| [
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_arg_no_name",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_conf",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_conf_dir",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_conf_no_name",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_envs",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_envs_append",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_deterministic_with_shazar",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_deterministic_without_shazar",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_no_format",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_no_ref",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_not_oci",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_with_shazar",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_without_shazar"
]
| []
| Apache License 2.0 | 1,289 | [
"conductr_cli/bndl_create.py"
]
| [
"conductr_cli/bndl_create.py"
]
|
|
borgbackup__borg-2555 | 5a667d6f6aad54ba1246fee4dbf54a67f75b80f4 | 2017-05-23 00:25:08 | a439fa3e720c8bb2a82496768ffcce282fb7f7b7 | diff --git a/src/borg/helpers.py b/src/borg/helpers.py
index a93ba710..3fc22e7c 100644
--- a/src/borg/helpers.py
+++ b/src/borg/helpers.py
@@ -769,7 +769,7 @@ def bin_to_hex(binary):
class Location:
"""Object representing a repository / archive location
"""
- proto = user = host = port = path = archive = None
+ proto = user = _host = port = path = archive = None
# user must not contain "@", ":" or "/".
# Quoting adduser error message:
@@ -814,7 +814,7 @@ class Location:
ssh_re = re.compile(r"""
(?P<proto>ssh):// # ssh://
""" + optional_user_re + r""" # user@ (optional)
- (?P<host>[^:/]+)(?::(?P<port>\d+))? # host or host:port
+ (?P<host>([^:/]+|\[[0-9a-fA-F:.]+\]))(?::(?P<port>\d+))? # host or host:port or [ipv6] or [ipv6]:port
""" + abs_path_re + optional_archive_re, re.VERBOSE) # path or path::archive
file_re = re.compile(r"""
@@ -825,7 +825,7 @@ class Location:
scp_re = re.compile(r"""
(
""" + optional_user_re + r""" # user@ (optional)
- (?P<host>[^:/]+): # host: (don't match / in host to disambiguate from file:)
+ (?P<host>([^:/]+|\[[0-9a-fA-F:.]+\])): # host: (don't match / or [ipv6] in host to disambiguate from file:)
)? # user@host: part is optional
""" + scp_path_re + optional_archive_re, re.VERBOSE) # path with optional archive
@@ -841,7 +841,7 @@ class Location:
def __init__(self, text=''):
self.orig = text
if not self.parse(self.orig):
- raise ValueError
+ raise ValueError('Location: parse failed: %s' % self.orig)
def parse(self, text):
text = replace_placeholders(text)
@@ -871,7 +871,7 @@ def normpath_special(p):
if m:
self.proto = m.group('proto')
self.user = m.group('user')
- self.host = m.group('host')
+ self._host = m.group('host')
self.port = m.group('port') and int(m.group('port')) or None
self.path = normpath_special(m.group('path'))
self.archive = m.group('archive')
@@ -885,10 +885,10 @@ def normpath_special(p):
m = self.scp_re.match(text)
if m:
self.user = m.group('user')
- self.host = m.group('host')
+ self._host = m.group('host')
self.path = normpath_special(m.group('path'))
self.archive = m.group('archive')
- self.proto = self.host and 'ssh' or 'file'
+ self.proto = self._host and 'ssh' or 'file'
return True
return False
@@ -912,6 +912,12 @@ def to_key_filename(self):
def __repr__(self):
return "Location(%s)" % self
+ @property
+ def host(self):
+ # strip square brackets used for IPv6 addrs
+ if self._host is not None:
+ return self._host.lstrip('[').rstrip(']')
+
def canonical_path(self):
if self.proto == 'file':
return self.path
@@ -923,7 +929,7 @@ def canonical_path(self):
else:
path = self.path
return 'ssh://{}{}{}{}'.format('{}@'.format(self.user) if self.user else '',
- self.host,
+ self._host, # needed for ipv6 addrs
':{}'.format(self.port) if self.port else '',
path)
| Please enable IPv6 addresses for remote repositories
Hello,
I was trying to specify a remote borg repo as something like that
ssh://user@[ipv6_address_string]:/path/to/repo
(and tried with leaving out the ssh:// in front, as well)
borg then somehow tried to read the ipv6 address as a hostname, and the hostname lookup naturally failed.
ssh supports ipv6 address string, by putting the address into a square bracket.
I have a use case where I want to backup to a remote repository via ssh onto a machine by ipv6. However, I do not want to expose machines IPv6 address to DNS, that's why I would like to use an address literal instead of a dns domain name in the remote repo url.
Regards,
wararjey | borgbackup/borg | diff --git a/src/borg/testsuite/helpers.py b/src/borg/testsuite/helpers.py
index ff6b5efe..7ce22dc2 100644
--- a/src/borg/testsuite/helpers.py
+++ b/src/borg/testsuite/helpers.py
@@ -58,6 +58,30 @@ def test_ssh(self, monkeypatch):
"Location(proto='ssh', user='user', host='host', port=1234, path='/some/path', archive=None)"
assert repr(Location('ssh://user@host/some/path')) == \
"Location(proto='ssh', user='user', host='host', port=None, path='/some/path', archive=None)"
+ assert repr(Location('ssh://user@[::]:1234/some/path::archive')) == \
+ "Location(proto='ssh', user='user', host='::', port=1234, path='/some/path', archive='archive')"
+ assert repr(Location('ssh://user@[::]:1234/some/path')) == \
+ "Location(proto='ssh', user='user', host='::', port=1234, path='/some/path', archive=None)"
+ assert repr(Location('ssh://user@[::]/some/path')) == \
+ "Location(proto='ssh', user='user', host='::', port=None, path='/some/path', archive=None)"
+ assert repr(Location('ssh://user@[2001:db8::]:1234/some/path::archive')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::', port=1234, path='/some/path', archive='archive')"
+ assert repr(Location('ssh://user@[2001:db8::]:1234/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::', port=1234, path='/some/path', archive=None)"
+ assert repr(Location('ssh://user@[2001:db8::]/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::', port=None, path='/some/path', archive=None)"
+ assert repr(Location('ssh://user@[2001:db8::c0:ffee]:1234/some/path::archive')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::c0:ffee', port=1234, path='/some/path', archive='archive')"
+ assert repr(Location('ssh://user@[2001:db8::c0:ffee]:1234/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::c0:ffee', port=1234, path='/some/path', archive=None)"
+ assert repr(Location('ssh://user@[2001:db8::c0:ffee]/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::c0:ffee', port=None, path='/some/path', archive=None)"
+ assert repr(Location('ssh://user@[2001:db8::192.0.2.1]:1234/some/path::archive')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=1234, path='/some/path', archive='archive')"
+ assert repr(Location('ssh://user@[2001:db8::192.0.2.1]:1234/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=1234, path='/some/path', archive=None)"
+ assert repr(Location('ssh://user@[2001:db8::192.0.2.1]/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=None, path='/some/path', archive=None)"
def test_file(self, monkeypatch):
monkeypatch.delenv('BORG_REPO', raising=False)
@@ -72,6 +96,22 @@ def test_scp(self, monkeypatch):
"Location(proto='ssh', user='user', host='host', port=None, path='/some/path', archive='archive')"
assert repr(Location('user@host:/some/path')) == \
"Location(proto='ssh', user='user', host='host', port=None, path='/some/path', archive=None)"
+ assert repr(Location('user@[::]:/some/path::archive')) == \
+ "Location(proto='ssh', user='user', host='::', port=None, path='/some/path', archive='archive')"
+ assert repr(Location('user@[::]:/some/path')) == \
+ "Location(proto='ssh', user='user', host='::', port=None, path='/some/path', archive=None)"
+ assert repr(Location('user@[2001:db8::]:/some/path::archive')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::', port=None, path='/some/path', archive='archive')"
+ assert repr(Location('user@[2001:db8::]:/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::', port=None, path='/some/path', archive=None)"
+ assert repr(Location('user@[2001:db8::c0:ffee]:/some/path::archive')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::c0:ffee', port=None, path='/some/path', archive='archive')"
+ assert repr(Location('user@[2001:db8::c0:ffee]:/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::c0:ffee', port=None, path='/some/path', archive=None)"
+ assert repr(Location('user@[2001:db8::192.0.2.1]:/some/path::archive')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=None, path='/some/path', archive='archive')"
+ assert repr(Location('user@[2001:db8::192.0.2.1]:/some/path')) == \
+ "Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=None, path='/some/path', archive=None)"
def test_smb(self, monkeypatch):
monkeypatch.delenv('BORG_REPO', raising=False)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-xdist pytest-cov pytest-benchmark"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev liblz4-dev libacl1-dev libfuse-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.d/development.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/borgbackup/borg.git@5a667d6f6aad54ba1246fee4dbf54a67f75b80f4#egg=borgbackup
cachetools==5.5.2
chardet==5.2.0
colorama==0.4.6
coverage==7.8.0
Cython==3.0.12
distlib==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
iniconfig==2.1.0
msgpack-python==0.5.6
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
py-cpuinfo==9.0.0
pyproject-api==1.9.0
pytest==8.3.5
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-xdist==3.6.1
setuptools-scm==8.2.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
virtualenv==20.29.3
| name: borg
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- coverage==7.8.0
- cython==3.0.12
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- iniconfig==2.1.0
- msgpack-python==0.5.6
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-xdist==3.6.1
- setuptools-scm==8.2.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/borg
| [
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_ssh",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_scp"
]
| [
"src/borg/testsuite/helpers.py::test_is_slow_msgpack"
]
| [
"src/borg/testsuite/helpers.py::BigIntTestCase::test_bigint",
"src/borg/testsuite/helpers.py::test_bin_to_hex",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_file",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_smb",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_folder",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_abspath",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_relpath",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_with_colons",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_user_parsing",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_underspecified",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_no_slashes",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_canonical_path",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_format_path",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_bad_syntax",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_ssh",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_file",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_scp",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_folder",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_abspath",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_relpath",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_with_colons",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_no_slashes",
"src/borg/testsuite/helpers.py::FormatTimedeltaTestCase::test",
"src/borg/testsuite/helpers.py::test_chunkerparams",
"src/borg/testsuite/helpers.py::MakePathSafeTestCase::test",
"src/borg/testsuite/helpers.py::PruneSplitTestCase::test",
"src/borg/testsuite/helpers.py::PruneWithinTestCase::test",
"src/borg/testsuite/helpers.py::StableDictTestCase::test",
"src/borg/testsuite/helpers.py::TestParseTimestamp::test",
"src/borg/testsuite/helpers.py::test_get_cache_dir",
"src/borg/testsuite/helpers.py::test_get_keys_dir",
"src/borg/testsuite/helpers.py::test_get_security_dir",
"src/borg/testsuite/helpers.py::test_file_size",
"src/borg/testsuite/helpers.py::test_file_size_precision",
"src/borg/testsuite/helpers.py::test_file_size_sign",
"src/borg/testsuite/helpers.py::test_parse_file_size[1-1]",
"src/borg/testsuite/helpers.py::test_parse_file_size[20-20]",
"src/borg/testsuite/helpers.py::test_parse_file_size[5K-5000]",
"src/borg/testsuite/helpers.py::test_parse_file_size[1.75M-1750000]",
"src/borg/testsuite/helpers.py::test_parse_file_size[1e+9-1000000000.0]",
"src/borg/testsuite/helpers.py::test_parse_file_size[-1T--1000000000000.0]",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[]",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[5",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[4E]",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[2229",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[1B]",
"src/borg/testsuite/helpers.py::TestBuffer::test_type",
"src/borg/testsuite/helpers.py::TestBuffer::test_len",
"src/borg/testsuite/helpers.py::TestBuffer::test_resize",
"src/borg/testsuite/helpers.py::TestBuffer::test_limit",
"src/borg/testsuite/helpers.py::TestBuffer::test_get",
"src/borg/testsuite/helpers.py::test_yes_input",
"src/borg/testsuite/helpers.py::test_yes_input_defaults",
"src/borg/testsuite/helpers.py::test_yes_input_custom",
"src/borg/testsuite/helpers.py::test_yes_env",
"src/borg/testsuite/helpers.py::test_yes_env_default",
"src/borg/testsuite/helpers.py::test_yes_defaults",
"src/borg/testsuite/helpers.py::test_yes_retry",
"src/borg/testsuite/helpers.py::test_yes_no_retry",
"src/borg/testsuite/helpers.py::test_yes_output",
"src/borg/testsuite/helpers.py::test_yes_env_output",
"src/borg/testsuite/helpers.py::test_progress_percentage_sameline",
"src/borg/testsuite/helpers.py::test_progress_percentage_step",
"src/borg/testsuite/helpers.py::test_progress_percentage_quiet",
"src/borg/testsuite/helpers.py::test_progress_endless",
"src/borg/testsuite/helpers.py::test_progress_endless_step",
"src/borg/testsuite/helpers.py::test_partial_format",
"src/borg/testsuite/helpers.py::test_chunk_file_wrapper",
"src/borg/testsuite/helpers.py::test_chunkit",
"src/borg/testsuite/helpers.py::test_clean_lines",
"src/borg/testsuite/helpers.py::test_format_line",
"src/borg/testsuite/helpers.py::test_format_line_erroneous",
"src/borg/testsuite/helpers.py::test_replace_placeholders",
"src/borg/testsuite/helpers.py::test_swidth_slice",
"src/borg/testsuite/helpers.py::test_swidth_slice_mixed_characters",
"src/borg/testsuite/helpers.py::test_safe_timestamps",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_simple",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_not_found",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_bad_syntax[mismatched",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_bad_syntax[foo",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_bad_syntax[]",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_shell"
]
| []
| BSD License | 1,290 | [
"src/borg/helpers.py"
]
| [
"src/borg/helpers.py"
]
|
|
ELIFE-ASU__Neet-30 | 27902597953c9188545e4f0543f14dd6d180952f | 2017-05-23 01:56:53 | 27902597953c9188545e4f0543f14dd6d180952f | diff --git a/neet/automata.py b/neet/automata.py
index aaa9971..e94dfd7 100644
--- a/neet/automata.py
+++ b/neet/automata.py
@@ -182,7 +182,7 @@ class ECA(object):
return True
- def _unsafe_update(self, lattice):
+ def _unsafe_update(self, lattice, index=None):
"""
Update the state of the ``lattice``, in place, without
checking the validity of the arguments.
@@ -194,13 +194,25 @@ class ECA(object):
>>> ca = ECA(30)
>>> xs = [0,0,1,0,0]
>>> ca._unsafe_update(xs)
+ [0, 1, 1, 1, 0]
>>> xs
[0, 1, 1, 1, 0]
>>> ca.boundary = (0,1)
>>> ca._unsafe_update(xs)
- >>> xs
[1, 1, 0, 0, 0]
+ ::
+
+ >>> ca = ECA(30)
+ >>> xs = [0,0,1,0,0]
+ >>> ca._unsafe_update(xs, index=1)
+ [0, 1, 1, 0, 0]
+ >>> xs
+ [0, 1, 1, 0, 0]
+ >>> ca.boundary = (0,1)
+ >>> ca._unsafe_update(xs, index=-1)
+ [0, 1, 1, 0, 1]
+
::
>>> xs = [0,0,2,0,0]
@@ -210,6 +222,8 @@ class ECA(object):
:param lattice: the one-dimensional sequence of states
:type lattice: sequence
+ :param index: the index to update (or None)
+ :type index: int
:returns: the updated lattice
"""
if self.boundary:
@@ -219,15 +233,33 @@ class ECA(object):
left = lattice[-1]
right = lattice[0]
code = self.code
- d = 2 * left + lattice[0]
- for i in range(1, len(lattice)):
- d = 7 & (2 * d + lattice[i])
- lattice[i-1] = 1 & (code >> d)
- d = 7 & (2 * d + right)
- lattice[-1] = 1 & (code >> d)
+ if index is None:
+ d = 2 * left + lattice[0]
+ for i in range(1, len(lattice)):
+ d = 7 & (2 * d + lattice[i])
+ lattice[i-1] = 1 & (code >> d)
+ d = 7 & (2 * d + right)
+ lattice[-1] = 1 & (code >> d)
+ else:
+ if index < 0:
+ index += len(lattice)
+
+ if index == 0:
+ d = left
+ else:
+ d = lattice[index-1]
+
+ d = 2 * d + lattice[index]
+
+ if index + 1 == len(lattice):
+ d = 2 * d + right
+ else:
+ d = 2 * d + lattice[index+1]
+
+ lattice[index] = 1 & (code >> (7 & d))
return lattice
- def update(self, lattice):
+ def update(self, lattice, index=None):
"""
Update the state of the ``lattice`` in place.
@@ -237,13 +269,13 @@ class ECA(object):
>>> ca = ECA(30)
>>> xs = [0,0,1,0,0]
- >>> ca.update(xs)
+ >>> ca.update(xs, index=1)
+ [0, 1, 1, 0, 0]
>>> xs
- [0, 1, 1, 1, 0]
- >>> ca.boundary = (0,1)
- >>> ca.update(xs)
- >>> xs
- [1, 1, 0, 0, 0]
+ [0, 1, 1, 0, 0]
+ >>> ca.boundary = (1,1)
+ >>> ca.update(xs, index=-1)
+ [0, 1, 1, 0, 1]
::
@@ -257,13 +289,22 @@ class ECA(object):
Traceback (most recent call last):
...
ValueError: invalid value "2" in lattice
+ >>> ca.update(xs, index=5)
+ Traceback (most recent call last):
+ ...
+ IndexError: list index out of range
:param lattice: the one-dimensional sequence of states
:type lattice: sequence
+ :param index: the index to update (or None)
+ :type index: int
:returns: the updated lattice
:raises ValueError: if ``lattice`` is empty
:raises TypeError: if ``lattice`` is not iterable
:raises ValueError: unless :math:`lattice[i] \in \{0,1\}` for all :math:`i`
+ :raises IndexError: if ``index is not None and index > len(states)``
"""
ECA.check_lattice(lattice)
- return self._unsafe_update(lattice)
+ if index is not None and index < -len(lattice):
+ raise(IndexError("lattice index out of range"))
+ return self._unsafe_update(lattice, index)
| Add an optional index argument to network update methods
This adds ability for asynchronous updating | ELIFE-ASU/Neet | diff --git a/test/test_automata.py b/test/test_automata.py
index fcee65b..52d3cf0 100644
--- a/test/test_automata.py
+++ b/test/test_automata.py
@@ -260,3 +260,51 @@ class TestECA(unittest.TestCase):
eca.update(lattice)
self.assertTrue(np.array_equal([1,1,0,0,0], lattice))
+
+ def test_update_index_error(self):
+ eca = ca.ECA(30)
+ with self.assertRaises(IndexError):
+ eca.update([0,0], index=2)
+
+ with self.assertRaises(IndexError):
+ eca.update([0,0], index=-3)
+
+
+ def test_update_index(self):
+ eca = ca.ECA(30, (1,1))
+
+ lattice = [0,0,0,0,0]
+ eca.update(lattice, index=0)
+ self.assertEqual([1,0,0,0,0], lattice)
+
+ lattice = [0,0,0,0,0]
+ eca.update(lattice, index=1)
+ self.assertEqual([0,0,0,0,0], lattice)
+
+ lattice = [0,0,0,0,0]
+ eca.update(lattice, index=-1)
+ self.assertEqual([0,0,0,0,1], lattice)
+
+ lattice = [0,0,1,0,0]
+ eca.update(lattice, index=1)
+ self.assertEqual([0,1,1,0,0], lattice)
+
+
+ def test_update_index_numpy(self):
+ eca = ca.ECA(30, (1,1))
+
+ lattice = np.asarray([0,0,0,0,0])
+ eca.update(lattice, index=0)
+ self.assertTrue(np.array_equal([1,0,0,0,0], lattice))
+
+ lattice = np.asarray([0,0,0,0,0])
+ eca.update(lattice, index=1)
+ self.assertTrue(np.array_equal([0,0,0,0,0], lattice))
+
+ lattice = np.asarray([0,0,0,0,0])
+ eca.update(lattice, index=-1)
+ self.assertTrue(np.array_equal([0,0,0,0,1], lattice))
+
+ lattice = np.asarray([0,0,1,0,0])
+ eca.update(lattice, index=1)
+ self.assertTrue(np.array_equal([0,1,1,0,0], lattice))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"nose-cov",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | cov-core==1.15.0
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
-e git+https://github.com/ELIFE-ASU/Neet.git@27902597953c9188545e4f0543f14dd6d180952f#egg=neet
networkx==3.2.1
nose==1.3.7
nose-cov==1.6
numpy==2.0.2
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: Neet
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cov-core==1.15.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- networkx==3.2.1
- nose==1.3.7
- nose-cov==1.6
- numpy==2.0.2
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/Neet
| [
"test/test_automata.py::TestECA::test_update_index",
"test/test_automata.py::TestECA::test_update_index_error",
"test/test_automata.py::TestECA::test_update_index_numpy"
]
| []
| [
"test/test_automata.py::TestECA::test_check_lattice_list",
"test/test_automata.py::TestECA::test_check_lattice_numpy",
"test/test_automata.py::TestECA::test_check_lattice_string",
"test/test_automata.py::TestECA::test_fail_init",
"test/test_automata.py::TestECA::test_init",
"test/test_automata.py::TestECA::test_invalid_boundary",
"test/test_automata.py::TestECA::test_invalid_code",
"test/test_automata.py::TestECA::test_invalid_lattice_state_update",
"test/test_automata.py::TestECA::test_is_network",
"test/test_automata.py::TestECA::test_is_not_fixed_sized",
"test/test_automata.py::TestECA::test_lattice_empty_update",
"test/test_automata.py::TestECA::test_state_space",
"test/test_automata.py::TestECA::test_update_closed",
"test/test_automata.py::TestECA::test_update_long_time_closed",
"test/test_automata.py::TestECA::test_update_long_time_open",
"test/test_automata.py::TestECA::test_update_numpy",
"test/test_automata.py::TestECA::test_update_open"
]
| []
| MIT License | 1,291 | [
"neet/automata.py"
]
| [
"neet/automata.py"
]
|
|
twisted__tubes-58 | cc210dcbe7b15897193db87cf0c611e8dc038693 | 2017-05-23 07:02:44 | cc210dcbe7b15897193db87cf0c611e8dc038693 | diff --git a/.travis.yml b/.travis.yml
index 7c74ff0..4167733 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,19 +3,23 @@ language: python
env:
global:
- secure: "CvFj8Df5OiDRrW7EsTGhkltdmNlYerx9hH/tSKxiNFVDBUUFaTN7rUr7kWcOKchzerGwk7zjZ4SRXyoSCs+Srht6GZxWHkNROwKpp5Xvf5clbLXbp7GO1X/L5rLgrXpGwtkhgNuHx0X2IUCDHUQAUSumPgZcNFu3emgVxEqabN0="
- matrix:
- - TOX_ENV=lint
- - TOX_ENV=py27
- - TOX_ENV=pypy
- - TOX_ENV=docs
- - TOX_ENV=apidocs
- - TOX_ENV=docs-spellcheck
- - TOX_ENV=docs-linkcheck
- # - PUSH_DOCS=true
matrix:
allow_failures:
- env: "TOX_ENV=docs-linkcheck"
+ include:
+ - env: TOX_ENV=lint
+ python: 2.7
+ - env: TOX_ENV=py27
+ python: 2.7
+ - env: TOX_ENV=py36
+ python: 3.6
+ - env: TOX_ENV=pypy
+ - env: TOX_ENV=docs
+ - env: TOX_ENV=apidocs
+ - env: TOX_ENV=docs-spellcheck
+ - env: TOX_ENV=docs-linkcheck
+ # - PUSH_DOCS=true
install:
- ./.travis/install.sh
diff --git a/tox.ini b/tox.ini
index c85569f..af0594e 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27, pypy, docs, lint, apidocs, docs-spellcheck
+envlist = py27, py36, pypy, docs, lint, apidocs, docs-spellcheck
[testenv]
deps =
diff --git a/tubes/framing.py b/tubes/framing.py
index f7e24f3..c573e10 100644
--- a/tubes/framing.py
+++ b/tubes/framing.py
@@ -270,7 +270,7 @@ def bytesToLines():
@return: a new L{IDrain} that does the given conversion.
"""
- return series(Diverter(bytesDelimitedBy("\n")), _CarriageReturnRemover())
+ return series(Diverter(bytesDelimitedBy(b"\n")), _CarriageReturnRemover())
diff --git a/tubes/tube.py b/tubes/tube.py
index 39de857..2de2cfb 100644
--- a/tubes/tube.py
+++ b/tubes/tube.py
@@ -232,7 +232,7 @@ def series(start, *tubes):
with _registryActive(_tubeRegistry):
result = IDrain(start)
currentFount = result.flowingFrom(None)
- drains = map(IDrain, tubes)
+ drains = [IDrain(tube) for tube in tubes]
for drain in drains:
currentFount = currentFount.flowTo(drain)
return result
| support python 3 | twisted/tubes | diff --git a/tubes/test/test_framing.py b/tubes/test/test_framing.py
index 0925df5..d49cb34 100644
--- a/tubes/test/test_framing.py
+++ b/tubes/test/test_framing.py
@@ -26,10 +26,11 @@ class NetstringTests(TestCase):
ff = FakeFount()
fd = FakeDrain()
ff.flowTo(series(bytesToNetstrings())).flowTo(fd)
- ff.drain.receive("hello")
- self.assertEquals(fd.received, ["{len:d}:{data:s},".format(
- len=len("hello"), data="hello"
- )])
+ ff.drain.receive(b"hello")
+ self.assertEquals(
+ fd.received, [b"%(len)d:%(data)s," %
+ {b"len": len(b"hello"), b"data": b"hello"}]
+ )
def test_bytesToNetstrings(self):
@@ -39,14 +40,14 @@ class NetstringTests(TestCase):
ff = FakeFount()
fd = FakeDrain()
ff.flowTo(series(bytesToNetstrings())).flowTo(fd)
- ff.drain.receive("hello")
- ff.drain.receive("world")
+ ff.drain.receive(b"hello")
+ ff.drain.receive(b"world")
self.assertEquals(
b"".join(fd.received),
- "{len:d}:{data:s},{len2:d}:{data2:s},".format(
- len=len("hello"), data="hello",
- len2=len("world"), data2="world",
- )
+ b"%(len)d:%(data)s,%(len2)d:%(data2)s," % {
+ b"len": len(b"hello"), b"data": b"hello",
+ b"len2": len(b"world"), b"data2": b"world",
+ }
)
@@ -57,8 +58,8 @@ class NetstringTests(TestCase):
ff = FakeFount()
fd = FakeDrain()
ff.flowTo(series(netstringsToBytes())).flowTo(fd)
- ff.drain.receive("1:x,2:yz,3:")
- self.assertEquals(fd.received, ["x", "yz"])
+ ff.drain.receive(b"1:x,2:yz,3:")
+ self.assertEquals(fd.received, [b"x", b"yz"])
@@ -75,10 +76,10 @@ class LineTests(TestCase):
ff = FakeFount()
fd = FakeDrain()
ff.flowTo(series(bytesToLines())).flowTo(fd)
- ff.drain.receive(newline.join([b"alpha", "beta", "gamma"]))
+ ff.drain.receive(newline.join([b"alpha", b"beta", b"gamma"]))
self.assertEquals(fd.received, [b"alpha", b"beta"])
- splitALine("\n")
- splitALine("\r\n")
+ splitALine(b"\n")
+ splitALine(b"\r\n")
def test_linesToBytes(self):
@@ -106,8 +107,8 @@ class LineTests(TestCase):
@tube
class Switcher(object):
def received(self, line):
- splitted = line.split(" ", 1)
- if splitted[0] == 'switch':
+ splitted = line.split(b" ", 1)
+ if splitted[0] == b'switch':
length = int(splitted[1])
lines.divert(series(Switchee(length), fd))
@@ -121,10 +122,10 @@ class LineTests(TestCase):
cc = series(lines, Switcher())
ff.flowTo(cc).flowTo(fd)
- ff.drain.receive("hello\r\nworld\r\nswitch 10\r\nabcde\r\nfgh"
+ ff.drain.receive(b"hello\r\nworld\r\nswitch 10\r\nabcde\r\nfgh"
# + '\r\nagain\r\n'
)
- self.assertEquals("".join(Switchee.datums), "abcde\r\nfgh")
+ self.assertEquals(b"".join(Switchee.datums), b"abcde\r\nfgh")
def test_switchingWithMoreDataToDeliver(self):
@@ -139,16 +140,16 @@ class LineTests(TestCase):
@tube
class Switcher(object):
def received(self, line):
- if 'switch' in line:
+ if b'switch' in line:
lines.divert(series(netstringsToBytes(), fd2))
else:
yield line
cc = series(lines, Switcher())
ff.flowTo(cc).flowTo(fd1)
- ff.drain.receive('something\r\nswitch\r\n7:hello\r\n,5:world,')
- self.assertEquals(fd1.received, ["something"])
- self.assertEquals(fd2.received, ['hello\r\n', 'world'])
+ ff.drain.receive(b'something\r\nswitch\r\n7:hello\r\n,5:world,')
+ self.assertEquals(fd1.received, [b"something"])
+ self.assertEquals(fd2.received, [b'hello\r\n', b'world'])
@@ -166,7 +167,7 @@ class PackedPrefixTests(TestCase):
fd = FakeDrain()
ff.flowTo(series(packed)).flowTo(fd)
ff.drain.receive(b"\x0812345678\x02")
- self.assertEquals(fd.received, ["12345678"])
+ self.assertEquals(fd.received, [b"12345678"])
def test_prefixOut(self):
@@ -177,7 +178,7 @@ class PackedPrefixTests(TestCase):
ff = FakeFount()
fd = FakeDrain()
ff.flowTo(series(packed, fd))
- ff.drain.receive('a')
- ff.drain.receive('bc')
- ff.drain.receive('def')
- self.assertEquals(fd.received, ['\x01a', '\x02bc', '\x03def'])
+ ff.drain.receive(b'a')
+ ff.drain.receive(b'bc')
+ ff.drain.receive(b'def')
+ self.assertEquals(fd.received, [b'\x01a', b'\x02bc', b'\x03def'])
diff --git a/tubes/test/test_protocol.py b/tubes/test/test_protocol.py
index c72e112..7eb4b3e 100644
--- a/tubes/test/test_protocol.py
+++ b/tubes/test/test_protocol.py
@@ -164,7 +164,7 @@ class FlowConnectorTests(TestCase):
ff = FakeFount()
ff.flowTo(self.adaptedDrain)
self.assertEqual(ff.flowIsStopped, False)
- self.adaptedProtocol().connectionLost(Failure(ZeroDivisionError))
+ self.adaptedProtocol().connectionLost(Failure(ZeroDivisionError()))
self.assertEqual(ff.flowIsStopped, True)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 4
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
characteristic==14.3.0
constantly==23.10.4
exceptiongroup==1.2.2
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
six==1.17.0
tomli==2.2.1
-e git+https://github.com/twisted/tubes.git@cc210dcbe7b15897193db87cf0c611e8dc038693#egg=Tubes
Twisted==24.11.0
typing_extensions==4.13.0
zope.interface==7.2
| name: tubes
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- characteristic==14.3.0
- constantly==23.10.4
- exceptiongroup==1.2.2
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- zope-interface==7.2
prefix: /opt/conda/envs/tubes
| [
"tubes/test/test_framing.py::LineTests::test_rawMode",
"tubes/test/test_framing.py::LineTests::test_stringToLines",
"tubes/test/test_framing.py::LineTests::test_switchingWithMoreDataToDeliver"
]
| []
| [
"tubes/test/test_framing.py::NetstringTests::test_bytesToNetstrings",
"tubes/test/test_framing.py::NetstringTests::test_netstringToString",
"tubes/test/test_framing.py::NetstringTests::test_stringToNetstring",
"tubes/test/test_framing.py::LineTests::test_linesToBytes",
"tubes/test/test_framing.py::PackedPrefixTests::test_prefixIn",
"tubes/test/test_framing.py::PackedPrefixTests::test_prefixOut",
"tubes/test/test_protocol.py::FlowConnectorTests::test_connectionLostSendsFlowStopped",
"tubes/test/test_protocol.py::FlowConnectorTests::test_connectionLostSendsStopFlow",
"tubes/test/test_protocol.py::FlowConnectorTests::test_dataReceivedBeforeFlowing",
"tubes/test/test_protocol.py::FlowConnectorTests::test_dataReceivedBeforeFlowingThenFlowTo",
"tubes/test/test_protocol.py::FlowConnectorTests::test_dataReceivedWhenFlowingToNone",
"tubes/test/test_protocol.py::FlowConnectorTests::test_drainReceivingWritesToTransport",
"tubes/test/test_protocol.py::FlowConnectorTests::test_flowStoppedStopsConnection",
"tubes/test/test_protocol.py::FlowConnectorTests::test_flowToDeliversData",
"tubes/test/test_protocol.py::FlowConnectorTests::test_flowToSetsDrain",
"tubes/test/test_protocol.py::FlowConnectorTests::test_flowingFrom",
"tubes/test/test_protocol.py::FlowConnectorTests::test_flowingFromAttribute",
"tubes/test/test_protocol.py::FlowConnectorTests::test_flowingFromTwice",
"tubes/test/test_protocol.py::FlowConnectorTests::test_flowingToNoneAfterFlowingToSomething",
"tubes/test/test_protocol.py::FlowConnectorTests::test_pauseUnpauseFromOtherDrain",
"tubes/test/test_protocol.py::FlowConnectorTests::test_pauseUnpauseFromTransport",
"tubes/test/test_protocol.py::FlowConnectorTests::test_stopFlowStopsConnection",
"tubes/test/test_protocol.py::FlowConnectorTests::test_stopProducing",
"tubes/test/test_protocol.py::FlowListenerTests::test_acceptAfterDeferredButBeforeFlowTo",
"tubes/test/test_protocol.py::FlowListenerTests::test_acceptBeforeActuallyListening",
"tubes/test/test_protocol.py::FlowListenerTests::test_backpressure",
"tubes/test/test_protocol.py::FlowListenerTests::test_fromEndpoint",
"tubes/test/test_protocol.py::FlowListenerTests::test_oneConnectionAccepted",
"tubes/test/test_protocol.py::FlowListenerTests::test_stopping"
]
| []
| MIT License | 1,292 | [
".travis.yml",
"tox.ini",
"tubes/framing.py",
"tubes/tube.py"
]
| [
".travis.yml",
"tox.ini",
"tubes/framing.py",
"tubes/tube.py"
]
|
|
zopefoundation__zope.mkzeoinstance-7 | 7ed33bb183929e5645a71950f5032018acbbda51 | 2017-05-23 09:43:28 | 7ed33bb183929e5645a71950f5032018acbbda51 | diff --git a/CHANGES.rst b/CHANGES.rst
index 607054e..f794d66 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -4,7 +4,7 @@ Changelog
4.1 (unreleased)
----------------
-- Nothing changed yet.
+- Fix generated ``runzeo`` and ``zeoctl`` scripts to run with ZEO 5.
4.0 (2017-02-28)
diff --git a/src/zope/mkzeoinstance/__init__.py b/src/zope/mkzeoinstance/__init__.py
index 9b6fe44..8053e92 100644
--- a/src/zope/mkzeoinstance/__init__.py
+++ b/src/zope/mkzeoinstance/__init__.py
@@ -116,9 +116,7 @@ CONFIG_FILE="%(instance_home)s/etc/%(package)s.conf"
PYTHONPATH="$ZODB3_HOME"
export PYTHONPATH INSTANCE_HOME
-ZEOCTL="$ZODB3_HOME/ZEO/zeoctl.py"
-
-exec "$PYTHON" "$ZEOCTL" -C "$CONFIG_FILE" ${1+"$@"}
+exec "$PYTHON" -m ZEO.zeoctl -C "$CONFIG_FILE" ${1+"$@"}
"""
@@ -135,9 +133,7 @@ CONFIG_FILE="%(instance_home)s/etc/%(package)s.conf"
PYTHONPATH="$ZODB3_HOME"
export PYTHONPATH INSTANCE_HOME
-RUNZEO="$ZODB3_HOME/ZEO/runzeo.py"
-
-exec "$PYTHON" "$RUNZEO" -C "$CONFIG_FILE" ${1+"$@"}
+exec "$PYTHON" -m ZEO.runzeo -C "$CONFIG_FILE" ${1+"$@"}
"""
@@ -197,7 +193,7 @@ class ZEOInstanceBuilder:
if k in ('-h', '--help'):
usage(rc=2)
- if len(args) < 1 or len(args) > 2:
+ if len(args) < 1 or len(args) > 2:
usage(rc=1)
instance_home = os.path.abspath(args[0])
| Broken with ZEO 5.1.1: runzeo and zeoctl fails.
While investigating zopefoundation/ZEO#87 I figured out, that the scripts running zeoctl and runzeo are no longer working due to usage of relative imports in ZEO.runzeo and ZEO.zeoctl.
It looks like the clean way is to not call the files as scripts, but execute them as modules. | zopefoundation/zope.mkzeoinstance | diff --git a/src/zope/mkzeoinstance/tests/test_unix.py b/src/zope/mkzeoinstance/tests/test_unix.py
index 68c4a2a..89c5c07 100644
--- a/src/zope/mkzeoinstance/tests/test_unix.py
+++ b/src/zope/mkzeoinstance/tests/test_unix.py
@@ -271,9 +271,7 @@ class ZEOInstanceBuilderTests(_WithTempdir, unittest.TestCase):
'PYTHONPATH="$ZODB3_HOME"',
'export PYTHONPATH INSTANCE_HOME',
'',
- 'ZEOCTL="$ZODB3_HOME/ZEO/zeoctl.py"',
- '',
- 'exec "$PYTHON" "$ZEOCTL" -C "$CONFIG_FILE" ${1+"$@"}',
+ 'exec "$PYTHON" -m ZEO.zeoctl -C "$CONFIG_FILE" ${1+"$@"}',
'',
]) % params
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 4.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | BTrees==6.1
cffi==1.17.1
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
persistent==6.1.1
pluggy==1.5.0
pycparser==2.22
pytest==8.3.5
tomli==2.2.1
transaction==5.0
zc.lockfile==3.0.post1
ZConfig==4.2
zdaemon==5.1
ZEO==6.0.0
ZODB==6.0
zodbpickle==4.2
zope.deferredimport==5.0
zope.interface==7.2
-e git+https://github.com/zopefoundation/zope.mkzeoinstance.git@7ed33bb183929e5645a71950f5032018acbbda51#egg=zope.mkzeoinstance
zope.proxy==6.1
| name: zope.mkzeoinstance
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- btrees==6.1
- cffi==1.17.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- persistent==6.1.1
- pluggy==1.5.0
- pycparser==2.22
- pytest==8.3.5
- tomli==2.2.1
- transaction==5.0
- zc-lockfile==3.0.post1
- zconfig==4.2
- zdaemon==5.1
- zeo==6.0.0
- zodb==6.0
- zodbpickle==4.2
- zope-deferredimport==5.0
- zope-interface==7.2
- zope-proxy==6.1
prefix: /opt/conda/envs/zope.mkzeoinstance
| [
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_zeoctl_content"
]
| []
| [
"src/zope/mkzeoinstance/tests/test_unix.py::Test_print_::test_print__no_args_no_kw",
"src/zope/mkzeoinstance/tests/test_unix.py::Test_print_::test_print__w_args",
"src/zope/mkzeoinstance/tests/test_unix.py::Test_print_::test_print__w_kw",
"src/zope/mkzeoinstance/tests/test_unix.py::Test_usage::test_defaults",
"src/zope/mkzeoinstance/tests/test_unix.py::Test_usage::test_explicit",
"src/zope/mkzeoinstance/tests/test_unix.py::Test_usage::test_w_non_str_message",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_create_folders_and_files",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_get_params",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_run_w_help",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_run_w_invalid_opt",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_run_w_too_many_arguments",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_run_wo_arguments",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_run_wo_single_arg_non_absolute",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_run_wo_two_args_no_host",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_run_wo_two_args_w_host",
"src/zope/mkzeoinstance/tests/test_unix.py::ZEOInstanceBuilderTests::test_zeo_conf_content",
"src/zope/mkzeoinstance/tests/test_unix.py::UtilityFunctionsTest::test_makedir",
"src/zope/mkzeoinstance/tests/test_unix.py::UtilityFunctionsTest::test_makefile",
"src/zope/mkzeoinstance/tests/test_unix.py::UtilityFunctionsTest::test_makefile_existing_different_content",
"src/zope/mkzeoinstance/tests/test_unix.py::UtilityFunctionsTest::test_makefile_existing_same_content",
"src/zope/mkzeoinstance/tests/test_unix.py::UtilityFunctionsTest::test_makexfile",
"src/zope/mkzeoinstance/tests/test_unix.py::UtilityFunctionsTest::test_mkdirs",
"src/zope/mkzeoinstance/tests/test_unix.py::UtilityFunctionsTest::test_mkdirs_nested"
]
| []
| null | 1,293 | [
"src/zope/mkzeoinstance/__init__.py",
"CHANGES.rst"
]
| [
"src/zope/mkzeoinstance/__init__.py",
"CHANGES.rst"
]
|
|
AzureAD__azure-activedirectory-library-for-python-87 | 006b8b7749ede41c2f28530134b151a957ab5689 | 2017-05-23 18:47:18 | 901db2c4bcd2f607db576ceeefeeba20a2047ec3 | diff --git a/README.md b/README.md
index a6f8d42..a60795f 100644
--- a/README.md
+++ b/README.md
@@ -8,19 +8,16 @@ The ADAL for python library makes it easy for python applications to authenticat
To support 'service principal' with certificate, ADAL depends on the 'cryptography' package. For smooth installation, some suggestions:
-* For Windows and OSX
+*For Windows and OSX
Upgrade to the latest pip (8.1.2 as of June 2016) and just do `pip install adal`.
-* For Linux
-
-Upgrade to the latest pip (8.1.2 as of June 2016).
+*For Linux
You'll need a C compiler, libffi + its development headers, and openssl + its development headers. Refer to [cryptography installation](https://cryptography.io/en/latest/installation/)
-* To install from source:
+*To install from source:
-Upgrade to the latest pip (8.1.2 as of June 2016).
Before run `python setup.py install`, to avoid dealing with compilation errors from cryptography, run `pip install cryptography` first to use statically-linked wheels.
If you still like build from source, refer to [cryptography installation](https://cryptography.io/en/latest/installation/).
@@ -31,9 +28,9 @@ The convinient methods in 0.1.0 have been removed, and now your application shou
2 Reasons:
-* Each adal client should have a unique id representing an valid application registered in a tenant. The old methods borrowed the client-id of [azure-cli](https://github.com/Azure/azure-xplat-cli), which is never right. It is simple to register your application and get a client id. Many walkthroughs exist. You can follow [one of those](http://www.bradygaster.com/post/using-windows-azure-active-directory-to-authenticate-the-management-libraries). Though that involves C# client, but the flow, and particularly the wizard snapshots are the same with adal-python. Do check out if you are new to AAD.
+* Each adal client should have a unique id representing an valid application registered in a tenant. The old methods borrowed the client-id of [azure-cli](https://github.com/Azure/azure-xplat-cli), which is never right. It is simple to register your application and get a client id. Many walkthroughs exist. You can follow [one of those] (http://www.bradygaster.com/post/using-windows-azure-active-directory-to-authenticate-the-management-libraries). Though that involves C# client, but the flow, and particularly the wizard snapshots are the same with adal-python. Do check out if you are new to AAD.
-* The old method defaults the `resource` argument to 'https://management.core.windows.net/', now you can just supply this value explictly. Please note, there are lots of different azure resources you can acquire tokens through adal though, for example, the samples in the repository acquire for the 'graph' resource. Because it is not an appropriate assumption to be made at the library level, we removed the old defaults.
+* The old mmethod defaults the `resource` argument to 'https://management.core.windows.net/', now you can just supply this value explictly. Please note, there are lots of different azure resources you can acquire tokens through adal though, for example, the samples in the repository acquire for the 'graph' resource. Because it is not an appropriate assumption to be made at the library level, we removed the old defaults.
### Acquire Token with Client Credentials
diff --git a/adal/cache_driver.py b/adal/cache_driver.py
index fba053b..9683dca 100644
--- a/adal/cache_driver.py
+++ b/adal/cache_driver.py
@@ -164,11 +164,19 @@ class CacheDriver(object):
now_plus_buffer = now + timedelta(minutes=Misc.CLOCK_BUFFER)
if is_resource_specific and now_plus_buffer > expiry_date:
- self._log.info('Cached token is expired. Refreshing: %s', expiry_date)
- return self._refresh_expired_entry(entry)
+ if TokenResponseFields.REFRESH_TOKEN in entry:
+ self._log.info('Cached token is expired. Refreshing: %s', expiry_date)
+ return self._refresh_expired_entry(entry)
+ else:
+ self.remove(entry)
+ return None
elif not is_resource_specific and entry.get(TokenResponseFields.IS_MRRT):
- self._log.info('Acquiring new access token from MRRT token.')
- return self._acquire_new_token_from_mrrt(entry)
+ if TokenResponseFields.REFRESH_TOKEN in entry:
+ self._log.info('Acquiring new access token from MRRT token.')
+ return self._acquire_new_token_from_mrrt(entry)
+ else:
+ self.remove(entry)
+ return None
else:
return entry
| Issue while trying to obtain a token using client credentials once the token has expired
I am able to obtain a valid access token by issuing the following command
```
>>> token = context.acquire_token_with_client_credentials(RESOURCE, client_id, client_secret)
```
However, when I issue the same command after the above token has expired, I get the following error message. Please let me know if I am missing something here or if I am expected to issue a different command in order to obtain a new token. Thanks in advance.
```
>>> token = context.acquire_token_with_client_credentials(RESOURCE, client_id, client_secret)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/authentication_context.py", line 160, in acquire_token_with_client_credentials
return self._acquire_token(token_func)
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/authentication_context.py", line 109, in _acquire_token
return token_func(self)
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/authentication_context.py", line 158, in token_func
return token_request.get_token_with_client_credentials(client_secret)
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/token_request.py", line 304, in get_token_with_client_credentials
token = self._find_token_from_cache()
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/token_request.py", line 128, in _find_token_from_cache
return self._cache_driver.find(cache_query)
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/cache_driver.py", line 182, in find
is_resource_tenant_specific)
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/cache_driver.py", line 171, in _refresh_entry_if_necessary
return self._acquire_new_token_from_mrrt(entry)
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/cache_driver.py", line 153, in _acquire_new_token_from_mrrt
token_response = self._refresh_function(entry, self._resource)
File "/Users/shetty/.virtualenvs/ad/lib/python2.7/site-packages/adal/token_request.py", line 137, in _get_token_with_token_response
refresh_token = entry[TOKEN_RESPONSE_FIELDS.REFRESH_TOKEN]
KeyError: 'refreshToken'
``` | AzureAD/azure-activedirectory-library-for-python | diff --git a/tests/test_cache_driver.py b/tests/test_cache_driver.py
new file mode 100644
index 0000000..b3c4e07
--- /dev/null
+++ b/tests/test_cache_driver.py
@@ -0,0 +1,58 @@
+#------------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation.
+# All rights reserved.
+#
+# This code is licensed under the MIT License.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files(the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions :
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+#
+#------------------------------------------------------------------------------
+
+import unittest
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+from adal.log import create_log_context
+from adal.cache_driver import CacheDriver
+
+
+class TestCacheDriver(unittest.TestCase):
+ def test_rt_less_item_wont_cause_exception(self): # Github issue #82
+ rt_less_entry_came_from_previous_client_credentials_grant = {
+ "expiresIn": 3600,
+ "_authority": "https://login.microsoftonline.com/foo",
+ "resource": "spn:00000002-0000-0000-c000-000000000000",
+ "tokenType": "Bearer",
+ "expiresOn": "1999-05-22 16:31:46.202000",
+ "isMRRT": True,
+ "_clientId": "client_id",
+ "accessToken": "this is an AT",
+ }
+ refresh_function = mock.MagicMock(return_value={})
+ cache_driver = CacheDriver(
+ {"log_context": create_log_context()}, "authority", "resource",
+ "client_id", mock.MagicMock(), refresh_function)
+ entry = cache_driver._refresh_entry_if_necessary(
+ rt_less_entry_came_from_previous_client_credentials_grant, False)
+ refresh_function.assert_not_called() # Otherwise it will cause an exception
+ self.assertIsNone(entry)
+
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"responses",
"requests-mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/AzureAD/azure-activedirectory-library-for-python.git@006b8b7749ede41c2f28530134b151a957ab5689#egg=adal
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
cryptography==44.0.2
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pycparser==2.22
PyJWT==2.10.1
pytest==8.3.5
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
requests-mock==1.12.1
responses==0.25.7
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: azure-activedirectory-library-for-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- cryptography==44.0.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pycparser==2.22
- pyjwt==2.10.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- requests-mock==1.12.1
- responses==0.25.7
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/azure-activedirectory-library-for-python
| [
"tests/test_cache_driver.py::TestCacheDriver::test_rt_less_item_wont_cause_exception"
]
| []
| []
| []
| MIT License | 1,294 | [
"adal/cache_driver.py",
"README.md"
]
| [
"adal/cache_driver.py",
"README.md"
]
|
|
EdinburghGenomics__EGCG-Core-52 | 5a073c8d79148ed29379e4818e4dcebd5180eb15 | 2017-05-23 21:56:31 | 5a073c8d79148ed29379e4818e4dcebd5180eb15 | diff --git a/egcg_core/archive_management.py b/egcg_core/archive_management.py
index 3437925..58c5eb9 100644
--- a/egcg_core/archive_management.py
+++ b/egcg_core/archive_management.py
@@ -1,6 +1,8 @@
import os
import re
import subprocess
+from time import sleep
+
from egcg_core.app_logging import logging_default as log_cfg
from egcg_core.exceptions import EGCGError
@@ -83,13 +85,18 @@ def release_file_from_lustre(file_path):
return True
-def register_for_archiving(file_path):
+def register_for_archiving(file_path, strict=False):
if is_register_for_archiving(file_path):
return True
cmd = 'lfs hsm_archive %s' % file_path
val = _get_stdout(cmd)
if val is None or not is_register_for_archiving(file_path):
- raise ArchivingError('Registering %s for archiving to tape failed' % file_path)
+ if strict:
+ raise ArchivingError('Registering %s for archiving to tape failed' % file_path)
+ # Registering for archive can sometime take time so give it a second
+ sleep(1)
+ return register_for_archiving(filter, strict=True)
+
return True
| Failure to register for archiving cause pipeline crash
We should make the registering for archiving more robust by sleeping trying again to register the file after 1 second.
https://github.com/EdinburghGenomics/EGCG-Core/blob/master/egcg_core/archive_management.py#L92
This raising can cause Analysis Driver to crash at the end of the processing rather randomly.
| EdinburghGenomics/EGCG-Core | diff --git a/tests/test_archive_management.py b/tests/test_archive_management.py
index c087706..5d45646 100644
--- a/tests/test_archive_management.py
+++ b/tests/test_archive_management.py
@@ -76,7 +76,7 @@ class TestArchiveManagement(TestEGCG):
'',
'testfile: (0x00000001)',
]) as get_stdout:
- self.assertRaises(ArchivingError, register_for_archiving, 'testfile')
+ self.assertRaises(ArchivingError, register_for_archiving, 'testfile', True)
assert get_stdout.call_count == 3
assert get_stdout.call_args_list[1][0] == ('lfs hsm_archive testfile',)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | asana==0.6.2
cached-property==2.0.1
-e git+https://github.com/EdinburghGenomics/EGCG-Core.git@5a073c8d79148ed29379e4818e4dcebd5180eb15#egg=EGCG_Core
exceptiongroup==1.2.2
genologics==1.0.0
iniconfig==2.1.0
Jinja2==2.8
MarkupSafe==3.0.2
oauthlib==3.2.2
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
requests==2.9.2
requests-oauthlib==0.6.2
six==1.10.0
tomli==2.2.1
| name: EGCG-Core
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- asana==0.6.2
- cached-property==2.0.1
- exceptiongroup==1.2.2
- genologics==1.0.0
- iniconfig==2.1.0
- jinja2==2.8
- markupsafe==3.0.2
- oauthlib==3.2.2
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- requests==2.9.2
- requests-oauthlib==0.6.2
- six==1.10.0
- tomli==2.2.1
prefix: /opt/conda/envs/EGCG-Core
| [
"tests/test_archive_management.py::TestArchiveManagement::test_register_for_archiving"
]
| []
| [
"tests/test_archive_management.py::TestArchiveManagement::test_archive_directory",
"tests/test_archive_management.py::TestArchiveManagement::test_archive_states",
"tests/test_archive_management.py::TestArchiveManagement::test_recall_from_tape",
"tests/test_archive_management.py::TestArchiveManagement::test_release_file_from_lustre"
]
| []
| MIT License | 1,295 | [
"egcg_core/archive_management.py"
]
| [
"egcg_core/archive_management.py"
]
|
|
google__mobly-216 | 6344f817976315515f6b183b2394012a8f348e23 | 2017-05-24 00:27:30 | 31dcff279d4808e011f6af8ab0661b9750357cda | xpconanfan:
Review status: 0 of 10 files reviewed at latest revision, 6 unresolved discussions.
---
*[mobly/utils.py, line 191 at r1](https://reviewable.io:443/reviews/google/mobly/216#-KksmGNXOJuTxDvAq8CM:-KksmGNXOJuTxDvAq8CN:b-oqfifj) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/utils.py#L191)):*
> ```Python
> for path in paths:
> p = abs_path(path)
> for dirPath, unused_subdirList, fileList in os.walk(p):
> ```
Simply use `_` instead of `unused_subdirList`?
---
*[mobly/utils.py, line 429 at r1](https://reviewable.io:443/reviews/google/mobly/216#-KksmQfoOeHMN6bqR-az:-KksmQfoOeHMN6bqR-b-:b8h2k1j) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/utils.py#L429)):*
> ```Python
> that matches a given regex pattern.
>
> This function is specifically used to grep strings from AdbProxy's
> ```
If this is specific to `AdbProxy`, should it live there instead?
---
*[mobly/controllers/android_device_lib/jsonrpc_client_base.py, line 124 at r1](https://reviewable.io:443/reviews/google/mobly/216#-KksppqrXEQ8fimOOv0j:-KksppqsL48x2i169ytC:b-5fd9cq) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/controllers/android_device_lib/jsonrpc_client_base.py#L124)):*
> ```Python
> # Methods to be implemented by subclasses.
>
> def start_app(self):
> ```
Should this method's name be changed to something like `start_app_and_connect`?
(as long as the new name indicates that it also connects)
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 35 at r1](https://reviewable.io:443/reviews/google/mobly/216#-Kksq6k8LPXjF8h3xhjB:-Kksq6k8LPXjF8h3xhjC:b-flbu2r) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/controllers/android_device_lib/snippet_client.py#L35)):*
> ```Python
> 'am instrument -w -e action stop %s/' + _INSTRUMENTATION_RUNNER_PACKAGE)
>
> # Maximum time to wait for the app to start on the device (10 minutes).
> ```
Could we keep this in base class and assign `_APP_START_WAIT_TIME_V0` as an alias, so we don't have to dup this in two places.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 50 at r1](https://reviewable.io:443/reviews/google/mobly/216#-KksnWRcgnlQ1Gwp-jor:-KksnWRcgnlQ1Gwp-jos:bgyt3m8) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/controllers/android_device_lib/snippet_client.py#L50)):*
> ```Python
>
>
> class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
> ```
Can we add some context on the v0 and v1 changes in the docstring here?
---
*[tools/sl4a_shell.py, line 67 at r1](https://reviewable.io:443/reviews/google/mobly/216#-Kksp8Cgj7VeRpqF61_B:-Kksp8Cgj7VeRpqF61_C:bqebhii) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/tools/sl4a_shell.py#L67)):*
> ```Python
> 'Device serial to connect to (if more than one device is connected)')
> args = parser.parse_args()
> logging.basicConfig(level=logging.INFO)
> ```
why this change?
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/216)*
<!-- Sent from Reviewable.io -->
adorokhine:
Review status: 0 of 10 files reviewed at latest revision, 6 unresolved discussions.
---
*[mobly/utils.py, line 191 at r1](https://reviewable.io:443/reviews/google/mobly/216#-KksmGNXOJuTxDvAq8CM:-Kkt8twZgicerHD2OJMW:b-896fix) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/utils.py#L191)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
Simply use `_` instead of `unused_subdirList`?
</blockquote></details>
Done.
---
*[mobly/utils.py, line 429 at r1](https://reviewable.io:443/reviews/google/mobly/216#-KksmQfoOeHMN6bqR-az:-Kkt9M0sFzwLJ9oFQD7k:bpqwp91) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/utils.py#L429)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
If this is specific to `AdbProxy`, should it live there instead?
</blockquote></details>
Changed this text; it's not specific to AdbProxy.
---
*[mobly/controllers/android_device_lib/jsonrpc_client_base.py, line 124 at r1](https://reviewable.io:443/reviews/google/mobly/216#-KksppqrXEQ8fimOOv0j:-Kkt9t1A400G0mYJUpPk:b-896fix) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/controllers/android_device_lib/jsonrpc_client_base.py#L124)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
Should this method's name be changed to something like `start_app_and_connect`?
(as long as the new name indicates that it also connects)
</blockquote></details>
Done.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 35 at r1](https://reviewable.io:443/reviews/google/mobly/216#-Kksq6k8LPXjF8h3xhjB:-Kkt9y5p6eip75Lgct2X:b-umu14f) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/controllers/android_device_lib/snippet_client.py#L35)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
Could we keep this in base class and assign `_APP_START_WAIT_TIME_V0` as an alias, so we don't have to dup this in two places.
</blockquote></details>
Actually it shouldn't be duped, because the start time of sl4a has nothing to do with the start time of snippet. I want to reduce the sl4a timeout again but I don't want to do that in this PR.
The one in snippet will soon be deleted because v1 snippets don't have a timeout value like this.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 50 at r1](https://reviewable.io:443/reviews/google/mobly/216#-KksnWRcgnlQ1Gwp-jor:-KktAVbpYGDTTWN_-E_I:b-896fix) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/controllers/android_device_lib/snippet_client.py#L50)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
Can we add some context on the v0 and v1 changes in the docstring here?
</blockquote></details>
Done.
---
*[tools/sl4a_shell.py, line 67 at r1](https://reviewable.io:443/reviews/google/mobly/216#-Kksp8Cgj7VeRpqF61_B:-KktAWQ1epwCDO8KN5OZ:bwqbmaq) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/tools/sl4a_shell.py#L67)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
why this change?
</blockquote></details>
The same line already exists in snippet_shell so this makes it consistent. It makes it easier to debug because you can just flip this value and rerun to get debug logs. (there's no file logging happening in the shells so there's no other way to view debug logs at the moment.)
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/216)*
<!-- Sent from Reviewable.io -->
xpconanfan:
Review status: 0 of 10 files reviewed at latest revision, 3 unresolved discussions.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 35 at r1](https://reviewable.io:443/reviews/google/mobly/216#-Kksq6k8LPXjF8h3xhjB:-KktGTB7IK6Eci8jgw2z:b-mhl3j7) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/controllers/android_device_lib/snippet_client.py#L35)):*
<details><summary><i>Previously, adorokhine (Alexander Dorokhine) wrote…</i></summary><blockquote>
Actually it shouldn't be duped, because the start time of sl4a has nothing to do with the start time of snippet. I want to reduce the sl4a timeout again but I don't want to do that in this PR.
The one in snippet will soon be deleted because v1 snippets don't have a timeout value like this.
</blockquote></details>
Should this comment mention the pending removal of this value?
Should the comment for the one in sl4a mention the coming reduction in that value?
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/216)*
<!-- Sent from Reviewable.io -->
adorokhine:
Review status: 0 of 10 files reviewed at latest revision, 3 unresolved discussions.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 35 at r1](https://reviewable.io:443/reviews/google/mobly/216#-Kksq6k8LPXjF8h3xhjB:-KktIKdGuZz_M4fjY_ic:b-896fix) ([raw file](https://github.com/google/mobly/blob/b3fac107ec424a0b8bce0dc1492db2921ef986da/mobly/controllers/android_device_lib/snippet_client.py#L35)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
Should this comment mention the pending removal of this value?
Should the comment for the one in sl4a mention the coming reduction in that value?
</blockquote></details>
Done.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/216)*
<!-- Sent from Reviewable.io -->
xpconanfan: <img class="emoji" title=":lgtm:" alt=":lgtm:" align="absmiddle" src="https://reviewable.io/lgtm.png" height="20" width="61"/>
---
Review status: 0 of 10 files reviewed at latest revision, all discussions resolved.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/216#-:-Kl0YT3OyOKHOb-QvWIa:bnfp4nl)*
<!-- Sent from Reviewable.io -->
| diff --git a/mobly/controllers/android_device.py b/mobly/controllers/android_device.py
index 3f8e209..33f7309 100644
--- a/mobly/controllers/android_device.py
+++ b/mobly/controllers/android_device.py
@@ -25,9 +25,7 @@ from mobly import logger as mobly_logger
from mobly import signals
from mobly import utils
from mobly.controllers.android_device_lib import adb
-from mobly.controllers.android_device_lib import event_dispatcher
from mobly.controllers.android_device_lib import fastboot
-from mobly.controllers.android_device_lib import jsonrpc_client_base
from mobly.controllers.android_device_lib import sl4a_client
from mobly.controllers.android_device_lib import snippet_client
@@ -462,7 +460,7 @@ class AndroidDevice(object):
service_info['use_sl4a'] = self.sl4a is not None
self._terminate_sl4a()
for name, client in self._snippet_clients.items():
- self._terminate_jsonrpc_client(client)
+ client.stop_app()
delattr(self, name)
self._snippet_clients = {}
if self._adb_logcat_process:
@@ -643,13 +641,9 @@ class AndroidDevice(object):
self,
'Snippet package "%s" has already been loaded under name'
' "%s".' % (package, client_name))
- host_port = utils.get_available_host_port()
client = snippet_client.SnippetClient(
- package=package,
- host_port=host_port,
- adb_proxy=self.adb,
- log=self.log)
- self._start_jsonrpc_client(client)
+ package=package, adb_proxy=self.adb, log=self.log)
+ client.start_app_and_connect()
self._snippet_clients[name] = client
setattr(self, name, client)
@@ -662,48 +656,10 @@ class AndroidDevice(object):
Creates an sl4a client (self.sl4a) with one connection, and one
EventDispatcher obj (self.ed) with the other connection.
"""
- host_port = utils.get_available_host_port()
- self.sl4a = sl4a_client.Sl4aClient(
- host_port=host_port, adb_proxy=self.adb)
- self._start_jsonrpc_client(self.sl4a)
-
- # Start an EventDispatcher for the current sl4a session
- event_client = sl4a_client.Sl4aClient(
- host_port=host_port, adb_proxy=self.adb)
- event_client.connect(
- uid=self.sl4a.uid, cmd=jsonrpc_client_base.JsonRpcCommand.CONTINUE)
- self.ed = event_dispatcher.EventDispatcher(event_client)
- self.ed.start()
-
- def _start_jsonrpc_client(self, client):
- """Create a connection to a jsonrpc server running on the device.
-
- If the connection cannot be made, tries to restart it.
- """
- client.check_app_installed()
- self.adb.forward(
- ['tcp:%d' % client.host_port, 'tcp:%d' % client.device_port])
- try:
- client.connect()
- except:
- try:
- client.stop_app()
- except Exception as e:
- self.log.warning(e)
- client.start_app()
- client.connect()
-
- def _terminate_jsonrpc_client(self, client):
- try:
- client.closeSl4aSession()
- client.close()
- client.stop_app()
- except:
- self.log.exception('Failed to stop Rpc client for %s.',
- client.app_name)
- finally:
- # Always clean up the adb port
- self.adb.forward(['--remove', 'tcp:%d' % client.host_port])
+ self.sl4a = sl4a_client.Sl4aClient(adb_proxy=self.adb, log=self.log)
+ self.sl4a.start_app_and_connect()
+ # Unpack the 'ed' attribute for compatibility.
+ self.ed = self.sl4a.ed
def _is_timestamp_in_range(self, target, begin_time, end_time):
low = mobly_logger.logline_timestamp_comparator(begin_time,
@@ -849,13 +805,8 @@ class AndroidDevice(object):
the session. Clear corresponding droids and dispatchers from cache.
"""
if self.sl4a:
- self._terminate_jsonrpc_client(self.sl4a)
+ self.sl4a.stop_app()
self.sl4a = None
- if self.ed:
- try:
- self.ed.clean_up()
- except:
- self.log.exception('Failed to shutdown sl4a event dispatcher.')
self.ed = None
def run_iperf_client(self, server_host, extra_args=''):
diff --git a/mobly/controllers/android_device_lib/event_dispatcher.py b/mobly/controllers/android_device_lib/event_dispatcher.py
index a718ca4..5ebf4df 100644
--- a/mobly/controllers/android_device_lib/event_dispatcher.py
+++ b/mobly/controllers/android_device_lib/event_dispatcher.py
@@ -147,7 +147,10 @@ class EventDispatcher:
return
self.started = False
self.clear_all_events()
- self._sl4a.close()
+ # At this point, the sl4a apk is destroyed and nothing is listening on
+ # the socket. Avoid sending any sl4a commands; just clean up the socket
+ # and return.
+ self._sl4a.disconnect()
self.poller.set_result("Done")
# The polling thread is guaranteed to finish after a max of 60 seconds,
# so we don't wait here.
diff --git a/mobly/controllers/android_device_lib/jsonrpc_client_base.py b/mobly/controllers/android_device_lib/jsonrpc_client_base.py
index 9f96aaa..5468271 100644
--- a/mobly/controllers/android_device_lib/jsonrpc_client_base.py
+++ b/mobly/controllers/android_device_lib/jsonrpc_client_base.py
@@ -38,20 +38,11 @@ from builtins import str
import json
import logging
-import re
import socket
import threading
-import time
-from mobly.controllers.android_device_lib import adb
from mobly.controllers.android_device_lib import callback_handler
-# Maximum time to wait for the app to start on the device (10 minutes).
-# TODO: This timeout is set high in order to allow for retries in start_app.
-# Decrease it when the call to connect() has the option for a quicker timeout
-# than the default _cmd() timeout.
-APP_START_WAIT_TIME = 10 * 60
-
# UID of the 'unknown' jsonrpc session. Will cause creation of a new session.
UNKNOWN_UID = -1
@@ -107,39 +98,44 @@ class JsonRpcClientBase(object):
uid: (int) The uid of this session.
"""
- def __init__(self,
- host_port,
- device_port,
- app_name,
- adb_proxy,
- log=logging.getLogger()):
+ def __init__(self, app_name, log=logging.getLogger()):
"""
Args:
- host_port: (int) The host port of this RPC client.
- device_port: (int) The device port of this RPC client.
app_name: (str) The user-visible name of the app being communicated
with.
- adb_proxy: (adb.AdbProxy) The adb proxy to use to start the app.
+ log: (logging.Logger) logger to which to send log messages.
"""
- self.host_port = host_port
- self.device_port = device_port
+ self.host_port = None
+ self.device_port = None
self.app_name = app_name
+ self.log = log
self.uid = None
- self._adb = adb_proxy
self._client = None # prevent close errors on connect failure
self._conn = None
self._counter = None
self._lock = threading.Lock()
self._event_client = None
- self._log = log
def __del__(self):
- self.close()
+ self.disconnect()
# Methods to be implemented by subclasses.
- def _do_start_app(self):
- """Starts the server app on the android device.
+ def start_app_and_connect(self):
+ """Starts the server app on the android device and connects to it.
+
+ After this, the self.host_port and self.device_port attributes must be
+ set.
+
+ Must be implemented by subclasses.
+
+ Raises:
+ AppStartError: When the app was not able to be started.
+ """
+ raise NotImplementedError()
+
+ def stop_app(self):
+ """Kills any running instance of the app.
Must be implemented by subclasses.
"""
@@ -158,47 +154,8 @@ class JsonRpcClientBase(object):
"""
raise NotImplementedError()
- def stop_app(self):
- """Kills any running instance of the app.
-
- Must be implemented by subclasses.
- """
- raise NotImplementedError()
-
- def check_app_installed(self):
- """Checks if app is installed.
-
- Must be implemented by subclasses.
- """
- raise NotImplementedError()
-
# Rest of the client methods.
- def start_app(self, wait_time=APP_START_WAIT_TIME):
- """Starts the server app on the android device.
-
- Args:
- wait_time: int, The minimum number of seconds to wait for the app
- to come up before raising an error. Note that _is_app_running()
- may take longer than wait_time.
-
- Raises:
- AppStartError: When the app was not able to be started.
- """
- self.check_app_installed()
- self._do_start_app()
- start_time = time.time()
- expiration_time = start_time + wait_time
- while time.time() < expiration_time:
- self._log.debug('Attempting to start %s.', self.app_name)
- if self._is_app_running():
- self._log.debug('Successfully started %s after %.1f seconds.',
- self.app_name, time.time() - start_time)
- return
- time.sleep(1)
- raise AppStartError('%s failed to start on %s.' % (self.app_name,
- self._adb.serial))
-
def connect(self, uid=UNKNOWN_UID, cmd=JsonRpcCommand.INIT):
"""Opens a connection to a JSON RPC server.
@@ -232,35 +189,12 @@ class JsonRpcClientBase(object):
else:
self.uid = UNKNOWN_UID
- def close(self):
+ def disconnect(self):
"""Close the connection to the remote client."""
if self._conn:
self._conn.close()
self._conn = None
- def _grep(self, regex, output):
- """Similar to linux's `grep`, this returns the line in an output stream
- that matches a given regex pattern.
-
- This function is specifically used to grep strings from AdbProxy's
- output. We have to do this in Python instead of using cli tools because
- we need to support windows which does not have `grep` in all vesions.
-
- Args:
- regex: string, a regex that matches the expected pattern.
- output: byte string, the raw output of the adb cmd.
-
- Returns:
- A list of strings, all of which are output lines that matches the
- regex pattern.
- """
- lines = output.decode('utf-8').strip().splitlines()
- results = []
- for line in lines:
- if re.search(regex, line):
- results.append(line.strip())
- return results
-
def _cmd(self, command, uid=None):
"""Send a command to the server.
@@ -319,20 +253,6 @@ class JsonRpcClientBase(object):
method_name=method)
return result['result']
- def _is_app_running(self):
- """Checks if the app is currently running on an android device.
-
- May be overridden by subclasses with custom sanity checks.
- """
- running = False
- try:
- self.connect()
- running = True
- finally:
- self.close()
- # This 'return' squashes exceptions from connect()
- return running
-
def __getattr__(self, name):
"""Wrapper for python magic to turn method calls into RPC calls."""
diff --git a/mobly/controllers/android_device_lib/jsonrpc_shell_base.py b/mobly/controllers/android_device_lib/jsonrpc_shell_base.py
index 1176b4c..e596d8c 100755
--- a/mobly/controllers/android_device_lib/jsonrpc_shell_base.py
+++ b/mobly/controllers/android_device_lib/jsonrpc_shell_base.py
@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
"""Shared library for frontends to jsonrpc servers."""
from __future__ import print_function
@@ -23,7 +22,7 @@ from mobly.controllers import android_device
class Error(Exception):
- pass
+ pass
class JsonRpcShellBase(object):
@@ -55,7 +54,7 @@ class JsonRpcShellBase(object):
if len(serials) != 1:
raise Error(
'Expected one phone, but %d found. Use the -s flag.' %
- len(serials))
+ len(serials))
serial = serials[0]
if serial not in serials:
raise Error('Device "%s" is not found by adb.' % serial)
diff --git a/mobly/controllers/android_device_lib/sl4a_client.py b/mobly/controllers/android_device_lib/sl4a_client.py
index 9e89f09..d961734 100644
--- a/mobly/controllers/android_device_lib/sl4a_client.py
+++ b/mobly/controllers/android_device_lib/sl4a_client.py
@@ -12,51 +12,131 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""JSON RPC interface to android scripting engine."""
+import logging
+import time
-import re
-
-from mobly.controllers.android_device_lib import adb
+from mobly import utils
+from mobly.controllers.android_device_lib import event_dispatcher
from mobly.controllers.android_device_lib import jsonrpc_client_base
-DEVICE_SIDE_PORT = 8080
-
+_APP_NAME = 'SL4A'
+_DEVICE_SIDE_PORT = 8080
_LAUNCH_CMD = (
'am start -a com.googlecode.android_scripting.action.LAUNCH_SERVER '
'--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s '
'com.googlecode.android_scripting/.activity.ScriptingLayerServiceLauncher')
+# Maximum time to wait for the app to start on the device (10 minutes).
+# TODO: This timeout is set high in order to allow for retries in
+# start_app_and_connect. Decrease it when the call to connect() has the option
+# for a quicker timeout than the default _cmd() timeout.
+# TODO: Evaluate whether the high timeout still makes sense for sl4a. It was
+# designed for user snippets which could be very slow to start depending on the
+# size of the snippet and main apps. sl4a can probably use a much smaller value.
+_APP_START_WAIT_TIME = 10 * 60
+
+
+class Error(Exception):
+ pass
+
+
+class AppStartError(Error):
+ """Raised when sl4a is not able to be started."""
class Sl4aClient(jsonrpc_client_base.JsonRpcClientBase):
"""A client for interacting with SL4A using Mobly Snippet Lib.
- See superclass documentation for a list of public attributes.
+ Extra public attributes:
+ ed: Event dispatcher instance for this sl4a client.
"""
- def __init__(self, host_port, adb_proxy):
+ def __init__(self, adb_proxy, log=logging.getLogger()):
"""Initializes an Sl4aClient.
Args:
- host_port: (int) The host port of this RPC client.
- adb_proxy: (adb.AdbProxy) The adb proxy to use to start the app.
+ self._adb: (adb.AdbProxy) The adb proxy to use to start the app.
+ log: (logging.Logger) logger to which to send log messages.
"""
- super(Sl4aClient, self).__init__(
- host_port=host_port,
- device_port=DEVICE_SIDE_PORT,
- app_name='SL4A',
- adb_proxy=adb_proxy)
+ super(Sl4aClient, self).__init__(app_name=_APP_NAME, log=log)
+ self.ed = None
+ self._adb = adb_proxy
- def _do_start_app(self):
+ def start_app_and_connect(self):
"""Overrides superclass."""
+ # Check that sl4a is installed
+ out = self._adb.shell('pm list package')
+ if not utils.grep('com.googlecode.android_scripting', out):
+ raise AppStartError('%s is not installed on %s' %
+ (_APP_NAME, self._adb.serial))
+
+ # sl4a has problems connecting after disconnection, so kill the apk and
+ # try connecting again.
+ try:
+ self.stop_app()
+ except Exception as e:
+ self.log.warning(e)
+
+ # Launch the app
+ self.host_port = utils.get_available_host_port()
+ self.device_port = _DEVICE_SIDE_PORT
+ self._adb.forward(
+ ['tcp:%d' % self.host_port, 'tcp:%d' % self.device_port])
self._adb.shell(_LAUNCH_CMD % self.device_port)
+ # Connect with retry
+ start_time = time.time()
+ expiration_time = start_time + _APP_START_WAIT_TIME
+ started = False
+ while time.time() < expiration_time:
+ self.log.debug('Attempting to start %s.', self.app_name)
+ try:
+ self.connect()
+ started = True
+ break
+ except:
+ self.log.debug(
+ '%s is not yet running, retrying',
+ self.app_name,
+ exc_info=True)
+ time.sleep(1)
+ if not started:
+ raise jsonrpc_client_base.AppStartError(
+ '%s failed to start on %s.' % (self.app_name, self._adb.serial))
+
+ # Start an EventDispatcher for the current sl4a session
+ event_client = Sl4aClient(self._adb, self.log)
+ event_client.host_port = self.host_port
+ event_client.connect(
+ uid=self.uid, cmd=jsonrpc_client_base.JsonRpcCommand.CONTINUE)
+ self.ed = event_dispatcher.EventDispatcher(event_client)
+ self.ed.start()
+
def stop_app(self):
"""Overrides superclass."""
- self._adb.shell('am force-stop com.googlecode.android_scripting')
+ try:
+ if self._conn:
+ # Be polite; let the dest know we're shutting down.
+ try:
+ self.closeSl4aSession()
+ except:
+ self.log.exception('Failed to gracefully shut down %s.',
+ self.app_name)
- def check_app_installed(self):
- """Overrides superclass."""
- out = self._adb.shell('pm list package')
- if not self._grep('com.googlecode.android_scripting', out):
- raise jsonrpc_client_base.AppStartError(
- '%s is not installed on %s' % (self.app_name,
- self._adb.serial))
+ # Close the socket connection.
+ self.disconnect()
+
+ # Close Event Dispatcher
+ if self.ed:
+ try:
+ self.ed.clean_up()
+ except:
+ self.log.exception(
+ 'Failed to shutdown sl4a event dispatcher.')
+ self.ed = None
+
+ # Terminate the app
+ self._adb.shell('am force-stop com.googlecode.android_scripting')
+ finally:
+ # Always clean up the adb port
+ if self.host_port:
+ self._adb.forward(['--remove', 'tcp:%d' % self.host_port])
diff --git a/mobly/controllers/android_device_lib/snippet_client.py b/mobly/controllers/android_device_lib/snippet_client.py
index a714cbe..1470927 100644
--- a/mobly/controllers/android_device_lib/snippet_client.py
+++ b/mobly/controllers/android_device_lib/snippet_client.py
@@ -14,6 +14,7 @@
"""JSON RPC interface to Mobly Snippet Lib."""
import logging
import re
+import time
from mobly import utils
from mobly.controllers.android_device_lib import adb
@@ -22,90 +23,144 @@ from mobly.controllers.android_device_lib import jsonrpc_client_base
_INSTRUMENTATION_RUNNER_PACKAGE = (
'com.google.android.mobly.snippet.SnippetRunner')
-_LAUNCH_CMD = ('am instrument -w -e action start -e port %s %s/' +
- _INSTRUMENTATION_RUNNER_PACKAGE)
+# TODO(adorokhine): delete this in Mobly 1.6 when snippet v0 support is removed.
+_LAUNCH_CMD_V0 = ('am instrument -w -e action start -e port %s %s/' +
+ _INSTRUMENTATION_RUNNER_PACKAGE)
+
+_LAUNCH_CMD_V1 = (
+ 'am instrument -w -e action start %s/' + _INSTRUMENTATION_RUNNER_PACKAGE)
_STOP_CMD = (
'am instrument -w -e action stop %s/' + _INSTRUMENTATION_RUNNER_PACKAGE)
+# Maximum time to wait for a v0 snippet to start on the device (10 minutes).
+# TODO(adorokhine): delete this in Mobly 1.6 when snippet v0 support is removed.
+_APP_START_WAIT_TIME_V0 = 10 * 60
+
class Error(Exception):
pass
+class ProtocolVersionError(Error):
+ """Raised when the protocol reported by the snippet is unknown."""
+
+
class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
"""A client for interacting with snippet APKs using Mobly Snippet Lib.
See superclass documentation for a list of public attributes.
+
+ It currently supports both v0 and v1 snippet launch protocols, although
+ support for v0 will be removed in a future version.
+
+ For a description of the launch protocols, see the documentation in
+ mobly-snippet-lib, SnippetRunner.java.
"""
- def __init__(self, package, host_port, adb_proxy, log=logging.getLogger()):
+ def __init__(self, package, adb_proxy, log=logging.getLogger()):
"""Initializes a SnippetClient.
Args:
package: (str) The package name of the apk where the snippets are
defined.
- host_port: (int) The port at which to start the snippet client. Note
- that the same port will currently be used for both the
- device and host side of the connection.
- adb_proxy: (adb.AdbProxy) The adb proxy to use to start the app.
+ adb_proxy: (adb.AdbProxy) Adb proxy for running adb commands.
+ log: (logging.Logger) logger to which to send log messages.
"""
- # TODO(adorokhine): Don't assume that a free host-side port is free on
- # the device as well. Both sides should allocate a unique port.
- super(SnippetClient, self).__init__(
- host_port=host_port,
- device_port=host_port,
- app_name=package,
- adb_proxy=adb_proxy,
- log=log)
+ super(SnippetClient, self).__init__(app_name=package, log=log)
self.package = package
- self.log = log
- self._serial = self._adb.serial
+ self._adb = adb_proxy
self._proc = None
- def _do_start_app(self):
- """Overrides superclass."""
- cmd = _LAUNCH_CMD % (self.device_port, self.package)
- # Use info here so people know exactly what's happening here, which is
- # helpful since they need to create their own instrumentations and
- # manifest.
- self.log.info('Launching snippet apk %s', self.package)
- adb_cmd = [adb.ADB]
- if self._adb.serial:
- adb_cmd += ['-s', self._adb.serial]
- adb_cmd += ['shell', cmd]
- self._proc = utils.start_standing_subprocess(adb_cmd, shell=False)
+ def start_app_and_connect(self):
+ """Overrides superclass. Launches a snippet app and connects to it."""
+ self._check_app_installed()
+
+ # Try launching the app with the v1 protocol. If that fails, fall back
+ # to v0 for compatibility. Use info here so people know exactly what's
+ # happening here, which is helpful since they need to create their own
+ # instrumentations and manifest.
+ self.log.info('Launching snippet apk %s with protocol v1',
+ self.package)
+ cmd = _LAUNCH_CMD_V1 % self.package
+ start_time = time.time()
+ self._proc = self._do_start_app(cmd)
+
+ # "Instrumentation crashed" could be due to several reasons, eg
+ # exception thrown during startup or just a launch protocol 0 snippet
+ # dying because it needs the port flag. Sadly we have no way to tell so
+ # just warn and retry as v0.
+ # TODO(adorokhine): delete this in Mobly 1.6 when snippet v0 support is
+ # removed.
+ line = self._read_line()
+ if line == 'INSTRUMENTATION_RESULT: shortMsg=Process crashed.':
+ self.log.warning('Snippet %s crashed on startup. This might be an '
+ 'actual error or a snippet using deprecated v0 '
+ 'start protocol. Retrying as a v0 snippet.',
+ self.package)
+ self.host_port = utils.get_available_host_port()
+ # Reuse the host port as the device port in v0 snippet. This isn't
+ # safe in general, but the protocol is deprecated.
+ cmd = _LAUNCH_CMD_V0 % (self.host_port, self.package)
+ self._proc = self._do_start_app(cmd)
+ self._connect_to_v0()
+ else:
+ # Check protocol version and get the device port
+ match = re.match('^SNIPPET START, PROTOCOL ([0-9]+) ([0-9]+)$',
+ line)
+ if not match or match.group(1) != '1':
+ raise ProtocolVersionError(line)
+ self._connect_to_v1()
+ self.log.debug('Snippet %s started after %.1fs on host port %s',
+ self.package, time.time() - start_time, self.host_port)
def stop_app(self):
- """Overrides superclass."""
# Kill the pending 'adb shell am instrument -w' process if there is one.
# Although killing the snippet apk would abort this process anyway, we
# want to call stop_standing_subprocess() to perform a health check,
# print the failure stack trace if there was any, and reap it from the
# process table.
- if self._proc:
- utils.stop_standing_subprocess(self._proc)
self.log.debug('Stopping snippet apk %s', self.package)
- out = self._adb.shell(_STOP_CMD % self.package).decode('utf-8')
- if 'OK (0 tests)' not in out:
- raise Error('Failed to stop existing apk. Unexpected output: %s' %
- out)
+ try:
+ # Close the socket connection.
+ self.disconnect()
+ if self._proc:
+ utils.stop_standing_subprocess(self._proc)
+ out = self._adb.shell(_STOP_CMD % self.package).decode('utf-8')
+ if 'OK (0 tests)' not in out:
+ raise Error('Failed to stop existing apk. Unexpected '
+ 'output: %s' % out)
+ finally:
+ # Always clean up the adb port
+ if self.host_port:
+ self._adb.forward(['--remove', 'tcp:%d' % self.host_port])
- def check_app_installed(self):
+ def _start_event_client(self):
"""Overrides superclass."""
+ event_client = SnippetClient(
+ package=self.package,
+ host_port=self.host_port,
+ adb_proxy=self._adb,
+ log=self.log)
+ event_client.connect(self.uid,
+ jsonrpc_client_base.JsonRpcCommand.CONTINUE)
+ return event_client
+
+ def _check_app_installed(self):
# Check that the Mobly Snippet app is installed.
out = self._adb.shell('pm list package')
- if not self._grep('^package:%s$' % self.package, out):
+ if not utils.grep('^package:%s$' % self.package, out):
raise jsonrpc_client_base.AppStartError(
- '%s is not installed on %s' % (self.package, self._serial))
+ '%s is not installed on %s' % (self.package, self._adb.serial))
# Check that the app is instrumented.
out = self._adb.shell('pm list instrumentation')
- matched_out = self._grep('^instrumentation:%s/%s' % (
- self.package, _INSTRUMENTATION_RUNNER_PACKAGE), out)
+ matched_out = utils.grep('^instrumentation:%s/%s' %
+ (self.package,
+ _INSTRUMENTATION_RUNNER_PACKAGE), out)
if not matched_out:
raise jsonrpc_client_base.AppStartError(
'%s is installed on %s, but it is not instrumented.' %
- (self.package, self._serial))
+ (self.package, self._adb.serial))
match = re.search('^instrumentation:(.*)\/(.*) \(target=(.*)\)$',
matched_out[0])
target_name = match.group(3)
@@ -113,17 +168,54 @@ class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
# same as the snippet package.
if target_name != self.package:
out = self._adb.shell('pm list package')
- if not self._grep('^package:%s$' % target_name, out):
+ if not utils.grep('^package:%s$' % target_name, out):
raise jsonrpc_client_base.AppStartError(
'Instrumentation target %s is not installed on %s' %
- (target_name, self._serial))
+ (target_name, self._adb.serial))
- def _start_event_client(self):
- event_client = SnippetClient(
- package=self.package,
- host_port=self.host_port,
- adb_proxy=self._adb,
- log=self.log)
- event_client.connect(self.uid,
- jsonrpc_client_base.JsonRpcCommand.CONTINUE)
- return event_client
+ def _do_start_app(self, launch_cmd):
+ adb_cmd = [adb.ADB]
+ if self._adb.serial:
+ adb_cmd += ['-s', self._adb.serial]
+ adb_cmd += ['shell', launch_cmd]
+ return utils.start_standing_subprocess(adb_cmd, shell=False)
+
+ # TODO(adorokhine): delete this in Mobly 1.6 when snippet v0 support is
+ # removed.
+ def _connect_to_v0(self):
+ self.device_port = self.host_port
+ self._adb.forward(
+ ['tcp:%d' % self.host_port, 'tcp:%d' % self.device_port])
+ start_time = time.time()
+ expiration_time = start_time + _APP_START_WAIT_TIME_V0
+ while time.time() < expiration_time:
+ self.log.debug('Attempting to start %s.', self.package)
+ try:
+ self.connect()
+ return
+ except:
+ self.log.debug(
+ 'v0 snippet %s is not yet running, retrying',
+ self.package,
+ exc_info=True)
+ time.sleep(1)
+ raise jsonrpc_client_base.AppStartError(
+ '%s failed to start on %s.' % (self.package, self._adb.serial))
+
+ def _connect_to_v1(self):
+ line = self._read_line()
+ match = re.match('^SNIPPET SERVING, PORT ([0-9]+)$', line)
+ if not match:
+ raise ProtocolVersionError(line)
+ self.device_port = int(match.group(1))
+
+ # Forward the device port to a new host port, and connect to that port
+ self.host_port = utils.get_available_host_port()
+ self._adb.forward(
+ ['tcp:%d' % self.host_port, 'tcp:%d' % self.device_port])
+ self.connect()
+
+ def _read_line(self):
+ line = self._proc.stdout.readline().rstrip()
+ self.log.debug('Read line from instrumentation output: "%s"', line)
+ return line
diff --git a/mobly/utils.py b/mobly/utils.py
index 3d01c79..5c998a0 100644
--- a/mobly/utils.py
+++ b/mobly/utils.py
@@ -15,15 +15,14 @@
import base64
import concurrent.futures
import datetime
-import functools
import logging
import os
import platform
import portpicker
import psutil
import random
+import re
import signal
-import socket
import string
import subprocess
import time
@@ -189,7 +188,7 @@ def find_files(paths, file_predicate):
file_list = []
for path in paths:
p = abs_path(path)
- for dirPath, subdirList, fileList in os.walk(p):
+ for dirPath, _, fileList in os.walk(p):
for fname in fileList:
name, ext = os.path.splitext(fname)
if file_predicate(name, ext):
@@ -243,7 +242,7 @@ def rand_ascii_str(length):
Returns:
The random string generated.
"""
- letters = [random.choice(ascii_letters_and_digits) for i in range(length)]
+ letters = [random.choice(ascii_letters_and_digits) for _ in range(length)]
return ''.join(letters)
@@ -421,3 +420,26 @@ def get_available_host_port():
return port
raise Error('Failed to find available port after {} retries'.format(
MAX_PORT_ALLOCATION_RETRY))
+
+
+def grep(regex, output):
+ """Similar to linux's `grep`, this returns the line in an output stream
+ that matches a given regex pattern.
+
+ It does not rely on the `grep` binary and is not sensitive to line endings,
+ so it can be used cross-platform.
+
+ Args:
+ regex: string, a regex that matches the expected pattern.
+ output: byte string, the raw output of the adb cmd.
+
+ Returns:
+ A list of strings, all of which are output lines that matches the
+ regex pattern.
+ """
+ lines = output.decode('utf-8').strip().splitlines()
+ results = []
+ for line in lines:
+ if re.search(regex, line):
+ results.append(line.strip())
+ return results
diff --git a/tools/sl4a_shell.py b/tools/sl4a_shell.py
index 501e656..1006ff8 100755
--- a/tools/sl4a_shell.py
+++ b/tools/sl4a_shell.py
@@ -35,6 +35,7 @@ u'N2F52'
"""
import argparse
+import logging
from mobly.controllers.android_device_lib import jsonrpc_shell_base
@@ -42,7 +43,6 @@ from mobly.controllers.android_device_lib import jsonrpc_shell_base
class Sl4aShell(jsonrpc_shell_base.JsonRpcShellBase):
def _start_services(self, console_env):
"""Overrides superclass."""
- self._ad.start_services()
self._ad.load_sl4a()
console_env['s'] = self._ad.sl4a
console_env['sl4a'] = self._ad.sl4a
@@ -64,4 +64,5 @@ if __name__ == '__main__':
help=
'Device serial to connect to (if more than one device is connected)')
args = parser.parse_args()
+ logging.basicConfig(level=logging.INFO)
Sl4aShell().main(args.serial)
| [Snippet Client] Backward compatibility mechanism with older versions of snippet lib
If we make updates to snippet lib and introduce new internal Rpcs or other protocol changes, the client needs to be able to figure out which version of the lib is used and behave accordingly. | google/mobly | diff --git a/mobly/test_runner.py b/mobly/test_runner.py
index a883a7c..bcdebfa 100644
--- a/mobly/test_runner.py
+++ b/mobly/test_runner.py
@@ -400,11 +400,13 @@ class TestRunner(object):
raise signals.ControllerError(
'Expected to get at least %d controller objects, got %d.' %
(min_number, actual_number))
- self._controller_registry[module_ref_name] = objects
+ # Save a shallow copy of the list for internal usage, so tests can't
+ # affect internal registry by manipulating the object list.
+ self._controller_registry[module_ref_name] = copy.copy(objects)
# Collect controller information and write to test result.
# Implementation of 'get_info' is optional for a controller module.
if hasattr(module, 'get_info'):
- controller_info = module.get_info(objects)
+ controller_info = module.get_info(copy.copy(objects))
logging.debug('Controller %s: %s', module_config_name,
controller_info)
self.results.add_controller_info(module_config_name,
diff --git a/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py b/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py
index 5adcfc5..3fbe8cf 100755
--- a/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py
+++ b/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py
@@ -51,11 +51,7 @@ class MockSocketFile(object):
class FakeRpcClient(jsonrpc_client_base.JsonRpcClientBase):
def __init__(self):
- super(FakeRpcClient, self).__init__(
- host_port=80,
- device_port=90,
- app_name='FakeRpcClient',
- adb_proxy=None)
+ super(FakeRpcClient, self).__init__(app_name='FakeRpcClient')
class JsonRpcClientBaseTest(unittest.TestCase):
@@ -164,8 +160,8 @@ class JsonRpcClientBaseTest(unittest.TestCase):
with self.assertRaises(
jsonrpc_client_base.ProtocolError,
- msg=jsonrpc_client_base.ProtocolError.
- NO_RESPONSE_FROM_HANDSHAKE):
+ msg=
+ jsonrpc_client_base.ProtocolError.NO_RESPONSE_FROM_HANDSHAKE):
client.some_rpc(1, 2, 3)
@mock.patch('socket.create_connection')
diff --git a/tests/mobly/controllers/android_device_lib/snippet_client_test.py b/tests/mobly/controllers/android_device_lib/snippet_client_test.py
index 21db458..c5b7df4 100755
--- a/tests/mobly/controllers/android_device_lib/snippet_client_test.py
+++ b/tests/mobly/controllers/android_device_lib/snippet_client_test.py
@@ -15,14 +15,11 @@
from builtins import str
from builtins import bytes
-import json
import mock
-import socket
import unittest
from mobly.controllers.android_device_lib import jsonrpc_client_base
from mobly.controllers.android_device_lib import snippet_client
-from tests.lib import mock_android_device
MOCK_PACKAGE_NAME = 'some.package.name'
MOCK_MISSING_PACKAGE_NAME = 'not.installed'
@@ -46,17 +43,13 @@ class MockAdbProxy(object):
if self.apk_not_instrumented:
return b''
if self.target_not_installed:
- return bytes(
- 'instrumentation:{p}/{r} (target={mp})'.format(
- p=MOCK_PACKAGE_NAME,
- r=snippet_client._INSTRUMENTATION_RUNNER_PACKAGE,
- mp=MOCK_MISSING_PACKAGE_NAME),
- 'utf-8')
- return bytes(
- 'instrumentation:{p}/{r} (target={p})'.format(
+ return bytes('instrumentation:{p}/{r} (target={mp})'.format(
p=MOCK_PACKAGE_NAME,
- r=snippet_client._INSTRUMENTATION_RUNNER_PACKAGE),
- 'utf-8')
+ r=snippet_client._INSTRUMENTATION_RUNNER_PACKAGE,
+ mp=MOCK_MISSING_PACKAGE_NAME), 'utf-8')
+ return bytes('instrumentation:{p}/{r} (target={p})'.format(
+ p=MOCK_PACKAGE_NAME,
+ r=snippet_client._INSTRUMENTATION_RUNNER_PACKAGE), 'utf-8')
def __getattr__(self, name):
"""All calls to the none-existent functions in adb proxy would
@@ -78,42 +71,42 @@ class JsonRpcClientBaseTest(unittest.TestCase):
@mock.patch(JSONRPC_BASE_PACKAGE)
def test_check_app_installed_normal(self, mock_create_connection,
mock_client_base):
- sc = snippet_client.SnippetClient(MOCK_PACKAGE_NAME, 42,
- MockAdbProxy())
- sc.check_app_installed()
+ sc = self._make_client()
+ sc._check_app_installed()
@mock.patch('socket.create_connection')
@mock.patch(JSONRPC_BASE_PACKAGE)
def test_check_app_installed_fail_app_not_installed(
self, mock_create_connection, mock_client_base):
- sc = snippet_client.SnippetClient(
- MOCK_PACKAGE_NAME, 42, MockAdbProxy(apk_not_installed=True))
+ sc = self._make_client(MockAdbProxy(apk_not_installed=True))
expected_msg = '%s is not installed on .*' % MOCK_PACKAGE_NAME
with self.assertRaisesRegexp(jsonrpc_client_base.AppStartError,
expected_msg):
- sc.check_app_installed()
+ sc._check_app_installed()
@mock.patch('socket.create_connection')
@mock.patch(JSONRPC_BASE_PACKAGE)
def test_check_app_installed_fail_not_instrumented(
self, mock_create_connection, mock_client_base):
- sc = snippet_client.SnippetClient(
- MOCK_PACKAGE_NAME, 42, MockAdbProxy(apk_not_instrumented=True))
+ sc = self._make_client(MockAdbProxy(apk_not_instrumented=True))
expected_msg = '%s is installed on .*, but it is not instrumented.' % MOCK_PACKAGE_NAME
with self.assertRaisesRegexp(jsonrpc_client_base.AppStartError,
expected_msg):
- sc.check_app_installed()
+ sc._check_app_installed()
@mock.patch('socket.create_connection')
@mock.patch(JSONRPC_BASE_PACKAGE)
def test_check_app_installed_fail_target_not_installed(
self, mock_create_connection, mock_client_base):
- sc = snippet_client.SnippetClient(
- MOCK_PACKAGE_NAME, 42, MockAdbProxy(target_not_installed=True))
+ sc = self._make_client(MockAdbProxy(target_not_installed=True))
expected_msg = 'Instrumentation target %s is not installed on .*' % MOCK_MISSING_PACKAGE_NAME
with self.assertRaisesRegexp(jsonrpc_client_base.AppStartError,
expected_msg):
- sc.check_app_installed()
+ sc._check_app_installed()
+
+ def _make_client(self, adb_proxy=MockAdbProxy()):
+ return snippet_client.SnippetClient(
+ package=MOCK_PACKAGE_NAME, adb_proxy=adb_proxy)
if __name__ == "__main__":
diff --git a/tests/mobly/test_runner_test.py b/tests/mobly/test_runner_test.py
index 67bf394..843888e 100755
--- a/tests/mobly/test_runner_test.py
+++ b/tests/mobly/test_runner_test.py
@@ -110,6 +110,19 @@ class TestRunnerTest(unittest.TestCase):
self.assertEqual(magic_devices[0].magic, 'magic1')
self.assertEqual(magic_devices[1].magic, 'magic2')
+ def test_register_controller_change_return_value(self):
+ mock_test_config = self.base_mock_test_config.copy()
+ mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
+ mock_test_config.controller_configs = {
+ mock_ctrlr_config_name: ['magic1', 'magic2']
+ }
+ tr = test_runner.TestRunner(self.log_dir, self.test_bed_name)
+ magic_devices = tr._register_controller(mock_test_config,
+ mock_controller)
+ magic1 = magic_devices.pop(0)
+ self.assertIs(magic1, tr._controller_registry['mock_controller'][0])
+ self.assertEqual(len(tr._controller_registry['mock_controller']), 2)
+
def test_register_controller_less_than_min_number(self):
mock_test_config = self.base_mock_test_config.copy()
mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 8
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y adb"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
future==1.0.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/google/mobly.git@6344f817976315515f6b183b2394012a8f348e23#egg=mobly
mock==1.0.1
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
portpicker==1.6.0
psutil==7.0.0
pytest @ file:///croot/pytest_1738938843180/work
pytz==2025.2
PyYAML==6.0.2
timeout-decorator==0.5.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: mobly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- future==1.0.0
- mock==1.0.1
- portpicker==1.6.0
- psutil==7.0.0
- pytz==2025.2
- pyyaml==6.0.2
- timeout-decorator==0.5.0
prefix: /opt/conda/envs/mobly
| [
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_connect_handshake",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_connect_handshake_unknown_status",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_connect_no_response",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_connect_timeout",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_handshake_error",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_open_timeout_io_error",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_call_increment_counter",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_callback_response",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_error_response",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_id_mismatch",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_no_response",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_send_to_socket",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_send_to_socket_without_callback",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::JsonRpcClientBaseTest::test_check_app_installed_fail_app_not_installed",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::JsonRpcClientBaseTest::test_check_app_installed_fail_not_instrumented",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::JsonRpcClientBaseTest::test_check_app_installed_fail_target_not_installed",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::JsonRpcClientBaseTest::test_check_app_installed_normal"
]
| []
| [
"tests/mobly/test_runner_test.py::TestRunnerTest::test_add_test_class_mismatched_log_path",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_add_test_class_mismatched_test_bed_name",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_register_controller_change_return_value",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_register_controller_dup_register",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_register_controller_less_than_min_number",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_register_controller_no_config",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_register_controller_no_config_no_register",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_register_controller_no_get_info",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_register_controller_return_value",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_run_no_tests",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_run_twice",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_run_two_test_classes",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_run_two_test_classes_different_configs",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_verify_controller_module",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_verify_controller_module_missing_attr",
"tests/mobly/test_runner_test.py::TestRunnerTest::test_verify_controller_module_null_attr"
]
| []
| Apache License 2.0 | 1,296 | [
"tools/sl4a_shell.py",
"mobly/controllers/android_device_lib/snippet_client.py",
"mobly/controllers/android_device.py",
"mobly/controllers/android_device_lib/jsonrpc_client_base.py",
"mobly/utils.py",
"mobly/controllers/android_device_lib/jsonrpc_shell_base.py",
"mobly/controllers/android_device_lib/event_dispatcher.py",
"mobly/controllers/android_device_lib/sl4a_client.py"
]
| [
"tools/sl4a_shell.py",
"mobly/controllers/android_device_lib/snippet_client.py",
"mobly/controllers/android_device.py",
"mobly/controllers/android_device_lib/jsonrpc_client_base.py",
"mobly/utils.py",
"mobly/controllers/android_device_lib/jsonrpc_shell_base.py",
"mobly/controllers/android_device_lib/event_dispatcher.py",
"mobly/controllers/android_device_lib/sl4a_client.py"
]
|
dask__dask-2383 | 9288d5c861a6ca82a462c0138ec147e01b1130a6 | 2017-05-24 17:28:19 | b25fcaf1f84521425faa935f3c586f418c83760a | jcrist: We may want to rethink these keyword names at some point. It'd be a bit of a pain to deprecate since this is public api, but the current keywords aren't the clearest (existing for historical reasons).
If I was to redo them I'd probably have `key_name` be for specifying the full key (`name` currently), and `key_prefix` for just the prefix (`token` currently). If we were to change them we'd probably want to mirror this convention in `dask.dataframe` and `dask.bag` as well. | diff --git a/dask/array/core.py b/dask/array/core.py
index 0f602a296..5e6830bee 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -533,9 +533,13 @@ def map_blocks(func, *args, **kwargs):
new_axis : number or iterable, optional
New dimensions created by the function. Note that these are applied
after ``drop_axis`` (if present).
+ token : string, optional
+ The key prefix to use for the output array. If not provided, will be
+ determined from the function name.
name : string, optional
- The key name to use for the array. If not provided, will be determined
- by a hash of the arguments.
+ The key name to use for the output array. Note that this fully
+ specifies the output key name, and must be unique. If not provided,
+ will be determined by a hash of the arguments.
**kwargs :
Other keyword arguments to pass to function. Values must be constants
(not dask.arrays)
@@ -608,10 +612,11 @@ def map_blocks(func, *args, **kwargs):
>>> def func(block, block_id=None):
... pass
- You may specify the name of the resulting task in the graph with the
- optional ``name`` keyword argument.
+ You may specify the key name prefix of the resulting task in the graph with
+ the optional ``token`` keyword argument.
- >>> y = x.map_blocks(lambda x: x + 1, name='increment')
+ >>> x.map_blocks(lambda x: x + 1, token='increment') # doctest: +SKIP
+ dask.array<increment, shape=(100,), dtype=int64, chunksize=(10,)>
"""
if not callable(func):
msg = ("First argument must be callable function, not %s\n"
@@ -619,7 +624,10 @@ def map_blocks(func, *args, **kwargs):
" or: da.map_blocks(function, x, y, z)")
raise TypeError(msg % type(func).__name__)
name = kwargs.pop('name', None)
- name = name or '%s-%s' % (funcname(func), tokenize(func, args, **kwargs))
+ token = kwargs.pop('token', None)
+ if not name:
+ name = '%s-%s' % (token or funcname(func),
+ tokenize(token or func, args, **kwargs))
dtype = kwargs.pop('dtype', None)
chunks = kwargs.pop('chunks', None)
drop_axis = kwargs.pop('drop_axis', [])
diff --git a/dask/array/random.py b/dask/array/random.py
index 7bed2acd7..ad5ef1954 100644
--- a/dask/array/random.py
+++ b/dask/array/random.py
@@ -37,7 +37,7 @@ class RandomState(object):
>>> state = da.random.RandomState(1234) # a seed
>>> x = state.normal(10, 0.1, size=3, chunks=(2,))
>>> x.compute()
- array([ 10.06307943, 9.91493648, 10.0822082 ])
+ array([ 10.01867852, 10.04812289, 9.89649746])
See Also:
np.random.RandomState
diff --git a/dask/utils.py b/dask/utils.py
index 637f7bd82..2a007966c 100644
--- a/dask/utils.py
+++ b/dask/utils.py
@@ -263,7 +263,7 @@ def random_state_data(n, random_state=None):
Parameters
----------
n : int
- Number of tuples to return.
+ Number of arrays to return.
random_state : int or np.random.RandomState, optional
If an int, is used to seed a new ``RandomState``.
"""
@@ -272,9 +272,10 @@ def random_state_data(n, random_state=None):
if not isinstance(random_state, np.random.RandomState):
random_state = np.random.RandomState(random_state)
- maxuint32 = np.iinfo(np.uint32).max
- return [(random_state.rand(624) * maxuint32).astype('uint32')
- for i in range(n)]
+ random_data = random_state.bytes(624 * n * 4) # `n * 624` 32-bit integers
+ l = list(np.frombuffer(random_data, dtype=np.uint32).reshape((n, -1)))
+ assert len(l) == n
+ return l
def is_integer(i):
diff --git a/docs/source/array-creation.rst b/docs/source/array-creation.rst
index 5cfd4a4bf..86b466290 100644
--- a/docs/source/array-creation.rst
+++ b/docs/source/array-creation.rst
@@ -149,7 +149,7 @@ Unknown Chunks
Some arrays have unknown chunk sizes. These are designated using ``np.nan``
rather than an integer. These arrays support many but not all operations. In
-particular, opeations like slicing are not possible and will result in an
+particular, operations like slicing are not possible and will result in an
error.
.. code-block:: python
diff --git a/docs/source/dataframe-create.rst b/docs/source/dataframe-create.rst
index 48709ec47..7d429c3a2 100644
--- a/docs/source/dataframe-create.rst
+++ b/docs/source/dataframe-create.rst
@@ -97,7 +97,7 @@ From Raw Dask Graphs
This section is mainly for developers wishing to extend dask.dataframe. It
discusses internal API not normally needed by users. Everything below can be
done just as effectively with :doc:`dask.delayed<delayed-overview>` described
-just above. You should never need to create a dataframe object by han
+just above. You should never need to create a dataframe object by hand.
To construct a DataFrame manually from a dask graph you need the following
information:
diff --git a/docs/source/dataframe-design.rst b/docs/source/dataframe-design.rst
index f23784bf8..f16b33bc1 100644
--- a/docs/source/dataframe-design.rst
+++ b/docs/source/dataframe-design.rst
@@ -81,7 +81,7 @@ Dask dataframe divides `categorical data`_ into two types:
different categories in each partition. Internally, unknown categoricals are
indicated by the presence of ``dd.utils.UNKNOWN_CATEGORIES`` in the
categories on the ``_meta`` attribute. Since most dataframe operations
- propogate the categories, the known/unknown status should propogate through
+ propagate the categories, the known/unknown status should propagate through
operations (similar to how ``NaN`` propagates).
For metadata specified as a description (option 2 above), unknown categoricals
diff --git a/docs/source/remote-data-services.rst b/docs/source/remote-data-services.rst
index 3871ba5bd..1cebacc37 100644
--- a/docs/source/remote-data-services.rst
+++ b/docs/source/remote-data-services.rst
@@ -148,7 +148,7 @@ The following parameters may be passed to s3fs using ``storage_options``:
- token: if authentication has been done with some other S3 client
- - use_ssl: whether connections are encryted and secure (default True)
+ - use_ssl: whether connections are encrypted and secure (default True)
- client_kwargs: dict passed to the `boto3 client`_, with keys such
as `region_name`, `endpoint_url`
| stack/concatenate duplicates last result in list comprehension
Am running into some strange behavior with Dask `stack` and `concatenate` where they end up duplicating the last value in a list comprehension for all other values in the stacked or concatenated array. Not seeing this across the board, but am seeing it reliably in a few cases. Not sure if there is some optimization gone awry or something else giving rise to this problem. Have documented it in issue ( https://github.com/dask-image/dask-ndfilters/issues/28 ) with an example and specs for reproducing the issue. | dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index 5ab285d6a..c4e0d5d68 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -895,6 +895,10 @@ def test_map_blocks():
e = d.map_blocks(inc, name='increment')
assert e.name == 'increment'
+ e = d.map_blocks(inc, token='increment')
+ assert e.name != 'increment'
+ assert e.name.startswith('increment')
+
d = from_array(x, chunks=(10, 10))
e = d.map_blocks(lambda x: x[::2, ::2], chunks=(5, 5), dtype=d.dtype)
diff --git a/dask/tests/test_utils.py b/dask/tests/test_utils.py
index c87033147..d9ff77c0a 100644
--- a/dask/tests/test_utils.py
+++ b/dask/tests/test_utils.py
@@ -59,11 +59,10 @@ def test_dispatch():
assert foo((1, 2.0, b)) == (2, 1.0, b)
[email protected]
def test_random_state_data():
seed = 37
state = np.random.RandomState(seed)
- n = 100000
+ n = 10000
# Use an integer
states = random_state_data(n, seed)
@@ -72,6 +71,7 @@ def test_random_state_data():
# Use RandomState object
states2 = random_state_data(n, state)
for s1, s2 in zip(states, states2):
+ assert s1.shape == (624,)
assert (s1 == s2).all()
# Consistent ordering
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 7
} | 0.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore==2.1.2
aiohttp==3.8.6
aioitertools==0.11.0
aiosignal==1.2.0
async-timeout==4.0.2
asynctest==0.13.0
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
botocore==1.23.24
certifi==2021.5.30
charset-normalizer==3.0.1
click==8.0.4
cloudpickle==2.2.1
-e git+https://github.com/dask/dask.git@9288d5c861a6ca82a462c0138ec147e01b1130a6#egg=dask
distributed==1.19.3
frozenlist==1.2.0
fsspec==2022.1.0
HeapDict==1.0.1
idna==3.10
idna-ssl==1.1.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jmespath==0.10.0
locket==1.0.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
msgpack-python==0.5.6
multidict==5.2.0
numpy==1.19.5
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
partd==1.2.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-dateutil==2.9.0.post0
pytz==2025.2
s3fs==2022.1.0
six==1.17.0
sortedcontainers==2.4.0
tblib==1.7.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
toolz==0.12.0
tornado==6.1
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
wrapt==1.16.0
yarl==1.7.2
zict==2.1.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aiobotocore==2.1.2
- aiohttp==3.8.6
- aioitertools==0.11.0
- aiosignal==1.2.0
- async-timeout==4.0.2
- asynctest==0.13.0
- botocore==1.23.24
- charset-normalizer==3.0.1
- click==8.0.4
- cloudpickle==2.2.1
- distributed==1.19.3
- frozenlist==1.2.0
- fsspec==2022.1.0
- heapdict==1.0.1
- idna==3.10
- idna-ssl==1.1.0
- jmespath==0.10.0
- locket==1.0.0
- msgpack-python==0.5.6
- multidict==5.2.0
- numpy==1.19.5
- pandas==1.1.5
- partd==1.2.0
- psutil==7.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- s3fs==2022.1.0
- six==1.17.0
- sortedcontainers==2.4.0
- tblib==1.7.0
- toolz==0.12.0
- tornado==6.1
- urllib3==1.26.20
- wrapt==1.16.0
- yarl==1.7.2
- zict==2.1.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_map_blocks"
]
| [
"dask/array/tests/test_array_core.py::test_concatenate_unknown_axes",
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_field_access_with_shape",
"dask/array/tests/test_array_core.py::test_to_dask_dataframe",
"dask/array/tests/test_array_core.py::test_setitem_mixed_d"
]
| [
"dask/array/tests/test_array_core.py::test_getem",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_transpose",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_stack_promote_type",
"dask/array/tests/test_array_core.py::test_stack_rechunk",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_concatenate_rechunk",
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_vstack",
"dask/array/tests/test_array_core.py::test_hstack",
"dask/array/tests/test_array_core.py::test_dstack",
"dask/array/tests/test_array_core.py::test_take",
"dask/array/tests/test_array_core.py::test_compress",
"dask/array/tests/test_array_core.py::test_binops",
"dask/array/tests/test_array_core.py::test_isnull",
"dask/array/tests/test_array_core.py::test_isclose",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_partial_by_order",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_tensordot",
"dask/array/tests/test_array_core.py::test_tensordot_2[0]",
"dask/array/tests/test_array_core.py::test_tensordot_2[1]",
"dask/array/tests/test_array_core.py::test_tensordot_2[axes2]",
"dask/array/tests/test_array_core.py::test_tensordot_2[axes3]",
"dask/array/tests/test_array_core.py::test_tensordot_2[axes4]",
"dask/array/tests/test_array_core.py::test_tensordot_2[axes5]",
"dask/array/tests/test_array_core.py::test_tensordot_2[axes6]",
"dask/array/tests/test_array_core.py::test_dot_method",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_norm",
"dask/array/tests/test_array_core.py::test_choose",
"dask/array/tests/test_array_core.py::test_where",
"dask/array/tests/test_array_core.py::test_where_has_informative_error",
"dask/array/tests/test_array_core.py::test_coarsen",
"dask/array/tests/test_array_core.py::test_coarsen_with_excess",
"dask/array/tests/test_array_core.py::test_insert",
"dask/array/tests/test_array_core.py::test_multi_insert",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_ravel",
"dask/array/tests/test_array_core.py::test_roll[None-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape0-new_shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape1-new_shape1-5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape2-new_shape2-5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape3-new_shape3-12]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape4-new_shape4-12]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape5-new_shape5-chunks5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape6-new_shape6-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape7-new_shape7-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape8-new_shape8-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape9-new_shape9-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape10-new_shape10-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape11-new_shape11-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape12-new_shape12-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape13-new_shape13-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape14-new_shape14-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape15-new_shape15-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape16-new_shape16-chunks16]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape17-new_shape17-3]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape18-new_shape18-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape19-new_shape19-chunks19]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape20-new_shape20-1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape21-new_shape21-1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape22-new_shape22-24]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape23-new_shape23-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape24-new_shape24-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape25-new_shape25-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape26-new_shape26-chunks26]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape27-new_shape27-chunks27]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape28-new_shape28-chunks28]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape29-new_shape29-chunks29]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape30-new_shape30-chunks30]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape31-new_shape31-chunks31]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape32-new_shape32-chunks32]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape33-new_shape33-chunks33]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape34-new_shape34-chunks34]",
"dask/array/tests/test_array_core.py::test_reshape_exceptions",
"dask/array/tests/test_array_core.py::test_reshape_splat",
"dask/array/tests/test_array_core.py::test_reshape_fails_for_dask_only",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_map_blocks_with_chunks",
"dask/array/tests/test_array_core.py::test_map_blocks_dtype_inference",
"dask/array/tests/test_array_core.py::test_fromfunction",
"dask/array/tests/test_array_core.py::test_from_function_requires_block_args",
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_store_delayed_target",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_store_regions",
"dask/array/tests/test_array_core.py::test_store_compute_false",
"dask/array/tests/test_array_core.py::test_store_locks",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_unique",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_astype",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getarray",
"dask/array/tests/test_array_core.py::test_squeeze",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_itemsize",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_from_array_with_lock",
"dask/array/tests/test_array_core.py::test_from_array_slicing_results_in_ndarray",
"dask/array/tests/test_array_core.py::test_from_array_getitem",
"dask/array/tests/test_array_core.py::test_asarray",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_topk",
"dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk",
"dask/array/tests/test_array_core.py::test_bincount",
"dask/array/tests/test_array_core.py::test_bincount_with_weights",
"dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg",
"dask/array/tests/test_array_core.py::test_digitize",
"dask/array/tests/test_array_core.py::test_histogram",
"dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range",
"dask/array/tests/test_array_core.py::test_histogram_return_type",
"dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes",
"dask/array/tests/test_array_core.py::test_concatenate3_2",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_take_dask_from_numpy",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_array",
"dask/array/tests/test_array_core.py::test_cov",
"dask/array/tests/test_array_core.py::test_corrcoef",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_delayed",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_eye",
"dask/array/tests/test_array_core.py::test_diag",
"dask/array/tests/test_array_core.py::test_tril_triu",
"dask/array/tests/test_array_core.py::test_tril_triu_errors",
"dask/array/tests/test_array_core.py::test_atop_names",
"dask/array/tests/test_array_core.py::test_atop_new_axes",
"dask/array/tests/test_array_core.py::test_atop_kwargs",
"dask/array/tests/test_array_core.py::test_atop_chunks",
"dask/array/tests/test_array_core.py::test_from_delayed",
"dask/array/tests/test_array_core.py::test_A_property",
"dask/array/tests/test_array_core.py::test_copy_mutate",
"dask/array/tests/test_array_core.py::test_npartitions",
"dask/array/tests/test_array_core.py::test_astype_gh1151",
"dask/array/tests/test_array_core.py::test_elemwise_name",
"dask/array/tests/test_array_core.py::test_map_blocks_name",
"dask/array/tests/test_array_core.py::test_from_array_names",
"dask/array/tests/test_array_core.py::test_array_picklable",
"dask/array/tests/test_array_core.py::test_swapaxes",
"dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks",
"dask/array/tests/test_array_core.py::test_concatenate_axes",
"dask/array/tests/test_array_core.py::test_atop_concatenate",
"dask/array/tests/test_array_core.py::test_common_blockdim",
"dask/array/tests/test_array_core.py::test_uneven_chunks_that_fit_neatly",
"dask/array/tests/test_array_core.py::test_elemwise_uneven_chunks",
"dask/array/tests/test_array_core.py::test_uneven_chunks_atop",
"dask/array/tests/test_array_core.py::test_warn_bad_rechunking",
"dask/array/tests/test_array_core.py::test_optimize_fuse_keys",
"dask/array/tests/test_array_core.py::test_round",
"dask/array/tests/test_array_core.py::test_repeat",
"dask/array/tests/test_array_core.py::test_tile[0-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[0-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[2-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[2-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[3-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[3-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[5-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[5-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-5-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-5-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps0-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps0-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_concatenate_stack_dont_warn",
"dask/array/tests/test_array_core.py::test_map_blocks_delayed",
"dask/array/tests/test_array_core.py::test_no_chunks",
"dask/array/tests/test_array_core.py::test_no_chunks_2d",
"dask/array/tests/test_array_core.py::test_no_chunks_yes_chunks",
"dask/array/tests/test_array_core.py::test_raise_informative_errors_no_chunks",
"dask/array/tests/test_array_core.py::test_no_chunks_slicing_2d",
"dask/array/tests/test_array_core.py::test_index_array_with_array_1d",
"dask/array/tests/test_array_core.py::test_index_array_with_array_2d",
"dask/array/tests/test_array_core.py::test_setitem_1d",
"dask/array/tests/test_array_core.py::test_setitem_2d",
"dask/array/tests/test_array_core.py::test_setitem_errs",
"dask/array/tests/test_array_core.py::test_zero_slice_dtypes",
"dask/array/tests/test_array_core.py::test_zero_sized_array_rechunk",
"dask/array/tests/test_array_core.py::test_atop_zero_shape",
"dask/array/tests/test_array_core.py::test_atop_zero_shape_new_axes",
"dask/array/tests/test_array_core.py::test_broadcast_against_zero_shape",
"dask/array/tests/test_array_core.py::test_fast_from_array",
"dask/array/tests/test_array_core.py::test_random_from_array",
"dask/array/tests/test_array_core.py::test_concatenate_errs",
"dask/array/tests/test_array_core.py::test_stack_errs",
"dask/array/tests/test_array_core.py::test_transpose_negative_axes",
"dask/array/tests/test_array_core.py::test_atop_with_numpy_arrays",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-6]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-6]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-6]",
"dask/array/tests/test_array_core.py::test_constructor_plugin",
"dask/tests/test_utils.py::test_takes_multiple_arguments",
"dask/tests/test_utils.py::test_dispatch",
"dask/tests/test_utils.py::test_random_state_data",
"dask/tests/test_utils.py::test_memory_repr",
"dask/tests/test_utils.py::test_method_caller",
"dask/tests/test_utils.py::test_skip_doctest",
"dask/tests/test_utils.py::test_SerializableLock",
"dask/tests/test_utils.py::test_SerializableLock_name_collision",
"dask/tests/test_utils.py::test_funcname",
"dask/tests/test_utils.py::test_funcname_toolz",
"dask/tests/test_utils.py::test_ndeepmap",
"dask/tests/test_utils.py::test_ensure_dict",
"dask/tests/test_utils.py::test_package_of"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,298 | [
"docs/source/remote-data-services.rst",
"docs/source/array-creation.rst",
"docs/source/dataframe-design.rst",
"dask/array/random.py",
"docs/source/dataframe-create.rst",
"dask/array/core.py",
"dask/utils.py"
]
| [
"docs/source/remote-data-services.rst",
"docs/source/array-creation.rst",
"docs/source/dataframe-design.rst",
"dask/array/random.py",
"docs/source/dataframe-create.rst",
"dask/array/core.py",
"dask/utils.py"
]
|
Azure__azure-cli-3441 | 2e672ea8757dd37052fb8131852eda28cd573cb2 | 2017-05-24 17:32:50 | 58aac4203905792244f4bb244910354fd44425d6 | diff --git a/src/azure-cli-core/azure/cli/core/_profile.py b/src/azure-cli-core/azure/cli/core/_profile.py
index 113eac669..17763237a 100644
--- a/src/azure-cli-core/azure/cli/core/_profile.py
+++ b/src/azure-cli-core/azure/cli/core/_profile.py
@@ -471,7 +471,6 @@ class SubscriptionFinder(object):
temp_credentials[_ACCESS_TOKEN])
all_subscriptions.extend(subscriptions)
- self.tenants = tenants
return all_subscriptions
def _find_using_specific_tenant(self, tenant, access_token):
@@ -484,7 +483,7 @@ class SubscriptionFinder(object):
for s in subscriptions:
setattr(s, 'tenant_id', tenant)
all_subscriptions.append(s)
- self.tenants = [tenant]
+ self.tenants.append(tenant)
return all_subscriptions
diff --git a/src/command_modules/azure-cli-interactive/azclishell/_dump_commands.py b/src/command_modules/azure-cli-interactive/azclishell/_dump_commands.py
index c1ccb67af..6fc3d1c26 100644
--- a/src/command_modules/azure-cli-interactive/azclishell/_dump_commands.py
+++ b/src/command_modules/azure-cli-interactive/azclishell/_dump_commands.py
@@ -14,6 +14,7 @@ import yaml
from azure.cli.core.application import APPLICATION, Configuration
from azure.cli.core.commands import _update_command_definitions, BLACKLISTED_MODS
from azure.cli.core.help_files import helps
+from azure.cli.core.commands.arm import add_id_parameters
import azclishell.configuration as config
@@ -48,6 +49,7 @@ def dump_command_table():
command_file = config.CONFIGURATION.get_help_files()
install_modules()
+ add_id_parameters(CMD_TABLE)
data = {}
for cmd in CMD_TABLE:
diff --git a/src/command_modules/azure-cli-interactive/azclishell/app.py b/src/command_modules/azure-cli-interactive/azclishell/app.py
index 405bf53e3..6b653c00d 100644
--- a/src/command_modules/azure-cli-interactive/azclishell/app.py
+++ b/src/command_modules/azure-cli-interactive/azclishell/app.py
@@ -554,8 +554,7 @@ class Shell(object):
CONFIG.load(os.path.join(azure_folder, 'az.json'))
SESSION.load(os.path.join(azure_folder, 'az.sess'), max_age=3600)
- config = Configuration()
- self.app.initialize(config)
+ self.app.initialize(Configuration())
result = self.app.execute(args)
self.last_exit = 0
diff --git a/src/command_modules/azure-cli-profile/HISTORY.rst b/src/command_modules/azure-cli-profile/HISTORY.rst
index e0018b776..3eee49a32 100644
--- a/src/command_modules/azure-cli-profile/HISTORY.rst
+++ b/src/command_modules/azure-cli-profile/HISTORY.rst
@@ -6,6 +6,7 @@ Release History
2.0.5 (unreleased)
* Output deprecating information on using '--expanded-view'
* Add get-access-token command to provide raw AAD token
+* Support login with a user account with no associated subscriptions
2.0.4 (2017-04-28)
++++++++++++++++++
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/commands.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/commands.py
index 24b237cec..69ca29b28 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/commands.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/commands.py
@@ -64,6 +64,26 @@ def transform_vm_create_output(result):
return None if isinstance(result, ClientRawResponse) else result
+def transform_vm_usage_list(result):
+ result = list(result)
+ for item in result:
+ item.current_value = str(item.current_value)
+ item.limit = str(item.limit)
+ item.local_name = item.name.localized_value
+ return result
+
+
+def transform_vm_usage_table(result):
+ transformed = []
+ for item in result:
+ transformed.append(OrderedDict([
+ ('Name', item['localName']),
+ ('CurrentValue', item['currentValue']),
+ ('Limit', item['limit'])
+ ]))
+ return transformed
+
+
def transform_vm_list(vm_list):
return [transform_vm(v) for v in vm_list]
@@ -217,7 +237,7 @@ cli_command(__name__, 'vm image list-skus', mgmt_path.format(op_var, op_class, '
cli_command(__name__, 'vm image list', custom_path.format('list_vm_images'))
# VM Usage
-cli_command(__name__, 'vm list-usage', mgmt_path.format('usage_operations', 'UsageOperations', 'list'), cf_usage)
+cli_command(__name__, 'vm list-usage', mgmt_path.format('usage_operations', 'UsageOperations', 'list'), cf_usage, transform=transform_vm_usage_list, table_transformer=transform_vm_usage_table)
# VMSS
cli_command(__name__, 'vmss delete', mgmt_path.format('virtual_machine_scale_sets_operations', 'VirtualMachineScaleSetsOperations', 'delete'), cf_vmss, no_wait_param='raw')
| az vm list-usages needs table/tsv work
Currently outputs like:
```
$ az vm list-usage --location westus
Limit CurrentValue
------- --------------
2000
100 4
10000 4
2000
100 4
100
100
100
100
100
100
100
100
100
100
24
48
8
100
100
100
100
2000 1
2000 4
```
But should have a formatter written to provide output like
```
Name CurrentValue Limit
------------------- ------------- --------
standardMSFamily 1 2000
StandardDiskCount 0 100
PremiumDiskCount 1 100
[...]
```
In addition, when the `CurrentValue`values are 0, we should still show the 0 in `table` output, where it seems we suppress the 0 value now (it is in the JSON doc).
| Azure/azure-cli | diff --git a/src/azure-cli-core/tests/test_profile.py b/src/azure-cli-core/tests/test_profile.py
index ad9d05766..0b11ec681 100644
--- a/src/azure-cli-core/tests/test_profile.py
+++ b/src/azure-cli-core/tests/test_profile.py
@@ -255,7 +255,7 @@ class Test_Profile(unittest.TestCase): # pylint: disable=too-many-public-method
extended_info['endpoints'].active_directory)
@mock.patch('adal.AuthenticationContext', autospec=True)
- def test_create_account_without_subscriptions(self, mock_auth_context):
+ def test_create_account_without_subscriptions_thru_service_principal(self, mock_auth_context):
mock_auth_context.acquire_token_with_client_credentials.return_value = self.token_entry1
mock_arm_client = mock.MagicMock()
mock_arm_client.subscriptions.list.return_value = []
@@ -277,7 +277,42 @@ class Test_Profile(unittest.TestCase): # pylint: disable=too-many-public-method
subscription_finder=finder)
# assert
- self.assertTrue(1, len(result))
+ self.assertEqual(1, len(result))
+ self.assertEqual(result[0]['id'], self.tenant_id)
+ self.assertEqual(result[0]['state'], 'Enabled')
+ self.assertEqual(result[0]['tenantId'], self.tenant_id)
+ self.assertEqual(result[0]['name'], 'N/A(tenant level account)')
+
+ @mock.patch('adal.AuthenticationContext', autospec=True)
+ def test_create_account_without_subscriptions_thru_common_tenant(self, mock_auth_context):
+ mock_auth_context.acquire_token.return_value = self.token_entry1
+ mock_auth_context.acquire_token_with_username_password.return_value = self.token_entry1
+ tenant_object = mock.MagicMock()
+ tenant_object.id = "foo-bar"
+ tenant_object.tenant_id = self.tenant_id
+ mock_arm_client = mock.MagicMock()
+ mock_arm_client.subscriptions.list.return_value = []
+ mock_arm_client.tenants.list.return_value = (x for x in [tenant_object])
+
+ finder = SubscriptionFinder(lambda _, _2: mock_auth_context,
+ None,
+ lambda _: mock_arm_client)
+
+ storage_mock = {'subscriptions': []}
+ profile = Profile(storage_mock, use_global_creds_cache=False)
+ profile._management_resource_uri = 'https://management.core.windows.net/'
+
+ # action
+ result = profile.find_subscriptions_on_login(False,
+ '1234',
+ 'my-secret',
+ False,
+ None,
+ allow_no_subscriptions=True,
+ subscription_finder=finder)
+
+ # assert
+ self.assertEqual(1, len(result))
self.assertEqual(result[0]['id'], self.tenant_id)
self.assertEqual(result[0]['state'], 'Enabled')
self.assertEqual(result[0]['tenantId'], self.tenant_id)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 5
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==1.4.9
attrs==22.2.0
autopep8==1.2.4
azure-batch==3.0.0
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_cdn&subdirectory=src/command_modules/azure-cli-cdn
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_cognitiveservices&subdirectory=src/command_modules/azure-cli-cognitiveservices
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_cosmosdb&subdirectory=src/command_modules/azure-cli-cosmosdb
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_dla&subdirectory=src/command_modules/azure-cli-dla
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_dls&subdirectory=src/command_modules/azure-cli-dls
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_interactive&subdirectory=src/command_modules/azure-cli-interactive
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_lab&subdirectory=src/command_modules/azure-cli-lab
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_monitor&subdirectory=src/command_modules/azure-cli-monitor
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_rdbms&subdirectory=src/command_modules/azure-cli-rdbms
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_sf&subdirectory=src/command_modules/azure-cli-sf
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@2e672ea8757dd37052fb8131852eda28cd573cb2#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.28
azure-core==1.24.2
azure-datalake-store==0.0.9
azure-graphrbac==0.30.0rc6
azure-keyvault==0.3.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==4.0.0
azure-mgmt-cdn==0.30.2
azure-mgmt-cognitiveservices==1.0.0
azure-mgmt-compute==1.0.0rc1
azure-mgmt-containerregistry==0.2.1
azure-mgmt-datalake-analytics==0.1.4
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.4
azure-mgmt-devtestlabs==2.0.0
azure-mgmt-dns==1.0.1
azure-mgmt-documentdb==0.1.3
azure-mgmt-iothub==0.2.2
azure-mgmt-keyvault==0.31.0
azure-mgmt-monitor==0.2.1
azure-mgmt-network==1.0.0rc3
azure-mgmt-nspkg==1.0.0
azure-mgmt-rdbms==0.1.0
azure-mgmt-redis==1.0.0
azure-mgmt-resource==1.1.0rc1
azure-mgmt-sql==0.4.0
azure-mgmt-storage==1.0.0rc1
azure-mgmt-trafficmanager==0.30.0
azure-mgmt-web==0.32.0
azure-monitor==0.3.0
azure-multiapi-storage==0.1.0
azure-nspkg==1.0.0
azure-servicefabric==5.6.130
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==4.2
cryptography==40.0.2
flake8==3.2.1
futures==3.1.1
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.7.0
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.5.3
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pep8==1.7.1
pluggy==1.0.0
prompt-toolkit==3.0.36
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.2.0
pycparser==2.21
pydocumentdb==2.3.5
pyflakes==1.3.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.7
tomli==1.2.3
typing-extensions==4.1.1
urllib3==1.16
vcrpy==1.10.3
vsts-cd-manager==1.0.2
wcwidth==0.2.13
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==1.4.9
- attrs==22.2.0
- autopep8==1.2.4
- azure-batch==3.0.0
- azure-common==1.1.28
- azure-core==1.24.2
- azure-datalake-store==0.0.9
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.3.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==4.0.0
- azure-mgmt-cdn==0.30.2
- azure-mgmt-cognitiveservices==1.0.0
- azure-mgmt-compute==1.0.0rc1
- azure-mgmt-containerregistry==0.2.1
- azure-mgmt-datalake-analytics==0.1.4
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.4
- azure-mgmt-devtestlabs==2.0.0
- azure-mgmt-dns==1.0.1
- azure-mgmt-documentdb==0.1.3
- azure-mgmt-iothub==0.2.2
- azure-mgmt-keyvault==0.31.0
- azure-mgmt-monitor==0.2.1
- azure-mgmt-network==1.0.0rc3
- azure-mgmt-nspkg==1.0.0
- azure-mgmt-rdbms==0.1.0
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==1.1.0rc1
- azure-mgmt-sql==0.4.0
- azure-mgmt-storage==1.0.0rc1
- azure-mgmt-trafficmanager==0.30.0
- azure-mgmt-web==0.32.0
- azure-monitor==0.3.0
- azure-multiapi-storage==0.1.0
- azure-nspkg==1.0.0
- azure-servicefabric==5.6.130
- cffi==1.15.1
- colorama==0.3.7
- coverage==4.2
- cryptography==40.0.2
- flake8==3.2.1
- futures==3.1.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.7.0
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.5.3
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pep8==1.7.1
- pip==9.0.1
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.2.0
- pycparser==2.21
- pydocumentdb==2.3.5
- pyflakes==1.3.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.7
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.10.3
- vsts-cd-manager==1.0.2
- wcwidth==0.2.13
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_create_account_without_subscriptions_thru_common_tenant"
]
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_service_principal_using_cert",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_service_principal_auth_client_cert"
]
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_create_account_without_subscriptions_thru_service_principal",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_create_account_without_subscriptions_without_tenant",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_create_token_cache",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_add_new_sp_creds",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_add_preexisting_sp_creds",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_load_tokens_and_sp_creds_with_cert",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_load_tokens_and_sp_creds_with_secret",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_new_token_added_by_adal",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_remove_creds",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_default_active_subscription_to_non_disabled_one",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_particular_tenent",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_service_principal_id",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_interactive_from_particular_tenent",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_through_interactive_flow",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_thru_username_password",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_thru_username_password_adfs",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_thru_username_password_with_account_disabled",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_current_account_user",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_expanded_subscription_info",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_expanded_subscription_info_for_logged_in_service_principal",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_login_credentials",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_login_credentials_for_graph_client",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_raw_token",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_subscription",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_load_cached_tokens",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_logout",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_logout_all",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_normalize",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_service_principal_auth_client_secret",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_set_active_subscription",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_update_add_two_different_subscriptions",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_update_with_same_subscription_added_twice"
]
| []
| MIT License | 1,299 | [
"src/command_modules/azure-cli-interactive/azclishell/_dump_commands.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/commands.py",
"src/azure-cli-core/azure/cli/core/_profile.py",
"src/command_modules/azure-cli-profile/HISTORY.rst",
"src/command_modules/azure-cli-interactive/azclishell/app.py"
]
| [
"src/command_modules/azure-cli-interactive/azclishell/_dump_commands.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/commands.py",
"src/azure-cli-core/azure/cli/core/_profile.py",
"src/command_modules/azure-cli-profile/HISTORY.rst",
"src/command_modules/azure-cli-interactive/azclishell/app.py"
]
|
|
borgbackup__borg-2566 | 2dcbe02e5ae2c5674b350866b145c028966e1664 | 2017-05-25 10:37:50 | a439fa3e720c8bb2a82496768ffcce282fb7f7b7 | diff --git a/src/borg/helpers.py b/src/borg/helpers.py
index 15f56437..db66b822 100644
--- a/src/borg/helpers.py
+++ b/src/borg/helpers.py
@@ -916,7 +916,12 @@ def __str__(self):
def to_key_filename(self):
name = re.sub('[^\w]', '_', self.path).strip('_')
if self.proto != 'file':
- name = self.host + '__' + name
+ name = re.sub('[^\w]', '_', self.host) + '__' + name
+ if len(name) > 100:
+ # Limit file names to some reasonable length. Most file systems
+ # limit them to 255 [unit of choice]; due to variations in unicode
+ # handling we truncate to 100 *characters*.
+ name = name[:100]
return os.path.join(get_keys_dir(), name)
def __repr__(self):
| Restrict file names generated by Location.to_keyfile_name
The generated file can currently include special characters like those mentioned in #2290, which should be stripped for compatibility reasons.
From #2555 | borgbackup/borg | diff --git a/src/borg/testsuite/helpers.py b/src/borg/testsuite/helpers.py
index 7ce22dc2..b23e277b 100644
--- a/src/borg/testsuite/helpers.py
+++ b/src/borg/testsuite/helpers.py
@@ -50,10 +50,19 @@ def test_bin_to_hex():
class TestLocationWithoutEnv:
- def test_ssh(self, monkeypatch):
+ @pytest.fixture
+ def keys_dir(self, tmpdir, monkeypatch):
+ tmpdir = str(tmpdir)
+ monkeypatch.setenv('BORG_KEYS_DIR', tmpdir)
+ if not tmpdir.endswith(os.path.sep):
+ tmpdir += os.path.sep
+ return tmpdir
+
+ def test_ssh(self, monkeypatch, keys_dir):
monkeypatch.delenv('BORG_REPO', raising=False)
assert repr(Location('ssh://user@host:1234/some/path::archive')) == \
"Location(proto='ssh', user='user', host='host', port=1234, path='/some/path', archive='archive')"
+ assert Location('ssh://user@host:1234/some/path::archive').to_key_filename() == keys_dir + 'host__some_path'
assert repr(Location('ssh://user@host:1234/some/path')) == \
"Location(proto='ssh', user='user', host='host', port=1234, path='/some/path', archive=None)"
assert repr(Location('ssh://user@host/some/path')) == \
@@ -62,12 +71,14 @@ def test_ssh(self, monkeypatch):
"Location(proto='ssh', user='user', host='::', port=1234, path='/some/path', archive='archive')"
assert repr(Location('ssh://user@[::]:1234/some/path')) == \
"Location(proto='ssh', user='user', host='::', port=1234, path='/some/path', archive=None)"
+ assert Location('ssh://user@[::]:1234/some/path').to_key_filename() == keys_dir + '____some_path'
assert repr(Location('ssh://user@[::]/some/path')) == \
"Location(proto='ssh', user='user', host='::', port=None, path='/some/path', archive=None)"
assert repr(Location('ssh://user@[2001:db8::]:1234/some/path::archive')) == \
"Location(proto='ssh', user='user', host='2001:db8::', port=1234, path='/some/path', archive='archive')"
assert repr(Location('ssh://user@[2001:db8::]:1234/some/path')) == \
"Location(proto='ssh', user='user', host='2001:db8::', port=1234, path='/some/path', archive=None)"
+ assert Location('ssh://user@[2001:db8::]:1234/some/path').to_key_filename() == keys_dir + '2001_db8____some_path'
assert repr(Location('ssh://user@[2001:db8::]/some/path')) == \
"Location(proto='ssh', user='user', host='2001:db8::', port=None, path='/some/path', archive=None)"
assert repr(Location('ssh://user@[2001:db8::c0:ffee]:1234/some/path::archive')) == \
@@ -82,15 +93,17 @@ def test_ssh(self, monkeypatch):
"Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=1234, path='/some/path', archive=None)"
assert repr(Location('ssh://user@[2001:db8::192.0.2.1]/some/path')) == \
"Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=None, path='/some/path', archive=None)"
+ assert Location('ssh://user@[2001:db8::192.0.2.1]/some/path').to_key_filename() == keys_dir + '2001_db8__192_0_2_1__some_path'
- def test_file(self, monkeypatch):
+ def test_file(self, monkeypatch, keys_dir):
monkeypatch.delenv('BORG_REPO', raising=False)
assert repr(Location('file:///some/path::archive')) == \
"Location(proto='file', user=None, host=None, port=None, path='/some/path', archive='archive')"
assert repr(Location('file:///some/path')) == \
"Location(proto='file', user=None, host=None, port=None, path='/some/path', archive=None)"
+ assert Location('file:///some/path').to_key_filename() == keys_dir + 'some_path'
- def test_scp(self, monkeypatch):
+ def test_scp(self, monkeypatch, keys_dir):
monkeypatch.delenv('BORG_REPO', raising=False)
assert repr(Location('user@host:/some/path::archive')) == \
"Location(proto='ssh', user='user', host='host', port=None, path='/some/path', archive='archive')"
@@ -112,42 +125,55 @@ def test_scp(self, monkeypatch):
"Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=None, path='/some/path', archive='archive')"
assert repr(Location('user@[2001:db8::192.0.2.1]:/some/path')) == \
"Location(proto='ssh', user='user', host='2001:db8::192.0.2.1', port=None, path='/some/path', archive=None)"
+ assert Location('user@[2001:db8::192.0.2.1]:/some/path').to_key_filename() == keys_dir + '2001_db8__192_0_2_1__some_path'
- def test_smb(self, monkeypatch):
+ def test_smb(self, monkeypatch, keys_dir):
monkeypatch.delenv('BORG_REPO', raising=False)
assert repr(Location('file:////server/share/path::archive')) == \
"Location(proto='file', user=None, host=None, port=None, path='//server/share/path', archive='archive')"
+ assert Location('file:////server/share/path::archive').to_key_filename() == keys_dir + 'server_share_path'
- def test_folder(self, monkeypatch):
+ def test_folder(self, monkeypatch, keys_dir):
monkeypatch.delenv('BORG_REPO', raising=False)
assert repr(Location('path::archive')) == \
"Location(proto='file', user=None, host=None, port=None, path='path', archive='archive')"
assert repr(Location('path')) == \
"Location(proto='file', user=None, host=None, port=None, path='path', archive=None)"
+ assert Location('path').to_key_filename() == keys_dir + 'path'
- def test_abspath(self, monkeypatch):
+ def test_long_path(self, monkeypatch, keys_dir):
+ monkeypatch.delenv('BORG_REPO', raising=False)
+ assert Location(os.path.join(*(40 * ['path']))).to_key_filename() == keys_dir + '_'.join(20 * ['path']) + '_'
+
+ def test_abspath(self, monkeypatch, keys_dir):
monkeypatch.delenv('BORG_REPO', raising=False)
assert repr(Location('/some/absolute/path::archive')) == \
"Location(proto='file', user=None, host=None, port=None, path='/some/absolute/path', archive='archive')"
assert repr(Location('/some/absolute/path')) == \
"Location(proto='file', user=None, host=None, port=None, path='/some/absolute/path', archive=None)"
+ assert Location('/some/absolute/path').to_key_filename() == keys_dir + 'some_absolute_path'
assert repr(Location('ssh://user@host/some/path')) == \
"Location(proto='ssh', user='user', host='host', port=None, path='/some/path', archive=None)"
+ assert Location('ssh://user@host/some/path').to_key_filename() == keys_dir + 'host__some_path'
- def test_relpath(self, monkeypatch):
+ def test_relpath(self, monkeypatch, keys_dir):
monkeypatch.delenv('BORG_REPO', raising=False)
assert repr(Location('some/relative/path::archive')) == \
"Location(proto='file', user=None, host=None, port=None, path='some/relative/path', archive='archive')"
assert repr(Location('some/relative/path')) == \
"Location(proto='file', user=None, host=None, port=None, path='some/relative/path', archive=None)"
+ assert Location('some/relative/path').to_key_filename() == keys_dir + 'some_relative_path'
assert repr(Location('ssh://user@host/./some/path')) == \
"Location(proto='ssh', user='user', host='host', port=None, path='/./some/path', archive=None)"
+ assert Location('ssh://user@host/./some/path').to_key_filename() == keys_dir + 'host__some_path'
assert repr(Location('ssh://user@host/~/some/path')) == \
"Location(proto='ssh', user='user', host='host', port=None, path='/~/some/path', archive=None)"
+ assert Location('ssh://user@host/~/some/path').to_key_filename() == keys_dir + 'host__some_path'
assert repr(Location('ssh://user@host/~user/some/path')) == \
"Location(proto='ssh', user='user', host='host', port=None, path='/~user/some/path', archive=None)"
+ assert Location('ssh://user@host/~user/some/path').to_key_filename() == keys_dir + 'host__user_some_path'
- def test_with_colons(self, monkeypatch):
+ def test_with_colons(self, monkeypatch, keys_dir):
monkeypatch.delenv('BORG_REPO', raising=False)
assert repr(Location('/abs/path:w:cols::arch:col')) == \
"Location(proto='file', user=None, host=None, port=None, path='/abs/path:w:cols', archive='arch:col')"
@@ -155,6 +181,7 @@ def test_with_colons(self, monkeypatch):
"Location(proto='file', user=None, host=None, port=None, path='/abs/path:with:colons', archive='archive')"
assert repr(Location('/abs/path:with:colons')) == \
"Location(proto='file', user=None, host=None, port=None, path='/abs/path:with:colons', archive=None)"
+ assert Location('/abs/path:with:colons').to_key_filename() == keys_dir + 'abs_path_with_colons'
def test_user_parsing(self):
# see issue #1930
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[fuse]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libacl1-dev liblz4-dev libfuse-dev pkg-config"
],
"python": "3.9",
"reqs_path": [
"requirements.d/development.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/borgbackup/borg.git@2dcbe02e5ae2c5674b350866b145c028966e1664#egg=borgbackup
cachetools==5.5.2
chardet==5.2.0
colorama==0.4.6
coverage==7.8.0
Cython==3.0.12
distlib==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
iniconfig==2.1.0
llfuse==1.5.1
msgpack-python==0.5.6
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
py-cpuinfo==9.0.0
pyproject-api==1.9.0
pytest==8.3.5
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
setuptools-scm==8.2.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
virtualenv==20.29.3
| name: borg
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- coverage==7.8.0
- cython==3.0.12
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- iniconfig==2.1.0
- llfuse==1.5.1
- msgpack-python==0.5.6
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- setuptools-scm==8.2.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/borg
| [
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_ssh",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_scp",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_long_path"
]
| [
"src/borg/testsuite/helpers.py::test_is_slow_msgpack"
]
| [
"src/borg/testsuite/helpers.py::BigIntTestCase::test_bigint",
"src/borg/testsuite/helpers.py::test_bin_to_hex",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_file",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_smb",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_folder",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_abspath",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_relpath",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_with_colons",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_user_parsing",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_underspecified",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_no_slashes",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_canonical_path",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_format_path",
"src/borg/testsuite/helpers.py::TestLocationWithoutEnv::test_bad_syntax",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_ssh",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_file",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_scp",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_folder",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_abspath",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_relpath",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_with_colons",
"src/borg/testsuite/helpers.py::TestLocationWithEnv::test_no_slashes",
"src/borg/testsuite/helpers.py::FormatTimedeltaTestCase::test",
"src/borg/testsuite/helpers.py::test_chunkerparams",
"src/borg/testsuite/helpers.py::MakePathSafeTestCase::test",
"src/borg/testsuite/helpers.py::PruneSplitTestCase::test",
"src/borg/testsuite/helpers.py::PruneWithinTestCase::test",
"src/borg/testsuite/helpers.py::StableDictTestCase::test",
"src/borg/testsuite/helpers.py::TestParseTimestamp::test",
"src/borg/testsuite/helpers.py::test_get_cache_dir",
"src/borg/testsuite/helpers.py::test_get_keys_dir",
"src/borg/testsuite/helpers.py::test_get_security_dir",
"src/borg/testsuite/helpers.py::test_file_size",
"src/borg/testsuite/helpers.py::test_file_size_precision",
"src/borg/testsuite/helpers.py::test_file_size_sign",
"src/borg/testsuite/helpers.py::test_parse_file_size[1-1]",
"src/borg/testsuite/helpers.py::test_parse_file_size[20-20]",
"src/borg/testsuite/helpers.py::test_parse_file_size[5K-5000]",
"src/borg/testsuite/helpers.py::test_parse_file_size[1.75M-1750000]",
"src/borg/testsuite/helpers.py::test_parse_file_size[1e+9-1000000000.0]",
"src/borg/testsuite/helpers.py::test_parse_file_size[-1T--1000000000000.0]",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[]",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[5",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[4E]",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[2229",
"src/borg/testsuite/helpers.py::test_parse_file_size_invalid[1B]",
"src/borg/testsuite/helpers.py::TestBuffer::test_type",
"src/borg/testsuite/helpers.py::TestBuffer::test_len",
"src/borg/testsuite/helpers.py::TestBuffer::test_resize",
"src/borg/testsuite/helpers.py::TestBuffer::test_limit",
"src/borg/testsuite/helpers.py::TestBuffer::test_get",
"src/borg/testsuite/helpers.py::test_yes_input",
"src/borg/testsuite/helpers.py::test_yes_input_defaults",
"src/borg/testsuite/helpers.py::test_yes_input_custom",
"src/borg/testsuite/helpers.py::test_yes_env",
"src/borg/testsuite/helpers.py::test_yes_env_default",
"src/borg/testsuite/helpers.py::test_yes_defaults",
"src/borg/testsuite/helpers.py::test_yes_retry",
"src/borg/testsuite/helpers.py::test_yes_no_retry",
"src/borg/testsuite/helpers.py::test_yes_output",
"src/borg/testsuite/helpers.py::test_yes_env_output",
"src/borg/testsuite/helpers.py::test_progress_percentage_sameline",
"src/borg/testsuite/helpers.py::test_progress_percentage_step",
"src/borg/testsuite/helpers.py::test_progress_percentage_quiet",
"src/borg/testsuite/helpers.py::test_progress_endless",
"src/borg/testsuite/helpers.py::test_progress_endless_step",
"src/borg/testsuite/helpers.py::test_partial_format",
"src/borg/testsuite/helpers.py::test_chunk_file_wrapper",
"src/borg/testsuite/helpers.py::test_chunkit",
"src/borg/testsuite/helpers.py::test_clean_lines",
"src/borg/testsuite/helpers.py::test_format_line",
"src/borg/testsuite/helpers.py::test_format_line_erroneous",
"src/borg/testsuite/helpers.py::test_replace_placeholders",
"src/borg/testsuite/helpers.py::test_swidth_slice",
"src/borg/testsuite/helpers.py::test_swidth_slice_mixed_characters",
"src/borg/testsuite/helpers.py::test_safe_timestamps",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_simple",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_not_found",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_bad_syntax[mismatched",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_bad_syntax[foo",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_bad_syntax[]",
"src/borg/testsuite/helpers.py::TestPopenWithErrorHandling::test_shell"
]
| []
| BSD License | 1,300 | [
"src/borg/helpers.py"
]
| [
"src/borg/helpers.py"
]
|
|
frictionlessdata__goodtables-py-193 | cc7b66a856d57270d0f1790d2b6048c7ac051837 | 2017-05-25 18:28:28 | cc7b66a856d57270d0f1790d2b6048c7ac051837 | roll: @amercader
please take a look
roll: @amercader
:+1: | diff --git a/README.md b/README.md
index 2c67352..ea9fe70 100644
--- a/README.md
+++ b/README.md
@@ -46,7 +46,7 @@ print(inspector.inspect('data/invalid.csv'))
# 'valid': False',
# 'error-count': 2,
# 'table-count': 1,
-# 'errors': [],
+# 'warnings': [],
# 'tables': [
# {'time': 0.027,
# 'valid': False',
@@ -90,15 +90,13 @@ As a result of inspection goodtables returns a report dictionary. It includes va
Report errors are categorized by type:
-- general - data can't be loaded or parsed
+- source - data can't be loaded or parsed
- structure - general tabular errors like duplicate headers
- schema - error of checks against JSON Table Schema
Report errors are categorized by context:
-- any - generic errors like IO, HTTP error
-- dataset - the whole dataset errors like invalid datapackage
-- table - the whole table errors like bad encoding
+- table - the whole table errors like IO, HTTP or encoding error
- head - headers errors
- body - contents errors
@@ -115,7 +113,7 @@ inspector = Inspector()
inspector.inspect('datapackage.json', preset='datapackage')
```
-A preset function proceses passed source and options and fills tables list for the following inspection. If any errors have happened a preset function should add them to errors list.
+A preset function proceses passed source and options and fills tables list for the following inspection. If any issues have happened a preset function should add them to warnings list.
#### Builtin presets
@@ -138,6 +136,8 @@ from goodtables import Inspector, preset
@preset('custom-preset')
def custom_preset(source, **options):
+ warnings = []
+ tables = []
for table in source:
try:
tables.append({
@@ -147,12 +147,8 @@ def custom_preset(source, **options):
'extra': {...},
})
except Exception:
- errors.append({
- 'code': 'error-code',
- 'message': 'Error message',
- 'row-number': None,
- 'column-number': None,
- })
+ warnings.append('Warning message')
+ return warnings, tables
inspector = Inspector(custom_presets=[custom_preset])
inspector.inspect(source, preset='custom-preset')
diff --git a/data/invalid_json.json b/data/invalid_json.json
new file mode 100644
index 0000000..2e8edeb
--- /dev/null
+++ b/data/invalid_json.json
@@ -0,0 +1,3 @@
+{
+ 'name': 'name',
+}
diff --git a/data/mixed_datapackage.json b/data/mixed_datapackage.json
new file mode 100644
index 0000000..f9df389
--- /dev/null
+++ b/data/mixed_datapackage.json
@@ -0,0 +1,37 @@
+{
+ "name": "non-tabular",
+ "resources": [
+ {
+ "name": "data",
+ "path": "datapackages/valid/data.csv",
+ "schema": {
+ "fields": [
+ {
+ "name": "id",
+ "type": "string",
+ "constraints": {
+ "required": true
+ }
+ },
+ {
+ "name": "name",
+ "type": "string"
+ },
+ {
+ "name": "description",
+ "type": "string"
+ },
+ {
+ "name": "amount",
+ "type": "number"
+ }
+ ],
+ "primaryKey": "id"
+ }
+ },
+ {
+ "name": "data2",
+ "path": "datapackages/valid/data2.csv"
+ }
+ ]
+}
diff --git a/data/non_tabular_datapackage.json b/data/non_tabular_datapackage.json
new file mode 100644
index 0000000..62601b0
--- /dev/null
+++ b/data/non_tabular_datapackage.json
@@ -0,0 +1,13 @@
+{
+ "name": "non-tabular",
+ "resources": [
+ {
+ "name": "data",
+ "path": "datapackages/valid/data.csv"
+ },
+ {
+ "name": "data2",
+ "path": "datapackages/valid/data2.csv"
+ }
+ ]
+}
diff --git a/examples/ckan.py b/examples/ckan.py
index 625c6ed..6bb995d 100644
--- a/examples/ckan.py
+++ b/examples/ckan.py
@@ -5,7 +5,7 @@ from goodtables import Inspector, preset
@preset('ckan')
def ckan_preset(source, **options):
- errors = []
+ warnings = []
tables = []
url = '%s/api/3/action/package_search' % source
data = requests.get(url).json()
@@ -22,7 +22,7 @@ def ckan_preset(source, **options):
'publisher': package['organization']['name']
},
})
- return errors, tables
+ return warnings, tables
inspector = Inspector(custom_presets=[ckan_preset])
report = inspector.inspect('http://data.surrey.ca', preset='ckan')
diff --git a/examples/custom_preset.py b/examples/custom_preset.py
index a8c1151..9f286dd 100644
--- a/examples/custom_preset.py
+++ b/examples/custom_preset.py
@@ -6,7 +6,7 @@ from goodtables import Inspector, preset
@preset('csvdir')
def csvdir(source):
- errors = []
+ warnings = []
tables = []
for name in os.listdir(source):
path = os.path.join(source, name)
@@ -19,7 +19,7 @@ def csvdir(source):
'filename': name,
},
})
- return errors, tables
+ return warnings, tables
inspector = Inspector(custom_presets=[csvdir])
diff --git a/examples/dropbox.py b/examples/dropbox.py
index 00a5d27..d3417d3 100644
--- a/examples/dropbox.py
+++ b/examples/dropbox.py
@@ -13,7 +13,7 @@ client = dropbox.dropbox.Dropbox(ACCESS_TOKEN)
@preset('dropbox')
def dropbox_preset(source, **options):
- errors = []
+ warnings = []
tables = []
for item in client.files_list_folder(source).entries:
if item.path_lower.endswith('.csv'):
@@ -26,7 +26,7 @@ def dropbox_preset(source, **options):
'folder': source,
},
})
- return errors, tables
+ return warnings, tables
inspector = Inspector(custom_presets=[dropbox_preset])
report = inspector.inspect(FOLDER, preset='dropbox')
diff --git a/features/fail_fast_two_schema_errors.yml b/features/fail_fast_two_schema_errors.yml
index 8129dcc..1b1aed1 100644
--- a/features/fail_fast_two_schema_errors.yml
+++ b/features/fail_fast_two_schema_errors.yml
@@ -5,6 +5,6 @@ fail_fast_two_schema_errors:
source: data/fail_fast_two_schema_errors.csv
schema: data/test_schema.json
report:
- - [1, 4, 3, 'required-constraint']
- [1, 4, 1, 'non-castable-value']
+ - [1, 4, 3, 'required-constraint']
- [1, 5, 1, 'required-constraint']
diff --git a/goodtables/checks/body/duplicate_row.py b/goodtables/checks/body/duplicate_row.py
index 407f483..8ab2e37 100644
--- a/goodtables/checks/body/duplicate_row.py
+++ b/goodtables/checks/body/duplicate_row.py
@@ -14,20 +14,24 @@ from ...register import check
@check('duplicate-row')
def duplicate_row(errors, columns, row_number, state):
rindex = state.setdefault('rindex', {})
- pointer = hash(json.dumps(list(column.get('value') for column in columns)))
- references = rindex.setdefault(pointer, [])
- if references:
- # Add error
- message = spec['errors']['duplicate-row']['message']
- message = message.format(
- row_number=row_number,
- row_numbers=', '.join(map(str, references)))
- errors.append({
- 'code': 'duplicate-row',
- 'message': message,
- 'row-number': row_number,
- 'column-number': None,
- })
- # Clear columns
- del columns[:]
- references.append(row_number)
+ try:
+ pointer = hash(json.dumps(list(column.get('value') for column in columns)))
+ references = rindex.setdefault(pointer, [])
+ except TypeError:
+ pointer = None
+ if pointer:
+ if references:
+ # Add error
+ message = spec['errors']['duplicate-row']['message']
+ message = message.format(
+ row_number=row_number,
+ row_numbers=', '.join(map(str, references)))
+ errors.append({
+ 'code': 'duplicate-row',
+ 'message': message,
+ 'row-number': row_number,
+ 'column-number': None,
+ })
+ # Clear columns
+ del columns[:]
+ references.append(row_number)
diff --git a/goodtables/cli.py b/goodtables/cli.py
index f19385f..42ad027 100644
--- a/goodtables/cli.py
+++ b/goodtables/cli.py
@@ -55,17 +55,14 @@ def _print_report(report, json=False):
return print(json_module.dumps(report, indent=4))
color = 'green' if report['valid'] else 'red'
tables = report.pop('tables')
- errors = report.pop('errors')
+ warnings = report.pop('warnings')
click.secho('DATASET', bold=True)
click.secho('='*7, bold=True)
click.secho(pformat(report), fg=color, bold=True)
- if errors:
+ if warnings:
click.secho('-'*9, bold=True)
- for error in errors:
- error = {key: value or '-' for key, value in error.items()}
- template = '[{row-number},{column-number}] [{code}] {message}'
- message = template.format(**error)
- click.secho(message)
+ for warning in warnings:
+ click.secho('Warning: %s' % warning, fg='yellow')
for table_number, table in enumerate(tables, start=1):
click.secho('\nTABLE [%s]' % table_number, bold=True)
click.secho('='*9, bold=True)
diff --git a/goodtables/inspector.py b/goodtables/inspector.py
index ec85860..dce6a06 100644
--- a/goodtables/inspector.py
+++ b/goodtables/inspector.py
@@ -54,10 +54,9 @@ class Inspector(object):
self.__table_limit = table_limit
self.__row_limit = row_limit
self.__infer_schema = infer_schema
- self.__infer_fields = infer_fields
- self.__order_fields = order_fields
- self.__presets = self.__prepare_presets(copy(custom_presets))
- self.__checks = self.__prepare_checks(checks, copy(custom_checks))
+ self.__presets = _prepare_presets(copy(custom_presets))
+ self.__checks = _prepare_checks(checks, copy(custom_checks),
+ order_fields=order_fields, infer_fields=infer_fields)
def inspect(self, source, preset='table', **options):
"""Inspect source with given preset and options.
@@ -90,34 +89,36 @@ class Inspector(object):
raise exceptions.GoodtablesException(message)
# Prepare tables
- errors, tables = preset_func(source, **options)
- tables = tables[:self.__table_limit]
- for error in errors:
- error['row'] = None
-
- # Collect reports
- reports = []
+ warnings, tables = preset_func(source, **options)
+ if len(tables) > self.__table_limit:
+ warnings.append(
+ 'Dataset inspection has reached %s table(s) limit' %
+ (self.__table_limit))
+ tables = tables[:self.__table_limit]
+
+ # Collect table reports
+ table_reports = []
if tables:
tasks = []
pool = ThreadPool(processes=len(tables))
for table in tables:
tasks.append(pool.apply_async(self.__inspect_table, (table,)))
for task in tasks:
- report = task.get()
- reports.append(report)
+ table_warnings, table_report = task.get()
+ warnings.extend(table_warnings)
+ table_reports.append(table_report)
# Stop timer
stop = datetime.datetime.now()
# Compose report
- errors = errors[:self.__error_limit]
report = {
'time': round((stop - start).total_seconds(), 3),
- 'valid': not bool(errors) and all(report['valid'] for report in reports),
- 'error-count': len(errors) + sum(len(report['errors']) for report in reports),
+ 'valid': all(item['valid'] for item in table_reports),
+ 'error-count': sum(len(item['errors']) for item in table_reports),
'table-count': len(tables),
- 'tables': reports,
- 'errors': errors,
+ 'tables': table_reports,
+ 'warnings': warnings,
}
return report
@@ -131,6 +132,7 @@ class Inspector(object):
# Prepare vars
errors = []
+ warnings = []
headers = None
row_number = 0
fatal_error = False
@@ -145,14 +147,14 @@ class Inspector(object):
stream.open()
sample = stream.sample
headers = stream.headers
- if self.__filter_checks(checks, type='schema'):
+ if _filter_checks(checks, type='schema'):
if schema is None and self.__infer_schema:
schema = Schema(infer(headers, sample))
if schema is None:
- checks = self.__filter_checks(checks, type='schema', inverse=True)
+ checks = _filter_checks(checks, type='schema', inverse=True)
except Exception as exception:
fatal_error = True
- error = self.__compose_error_from_exception(exception)
+ error = _compose_error_from_exception(exception)
errors.append(error)
# Prepare columns
@@ -172,7 +174,7 @@ class Inspector(object):
# Head checks
if not fatal_error:
- head_checks = self.__filter_checks(checks, context='head')
+ head_checks = _filter_checks(checks, context='head')
for check in head_checks:
if not columns:
break
@@ -184,7 +186,7 @@ class Inspector(object):
if not fatal_error:
states = {}
colmap = {column['number']: column for column in columns}
- body_checks = self.__filter_checks(checks, context='body')
+ body_checks = _filter_checks(checks, context='body')
with stream:
extended_rows = stream.iter(extended=True)
while True:
@@ -194,7 +196,7 @@ class Inspector(object):
break
except Exception as exception:
fatal_error = True
- error = self.__compose_error_from_exception(exception)
+ error = _compose_error_from_exception(exception)
errors.append(error)
break
columns = []
@@ -219,8 +221,14 @@ class Inspector(object):
break
error['row'] = row
if row_number >= self.__row_limit:
+ warnings.append(
+ 'Table "%s" inspection has reached %s row(s) limit' %
+ (source, self.__row_limit))
break
if len(errors) >= self.__error_limit:
+ warnings.append(
+ 'Table "%s" inspection has reached %s error(s) limit' %
+ (source, self.__error_limit))
break
# Stop timer
@@ -228,6 +236,7 @@ class Inspector(object):
# Compose report
errors = errors[:self.__error_limit]
+ errors = _sort_errors(errors)
report = copy(extra)
report.update({
'time': round((stop - start).total_seconds(), 3),
@@ -239,120 +248,127 @@ class Inspector(object):
'errors': errors,
})
- return report
-
- def __prepare_presets(self, custom):
+ return warnings, report
- # Prepare presets
- presets = {}
- for preset in chain(vars(presets_module).values(), custom):
- descriptor = getattr(preset, 'preset', None)
- if descriptor:
- presets[descriptor['name']] = preset
- return presets
+# Internal
- def __prepare_checks(self, setup, custom):
+_FILLVALUE = '_FILLVALUE'
- # Prepare errors/checkmap
- errors = []
- checkmap = {}
- for code in config.CHECKS:
- error = copy(spec['errors'][code])
- error.update({'code': code})
- errors.append(error)
- for check in chain(vars(checks_module).values(), custom):
- desc = getattr(check, 'check', None)
- if desc:
- errormap = {error['code']: index for index, error in enumerate(errors)}
- if desc['before'] in errormap:
- errors.insert(errormap[desc['before']], desc)
- if desc['after'] in errormap:
- errors.insert(errormap[desc['after']] + 1, desc)
- checkmap[desc['code']] = check
-
- # Prepare checks
- checks = []
- for error in errors:
- if error['code'] in checkmap:
- checks.append({
- 'func': checkmap[error['code']],
- 'code': error['code'],
- 'type': error['type'],
- 'context': error['context'],
- })
-
- # Filter structure checks
- if setup == 'structure':
- checks = self.__filter_checks(checks, type='structure')
-
- # Filter schema checks
- elif setup == 'schema':
- checks = self.__filter_checks(checks, type='schema')
-
- # Filter granular checks
- elif isinstance(setup, dict):
- default = True not in setup.values()
- checks = [check for check in checks
- if setup.get(check['code'], default)]
-
- # Unknown filter
- elif setup != 'all':
- message = 'Checks filter "%s" is not supported' % setup
- raise exceptions.GoodtablesException(message)
- # Bind options
- for check in checks:
- args, _, _, _ = inspect.getargspec(check['func'])
- if 'order_fields' in args:
- check['func'] = partial(check['func'],
- order_fields=self.__order_fields)
- if 'infer_fields' in args:
- check['func'] = partial(check['func'],
- infer_fields=self.__infer_fields)
-
- return checks
-
- def __filter_checks(self, checks, type=None, context=None, inverse=False):
-
- # Filted checks
- result = []
- comparator = operator.ne
- if inverse:
- comparator = operator.eq
- for check in checks:
- if type and comparator(check['type'], type):
- continue
- if context and comparator(check['context'], context):
- continue
- result.append(check)
-
- return result
-
- def __compose_error_from_exception(self, exception):
+def _prepare_presets(custom):
+
+ # Prepare presets
+ presets = {}
+ for preset in chain(vars(presets_module).values(), custom):
+ descriptor = getattr(preset, 'preset', None)
+ if descriptor:
+ presets[descriptor['name']] = preset
+
+ return presets
+
+
+def _prepare_checks(setup, custom, order_fields, infer_fields):
+
+ # Prepare errors/checkmap
+ errors = []
+ checkmap = {}
+ for code in config.CHECKS:
+ error = copy(spec['errors'][code])
+ error.update({'code': code})
+ errors.append(error)
+ for check in chain(vars(checks_module).values(), custom):
+ desc = getattr(check, 'check', None)
+ if desc:
+ errormap = {error['code']: index for index, error in enumerate(errors)}
+ if desc['before'] in errormap:
+ errors.insert(errormap[desc['before']], desc)
+ if desc['after'] in errormap:
+ errors.insert(errormap[desc['after']] + 1, desc)
+ checkmap[desc['code']] = check
+
+ # Prepare checks
+ checks = []
+ for error in errors:
+ if error['code'] in checkmap:
+ checks.append({
+ 'func': checkmap[error['code']],
+ 'code': error['code'],
+ 'type': error['type'],
+ 'context': error['context'],
+ })
+
+ # Filter structure checks
+ if setup == 'structure':
+ checks = _filter_checks(checks, type='structure')
+
+ # Filter schema checks
+ elif setup == 'schema':
+ checks = _filter_checks(checks, type='schema')
+
+ # Filter granular checks
+ elif isinstance(setup, dict):
+ default = True not in setup.values()
+ checks = [check for check in checks
+ if setup.get(check['code'], default)]
+
+ # Unknown filter
+ elif setup != 'all':
+ message = 'Checks filter "%s" is not supported' % setup
+ raise exceptions.GoodtablesException(message)
+
+ # Bind options
+ for check in checks:
+ args, _, _, _ = inspect.getargspec(check['func'])
+ if 'order_fields' in args:
+ check['func'] = partial(check['func'], order_fields=order_fields)
+ if 'infer_fields' in args:
+ check['func'] = partial(check['func'], infer_fields=infer_fields)
+
+ return checks
+
+
+def _filter_checks(checks, type=None, context=None, inverse=False):
+
+ # Filted checks
+ result = []
+ comparator = operator.ne
+ if inverse:
+ comparator = operator.eq
+ for check in checks:
+ if type and comparator(check['type'], type):
+ continue
+ if context and comparator(check['context'], context):
+ continue
+ result.append(check)
+
+ return result
+
+
+def _compose_error_from_exception(exception):
+ code = 'source-error'
+ message = str(exception)
+ if isinstance(exception, tabulator.exceptions.SourceError):
code = 'source-error'
- message = str(exception)
- if isinstance(exception, tabulator.exceptions.SourceError):
- code = 'source-error'
- elif isinstance(exception, tabulator.exceptions.SchemeError):
- code = 'scheme-error'
- elif isinstance(exception, tabulator.exceptions.FormatError):
- code = 'format-error'
- elif isinstance(exception, tabulator.exceptions.EncodingError):
- code = 'encoding-error'
- elif isinstance(exception, tabulator.exceptions.IOError):
- code = 'io-error'
- elif isinstance(exception, tabulator.exceptions.HTTPError):
- code = 'http-error'
- return {
- 'row': None,
- 'code': code,
- 'message': message,
- 'row-number': None,
- 'column-number': None,
- }
-
-
-# Internal
-
-_FILLVALUE = '_FILLVALUE'
+ elif isinstance(exception, tabulator.exceptions.SchemeError):
+ code = 'scheme-error'
+ elif isinstance(exception, tabulator.exceptions.FormatError):
+ code = 'format-error'
+ elif isinstance(exception, tabulator.exceptions.EncodingError):
+ code = 'encoding-error'
+ elif isinstance(exception, tabulator.exceptions.IOError):
+ code = 'io-error'
+ elif isinstance(exception, tabulator.exceptions.HTTPError):
+ code = 'http-error'
+ return {
+ 'row': None,
+ 'code': code,
+ 'message': message,
+ 'row-number': None,
+ 'column-number': None,
+ }
+
+
+def _sort_errors(errors):
+ return sorted(errors, key=lambda error:
+ (error['row-number'] or 0, error['column-number']))
diff --git a/goodtables/presets/datapackage.py b/goodtables/presets/datapackage.py
index 5d67c14..b956bc5 100644
--- a/goodtables/presets/datapackage.py
+++ b/goodtables/presets/datapackage.py
@@ -6,45 +6,56 @@ from __future__ import unicode_literals
from tabulator import Stream
from jsontableschema import Schema
+from jsontableschema.exceptions import SchemaValidationError
from datapackage import DataPackage
from ..register import preset
-from ..spec import spec
# Module API
@preset('datapackage')
def datapackage(source, **options):
- errors = []
+ warnings = []
tables = []
- # Prepare datapackage
- datapackage = DataPackage(source, **options)
- for exception in datapackage.iter_errors():
- # Error message should contain datapackage source (often it's path)
- message = spec['errors']['datapackage-error']['message']
- message = message.format(
- error_message='{problem} [{source}]'.format(
- problem=str(exception).splitlines()[0],
- source=str(source)))
- errors.append({
- 'code': 'datapackage-error',
- 'message': message,
- 'row-number': None,
- 'column-number': None,
- })
-
- # Add tables
- if not errors:
+ # Load datapackage
+ try:
+ datapackage = DataPackage(source, **options)
+ except Exception as error:
+ warnings.append(
+ 'Data Package "%s" has a loading error "%s"' %
+ (source, error))
+
+ # Validate datapackage
+ if not warnings:
+ for error in datapackage.iter_errors():
+ warnings.append(
+ 'Data Package "%s" has a validation error "%s"' %
+ (source, str(error).splitlines()[0]))
+
+ # Extract datapackage tables
+ if not warnings:
for resource in datapackage.resources:
+ # TODO: after datapackage-v1 will be ready
+ # - we should use `resource.tabular` to filter tabular resources
+ # - we don't need to validate schema here because of dereferencing
path = resource.remote_data_path or resource.local_data_path
+ try:
+ schema = Schema(resource.descriptor['schema'])
+ except SchemaValidationError as error:
+ warnings.append(
+ 'Data Package "%s" has a validation error "%s"' %
+ (source, str(error).splitlines()[0]))
+ continue
+ except Exception:
+ continue
tables.append({
'source': path,
'stream': Stream(path, headers=1),
- 'schema': Schema(resource.descriptor['schema']),
+ 'schema': schema,
'extra': {
'datapackage': str(source),
},
})
- return errors, tables
+ return warnings, tables
diff --git a/goodtables/presets/nested.py b/goodtables/presets/nested.py
index fa63a8d..7a55409 100644
--- a/goodtables/presets/nested.py
+++ b/goodtables/presets/nested.py
@@ -13,10 +13,10 @@ from .. import exceptions
@preset('nested')
def nested(source, presets):
- errors = []
+ warnings = []
tables = []
- # Add errors, tables
+ # Add warnings, tables
source = deepcopy(source)
for item in source:
preset = item.pop('preset', 'table')
@@ -28,8 +28,8 @@ def nested(source, presets):
except KeyError:
message = 'Preset "%s" is not registered' % preset
raise exceptions.GoodtablesException(message)
- item_errors, item_tables = preset_func(**item)
- errors.extend(item_errors)
+ item_warnings, item_tables = preset_func(**item)
+ warnings.extend(item_warnings)
tables.extend(item_tables)
- return errors, tables
+ return warnings, tables
diff --git a/goodtables/presets/table.py b/goodtables/presets/table.py
index f7a2464..a2b53fb 100644
--- a/goodtables/presets/table.py
+++ b/goodtables/presets/table.py
@@ -4,46 +4,44 @@ from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
+import six
import jsontableschema
from tabulator import Stream
from jsontableschema import Schema, validate
from ..register import preset
-from ..spec import spec
# Module API
@preset('table')
def table(source, schema=None, **options):
- errors = []
+ warnings = []
tables = []
+ # Ensure not a datapackage
+ if isinstance(source, six.string_types):
+ if source.endswith('datapackage.json'):
+ warnings.append('Use "datapackage" preset for Data Packages')
+
# Prepare schema
- if schema is not None:
- descriptor = schema
- try:
- # https://github.com/frictionlessdata/jsontableschema-py/issues/113
- from jsontableschema.helpers import load_json_source
- loaded_descriptor = load_json_source(schema)
- validate(loaded_descriptor, no_fail_fast=True)
- schema = Schema(loaded_descriptor)
- except jsontableschema.exceptions.MultipleInvalid as exception:
- for error in exception.errors:
- # Error message should contain schema source (often it's path)
- message = spec['errors']['jsontableschema-error']['message']
- message = message.format(
- error_message='{problem} [{source}]'.format(
- problem=str(error).splitlines()[0],
- source=str(descriptor)))
- errors.append({
- 'code': 'jsontableschema-error',
- 'message': message,
- 'row-number': None,
- 'column-number': None,
- })
+ if not warnings:
+ if schema is not None:
+ # TODO: after tableschema-v1 will be ready
+ # - we should use Schema(strict=False) to handle schema errors on inspection
+ # - it means we don't need to validate schema here, mess with helpers etc
+ try:
+ from jsontableschema.helpers import load_json_source
+ loaded_descriptor = load_json_source(schema)
+ validate(loaded_descriptor, no_fail_fast=True)
+ schema = Schema(loaded_descriptor)
+ except jsontableschema.exceptions.MultipleInvalid as exception:
+ for error in exception.errors:
+ warnings.append(
+ 'Table schema "%s" has a validation error "%s"' %
+ (schema, str(error).splitlines()[0]))
# Add table
- if not errors:
+ if not warnings:
options.setdefault('headers', 1)
tables.append({
'source': str(source),
@@ -52,4 +50,4 @@ def table(source, schema=None, **options):
'extra': {},
})
- return errors, tables
+ return warnings, tables
diff --git a/goodtables/spec.json b/goodtables/spec.json
index b367393..fe3e46b 100644
--- a/goodtables/spec.json
+++ b/goodtables/spec.json
@@ -1,39 +1,23 @@
{
- "version": "1.0.0-alpha2",
+ "version": "1.0.0-alpha3",
"errors": {
"io-error": {
"name": "IO Error",
"type": "source",
- "context": "any",
+ "context": "table",
"weight": 100,
"message": "The data source returned an IO Error of type {error_type}",
- "description": ""
+ "description": "Data reading error because of IO error.\n\n How it could be resolved:\n - Fix path if it's not correct."
},
"http-error": {
"name": "HTTP Error",
"type": "source",
- "context": "any",
+ "context": "table",
"weight": 100,
"message": "The data source returned an HTTP error with a status code of {status_code}",
- "description": ""
- },
- "datapackage-error": {
- "name": "DataPackage Error",
- "type": "source",
- "context": "dataset",
- "weight": 100,
- "message": "DataPackage error: {error_message}",
- "description": ""
- },
- "jsontableschema-error": {
- "name": "JSON Table Schema Error",
- "type": "source",
- "context": "dataset",
- "weight": 100,
- "message": "JSON Table Schema error: {error_message}",
- "description": ""
+ "description": "Data reading error because of HTTP error.\n\n How it could be resolved:\n - Fix url link if it's not correct."
},
"source-error": {
"name": "Source Error",
@@ -41,7 +25,7 @@
"context": "table",
"weight": 100,
"message": "The data source has not supported or has inconsistent contents; no tabular data can be extracted",
- "description": ""
+ "description": "Data reading error because of not supported or inconsistent contents.\n\n How it could be resolved:\n - Fix data contents (e.g. change JSON data to array or arrays/objects).\n - Set correct source settings in {validator}."
},
"scheme-error": {
"name": "Scheme Error",
@@ -49,7 +33,7 @@
"context": "table",
"weight": 100,
"message": "The data source is in an unknown scheme; no tabular data can be extracted",
- "description": ""
+ "description": "Data reading error because of incorrect scheme.\n\n How it could be resolved:\n - Fix data scheme (e.g. change scheme from `ftp` to `http`).\n - Set correct scheme in {validator}."
},
"format-error": {
"name": "Format Error",
@@ -57,7 +41,7 @@
"context": "table",
"weight": 100,
"message": "The data source is in an unknown format; no tabular data can be extracted",
- "description": ""
+ "description": "Data reading error because of incorrect format.\n\n How it could be resolved:\n - Fix data format (e.g. change file extension from `txt` to `csv`).\n - Set correct format in {validator}."
},
"encoding-error": {
"name": "Encoding Error",
@@ -65,7 +49,7 @@
"context": "table",
"weight": 100,
"message": "The data source could not be successfully decoded with {encoding} encoding",
- "description": ""
+ "description": "Data reading error because of an encoding problem.\n\n How it could be resolved:\n - Fix data source if it's broken.\n - Set correct encoding in {validator}."
},
@@ -75,7 +59,7 @@
"context": "head",
"weight": 3,
"message": "Header in column {column_number} is blank",
- "description": ""
+ "description": "A column in the header row is missing a value. Column names should be provided.\n\n How it could be resolved:\n - Add the missing column name to the first row of the data source.\n - If the first row starts with, or ends with a comma, remove it.\n - If this error should be ignored disable `blank-header` check in {validator}."
},
"duplicate-header": {
"name": "Duplicate Header",
@@ -83,7 +67,7 @@
"context": "head",
"weight": 3,
"message": "Header in column {column_number} is duplicated to header in column(s) {column_numbers}",
- "description": ""
+ "description": "Two columns in the header row have the same value. Column names should be unique.\n\n How it could be resolved:\n - Add the missing column name to the first row of the data.\n - If the first row starts with, or ends with a comma, remove it.\n - If this error should be ignored disable `duplicate-header` check in {validator}."
},
"blank-row": {
"name": "Blank Row",
@@ -91,7 +75,7 @@
"context": "body",
"weight": 9,
"message": "Row {row_number} is completely blank",
- "description": ""
+ "description": "This row is empty. A row should contain at least one value.\n\n How it could be resolved:\n - Delete the row.\n - If this error should be ignored disable `blank-row` check in {validator}."
},
"duplicate-row": {
"name": "Duplicate Row",
@@ -99,7 +83,7 @@
"context": "body",
"weight": 5,
"message": "Row {row_number} is duplicated to row(s) {row_numbers}",
- "description": ""
+ "description": "The exact same data has been seen in another row.\n\n How it could be resolved:\n - If some of the data is incorrect, correct it.\n - If the whole row is an incorrect duplicate, remove it.\n - If this error should be ignored disable `duplicate-row` check in {validator}."
},
"extra-value": {
"name": "Extra Value",
@@ -107,7 +91,7 @@
"context": "body",
"weight": 9,
"message": "Row {row_number} has an extra value in column {column_number}",
- "description": ""
+ "description": "This row has more values compared to the header row (the first row in the data source). A key concept is that all the rows in tabular data must have the same number of columns.\n\n How it could be resolved:\n - Check data has an extra comma between the values in this row.\n - If this error should be ignored disable `extra-value` check in {validator}."
},
"missing-value": {
"name": "Missing Value",
@@ -115,17 +99,25 @@
"context": "body",
"weight": 9,
"message": "Row {row_number} has a missing value in column {column_number}",
- "description": ""
+ "description": "This row has less values compared to the header row (the first row in the data source). A key concept is that all the rows in tabular data must have the same number of columns.\n\n How it could be resolved:\n - Check data is not missing a comma between the values in this row.\n - If this error should be ignored disable `missing-value` check in {validator}."
},
+ "schema-error": {
+ "name": "Table Schema Error",
+ "type": "schema",
+ "context": "table",
+ "weight": 15,
+ "message": "Table Schema error: {error_message}",
+ "description": "Provided schema is not valid.\n\n How it could be resolved:\n - Update schema descriptor to be a valid descriptor\n - If this error should be ignored disable schema cheks in {validator}."
+ },
"non-matching-header": {
"name": "Non-Matching Header",
"type": "schema",
"context": "head",
"weight": 9,
"message": "Header in column {column_number} doesn't match field name {field_name}",
- "description": ""
+ "description": "One of the data source headers doens't match the field name defined in the schema.\n\n How it could be resolved:\n - Rename header in the data source or field in the schema\n - If this error should be ignored disable `non-matching-header` check in {validator}."
},
"extra-header": {
"name": "Extra Header",
@@ -133,7 +125,7 @@
"context": "head",
"weight": 9,
"message": "There is an extra header in column {column_number}",
- "description": ""
+ "description": "The first row of the data source contains header that doesn't exist in the schema.\n\n How it could be resolved:\n - Remove the extra column from the data source or add the missing field to the schema\n - If this error should be ignored disable `extra-header` check in {validator}."
},
"missing-header": {
"name": "Missing Header",
@@ -141,7 +133,15 @@
"context": "head",
"weight": 9,
"message": "There is a missing header in column {column_number}",
- "description": ""
+ "description": "Based on the schema there should be a header that is missing in the first row of the data source.\n\n How it could be resolved:\n - Add the missing column to the data source or remove the extra field from the schema\n - If this error should be ignored disable `missing-header` check in {validator}."
+ },
+ "non-castable-value": {
+ "name": "Non-Castable Value",
+ "type": "schema",
+ "context": "body",
+ "weight": 9,
+ "message": "Row {row_number} has non castable value {value} in column {column_number} (type: {field_type}, format: {field_format})",
+ "description": "The value can't be cast based on the schema type and format for this field.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If this error should be ignored disable `non-castable-value` check in {validator}. In this case all schema checks for row values will be ignored."
},
"required-constraint": {
"name": "Required Constraint",
@@ -149,7 +149,7 @@
"context": "body",
"weight": 9,
"message": "Column {column_number} is a required field, but row {row_number} has no value",
- "description": ""
+ "description": "This field is a required field, but it contains no value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove the `required` constraint from the schema.\n - If this error should be ignored disable `required-constraint` check in {validator}."
},
"pattern-constraint": {
"name": "Pattern Constraint",
@@ -157,15 +157,7 @@
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the pattern constraint of {constraint}",
- "description": ""
- },
- "non-castable-value": {
- "name": "Non-Castable Value",
- "type": "schema",
- "context": "body",
- "weight": 9,
- "message": "Row {row_number} has non castable value {value} in column {column_number} (type: {field_type}, format: {field_format})",
- "description": ""
+ "description": "This field value should conform to constraint pattern.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove the `pattern` constraint from the schema.\n - If this error should be ignored disable `pattern-constraint` check in {validator}."
},
"unique-constraint": {
"name": "Unique Constraint",
@@ -173,7 +165,7 @@
"context": "body",
"weight": 9,
"message": "Rows {row_numbers} has unique constraint violation in column {column_number}",
- "description": ""
+ "description": "This field is a unique field but it contains a value that has been used in another row.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then the values in this column are not unique. Remove the `unique` constraint from the schema.\n - If this error should be ignored disable `unique-constraint` check in {validator}."
},
"enumerable-constraint": {
"name": "Enumerable Constraint",
@@ -181,7 +173,7 @@
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the given enumeration: {constraint}",
- "description": ""
+ "description": "This field value should be equal to one of the constraint enumeration.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove the `enum` constraint from the schema.\n - If this error should be ignored disable `enumerable-constraint` check in {validator}."
},
"minimum-constraint": {
"name": "Minimum Constraint",
@@ -189,7 +181,7 @@
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the minimum constraint of {constraint}",
- "description": ""
+ "description": "This field value should be greater or equal than constraint value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove the `minimum` constraint from the schema.\n - If this error should be ignored disable `minimum-constraint` check in {validator}."
},
"maximum-constraint": {
"name": "Maximum Constraint",
@@ -197,7 +189,7 @@
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the maximum constraint of {constraint}",
- "description": ""
+ "description": "This field value should be less or equal than constraint value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove the `maximum` constraint from the schema.\n - If this error should be ignored disable `maximum-constraint` check in {validator}."
},
"minimum-length-constraint": {
"name": "Minimum Length Constraint",
@@ -205,7 +197,7 @@
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the minimum length constraint of {constraint}",
- "description": ""
+ "description": "A lenght of this field value should be greater or equal than schema constraint value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove the `minimumLength` constraint from the schema.\n - If this error should be ignored disable `minimum-length-constraint` check in {validator}."
},
"maximum-length-constraint": {
"name": "Maximum Length Constraint",
@@ -213,7 +205,7 @@
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the maximum length constraint of {constraint}",
- "description": ""
+ "description": "A lenght of this field value should be less or equal than schema constraint value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove the `maximumLength` constraint from the schema.\n - If this error should be ignored disable `maximum-length-constraint` check in {validator}."
}
| Add warnings system?
# Overview
It's up on the air related to issues:
- #157
- https://github.com/frictionlessdata/data-quality-spec/issues/11
It should replace dataset level errors and provide ability to do other types of feedback to users inside the goodtables report (not failing with an exception).
It should be like (a `report` part example):
```json
{
"warninigs": [
"Invalid datapackage property #1",
"Invalid datapackage property #2",
"File <name> inspection has stopped because of error limit",
]
}
```
# Tasks
- [ ] implement warning system instead of dataset level errors
- [ ] make presets emit warnings instead of errors
- [ ] add warnings about hitting limits (table, error etc) | frictionlessdata/goodtables-py | diff --git a/tests/checks/body/constraints/test_enumerable_constraint.py b/tests/checks/body/constraints/test_enumerable_constraint.py
index 036b4d3..872b9f9 100644
--- a/tests/checks/body/constraints/test_enumerable_constraint.py
+++ b/tests/checks/body/constraints/test_enumerable_constraint.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_enumerable_constraint(log):
+def test_check_enumerable_constraint(log):
errors = []
columns = []
checks.enumerable_constraint(errors, columns, 1)
diff --git a/tests/checks/body/constraints/test_maximum_constraint.py b/tests/checks/body/constraints/test_maximum_constraint.py
index de76495..525e7cf 100644
--- a/tests/checks/body/constraints/test_maximum_constraint.py
+++ b/tests/checks/body/constraints/test_maximum_constraint.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_maximum_constraint(log):
+def test_check_maximum_constraint(log):
errors = []
columns = []
checks.maximum_constraint(errors, columns, 1)
diff --git a/tests/checks/body/constraints/test_maximum_length_constraint.py b/tests/checks/body/constraints/test_maximum_length_constraint.py
index 20958e8..d670378 100644
--- a/tests/checks/body/constraints/test_maximum_length_constraint.py
+++ b/tests/checks/body/constraints/test_maximum_length_constraint.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_maximum_length_constraint(log):
+def test_check_maximum_length_constraint(log):
errors = []
columns = []
checks.maximum_length_constraint(errors, columns, 1)
diff --git a/tests/checks/body/constraints/test_minimum_constraint.py b/tests/checks/body/constraints/test_minimum_constraint.py
index fac226a..5be5783 100644
--- a/tests/checks/body/constraints/test_minimum_constraint.py
+++ b/tests/checks/body/constraints/test_minimum_constraint.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_minimum_constraint(log):
+def test_check_minimum_constraint(log):
errors = []
columns = []
checks.minimum_constraint(errors, columns, 1)
diff --git a/tests/checks/body/constraints/test_minimum_length_constraint.py b/tests/checks/body/constraints/test_minimum_length_constraint.py
index df28305..7e9b0df 100644
--- a/tests/checks/body/constraints/test_minimum_length_constraint.py
+++ b/tests/checks/body/constraints/test_minimum_length_constraint.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_minimum_length_constraint(log):
+def test_check_minimum_length_constraint(log):
errors = []
columns = []
checks.minimum_length_constraint(errors, columns, 1)
diff --git a/tests/checks/body/constraints/test_pattern_constraint.py b/tests/checks/body/constraints/test_pattern_constraint.py
index 9ed9197..ec4f1a0 100644
--- a/tests/checks/body/constraints/test_pattern_constraint.py
+++ b/tests/checks/body/constraints/test_pattern_constraint.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_pattern_constraint(log):
+def test_check_pattern_constraint(log):
errors = []
columns = []
checks.pattern_constraint(errors, columns, 1)
diff --git a/tests/checks/body/constraints/test_required_constraint.py b/tests/checks/body/constraints/test_required_constraint.py
index 40224b2..60508fc 100644
--- a/tests/checks/body/constraints/test_required_constraint.py
+++ b/tests/checks/body/constraints/test_required_constraint.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_required_constraint(log):
+def test_check_required_constraint(log):
errors = []
columns = []
checks.required_constraint(errors, columns, 1)
diff --git a/tests/checks/body/constraints/test_unique_constraint.py b/tests/checks/body/constraints/test_unique_constraint.py
index d89c52a..7c52996 100644
--- a/tests/checks/body/constraints/test_unique_constraint.py
+++ b/tests/checks/body/constraints/test_unique_constraint.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_unique_constraint(log):
+def test_check_unique_constraint(log):
state = {}
errors = []
columns = []
diff --git a/tests/checks/body/test_blank_row.py b/tests/checks/body/test_blank_row.py
index 1df28c4..9c7da79 100644
--- a/tests/checks/body/test_blank_row.py
+++ b/tests/checks/body/test_blank_row.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_blank_row(log):
+def test_check_blank_row(log):
errors = []
columns = [
{'number': 1,
@@ -22,7 +22,7 @@ def test_blank_row(log):
assert len(columns) == 1
-def test_blank_row_problem(log):
+def test_check_blank_row_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/checks/body/test_duplicate_row.py b/tests/checks/body/test_duplicate_row.py
index a5ff368..c7e164b 100644
--- a/tests/checks/body/test_duplicate_row.py
+++ b/tests/checks/body/test_duplicate_row.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_duplicate_row(log):
+def test_check_duplicate_row(log):
errors = []
columns1 = [
{'number': 1,
@@ -31,7 +31,7 @@ def test_duplicate_row(log):
assert len(columns2) == 1
-def test_duplicate_row_problem(log):
+def test_check_duplicate_row_problem(log):
errors = []
columns1 = [
{'number': 1,
diff --git a/tests/checks/body/test_extra_value.py b/tests/checks/body/test_extra_value.py
index 218153e..0ff77bf 100644
--- a/tests/checks/body/test_extra_value.py
+++ b/tests/checks/body/test_extra_value.py
@@ -10,7 +10,7 @@ from goodtables import checks
# Test
-def test_extra_value(log):
+def test_check_extra_value(log):
errors = []
columns = [
{'number': 1,
@@ -27,7 +27,7 @@ def test_extra_value(log):
assert len(columns) == 2
-def test_extra_value_problem(log):
+def test_check_extra_value_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/checks/body/test_missing_value.py b/tests/checks/body/test_missing_value.py
index 6236edf..6d9a100 100644
--- a/tests/checks/body/test_missing_value.py
+++ b/tests/checks/body/test_missing_value.py
@@ -10,7 +10,7 @@ from goodtables import checks
# Test
-def test_missing_value(log):
+def test_check_missing_value(log):
errors = []
columns = [
{'number': 1,
@@ -27,7 +27,7 @@ def test_missing_value(log):
assert len(columns) == 2
-def test_missing_value_problem(log):
+def test_check_missing_value_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/checks/body/test_non_castable_value.py b/tests/checks/body/test_non_castable_value.py
index 33f486b..1b45207 100644
--- a/tests/checks/body/test_non_castable_value.py
+++ b/tests/checks/body/test_non_castable_value.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_non_castable_value(log):
+def test_check_non_castable_value(log):
errors = []
columns = [
{'number': 1,
@@ -23,7 +23,7 @@ def test_non_castable_value(log):
assert columns[0]['value'] == 1
-def test_non_castable_value_problem(log):
+def test_check_non_castable_value_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/checks/head/test_blank_header.py b/tests/checks/head/test_blank_header.py
index 09600a1..167448f 100644
--- a/tests/checks/head/test_blank_header.py
+++ b/tests/checks/head/test_blank_header.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_blank_header(log):
+def test_check_blank_header(log):
errors = []
columns = [
{'number': 1,
@@ -21,7 +21,7 @@ def test_blank_header(log):
assert len(columns) == 1
-def test_blank_header_problem(log):
+def test_check_blank_header_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/checks/head/test_duplicate_header.py b/tests/checks/head/test_duplicate_header.py
index 9f005d7..a5f8fb2 100644
--- a/tests/checks/head/test_duplicate_header.py
+++ b/tests/checks/head/test_duplicate_header.py
@@ -9,7 +9,7 @@ from goodtables import checks
# Test
-def test_duplicate_header(log):
+def test_check_duplicate_header(log):
errors = []
columns = [
{'number': 1,
@@ -24,7 +24,7 @@ def test_duplicate_header(log):
assert len(columns) == 2
-def test_duplicate_header_problem(log):
+def test_check_duplicate_header_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/checks/head/test_extra_header.py b/tests/checks/head/test_extra_header.py
index c58b31d..f8efb71 100644
--- a/tests/checks/head/test_extra_header.py
+++ b/tests/checks/head/test_extra_header.py
@@ -10,7 +10,7 @@ from goodtables import checks
# Test
-def test_extra_header(log):
+def test_check_extra_header(log):
errors = []
columns = [
{'number': 1,
@@ -26,7 +26,7 @@ def test_extra_header(log):
assert len(columns) == 2
-def test_extra_header_infer(log):
+def test_check_extra_header_infer(log):
errors = []
columns = [
{'number': 1,
@@ -42,7 +42,7 @@ def test_extra_header_infer(log):
assert columns[1]['field'].name == 'name2'
-def test_extra_header_problem(log):
+def test_check_extra_header_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/checks/head/test_missing_header.py b/tests/checks/head/test_missing_header.py
index 6b18fbd..d5ad6d0 100644
--- a/tests/checks/head/test_missing_header.py
+++ b/tests/checks/head/test_missing_header.py
@@ -10,7 +10,7 @@ from goodtables import checks
# Test
-def test_missing_header(log):
+def test_check_missing_header(log):
errors = []
columns = [
{'number': 1,
@@ -25,7 +25,7 @@ def test_missing_header(log):
assert len(columns) == 2
-def test_missing_header_problem(log):
+def test_check_missing_header_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/checks/head/test_non_matching_header.py b/tests/checks/head/test_non_matching_header.py
index 7ab5b55..675cd8d 100644
--- a/tests/checks/head/test_non_matching_header.py
+++ b/tests/checks/head/test_non_matching_header.py
@@ -10,7 +10,7 @@ from goodtables import checks
# Test
-def test_non_matching_header(log):
+def test_check_non_matching_header(log):
errors = []
columns = [
{'number': 1,
@@ -27,7 +27,7 @@ def test_non_matching_header(log):
assert len(columns) == 3
-def test_non_matching_header_problem(log):
+def test_check_non_matching_header_problem(log):
errors = []
columns = [
{'number': 1,
@@ -47,7 +47,7 @@ def test_non_matching_header_problem(log):
assert len(columns) == 1
-def test_non_matching_header_order_fields(log):
+def test_check_non_matching_header_order_fields(log):
errors = []
columns = [
{'number': 1,
@@ -64,7 +64,7 @@ def test_non_matching_header_order_fields(log):
assert len(columns) == 3
-def test_non_matching_header_order_fields_problem(log):
+def test_check_non_matching_header_order_fields_problem(log):
errors = []
columns = [
{'number': 1,
diff --git a/tests/conftest.py b/tests/conftest.py
index 589f74b..1c5f720 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -27,8 +27,6 @@ def log():
for error in struct:
result.append(pack_error(error))
if isinstance(struct, dict):
- for error in struct['errors']:
- result.append(pack_error(error))
for table_number, table in enumerate(struct['tables'], start=1):
for error in table['errors']:
result.append(pack_error(error, table_number))
diff --git a/tests/presets/test_datapackage.py b/tests/presets/test_datapackage.py
index 08cc0fc..3a0a398 100644
--- a/tests/presets/test_datapackage.py
+++ b/tests/presets/test_datapackage.py
@@ -9,7 +9,26 @@ from goodtables import presets
# Test
-def test_datapackage():
- errors, tables = presets.datapackage('data/datapackages/valid/datapackage.json')
- assert len(errors) == 0
+def test_preset_datapackage():
+ warnings, tables = presets.datapackage('data/datapackages/valid/datapackage.json')
+ assert len(warnings) == 0
assert len(tables) == 2
+
+
+def test_preset_datapackage_non_tabular_datapackage_issue_170():
+ warnings, tables = presets.datapackage('data/non_tabular_datapackage.json')
+ assert len(warnings) == 0
+ assert len(tables) == 0
+
+
+def test_preset_datapackage_mixed_datapackage_issue_170():
+ warnings, tables = presets.datapackage('data/mixed_datapackage.json')
+ assert len(warnings) == 0
+ assert len(tables) == 1
+
+
+def test_preset_datapackage_invalid_json_issue_192():
+ warnings, tables = presets.datapackage('data/invalid_json.json')
+ assert len(warnings) == 1
+ assert len(tables) == 0
+ assert 'Unable to parse JSON' in warnings[0]
diff --git a/tests/presets/test_nested.py b/tests/presets/test_nested.py
index 6f1b646..f17ae93 100644
--- a/tests/presets/test_nested.py
+++ b/tests/presets/test_nested.py
@@ -9,8 +9,8 @@ from goodtables import presets
# Test
-def test_nested():
- errors, tables = presets.nested([
+def test_preset_nested():
+ warnings, tables = presets.nested([
{'source': 'data/valid.csv'},
{'source': 'data/invalid.csv', 'preset': 'table'},
{'source': 'data/datapackages/valid/datapackage.json', 'preset': 'datapackage'},
@@ -20,5 +20,5 @@ def test_nested():
'nested': presets.nested,
'datapackage': presets.datapackage,
})
- assert len(errors) == 0
+ assert len(warnings) == 0
assert len(tables) == 6
diff --git a/tests/presets/test_table.py b/tests/presets/test_table.py
index 4d5a5d2..1281a86 100644
--- a/tests/presets/test_table.py
+++ b/tests/presets/test_table.py
@@ -9,7 +9,14 @@ from goodtables import presets
# Test
-def test_table():
- errors, tables = presets.table('data/valid.csv')
- assert len(errors) == 0
+def test_preset_table():
+ warnings, tables = presets.table('data/valid.csv')
+ assert len(warnings) == 0
assert len(tables) == 1
+
+
+def test_preset_table_but_got_datapackage_issue_187():
+ warnings, tables = presets.table('data/datapackages/valid/datapackage.json')
+ assert len(warnings) == 1
+ assert len(tables) == 0
+ assert 'Use "datapackage" preset' in warnings[0]
diff --git a/tests/test_inspector.py b/tests/test_inspector.py
index 296f323..82d4626 100644
--- a/tests/test_inspector.py
+++ b/tests/test_inspector.py
@@ -7,7 +7,7 @@ from __future__ import unicode_literals
from goodtables import Inspector
-# Tests [table]
+# Preset: table
def test_inspector_table_valid(log):
inspector = Inspector()
@@ -25,9 +25,9 @@ def test_inspector_table_invalid(log):
(1, 2, 4, 'missing-value'),
(1, 3, None, 'duplicate-row'),
(1, 4, None, 'blank-row'),
- (1, 5, 5, 'extra-value'),
(1, 5, 3, 'non-castable-value'),
(1, 5, 4, 'non-castable-value'),
+ (1, 5, 5, 'extra-value'),
]
@@ -51,7 +51,7 @@ def test_inspector_table_invalid_row_limit(log):
]
-# Tests [datapackage]
+# Preset: datapackage
def test_inspector_datapackage_valid(log):
inspector = Inspector()
@@ -79,7 +79,7 @@ def test_inspector_datapackage_invalid_table_limit(log):
]
-# Tests [nested]
+# Preset: nested
def test_inspector_tables_invalid(log):
inspector = Inspector(infer_schema=True)
@@ -95,13 +95,13 @@ def test_inspector_tables_invalid(log):
(2, 2, 4, 'missing-value'),
(2, 3, None, 'duplicate-row'),
(2, 4, None, 'blank-row'),
- (2, 5, 5, 'extra-value'),
(2, 5, 3, 'non-castable-value'),
(2, 5, 4, 'non-castable-value'),
+ (2, 5, 5, 'extra-value'),
]
-# Tests [exceptions]
+# Catch exceptions
def test_inspector_catch_all_open_exceptions(log):
inspector = Inspector()
@@ -118,3 +118,64 @@ def test_inspector_catch_all_iter_exceptions(log):
assert log(report) == [
(1, None, None, 'source-error'),
]
+
+
+# Warnings
+
+def test_inspector_warnings_no():
+ inspector = Inspector()
+ source = 'data/datapackages/invalid/datapackage.json'
+ report = inspector.inspect(source, preset='datapackage')
+ assert len(report['warnings']) == 0
+
+
+def test_inspector_warnings_bad_datapackage_json():
+ inspector = Inspector()
+ source = 'data/invalid_json.json'
+ report = inspector.inspect(source, preset='datapackage')
+ assert len(report['warnings']) == 1
+ assert 'Unable to parse JSON' in report['warnings'][0]
+
+
+def test_inspector_warnings_table_limit():
+ inspector = Inspector(table_limit=1)
+ source = 'data/datapackages/invalid/datapackage.json'
+ report = inspector.inspect(source, preset='datapackage')
+ assert len(report['warnings']) == 1
+ assert 'table(s) limit' in report['warnings'][0]
+
+
+def test_inspector_warnings_row_limit():
+ inspector = Inspector(row_limit=1)
+ source = 'data/datapackages/invalid/datapackage.json'
+ report = inspector.inspect(source, preset='datapackage')
+ assert len(report['warnings']) == 2
+ assert 'row(s) limit' in report['warnings'][0]
+ assert 'row(s) limit' in report['warnings'][1]
+
+
+def test_inspector_warnings_error_limit():
+ inspector = Inspector(error_limit=1)
+ source = 'data/datapackages/invalid/datapackage.json'
+ report = inspector.inspect(source, preset='datapackage')
+ assert len(report['warnings']) == 2
+ assert 'error(s) limit' in report['warnings'][0]
+ assert 'error(s) limit' in report['warnings'][1]
+
+
+def test_inspector_warnings_table_and_row_limit():
+ inspector = Inspector(table_limit=1, row_limit=1)
+ source = 'data/datapackages/invalid/datapackage.json'
+ report = inspector.inspect(source, preset='datapackage')
+ assert len(report['warnings']) == 2
+ assert 'table(s) limit' in report['warnings'][0]
+ assert 'row(s) limit' in report['warnings'][1]
+
+
+def test_inspector_warnings_table_and_error_limit():
+ inspector = Inspector(table_limit=1, error_limit=1)
+ source = 'data/datapackages/invalid/datapackage.json'
+ report = inspector.inspect(source, preset='datapackage')
+ assert len(report['warnings']) == 2
+ assert 'table(s) limit' in report['warnings'][0]
+ assert 'error(s) limit' in report['warnings'][1]
diff --git a/tests/test_spec.py b/tests/test_spec.py
index 4eeaf4f..53076ad 100644
--- a/tests/test_spec.py
+++ b/tests/test_spec.py
@@ -11,7 +11,6 @@ import requests
from goodtables.spec import spec
[email protected]
def test_spec_is_up_to_date():
origin_spec = requests.get('https://raw.githubusercontent.com/frictionlessdata/data-quality-spec/master/spec.json').json()
assert spec == origin_spec, 'run `make spec` to update the spec'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 12
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install --upgrade -e .[develop,ods]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"mock",
"pyyaml",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
chardet==5.0.0
charset-normalizer==2.0.12
click==6.7
datapackage==0.8.9
distlib==0.3.9
et-xmlfile==1.1.0
ezodf==0.3.2
filelock==3.4.1
future==0.18.3
-e git+https://github.com/frictionlessdata/goodtables-py.git@cc7b66a856d57270d0f1790d2b6048c7ac051837#egg=goodtables
greenlet==2.0.2
idna==3.10
ijson==3.3.0
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
isodate==0.6.1
jmespath==0.10.0
jsonlines==3.1.0
jsonschema==2.6.0
jsontableschema==0.10.1
linear-tsv==1.1.0
lxml==3.8.0
mccabe==0.7.0
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
openpyxl==3.1.3
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.10.0
pydocstyle==6.3.0
pyflakes==3.0.1
pylama==7.7.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-dateutil==2.9.0.post0
PyYAML==6.0.1
requests==2.27.1
rfc3986==0.4.1
s3transfer==0.5.2
six==1.17.0
snowballstemmer==2.2.0
SQLAlchemy==1.4.54
tabulator==1.53.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tox==3.28.0
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
unicodecsv==0.14.1
urllib3==1.26.20
virtualenv==20.17.1
xlrd==2.0.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: goodtables-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.23.10
- botocore==1.26.10
- chardet==5.0.0
- charset-normalizer==2.0.12
- click==6.7
- datapackage==0.8.9
- distlib==0.3.9
- et-xmlfile==1.1.0
- ezodf==0.3.2
- filelock==3.4.1
- future==0.18.3
- greenlet==2.0.2
- idna==3.10
- ijson==3.3.0
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- isodate==0.6.1
- jmespath==0.10.0
- jsonlines==3.1.0
- jsonschema==2.6.0
- jsontableschema==0.10.1
- linear-tsv==1.1.0
- lxml==3.8.0
- mccabe==0.7.0
- mock==5.2.0
- openpyxl==3.1.3
- platformdirs==2.4.0
- pycodestyle==2.10.0
- pydocstyle==6.3.0
- pyflakes==3.0.1
- pylama==7.7.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- requests==2.27.1
- rfc3986==0.4.1
- s3transfer==0.5.2
- six==1.17.0
- snowballstemmer==2.2.0
- sqlalchemy==1.4.54
- tabulator==1.53.5
- tox==3.28.0
- unicodecsv==0.14.1
- urllib3==1.26.20
- virtualenv==20.17.1
- xlrd==2.0.1
prefix: /opt/conda/envs/goodtables-py
| [
"tests/presets/test_datapackage.py::test_preset_datapackage_non_tabular_datapackage_issue_170",
"tests/presets/test_datapackage.py::test_preset_datapackage_mixed_datapackage_issue_170",
"tests/presets/test_datapackage.py::test_preset_datapackage_invalid_json_issue_192",
"tests/presets/test_table.py::test_preset_table_but_got_datapackage_issue_187",
"tests/test_inspector.py::test_inspector_table_invalid",
"tests/test_inspector.py::test_inspector_tables_invalid",
"tests/test_inspector.py::test_inspector_warnings_no",
"tests/test_inspector.py::test_inspector_warnings_bad_datapackage_json",
"tests/test_inspector.py::test_inspector_warnings_table_limit",
"tests/test_inspector.py::test_inspector_warnings_row_limit",
"tests/test_inspector.py::test_inspector_warnings_error_limit",
"tests/test_inspector.py::test_inspector_warnings_table_and_row_limit",
"tests/test_inspector.py::test_inspector_warnings_table_and_error_limit"
]
| [
"tests/test_inspector.py::test_inspector_catch_all_open_exceptions",
"tests/test_spec.py::test_spec_is_up_to_date"
]
| [
"tests/checks/body/constraints/test_enumerable_constraint.py::test_check_enumerable_constraint",
"tests/checks/body/constraints/test_maximum_constraint.py::test_check_maximum_constraint",
"tests/checks/body/constraints/test_maximum_length_constraint.py::test_check_maximum_length_constraint",
"tests/checks/body/constraints/test_minimum_constraint.py::test_check_minimum_constraint",
"tests/checks/body/constraints/test_minimum_length_constraint.py::test_check_minimum_length_constraint",
"tests/checks/body/constraints/test_pattern_constraint.py::test_check_pattern_constraint",
"tests/checks/body/constraints/test_required_constraint.py::test_check_required_constraint",
"tests/checks/body/constraints/test_unique_constraint.py::test_check_unique_constraint",
"tests/checks/body/test_blank_row.py::test_check_blank_row",
"tests/checks/body/test_blank_row.py::test_check_blank_row_problem",
"tests/checks/body/test_duplicate_row.py::test_check_duplicate_row",
"tests/checks/body/test_duplicate_row.py::test_check_duplicate_row_problem",
"tests/checks/body/test_extra_value.py::test_check_extra_value",
"tests/checks/body/test_extra_value.py::test_check_extra_value_problem",
"tests/checks/body/test_missing_value.py::test_check_missing_value",
"tests/checks/body/test_missing_value.py::test_check_missing_value_problem",
"tests/checks/body/test_non_castable_value.py::test_check_non_castable_value",
"tests/checks/body/test_non_castable_value.py::test_check_non_castable_value_problem",
"tests/checks/head/test_blank_header.py::test_check_blank_header",
"tests/checks/head/test_blank_header.py::test_check_blank_header_problem",
"tests/checks/head/test_duplicate_header.py::test_check_duplicate_header",
"tests/checks/head/test_duplicate_header.py::test_check_duplicate_header_problem",
"tests/checks/head/test_extra_header.py::test_check_extra_header",
"tests/checks/head/test_extra_header.py::test_check_extra_header_infer",
"tests/checks/head/test_extra_header.py::test_check_extra_header_problem",
"tests/checks/head/test_missing_header.py::test_check_missing_header",
"tests/checks/head/test_missing_header.py::test_check_missing_header_problem",
"tests/checks/head/test_non_matching_header.py::test_check_non_matching_header",
"tests/checks/head/test_non_matching_header.py::test_check_non_matching_header_problem",
"tests/checks/head/test_non_matching_header.py::test_check_non_matching_header_order_fields",
"tests/checks/head/test_non_matching_header.py::test_check_non_matching_header_order_fields_problem",
"tests/presets/test_datapackage.py::test_preset_datapackage",
"tests/presets/test_nested.py::test_preset_nested",
"tests/presets/test_table.py::test_preset_table",
"tests/test_inspector.py::test_inspector_table_valid",
"tests/test_inspector.py::test_inspector_table_invalid_error_limit",
"tests/test_inspector.py::test_inspector_table_invalid_row_limit",
"tests/test_inspector.py::test_inspector_datapackage_valid",
"tests/test_inspector.py::test_inspector_datapackage_invalid",
"tests/test_inspector.py::test_inspector_datapackage_invalid_table_limit",
"tests/test_inspector.py::test_inspector_catch_all_iter_exceptions"
]
| []
| MIT License | 1,301 | [
"data/mixed_datapackage.json",
"data/non_tabular_datapackage.json",
"examples/custom_preset.py",
"goodtables/checks/body/duplicate_row.py",
"data/invalid_json.json",
"examples/dropbox.py",
"features/fail_fast_two_schema_errors.yml",
"goodtables/presets/nested.py",
"goodtables/cli.py",
"goodtables/spec.json",
"README.md",
"goodtables/inspector.py",
"goodtables/presets/datapackage.py",
"examples/ckan.py",
"goodtables/presets/table.py"
]
| [
"data/mixed_datapackage.json",
"data/non_tabular_datapackage.json",
"examples/custom_preset.py",
"goodtables/checks/body/duplicate_row.py",
"data/invalid_json.json",
"examples/dropbox.py",
"features/fail_fast_two_schema_errors.yml",
"goodtables/presets/nested.py",
"goodtables/cli.py",
"goodtables/spec.json",
"README.md",
"goodtables/inspector.py",
"goodtables/presets/datapackage.py",
"examples/ckan.py",
"goodtables/presets/table.py"
]
|
certbot__certbot-4740 | c5d11d333fdac8f2df3e2b0360df4daaa0a54a1a | 2017-05-25 19:30:09 | 7531c9891633bf777abc0230241d45399f0cbd7a | diff --git a/certbot/log.py b/certbot/log.py
index c7bc867f1..889b5c50a 100644
--- a/certbot/log.py
+++ b/certbot/log.py
@@ -70,7 +70,8 @@ def pre_arg_parse_setup():
# close() are explicitly called
util.atexit_register(logging.shutdown)
sys.excepthook = functools.partial(
- except_hook, debug='--debug' in sys.argv, log_path=temp_handler.path)
+ pre_arg_parse_except_hook, memory_handler,
+ debug='--debug' in sys.argv, log_path=temp_handler.path)
def post_arg_parse_setup(config):
@@ -103,8 +104,9 @@ def post_arg_parse_setup(config):
root_logger.removeHandler(memory_handler)
temp_handler = memory_handler.target
memory_handler.setTarget(file_handler)
+ memory_handler.flush(force=True)
memory_handler.close()
- temp_handler.delete_and_close()
+ temp_handler.close()
if config.quiet:
level = constants.QUIET_LOGGING_LEVEL
@@ -115,7 +117,7 @@ def post_arg_parse_setup(config):
logger.info('Saving debug log to %s', file_path)
sys.excepthook = functools.partial(
- except_hook, debug=config.debug, log_path=logs_dir)
+ post_arg_parse_except_hook, debug=config.debug, log_path=logs_dir)
def setup_log_file_handler(config, logfile, fmt):
@@ -194,8 +196,7 @@ class MemoryHandler(logging.handlers.MemoryHandler):
"""Buffers logging messages in memory until the buffer is flushed.
This differs from `logging.handlers.MemoryHandler` in that flushing
- only happens when it is done explicitly by calling flush() or
- close().
+ only happens when flush(force=True) is called.
"""
def __init__(self, target=None):
@@ -209,6 +210,33 @@ class MemoryHandler(logging.handlers.MemoryHandler):
else:
super(MemoryHandler, self).__init__(capacity, target=target)
+ def close(self):
+ """Close the memory handler, but don't set the target to None."""
+ # This allows the logging module which may only have a weak
+ # reference to the target handler to properly flush and close it.
+ target = self.target
+ if sys.version_info < (2, 7): # pragma: no cover
+ logging.handlers.MemoryHandler.close(self)
+ else:
+ super(MemoryHandler, self).close()
+ self.target = target
+
+ def flush(self, force=False): # pylint: disable=arguments-differ
+ """Flush the buffer if force=True.
+
+ If force=False, this call is a noop.
+
+ :param bool force: True if the buffer should be flushed.
+
+ """
+ # This method allows flush() calls in logging.shutdown to be a
+ # noop so we can control when this handler is flushed.
+ if force:
+ if sys.version_info < (2, 7): # pragma: no cover
+ logging.handlers.MemoryHandler.flush(self)
+ else:
+ super(MemoryHandler, self).flush()
+
def shouldFlush(self, record):
"""Should the buffer be automatically flushed?
@@ -224,7 +252,9 @@ class MemoryHandler(logging.handlers.MemoryHandler):
class TempHandler(logging.StreamHandler):
"""Safely logs messages to a temporary file.
- The file is created with permissions 600.
+ The file is created with permissions 600. If no log records are sent
+ to this handler, the temporary file is deleted when the handler is
+ closed.
:ivar str path: file system path to the temporary log file
@@ -238,19 +268,26 @@ class TempHandler(logging.StreamHandler):
else:
super(TempHandler, self).__init__(stream)
self.path = stream.name
+ self._delete = True
- def delete_and_close(self):
- """Close the handler and delete the temporary log file."""
- self._close(delete=True)
+ def emit(self, record):
+ """Log the specified logging record.
- def close(self):
- """Close the handler and the temporary log file."""
- self._close(delete=False)
+ :param logging.LogRecord record: Record to be formatted
+
+ """
+ self._delete = False
+ # logging handlers use old style classes in Python 2.6 so
+ # super() cannot be used
+ if sys.version_info < (2, 7): # pragma: no cover
+ logging.StreamHandler.emit(self, record)
+ else:
+ super(TempHandler, self).emit(record)
- def _close(self, delete):
+ def close(self):
"""Close the handler and the temporary log file.
- :param bool delete: True if the log file should be deleted
+ The temporary log file is deleted if it wasn't used.
"""
self.acquire()
@@ -258,8 +295,9 @@ class TempHandler(logging.StreamHandler):
# StreamHandler.close() doesn't close the stream to allow a
# stream like stderr to be used
self.stream.close()
- if delete:
+ if self._delete:
os.remove(self.path)
+ self._delete = False
if sys.version_info < (2, 7): # pragma: no cover
logging.StreamHandler.close(self)
else:
@@ -268,7 +306,34 @@ class TempHandler(logging.StreamHandler):
self.release()
-def except_hook(exc_type, exc_value, trace, debug, log_path):
+def pre_arg_parse_except_hook(memory_handler, *args, **kwargs):
+ """A simple wrapper around post_arg_parse_except_hook.
+
+ The additional functionality provided by this wrapper is the memory
+ handler will be flushed before Certbot exits. This allows us to
+ write logging messages to a temporary file if we crashed before
+ logging was fully configured.
+
+ Since sys.excepthook isn't called on SystemExit exceptions, the
+ memory handler will not be flushed in this case which prevents us
+ from creating temporary log files when argparse exits because a
+ command line argument was invalid or -h, --help, or --version was
+ provided on the command line.
+
+ :param MemoryHandler memory_handler: memory handler to flush
+ :param tuple args: args for post_arg_parse_except_hook
+ :param dict kwargs: kwargs for post_arg_parse_except_hook
+
+ """
+ try:
+ post_arg_parse_except_hook(*args, **kwargs)
+ finally:
+ # flush() is called here so messages logged during
+ # post_arg_parse_except_hook are also flushed.
+ memory_handler.flush(force=True)
+
+
+def post_arg_parse_except_hook(exc_type, exc_value, trace, debug, log_path):
"""Logs fatal exceptions and reports them to the user.
If debug is True, the full exception and traceback is shown to the
| Temporal files and dirs not removed from /tmp
Since certbot-auto 0.14.0, every time `certbot-auto renew --quiet` runs on my cron job it left behind a tmp file in /tmp.
Just an example:
```
-rw------- 1 root root 361 May 22 12:17 tmpzdqsnC
-rw------- 1 root root 361 May 22 18:17 tmptUuhXk
-rw------- 1 root root 361 May 23 00:17 tmp1T_Xes
-rw------- 1 root root 361 May 23 06:17 tmp3vaADu
-rw------- 1 root root 361 May 23 12:17 tmpSuRRuU
-rw------- 1 root root 361 May 23 18:17 tmpgtxFOp
-rw------- 1 root root 361 May 24 00:17 tmpZF3Xp3
```
The content of these files are like this:
```
2017-05-22 12:17:01,586:DEBUG:certbot.main:certbot version: 0.14.1
2017-05-22 12:17:01,586:DEBUG:certbot.main:Arguments: ['--version']
2017-05-22 12:17:01,586:DEBUG:certbot.main:Discovered plugins: PluginsRegistry(PluginEntryPoint#apache,PluginEntryPoint#manual,PluginEntryPoint#nginx,PluginEntryPoint#null,PluginEntryPoint#standalone,PluginEntryPoint#webroot)
```
Also there is a tmp dir,
`drwx------ 2 root root 4096 May 24 00:33 tmp.ccaHF7JIEe/`
Tha only have a file inside:
`-rw-r--r-- 1 root root 4614 May 19 12:28 fetch.py`
As I said, checking the tmp files, it is happening from version 0.14.0 and I think they should be removed automatically.
Here a community buddy asking the same question. https://community.letsencrypt.org/t/certbot-auto-recently-starting-to-leave-files-in-tmp/34789 | certbot/certbot | diff --git a/certbot/tests/log_test.py b/certbot/tests/log_test.py
index 13021220b..72ff076dd 100644
--- a/certbot/tests/log_test.py
+++ b/certbot/tests/log_test.py
@@ -26,7 +26,7 @@ class PreArgParseSetupTest(unittest.TestCase):
return pre_arg_parse_setup(*args, **kwargs)
@mock.patch('certbot.log.sys')
- @mock.patch('certbot.log.except_hook')
+ @mock.patch('certbot.log.pre_arg_parse_except_hook')
@mock.patch('certbot.log.logging.getLogger')
@mock.patch('certbot.log.util.atexit_register')
def test_it(self, mock_register, mock_get, mock_except_hook, mock_sys):
@@ -34,11 +34,6 @@ class PreArgParseSetupTest(unittest.TestCase):
mock_sys.version_info = sys.version_info
self._call()
- mock_register.assert_called_once_with(logging.shutdown)
- mock_sys.excepthook(1, 2, 3)
- mock_except_hook.assert_called_once_with(
- 1, 2, 3, debug=True, log_path=mock.ANY)
-
mock_root_logger = mock_get()
mock_root_logger.setLevel.assert_called_once_with(logging.DEBUG)
self.assertEqual(mock_root_logger.addHandler.call_count, 2)
@@ -54,6 +49,11 @@ class PreArgParseSetupTest(unittest.TestCase):
self.assertTrue(
isinstance(memory_handler.target, logging.StreamHandler))
+ mock_register.assert_called_once_with(logging.shutdown)
+ mock_sys.excepthook(1, 2, 3)
+ mock_except_hook.assert_called_once_with(
+ memory_handler, 1, 2, 3, debug=True, log_path=mock.ANY)
+
class PostArgParseSetupTest(test_util.TempDirTestCase):
"""Tests for certbot.log.post_arg_parse_setup."""
@@ -88,7 +88,8 @@ class PostArgParseSetupTest(test_util.TempDirTestCase):
def test_common(self):
with mock.patch('certbot.log.logging.getLogger') as mock_get_logger:
mock_get_logger.return_value = self.root_logger
- with mock.patch('certbot.log.except_hook') as mock_except_hook:
+ except_hook_path = 'certbot.log.post_arg_parse_except_hook'
+ with mock.patch(except_hook_path) as mock_except_hook:
with mock.patch('certbot.log.sys') as mock_sys:
mock_sys.version_info = sys.version_info
self._call(self.config)
@@ -203,12 +204,13 @@ class MemoryHandlerTest(unittest.TestCase):
def test_flush(self):
self._test_log_debug()
- self.handler.flush()
+ self.handler.flush(force=True)
self.assertEqual(self.stream.getvalue(), self.msg + '\n')
def test_not_flushed(self):
# By default, logging.ERROR messages and higher are flushed
self.logger.critical(self.msg)
+ self.handler.flush()
self.assertEqual(self.stream.getvalue(), '')
def test_target_reset(self):
@@ -217,7 +219,7 @@ class MemoryHandlerTest(unittest.TestCase):
new_stream = six.StringIO()
new_stream_handler = logging.StreamHandler(new_stream)
self.handler.setTarget(new_stream_handler)
- self.handler.flush()
+ self.handler.flush(force=True)
self.assertEqual(self.stream.getvalue(), '')
self.assertEqual(new_stream.getvalue(), self.msg + '\n')
new_stream_handler.close()
@@ -234,31 +236,50 @@ class TempHandlerTest(unittest.TestCase):
self.handler = TempHandler()
def tearDown(self):
- if not self.closed:
- self.handler.delete_and_close()
+ self.handler.close()
def test_permissions(self):
self.assertTrue(
util.check_permissions(self.handler.path, 0o600, os.getuid()))
def test_delete(self):
- self.handler.delete_and_close()
- self.closed = True
+ self.handler.close()
self.assertFalse(os.path.exists(self.handler.path))
def test_no_delete(self):
+ self.handler.emit(mock.MagicMock())
self.handler.close()
- self.closed = True
self.assertTrue(os.path.exists(self.handler.path))
os.remove(self.handler.path)
-class ExceptHookTest(unittest.TestCase):
- """Tests for certbot.log.except_hook."""
+class PreArgParseExceptHookTest(unittest.TestCase):
+ """Tests for certbot.log.pre_arg_parse_except_hook."""
+ @classmethod
+ def _call(cls, *args, **kwargs):
+ from certbot.log import pre_arg_parse_except_hook
+ return pre_arg_parse_except_hook(*args, **kwargs)
+
+ @mock.patch('certbot.log.post_arg_parse_except_hook')
+ def test_it(self, mock_post_arg_parse_except_hook):
+ # pylint: disable=star-args
+ memory_handler = mock.MagicMock()
+ args = ('some', 'args',)
+ kwargs = {'some': 'kwargs'}
+
+ self._call(memory_handler, *args, **kwargs)
+
+ mock_post_arg_parse_except_hook.assert_called_once_with(
+ *args, **kwargs)
+ memory_handler.flush.assert_called_once_with(force=True)
+
+
+class PostArgParseExceptHookTest(unittest.TestCase):
+ """Tests for certbot.log.post_arg_parse_except_hook."""
@classmethod
def _call(cls, *args, **kwargs):
- from certbot.log import except_hook
- return except_hook(*args, **kwargs)
+ from certbot.log import post_arg_parse_except_hook
+ return post_arg_parse_except_hook(*args, **kwargs)
def setUp(self):
self.error_msg = 'test error message'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.14 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | acme==0.14.1
astroid==1.3.5
asttokens==3.0.0
backports.tarfile==1.2.0
cachetools==5.5.2
-e git+https://github.com/certbot/certbot.git@c5d11d333fdac8f2df3e2b0360df4daaa0a54a1a#egg=certbot
certifi==2025.1.31
cffi==1.17.1
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
ConfigArgParse==1.7
configobj==5.0.9
coverage==7.8.0
cryptography==44.0.2
decorator==5.2.1
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
execnet==2.1.1
executing==2.2.0
filelock==3.18.0
id==1.5.0
idna==3.10
importlib-metadata==6.11.0
iniconfig==2.1.0
ipdb==0.13.13
ipython==8.18.1
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jedi==0.19.2
jeepney==0.9.0
keyring==25.6.0
logilab-common==2.1.0
markdown-it-py==3.0.0
matplotlib-inline==0.1.7
mdurl==0.1.2
mock==5.2.0
more-itertools==10.6.0
mypy-extensions==1.0.0
nh3==0.2.21
nose==1.3.7
packaging==24.2
parsedatetime==2.6
parso==0.8.4
pexpect==4.9.0
platformdirs==4.3.7
pluggy==1.5.0
prompt_toolkit==3.0.50
ptyprocess==0.7.0
pure_eval==0.2.3
pycparser==2.22
Pygments==2.19.1
pylint==1.4.2
pyOpenSSL==25.0.0
pyproject-api==1.9.0
pyRFC3339==2.0.1
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
pytz==2025.2
readme_renderer==44.0
requests==2.32.3
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==14.0.0
SecretStorage==3.3.3
six==1.17.0
stack-data==0.6.3
tomli==2.2.1
tox==4.25.0
traitlets==5.14.3
twine==6.1.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
wcwidth==0.2.13
zipp==3.21.0
zope.component==6.0
zope.event==5.0
zope.hookable==7.0
zope.interface==7.2
| name: certbot
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- acme==0.14.1
- astroid==1.3.5
- asttokens==3.0.0
- backports-tarfile==1.2.0
- cachetools==5.5.2
- certifi==2025.1.31
- cffi==1.17.1
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- configargparse==1.7
- configobj==5.0.9
- coverage==7.8.0
- cryptography==44.0.2
- decorator==5.2.1
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- executing==2.2.0
- filelock==3.18.0
- id==1.5.0
- idna==3.10
- importlib-metadata==6.11.0
- iniconfig==2.1.0
- ipdb==0.13.13
- ipython==8.18.1
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jedi==0.19.2
- jeepney==0.9.0
- keyring==25.6.0
- logilab-common==2.1.0
- markdown-it-py==3.0.0
- matplotlib-inline==0.1.7
- mdurl==0.1.2
- mock==5.2.0
- more-itertools==10.6.0
- mypy-extensions==1.0.0
- nh3==0.2.21
- nose==1.3.7
- packaging==24.2
- parsedatetime==2.6
- parso==0.8.4
- pexpect==4.9.0
- platformdirs==4.3.7
- pluggy==1.5.0
- prompt-toolkit==3.0.50
- ptyprocess==0.7.0
- pure-eval==0.2.3
- pycparser==2.22
- pygments==2.19.1
- pylint==1.4.2
- pyopenssl==25.0.0
- pyproject-api==1.9.0
- pyrfc3339==2.0.1
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- pytz==2025.2
- readme-renderer==44.0
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==14.0.0
- secretstorage==3.3.3
- six==1.17.0
- stack-data==0.6.3
- tomli==2.2.1
- tox==4.25.0
- traitlets==5.14.3
- twine==6.1.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- wcwidth==0.2.13
- zipp==3.21.0
- zope-component==6.0
- zope-event==5.0
- zope-hookable==7.0
- zope-interface==7.2
prefix: /opt/conda/envs/certbot
| [
"certbot/tests/log_test.py::PreArgParseSetupTest::test_it",
"certbot/tests/log_test.py::PostArgParseSetupTest::test_common",
"certbot/tests/log_test.py::PostArgParseSetupTest::test_debug",
"certbot/tests/log_test.py::PostArgParseSetupTest::test_quiet",
"certbot/tests/log_test.py::MemoryHandlerTest::test_flush",
"certbot/tests/log_test.py::MemoryHandlerTest::test_not_flushed",
"certbot/tests/log_test.py::MemoryHandlerTest::test_target_reset",
"certbot/tests/log_test.py::TempHandlerTest::test_delete",
"certbot/tests/log_test.py::PreArgParseExceptHookTest::test_it",
"certbot/tests/log_test.py::PostArgParseExceptHookTest::test_acme_error",
"certbot/tests/log_test.py::PostArgParseExceptHookTest::test_base_exception",
"certbot/tests/log_test.py::PostArgParseExceptHookTest::test_custom_error",
"certbot/tests/log_test.py::PostArgParseExceptHookTest::test_debug",
"certbot/tests/log_test.py::PostArgParseExceptHookTest::test_other_error"
]
| []
| [
"certbot/tests/log_test.py::SetupLogFileHandlerTest::test_failure",
"certbot/tests/log_test.py::SetupLogFileHandlerTest::test_success",
"certbot/tests/log_test.py::ColoredStreamHandlerTest::test_format",
"certbot/tests/log_test.py::ColoredStreamHandlerTest::test_format_and_red_level",
"certbot/tests/log_test.py::TempHandlerTest::test_no_delete",
"certbot/tests/log_test.py::TempHandlerTest::test_permissions",
"certbot/tests/log_test.py::ExitWithLogPathTest::test_log_dir",
"certbot/tests/log_test.py::ExitWithLogPathTest::test_log_file"
]
| []
| Apache License 2.0 | 1,302 | [
"certbot/log.py"
]
| [
"certbot/log.py"
]
|
|
vimist__watch-do-6 | 909d01ff409b9a7a107f7b563e01f6084e358a71 | 2017-05-25 19:45:30 | 909d01ff409b9a7a107f7b563e01f6084e358a71 | diff --git a/watch_do/cli.py b/watch_do/cli.py
index 2a5474b..7a70bfc 100644
--- a/watch_do/cli.py
+++ b/watch_do/cli.py
@@ -230,7 +230,7 @@ def watch_do():
start_time = time.time()
for file_name in changed_files:
for output in doer_manager.run_doers(file_name):
- print(output)
+ print(output, end='')
if not args.multi:
break
diff --git a/watch_do/doer_manager.py b/watch_do/doer_manager.py
index 1ed7ab8..a11c347 100644
--- a/watch_do/doer_manager.py
+++ b/watch_do/doer_manager.py
@@ -117,14 +117,11 @@ class DoerManager:
return doers
def run_doers(self, file_name):
- """Run each doer in turn and return their output.
+ """Run each doer in turn and yield its output.
- Returns:
- list: A list of strings that contain the combined output of stdout
- and stderr from the doers.
+ Yields:
+ str: A string that contains the combined output of stdout and
+ stderr from the doers.
"""
- results = []
for doer in self.doers:
- results.append(doer.run(file_name))
-
- return results
+ yield from doer.run(file_name)
diff --git a/watch_do/doers/doer.py b/watch_do/doers/doer.py
index aa14900..952db28 100644
--- a/watch_do/doers/doer.py
+++ b/watch_do/doers/doer.py
@@ -85,7 +85,8 @@ class Doer(metaclass=ABCMeta):
Parameters:
file_name (str): The file name to run this doer against.
- Returns:
- str: A string, containing the status/output of the action.
+ Yields:
+ str: A string containing the output (possibly the partial output)
+ of the command, both stdout and stderr.
"""
pass
diff --git a/watch_do/doers/shell.py b/watch_do/doers/shell.py
index e12c6e4..fcc8364 100644
--- a/watch_do/doers/shell.py
+++ b/watch_do/doers/shell.py
@@ -35,20 +35,21 @@ class Shell(Doer):
file_name (str): The ``file_name`` that this doer should run
against.
- Returns:
- str: A string containing the output of the command, both stdout and
- stderr.
+ Yields:
+ str: A string containing the output (possibly the partial output)
+ of the command, both stdout and stderr.
"""
command = Doer._interpolate_file_name(self.command, file_name)
- try:
- output = subprocess.check_output(
- command, stderr=subprocess.STDOUT, shell=True)
- except subprocess.CalledProcessError as ex:
- output = (
- ex.output +
- b'\nCommand failed to run, exited with error code ' +
- bytes(str(ex.returncode), 'utf-8')
- )
-
- return output.decode('utf-8')
+ with subprocess.Popen(command, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, bufsize=1,
+ shell=True) as process:
+ for line in process.stdout:
+ yield line.decode('UTF-8')
+
+ process.wait()
+
+ # If the command returned a non 0 exit code, yield an error message
+ if process.returncode > 0:
+ yield ('Command failed to run, exited with error code {}'
+ .format(process.returncode))
| Incremental output
When a doer takes more than a few seconds to run and is incrementally outputting data, it would be good to reflect this in the output of Watch Do, rather than buffering the whole output and then outputting it all at the end.
An example would be running the following command:
`watch-do -w file -d 'for i in {1..10} :; do date; sleep 1; done'`
This will take over 10 seconds before _any_ output is displayed to the terminal. | vimist/watch-do | diff --git a/tests/doer_manager.py b/tests/doer_manager.py
index 015c4b2..e024d71 100644
--- a/tests/doer_manager.py
+++ b/tests/doer_manager.py
@@ -45,9 +45,9 @@ class TestDoerManager(TestCase):
"""Check that the doers are being run successfully.
"""
self.assertEqual(
- self.doer_manager.run_doers('/some/random/file'),
+ list(self.doer_manager.run_doers('/some/random/file')),
[
'/some/random/file has changed.\n',
- '\nCommand failed to run, exited with error code 1',
+ 'Command failed to run, exited with error code 1',
'Bye\n'
])
diff --git a/tests/doers/shell.py b/tests/doers/shell.py
index 8d7152d..317c54f 100644
--- a/tests/doers/shell.py
+++ b/tests/doers/shell.py
@@ -21,15 +21,15 @@ class TestShell(TestCase):
"""
shell = Shell('echo -n "This file changed: %f"')
self.assertEqual(
- shell.run('/some/random/file'),
- 'This file changed: /some/random/file')
+ list(shell.run('/some/random/file')),
+ ['This file changed: /some/random/file'])
shell = Shell('echo -n "Hello "; echo -n "World" >&2; echo -n "...";')
self.assertEqual(
- shell.run(''),
- 'Hello World...')
+ list(shell.run('')),
+ ['Hello World...'])
shell = Shell('echo -n "Hello"; exit 1')
self.assertEqual(
- shell.run(''),
- 'Hello\nCommand failed to run, exited with error code 1')
+ list(shell.run('')),
+ ['Hello', 'Command failed to run, exited with error code 1'])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 4
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
astroid==1.4.9
babel==2.17.0
bumpversion==0.5.3
certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docutils==0.21.2
exceptiongroup==1.2.2
idna==3.10
imagesize==1.4.1
iniconfig==2.1.0
isort==6.0.1
Jinja2==3.1.6
lazy-object-proxy==1.10.0
MarkupSafe==3.0.2
mccabe==0.7.0
packaging==24.2
pluggy==1.5.0
Pygments==2.19.1
pylint==1.6.5
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==1.5.2
tomli==2.2.1
urllib3==2.3.0
-e git+https://github.com/vimist/watch-do.git@909d01ff409b9a7a107f7b563e01f6084e358a71#egg=watch_do
wrapt==1.17.2
| name: watch-do
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- astroid==1.4.9
- babel==2.17.0
- bumpversion==0.5.3
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- idna==3.10
- imagesize==1.4.1
- iniconfig==2.1.0
- isort==6.0.1
- jinja2==3.1.6
- lazy-object-proxy==1.10.0
- markupsafe==3.0.2
- mccabe==0.7.0
- packaging==24.2
- pluggy==1.5.0
- pygments==2.19.1
- pylint==1.6.5
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==1.5.2
- tomli==2.2.1
- urllib3==2.3.0
- wrapt==1.17.2
prefix: /opt/conda/envs/watch-do
| [
"tests/doer_manager.py::TestDoerManager::test_run_doers",
"tests/doers/shell.py::TestShell::test_run"
]
| []
| [
"tests/doer_manager.py::TestDoerManager::test___init__",
"tests/doer_manager.py::TestDoerManager::test__process_commands",
"tests/doers/shell.py::TestShell::test__init__"
]
| []
| null | 1,303 | [
"watch_do/doers/shell.py",
"watch_do/doer_manager.py",
"watch_do/cli.py",
"watch_do/doers/doer.py"
]
| [
"watch_do/doers/shell.py",
"watch_do/doer_manager.py",
"watch_do/cli.py",
"watch_do/doers/doer.py"
]
|
|
tornadoweb__tornado-2058 | 09e255779dc1db78a0c76e720660f802d7dc9cab | 2017-05-26 13:21:43 | 03f13800e854a6fc9e6efa2168e694d9599348bd | jehiah: RFR @bdarnell
I've also done some preliminary validation that this solves the original issue I ran into.
jehiah: 🤦♂️ w/r/t executable bit. It seems my editor was doing that because those files begin with `#!/usr/bin/env python`. Is there a reason for/value in that if they should not be executable?
bdarnell: Thanks, just one more thing to figure out: the large-body chunked test cases are failing for some reason on windows (the fact that this happens on 2 of the 4 configurations on appveyor doesn't mean anything - we only run the full test suite on 2 configurations because tornado on windows is so slow). The `ExpectLog` is passing so the correct exception is getting raised. Maybe closing immediately after `yield stream.write` is not actually waiting for the outgoing data to be sent on windows because there is also unread data in the incoming socket buffer. This isn't too important so maybe these tests should just accept both 400 and 599 response codes.
There's no reason for the `#!/usr/bin/env python` line to be present in files that are not executable; this line could just be removed.
jehiah: @bdarnell updated to address flaky tests; if this looks good i'll squash/cleanup commits. | diff --git a/tornado/http1connection.py b/tornado/http1connection.py
index c6d3e336..6069e027 100644
--- a/tornado/http1connection.py
+++ b/tornado/http1connection.py
@@ -250,6 +250,8 @@ class HTTP1Connection(httputil.HTTPConnection):
except httputil.HTTPInputError as e:
gen_log.info("Malformed HTTP message from %s: %s",
self.context, e)
+ if not self.is_client:
+ yield self.stream.write(b'HTTP/1.1 400 Bad Request\r\n\r\n')
self.close()
raise gen.Return(False)
finally:
diff --git a/tornado/httputil.py b/tornado/httputil.py
old mode 100644
new mode 100755
index 818ea914..5b87ce61
--- a/tornado/httputil.py
+++ b/tornado/httputil.py
@@ -829,6 +829,8 @@ def parse_request_start_line(line):
try:
method, path, version = line.split(" ")
except ValueError:
+ # https://tools.ietf.org/html/rfc7230#section-3.1.1
+ # invalid request-line SHOULD respond with a 400 (Bad Request)
raise HTTPInputError("Malformed HTTP request line")
if not re.match(r"^HTTP/1\.[0-9]$", version):
raise HTTPInputError(
| Empty reply on malformed HTTP request
When an incorrect HTTP request is performed, tornado makes no reply (log + return in code). I suggest returning an 400 error code.
For example, when working behind an nginx proxy-server, it causes a 502 error code to the end-user and a record in nginx's error log, which is incorrect (should be 400 and a warning).
| tornadoweb/tornado | diff --git a/tornado/test/httpserver_test.py b/tornado/test/httpserver_test.py
index f5f91a9d..4169a43a 100644
--- a/tornado/test/httpserver_test.py
+++ b/tornado/test/httpserver_test.py
@@ -29,18 +29,19 @@ from io import BytesIO
def read_stream_body(stream, callback):
"""Reads an HTTP response from `stream` and runs callback with its
- headers and body."""
+ start_line, headers and body."""
chunks = []
class Delegate(HTTPMessageDelegate):
def headers_received(self, start_line, headers):
self.headers = headers
+ self.start_line = start_line
def data_received(self, chunk):
chunks.append(chunk)
def finish(self):
- callback((self.headers, b''.join(chunks)))
+ callback((self.start_line, self.headers, b''.join(chunks)))
conn = HTTP1Connection(stream, True)
conn.read_response(Delegate())
@@ -217,7 +218,7 @@ class HTTPConnectionTest(AsyncHTTPTestCase):
[utf8("Content-Length: %d" % len(body))]) +
newline + newline + body)
read_stream_body(stream, self.stop)
- headers, body = self.wait()
+ start_line, headers, body = self.wait()
return body
def test_multipart_form(self):
@@ -406,7 +407,15 @@ class HTTPServerRawTest(AsyncHTTPTestCase):
self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop)
self.wait()
- def test_malformed_first_line(self):
+ def test_malformed_first_line_response(self):
+ self.stream.write(b'asdf\r\n\r\n')
+ read_stream_body(self.stream, self.stop)
+ start_line, headers, response = self.wait()
+ self.assertEqual('HTTP/1.1', start_line.version)
+ self.assertEqual(400, start_line.code)
+ self.assertEqual('Bad Request', start_line.reason)
+
+ def test_malformed_first_line_log(self):
with ExpectLog(gen_log, '.*Malformed HTTP request line'):
self.stream.write(b'asdf\r\n\r\n')
# TODO: need an async version of ExpectLog so we don't need
@@ -438,7 +447,7 @@ bar
""".replace(b"\n", b"\r\n"))
read_stream_body(self.stream, self.stop)
- headers, response = self.wait()
+ start_line, headers, response = self.wait()
self.assertEqual(json_decode(response), {u'foo': [u'bar']})
def test_chunked_request_uppercase(self):
@@ -457,7 +466,7 @@ bar
""".replace(b"\n", b"\r\n"))
read_stream_body(self.stream, self.stop)
- headers, response = self.wait()
+ start_line, headers, response = self.wait()
self.assertEqual(json_decode(response), {u'foo': [u'bar']})
def test_invalid_content_length(self):
@@ -627,7 +636,7 @@ class UnixSocketTest(AsyncTestCase):
self.stream.write(b"garbage\r\n\r\n")
self.stream.read_until_close(self.stop)
response = self.wait()
- self.assertEqual(response, b"")
+ self.assertEqual(response, b"HTTP/1.1 400 Bad Request\r\n\r\n")
class KeepAliveTest(AsyncHTTPTestCase):
@@ -1036,24 +1045,26 @@ class BodyLimitsTest(AsyncHTTPTestCase):
def test_large_body_buffered(self):
with ExpectLog(gen_log, '.*Content-Length too long'):
response = self.fetch('/buffered', method='PUT', body=b'a' * 10240)
- self.assertEqual(response.code, 599)
+ self.assertEqual(response.code, 400)
def test_large_body_buffered_chunked(self):
with ExpectLog(gen_log, '.*chunked body too large'):
response = self.fetch('/buffered', method='PUT',
body_producer=lambda write: write(b'a' * 10240))
- self.assertEqual(response.code, 599)
+ # this test is flaky on windows; accept 400 (expected) or 599
+ self.assertIn(response.code, [400, 599])
def test_large_body_streaming(self):
with ExpectLog(gen_log, '.*Content-Length too long'):
response = self.fetch('/streaming', method='PUT', body=b'a' * 10240)
- self.assertEqual(response.code, 599)
+ self.assertEqual(response.code, 400)
def test_large_body_streaming_chunked(self):
with ExpectLog(gen_log, '.*chunked body too large'):
response = self.fetch('/streaming', method='PUT',
body_producer=lambda write: write(b'a' * 10240))
- self.assertEqual(response.code, 599)
+ # this test is flaky on windows; accept 400 (expected) or 599
+ self.assertIn(response.code, [400, 599])
def test_large_body_streaming_override(self):
response = self.fetch('/streaming?expected_size=10240', method='PUT',
@@ -1090,14 +1101,14 @@ class BodyLimitsTest(AsyncHTTPTestCase):
stream.write(b'PUT /streaming?expected_size=10240 HTTP/1.1\r\n'
b'Content-Length: 10240\r\n\r\n')
stream.write(b'a' * 10240)
- headers, response = yield gen.Task(read_stream_body, stream)
+ start_line, headers, response = yield gen.Task(read_stream_body, stream)
self.assertEqual(response, b'10240')
# Without the ?expected_size parameter, we get the old default value
stream.write(b'PUT /streaming HTTP/1.1\r\n'
b'Content-Length: 10240\r\n\r\n')
with ExpectLog(gen_log, '.*Content-Length too long'):
data = yield stream.read_until_close()
- self.assertEqual(data, b'')
+ self.assertEqual(data, b'HTTP/1.1 400 Bad Request\r\n\r\n')
finally:
stream.close()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 4.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"futures",
"mock",
"monotonic",
"trollius",
"sphinx",
"sphinx_rtd_theme",
"codecov",
"virtualenv",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
distlib==0.3.9
docutils==0.18.1
filelock==3.4.1
futures==2.2.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
monotonic==1.6
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@09e255779dc1db78a0c76e720660f802d7dc9cab#egg=tornado
trollius==2.1.post2
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
virtualenv==20.17.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.11.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- distlib==0.3.9
- docutils==0.18.1
- filelock==3.4.1
- futures==2.2.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- monotonic==1.6
- platformdirs==2.4.0
- pygments==2.14.0
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- trollius==2.1.post2
- urllib3==1.26.20
- virtualenv==20.17.1
prefix: /opt/conda/envs/tornado
| [
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_malformed_first_line_response"
]
| [
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_invalid_content_length",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_malformed_first_line_log",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_malformed_headers",
"tornado/test/httpserver_test.py::UnixSocketTest::test_unix_socket_bad_request",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_body_size_override_reset",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_buffered",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_buffered_chunked",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_chunked",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_timeout"
]
| [
"tornado/test/httpserver_test.py::SSLv23Test::test_error_logging",
"tornado/test/httpserver_test.py::SSLv23Test::test_large_post",
"tornado/test/httpserver_test.py::SSLv23Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLv23Test::test_ssl",
"tornado/test/httpserver_test.py::SSLv3Test::test_error_logging",
"tornado/test/httpserver_test.py::SSLv3Test::test_large_post",
"tornado/test/httpserver_test.py::SSLv3Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLv3Test::test_ssl",
"tornado/test/httpserver_test.py::TLSv1Test::test_error_logging",
"tornado/test/httpserver_test.py::TLSv1Test::test_large_post",
"tornado/test/httpserver_test.py::TLSv1Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::TLSv1Test::test_ssl",
"tornado/test/httpserver_test.py::SSLContextTest::test_error_logging",
"tornado/test/httpserver_test.py::SSLContextTest::test_large_post",
"tornado/test/httpserver_test.py::SSLContextTest::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLContextTest::test_ssl",
"tornado/test/httpserver_test.py::BadSSLOptionsTest::test_missing_arguments",
"tornado/test/httpserver_test.py::BadSSLOptionsTest::test_missing_key",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_100_continue",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_multipart_form",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_newlines",
"tornado/test/httpserver_test.py::HTTPServerTest::test_double_slash",
"tornado/test/httpserver_test.py::HTTPServerTest::test_empty_post_parameters",
"tornado/test/httpserver_test.py::HTTPServerTest::test_empty_query_string",
"tornado/test/httpserver_test.py::HTTPServerTest::test_malformed_body",
"tornado/test/httpserver_test.py::HTTPServerTest::test_query_string_encoding",
"tornado/test/httpserver_test.py::HTTPServerTest::test_types",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_chunked_request_body",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_chunked_request_uppercase",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_empty_request",
"tornado/test/httpserver_test.py::XHeaderTest::test_ip_headers",
"tornado/test/httpserver_test.py::XHeaderTest::test_scheme_headers",
"tornado/test/httpserver_test.py::XHeaderTest::test_trusted_downstream",
"tornado/test/httpserver_test.py::SSLXHeaderTest::test_request_without_xprotocol",
"tornado/test/httpserver_test.py::ManualProtocolTest::test_manual_protocol",
"tornado/test/httpserver_test.py::UnixSocketTest::test_unix_socket",
"tornado/test/httpserver_test.py::KeepAliveTest::test_cancel_during_download",
"tornado/test/httpserver_test.py::KeepAliveTest::test_finish_while_closed",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10_keepalive",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10_keepalive_extra_crlf",
"tornado/test/httpserver_test.py::KeepAliveTest::test_keepalive_chunked",
"tornado/test/httpserver_test.py::KeepAliveTest::test_pipelined_cancel",
"tornado/test/httpserver_test.py::KeepAliveTest::test_pipelined_requests",
"tornado/test/httpserver_test.py::KeepAliveTest::test_request_close",
"tornado/test/httpserver_test.py::KeepAliveTest::test_two_requests",
"tornado/test/httpserver_test.py::GzipTest::test_gzip",
"tornado/test/httpserver_test.py::GzipTest::test_uncompressed",
"tornado/test/httpserver_test.py::GzipUnsupportedTest::test_gzip_unsupported",
"tornado/test/httpserver_test.py::GzipUnsupportedTest::test_uncompressed",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_chunked_body",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_chunked_compressed",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_compressed_body",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_regular_body",
"tornado/test/httpserver_test.py::MaxHeaderSizeTest::test_large_headers",
"tornado/test/httpserver_test.py::MaxHeaderSizeTest::test_small_headers",
"tornado/test/httpserver_test.py::IdleTimeoutTest::test_idle_after_use",
"tornado/test/httpserver_test.py::IdleTimeoutTest::test_unused_connection",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_chunked_override",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_override",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_small_body",
"tornado/test/httpserver_test.py::LegacyInterfaceTest::test_legacy_interface"
]
| []
| Apache License 2.0 | 1,304 | [
"tornado/httputil.py",
"tornado/http1connection.py"
]
| [
"tornado/httputil.py",
"tornado/http1connection.py"
]
|
tobgu__pyrsistent-107 | decc5bbb11a3c795ad7553760f1bfc29f370162e | 2017-05-26 13:39:19 | decc5bbb11a3c795ad7553760f1bfc29f370162e | diff --git a/pyrsistent/_checked_types.py b/pyrsistent/_checked_types.py
index 492a862..8c768aa 100644
--- a/pyrsistent/_checked_types.py
+++ b/pyrsistent/_checked_types.py
@@ -83,15 +83,36 @@ def wrap_invariant(invariant):
return f
+def _all_dicts(bases, seen=None):
+ """
+ Yield each class in ``bases`` and each of their base classes.
+ """
+ if seen is None:
+ seen = set()
+ for cls in bases:
+ if cls in seen:
+ continue
+ seen.add(cls)
+ yield cls.__dict__
+ for b in _all_dicts(cls.__bases__, seen):
+ yield b
+
+
def store_invariants(dct, bases, destination_name, source_name):
# Invariants are inherited
- invariants = [dct[source_name]] if source_name in dct else []
- invariants += [b.__dict__[source_name] for b in bases if source_name in b.__dict__]
+ invariants = []
+ for ns in [dct] + list(_all_dicts(bases)):
+ try:
+ invariant = ns[source_name]
+ except KeyError:
+ continue
+ invariants.append(invariant)
+
if not all(callable(invariant) for invariant in invariants):
raise TypeError('Invariants must be callable')
-
dct[destination_name] = tuple(wrap_invariant(inv) for inv in invariants)
+
class _CheckedTypeMeta(type):
def __new__(mcs, name, bases, dct):
_store_types(dct, bases, '_checked_types', '__type__')
| Not all inherited `__invariant__` definitions are discovered by PClassMeta / store_invariants
Consider this class hierarchy:
```
from pyrsistent import PClass
class W(object):
def __invariant__(self):
return [(False, "W")]
class Y(W, PClass):
pass
```
Attempting to instantiate this `Y` results in a failure as expected (at least by me):
```
pyrsistent._checked_types.InvariantException:
Global invariant failed, invariant_errors=[('W',)], missing_fields=[]
```
Now compare to this class hierarchy:
```
from pyrsistent import PClass
class W(object):
def __invariant__(self):
return [(False, "W")]
class X(W):
pass
class Y(X, PClass):
pass
```
Attempting to instantiate *this* `Y` succeeds. | tobgu/pyrsistent | diff --git a/tests/class_test.py b/tests/class_test.py
index d029e38..8320e97 100644
--- a/tests/class_test.py
+++ b/tests/class_test.py
@@ -283,6 +283,49 @@ def test_multiple_global_invariants():
assert e.invariant_errors == (('x', 'y'),)
+def test_inherited_global_invariants():
+ class Distant(object):
+ def __invariant__(self):
+ return [(self.distant, "distant")]
+
+ class Nearby(Distant):
+ def __invariant__(self):
+ return [(self.nearby, "nearby")]
+
+ class MultipleInvariantGlobal(Nearby, PClass):
+ distant = field()
+ nearby = field()
+
+ try:
+ MultipleInvariantGlobal(distant=False, nearby=False)
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == (("nearby",), ("distant",),)
+
+
+def test_diamond_inherited_global_invariants():
+ counter = []
+ class Base(object):
+ def __invariant__(self):
+ counter.append(None)
+ return [(False, "base")]
+
+ class Left(Base):
+ pass
+
+ class Right(Base):
+ pass
+
+ class SingleInvariantGlobal(Left, Right, PClass):
+ pass
+
+ try:
+ SingleInvariantGlobal()
+ assert False
+ except InvariantException as e:
+ assert e.invariant_errors == (("base",),)
+ assert counter == [None]
+
def test_supports_weakref():
import weakref
weakref.ref(Point(x=1, y=2))
@@ -357,4 +400,4 @@ def test_enum_key_type():
class MyClass2(PClass):
f = pmap_field(key_type=(Foo,), value_type=int)
- MyClass2()
\ No newline at end of file
+ MyClass2()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
hypothesis==2.0.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
memory_profiler==0.31
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
psutil==2.1.1
py==1.11.0
Pygments==2.19.1
pyperform==1.86
pyproject-api==1.9.0
-e git+https://github.com/tobgu/pyrsistent.git@decc5bbb11a3c795ad7553760f1bfc29f370162e#egg=pyrsistent
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx_rtd_theme==0.1.5
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: pyrsistent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- hypothesis==2.0.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- memory-profiler==0.31
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- psutil==2.1.1
- py==1.11.0
- pygments==2.19.1
- pyperform==1.86
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==0.1.5
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/pyrsistent
| [
"tests/class_test.py::test_inherited_global_invariants",
"tests/class_test.py::test_diamond_inherited_global_invariants"
]
| []
| [
"tests/class_test.py::test_evolve_pclass_instance",
"tests/class_test.py::test_direct_assignment_not_possible",
"tests/class_test.py::test_direct_delete_not_possible",
"tests/class_test.py::test_cannot_construct_with_undeclared_fields",
"tests/class_test.py::test_cannot_construct_with_wrong_type",
"tests/class_test.py::test_cannot_construct_without_mandatory_fields",
"tests/class_test.py::test_field_invariant_must_hold",
"tests/class_test.py::test_initial_value_set_when_not_present_in_arguments",
"tests/class_test.py::test_can_create_nested_structures_from_dict_and_serialize_back_to_dict",
"tests/class_test.py::test_can_serialize_with_custom_serializer",
"tests/class_test.py::test_implements_proper_equality_based_on_equality_of_fields",
"tests/class_test.py::test_is_hashable",
"tests/class_test.py::test_supports_nested_transformation",
"tests/class_test.py::test_repr",
"tests/class_test.py::test_global_invariant_check",
"tests/class_test.py::test_supports_pickling",
"tests/class_test.py::test_supports_pickling_with_typed_container_fields",
"tests/class_test.py::test_can_remove_optional_member",
"tests/class_test.py::test_cannot_remove_mandatory_member",
"tests/class_test.py::test_cannot_remove_non_existing_member",
"tests/class_test.py::test_evolver_without_evolution_returns_original_instance",
"tests/class_test.py::test_evolver_with_evolution_to_same_element_returns_original_instance",
"tests/class_test.py::test_evolver_supports_chained_set_and_remove",
"tests/class_test.py::test_evolver_supports_dot_notation_for_setting_and_getting_elements",
"tests/class_test.py::test_string_as_type_specifier",
"tests/class_test.py::test_multiple_invariants_on_field",
"tests/class_test.py::test_multiple_global_invariants",
"tests/class_test.py::test_supports_weakref",
"tests/class_test.py::test_supports_weakref_with_multi_level_inheritance",
"tests/class_test.py::test_supports_lazy_initial_value_for_field",
"tests/class_test.py::test_type_checks_lazy_initial_value_for_field",
"tests/class_test.py::test_invariant_checks_lazy_initial_value_for_field",
"tests/class_test.py::test_invariant_checks_static_initial_value",
"tests/class_test.py::test_lazy_invariant_message"
]
| []
| MIT License | 1,305 | [
"pyrsistent/_checked_types.py"
]
| [
"pyrsistent/_checked_types.py"
]
|
|
google__mobly-222 | bb85ca2966791baecca17e2cb251c09ded139d07 | 2017-05-26 21:40:50 | 31dcff279d4808e011f6af8ab0661b9750357cda | xpconanfan:
Review status: 0 of 5 files reviewed at latest revision, 1 unresolved discussion.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 142 at r1](https://reviewable.io:443/reviews/google/mobly/222#-KlQEEpQJ3ucg8uTkN-j:-KlQEEpQJ3ucg8uTkN-k:b-3m5zkq) ([raw file](https://github.com/google/mobly/blob/c20b6f1ba95f9939858dffa423fdf1bf7fef3d44/mobly/controllers/android_device_lib/snippet_client.py#L142)):*
> ```Python
> event_client = SnippetClient(
> package=self.package, adb_proxy=self._adb, log=self.log)
> event_client.host_port = self.host_port
> ```
(Not sure if this is within the scope of this PR.)
Should we add a unit test that makes sure the `event_client` uses the same `host_port` as the parent client that created the event client?
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/222)*
<!-- Sent from Reviewable.io -->
adorokhine:
Review status: 0 of 5 files reviewed at latest revision, 1 unresolved discussion.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 142 at r1](https://reviewable.io:443/reviews/google/mobly/222#-KlQEEpQJ3ucg8uTkN-j:-KlQKWYfOqcjaBOZtZA1:b-896fix) ([raw file](https://github.com/google/mobly/blob/c20b6f1ba95f9939858dffa423fdf1bf7fef3d44/mobly/controllers/android_device_lib/snippet_client.py#L142)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
(Not sure if this is within the scope of this PR.)
Should we add a unit test that makes sure the `event_client` uses the same `host_port` as the parent client that created the event client?
</blockquote></details>
Done.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/222)*
<!-- Sent from Reviewable.io -->
xpconanfan:
Review status: 0 of 5 files reviewed at latest revision, 1 unresolved discussion.
---
*[tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py, line 231 at r2](https://reviewable.io:443/reviews/google/mobly/222#-KlR02OhSVsXA6sC48Qq:-KlR02OhSVsXA6sC48Qr:b-y4rdz8) ([raw file](https://github.com/google/mobly/blob/4de916df19009682c2e51d5f4a5bc8e9bb5279e0/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py#L231)):*
> ```Python
>
> @mock.patch('socket.create_connection')
> def test_rpc_call_async(self, mock_create_connection):
> ```
Hmm, this seems identical to `test_rpc_call_increment_counter`. Am I missing something?...
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/222)*
<!-- Sent from Reviewable.io -->
adorokhine:
Review status: 0 of 5 files reviewed at latest revision, 1 unresolved discussion.
---
*[tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py, line 231 at r2](https://reviewable.io:443/reviews/google/mobly/222#-KlR02OhSVsXA6sC48Qq:-KlR17AzZtpMMrBdbnHW:b5qk4v) ([raw file](https://github.com/google/mobly/blob/4de916df19009682c2e51d5f4a5bc8e9bb5279e0/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py#L231)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
Hmm, this seems identical to `test_rpc_call_increment_counter`. Am I missing something?...
</blockquote></details>
Whoops, good catch! I started creating a new test here but decided to put it in snippet_client instead. This code shouldn't have been committed. Done.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/222)*
<!-- Sent from Reviewable.io -->
xpconanfan:
Review status: 0 of 5 files reviewed at latest revision, 1 unresolved discussion.
---
*[tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py, line 148 at r3](https://reviewable.io:443/reviews/google/mobly/222#-KlR208gYCLqbHRd_csT:-KlR208gYCLqbHRd_csU:b-creb0e) ([raw file](https://github.com/google/mobly/blob/bb85ca2966791baecca17e2cb251c09ded139d07/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py#L148)):*
> ```Python
>
> @mock.patch('socket.create_connection')
> def test_connect_no_response(self, mock_create_connection):
> ```
Hmm, why is this test being removed in this PR?
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/222)*
<!-- Sent from Reviewable.io -->
adorokhine:
Review status: 0 of 5 files reviewed at latest revision, 1 unresolved discussion.
---
*[tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py, line 148 at r3](https://reviewable.io:443/reviews/google/mobly/222#-KlR208gYCLqbHRd_csT:-KlR2xtrn1hhmrQ9gju9:bmpx7qh) ([raw file](https://github.com/google/mobly/blob/bb85ca2966791baecca17e2cb251c09ded139d07/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py#L148)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
Hmm, why is this test being removed in this PR?
</blockquote></details>
`assertRaises` was being used incorrectly here (msg is what it prints if it triggers, not what it checks against). Upon fixing the bug it turned out that it's actually returning NO_RESPONSE_FROM_SERVER instead of NO_RESPONSE_FROM_HANDSHAKE, which makes sense because by the time you've finished connect, handshake has already completed.
NO_RESPONSE_FROM_SERVER is already being tested by `test_rpc_no_response`, and in fact the code seems to be the same.
Handshake errors from connect are already being tested by `test_connect_handshake`.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/222)*
<!-- Sent from Reviewable.io -->
xpconanfan: <img class="emoji" title=":lgtm:" alt=":lgtm:" align="absmiddle" src="https://reviewable.io/lgtm.png" height="20" width="61"/>
Thanks for fixing this!
---
Review status: 0 of 5 files reviewed at latest revision, all discussions resolved.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/222#-:-KlR48Z-pox3K3INMlFn:b-kym177)*
<!-- Sent from Reviewable.io -->
| diff --git a/mobly/controllers/android_device_lib/snippet_client.py b/mobly/controllers/android_device_lib/snippet_client.py
index 1470927..f47308a 100644
--- a/mobly/controllers/android_device_lib/snippet_client.py
+++ b/mobly/controllers/android_device_lib/snippet_client.py
@@ -138,10 +138,8 @@ class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
def _start_event_client(self):
"""Overrides superclass."""
event_client = SnippetClient(
- package=self.package,
- host_port=self.host_port,
- adb_proxy=self._adb,
- log=self.log)
+ package=self.package, adb_proxy=self._adb, log=self.log)
+ event_client.host_port = self.host_port
event_client.connect(self.uid,
jsonrpc_client_base.JsonRpcCommand.CONTINUE)
return event_client
| `snippet_client._start_event_client` crashes
because of the extra `host_port` arg.
This crashes all tests that use `@AsyncRpc`.
We should add some unit tests for this... | google/mobly | diff --git a/tests/lib/jsonrpc_client_test_base.py b/tests/lib/jsonrpc_client_test_base.py
new file mode 100755
index 0000000..c4ca7a8
--- /dev/null
+++ b/tests/lib/jsonrpc_client_test_base.py
@@ -0,0 +1,71 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from builtins import str
+
+import mock
+import unittest
+
+
+class JsonRpcClientTestBase(unittest.TestCase):
+ """Base class for tests of JSONRPC clients.
+
+ Contains infrastructure for mocking responses.
+ """
+
+ MOCK_RESP = (
+ b'{"id": 0, "result": 123, "error": null, "status": 1, "uid": 1, '
+ b'"callback": null}')
+ MOCK_RESP_WITHOUT_CALLBACK = (
+ b'{"id": 0, "result": 123, "error": null, "status": 1, "uid": 1}')
+ MOCK_RESP_TEMPLATE = (
+ '{"id": %d, "result": 123, "error": null, "status": 1, "uid": 1, '
+ '"callback": null}')
+ MOCK_RESP_UNKNOWN_STATUS = (
+ b'{"id": 0, "result": 123, "error": null, "status": 0, '
+ b'"callback": null}')
+ MOCK_RESP_WITH_CALLBACK = (
+ b'{"id": 0, "result": 123, "error": null, "status": 1, "uid": 1, '
+ b'"callback": "1-0"}')
+ MOCK_RESP_WITH_ERROR = b'{"id": 0, "error": 1, "status": 1, "uid": 1}'
+
+ class MockSocketFile(object):
+ def __init__(self, resp):
+ self.resp = resp
+ self.last_write = None
+
+ def write(self, msg):
+ self.last_write = msg
+
+ def readline(self):
+ return self.resp
+
+ def flush(self):
+ pass
+
+ def setup_mock_socket_file(self, mock_create_connection, resp=MOCK_RESP):
+ """Sets up a fake socket file from the mock connection.
+
+ Args:
+ mock_create_connection: The mock method for creating a method.
+ resp: (str) response to give. MOCK_RESP by default.
+
+ Returns:
+ The mock file that will be injected into the code.
+ """
+ fake_file = self.MockSocketFile(resp)
+ fake_conn = mock.MagicMock()
+ fake_conn.makefile.return_value = fake_file
+ mock_create_connection.return_value = fake_conn
+ return fake_file
diff --git a/tests/mobly/base_test_test.py b/tests/mobly/base_test_test.py
index 58ceba1..1a2a759 100755
--- a/tests/mobly/base_test_test.py
+++ b/tests/mobly/base_test_test.py
@@ -127,8 +127,8 @@ class BaseTestTest(unittest.TestCase):
bt_cls = MockBaseTest(self.mock_test_cls_configs)
expected_msg = ('Test method name not_a_test_something does not follow '
- 'naming convention test_*, abort.')
- with self.assertRaises(base_test.Error, msg=expected_msg):
+ 'naming convention test_\*, abort.')
+ with self.assertRaisesRegexp(base_test.Error, expected_msg):
bt_cls.run(test_names=["not_a_test_something"])
def test_default_execution_of_all_tests(self):
@@ -790,9 +790,9 @@ class BaseTestTest(unittest.TestCase):
"""Missing a required param should raise an error."""
required = ["something"]
bc = base_test.BaseTestClass(self.mock_test_cls_configs)
- expected_msg = ("Missing required user param '%s' in test "
- "configuration.") % required[0]
- with self.assertRaises(base_test.Error, msg=expected_msg):
+ expected_msg = ('Missing required user param "%s" in test '
+ 'configuration.') % required[0]
+ with self.assertRaisesRegexp(base_test.Error, expected_msg):
bc.unpack_userparams(required)
def test_unpack_userparams_optional(self):
diff --git a/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py b/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py
index 3fbe8cf..7263bdc 100755
--- a/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py
+++ b/tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py
@@ -20,33 +20,7 @@ import socket
import unittest
from mobly.controllers.android_device_lib import jsonrpc_client_base
-
-MOCK_RESP = (b'{"id": 0, "result": 123, "error": null, "status": 1, "uid": 1,'
- b' "callback": null}')
-MOCK_RESP_WITHOUT_CALLBACK = (b'{"id": 0, "result": 123, "error": null, '
- b'"status": 1, "uid": 1}')
-MOCK_RESP_TEMPLATE = ('{"id": %d, "result": 123, "error": null, "status": 1, '
- '"uid": 1, "callback": null}')
-MOCK_RESP_UNKNOWN_STATUS = (b'{"id": 0, "result": 123, "error": null, '
- b'"status": 0, "callback": null}')
-MOCK_RESP_WITH_CALLBACK = (b'{"id": 0, "result": 123, "error": null, '
- b'"status": 1, "uid": 1, "callback": "1-0"}')
-MOCK_RESP_WITH_ERROR = b'{"id": 0, "error": 1, "status": 1, "uid": 1}'
-
-
-class MockSocketFile(object):
- def __init__(self, resp):
- self.resp = resp
- self.last_write = None
-
- def write(self, msg):
- self.last_write = msg
-
- def readline(self):
- return self.resp
-
- def flush(self):
- pass
+from tests.lib import jsonrpc_client_test_base
class FakeRpcClient(jsonrpc_client_base.JsonRpcClientBase):
@@ -54,25 +28,10 @@ class FakeRpcClient(jsonrpc_client_base.JsonRpcClientBase):
super(FakeRpcClient, self).__init__(app_name='FakeRpcClient')
-class JsonRpcClientBaseTest(unittest.TestCase):
+class JsonRpcClientBaseTest(jsonrpc_client_test_base.JsonRpcClientTestBase):
"""Unit tests for mobly.controllers.android_device_lib.jsonrpc_client_base.
"""
- def setup_mock_socket_file(self, mock_create_connection):
- """Sets up a fake socket file from the mock connection.
-
- Args:
- mock_create_connection: The mock method for creating a method.
-
- Returns:
- The mock file that will be injected into the code.
- """
- fake_file = MockSocketFile(MOCK_RESP)
- fake_conn = mock.MagicMock()
- fake_conn.makefile.return_value = fake_file
- mock_create_connection.return_value = fake_conn
- return fake_file
-
@mock.patch('socket.create_connection')
def test_open_timeout_io_error(self, mock_create_connection):
"""Test socket timeout with io error
@@ -104,11 +63,11 @@ class JsonRpcClientBaseTest(unittest.TestCase):
Test that if there is an error in the jsonrpc handshake then a protocol
error will be raised.
"""
- fake_conn = mock.MagicMock()
- fake_conn.makefile.return_value = MockSocketFile(None)
- mock_create_connection.return_value = fake_conn
- with self.assertRaises(jsonrpc_client_base.ProtocolError):
- client = FakeRpcClient()
+ self.setup_mock_socket_file(mock_create_connection, resp=None)
+ client = FakeRpcClient()
+ with self.assertRaisesRegexp(
+ jsonrpc_client_base.ProtocolError,
+ jsonrpc_client_base.ProtocolError.NO_RESPONSE_FROM_HANDSHAKE):
client.connect()
@mock.patch('socket.create_connection')
@@ -118,13 +77,9 @@ class JsonRpcClientBaseTest(unittest.TestCase):
Test that at the end of a handshake with no errors the client object
has the correct parameters.
"""
- fake_conn = mock.MagicMock()
- fake_conn.makefile.return_value = MockSocketFile(MOCK_RESP)
- mock_create_connection.return_value = fake_conn
-
+ self.setup_mock_socket_file(mock_create_connection)
client = FakeRpcClient()
client.connect()
-
self.assertEqual(client.uid, 1)
@mock.patch('socket.create_connection')
@@ -134,36 +89,12 @@ class JsonRpcClientBaseTest(unittest.TestCase):
Test that when the handshake is given an unknown status then the client
will not be given a uid.
"""
- fake_conn = mock.MagicMock()
- fake_conn.makefile.return_value = MockSocketFile(
- MOCK_RESP_UNKNOWN_STATUS)
- mock_create_connection.return_value = fake_conn
-
+ self.setup_mock_socket_file(
+ mock_create_connection, resp=self.MOCK_RESP_UNKNOWN_STATUS)
client = FakeRpcClient()
client.connect()
-
self.assertEqual(client.uid, jsonrpc_client_base.UNKNOWN_UID)
- @mock.patch('socket.create_connection')
- def test_connect_no_response(self, mock_create_connection):
- """Test handshake no response
-
- Test that if a handshake recieves no response then it will give a
- protocol error.
- """
- fake_file = self.setup_mock_socket_file(mock_create_connection)
-
- client = FakeRpcClient()
- client.connect()
-
- fake_file.resp = None
-
- with self.assertRaises(
- jsonrpc_client_base.ProtocolError,
- msg=
- jsonrpc_client_base.ProtocolError.NO_RESPONSE_FROM_HANDSHAKE):
- client.some_rpc(1, 2, 3)
-
@mock.patch('socket.create_connection')
def test_rpc_error_response(self, mock_create_connection):
"""Test rpc that is given an error response
@@ -176,9 +107,9 @@ class JsonRpcClientBaseTest(unittest.TestCase):
client = FakeRpcClient()
client.connect()
- fake_file.resp = MOCK_RESP_WITH_ERROR
+ fake_file.resp = self.MOCK_RESP_WITH_ERROR
- with self.assertRaises(jsonrpc_client_base.ApiError, msg=1):
+ with self.assertRaisesRegexp(jsonrpc_client_base.ApiError, '1'):
client.some_rpc(1, 2, 3)
@mock.patch('socket.create_connection')
@@ -193,7 +124,7 @@ class JsonRpcClientBaseTest(unittest.TestCase):
client = FakeRpcClient()
client.connect()
- fake_file.resp = MOCK_RESP_WITH_CALLBACK
+ fake_file.resp = self.MOCK_RESP_WITH_CALLBACK
client._event_client = mock.Mock()
callback = client.some_rpc(1, 2, 3)
@@ -212,11 +143,11 @@ class JsonRpcClientBaseTest(unittest.TestCase):
client = FakeRpcClient()
client.connect()
- fake_file.resp = (MOCK_RESP_TEMPLATE % 52).encode('utf8')
+ fake_file.resp = (self.MOCK_RESP_TEMPLATE % 52).encode('utf8')
- with self.assertRaises(
+ with self.assertRaisesRegexp(
jsonrpc_client_base.ProtocolError,
- msg=jsonrpc_client_base.ProtocolError.MISMATCHED_API_ID):
+ jsonrpc_client_base.ProtocolError.MISMATCHED_API_ID):
client.some_rpc(1, 2, 3)
@mock.patch('socket.create_connection')
@@ -233,9 +164,9 @@ class JsonRpcClientBaseTest(unittest.TestCase):
fake_file.resp = None
- with self.assertRaises(
+ with self.assertRaisesRegexp(
jsonrpc_client_base.ProtocolError,
- msg=jsonrpc_client_base.ProtocolError.NO_RESPONSE_FROM_SERVER):
+ jsonrpc_client_base.ProtocolError.NO_RESPONSE_FROM_SERVER):
client.some_rpc(1, 2, 3)
@mock.patch('socket.create_connection')
@@ -265,10 +196,8 @@ class JsonRpcClientBaseTest(unittest.TestCase):
Logic is the same as test_rpc_send_to_socket.
"""
- fake_file = MockSocketFile(MOCK_RESP_WITHOUT_CALLBACK)
- fake_conn = mock.MagicMock()
- fake_conn.makefile.return_value = fake_file
- mock_create_connection.return_value = fake_conn
+ fake_file = self.setup_mock_socket_file(
+ mock_create_connection, resp=self.MOCK_RESP_WITHOUT_CALLBACK)
client = FakeRpcClient()
client.connect()
@@ -293,11 +222,11 @@ class JsonRpcClientBaseTest(unittest.TestCase):
client.connect()
for i in range(0, 10):
- fake_file.resp = (MOCK_RESP_TEMPLATE % i).encode('utf-8')
+ fake_file.resp = (self.MOCK_RESP_TEMPLATE % i).encode('utf-8')
client.some_rpc()
self.assertEquals(next(client._counter), 10)
-if __name__ == "__main__":
+if __name__ == '__main__':
unittest.main()
diff --git a/tests/mobly/controllers/android_device_lib/snippet_client_test.py b/tests/mobly/controllers/android_device_lib/snippet_client_test.py
index c5b7df4..911f53e 100755
--- a/tests/mobly/controllers/android_device_lib/snippet_client_test.py
+++ b/tests/mobly/controllers/android_device_lib/snippet_client_test.py
@@ -20,10 +20,11 @@ import unittest
from mobly.controllers.android_device_lib import jsonrpc_client_base
from mobly.controllers.android_device_lib import snippet_client
+from tests.lib import jsonrpc_client_test_base
MOCK_PACKAGE_NAME = 'some.package.name'
MOCK_MISSING_PACKAGE_NAME = 'not.installed'
-JSONRPC_BASE_PACKAGE = 'mobly.controllers.android_device_lib.jsonrpc_client_base.JsonRpcClientBase'
+JSONRPC_BASE_CLASS = 'mobly.controllers.android_device_lib.jsonrpc_client_base.JsonRpcClientBase'
class MockAdbProxy(object):
@@ -63,47 +64,62 @@ class MockAdbProxy(object):
return adb_call
-class JsonRpcClientBaseTest(unittest.TestCase):
+class SnippetClientTest(jsonrpc_client_test_base.JsonRpcClientTestBase):
"""Unit tests for mobly.controllers.android_device_lib.snippet_client.
"""
- @mock.patch('socket.create_connection')
- @mock.patch(JSONRPC_BASE_PACKAGE)
- def test_check_app_installed_normal(self, mock_create_connection,
- mock_client_base):
+ def test_check_app_installed_normal(self):
sc = self._make_client()
sc._check_app_installed()
- @mock.patch('socket.create_connection')
- @mock.patch(JSONRPC_BASE_PACKAGE)
- def test_check_app_installed_fail_app_not_installed(
- self, mock_create_connection, mock_client_base):
+ def test_check_app_installed_fail_app_not_installed(self):
sc = self._make_client(MockAdbProxy(apk_not_installed=True))
expected_msg = '%s is not installed on .*' % MOCK_PACKAGE_NAME
with self.assertRaisesRegexp(jsonrpc_client_base.AppStartError,
expected_msg):
sc._check_app_installed()
- @mock.patch('socket.create_connection')
- @mock.patch(JSONRPC_BASE_PACKAGE)
- def test_check_app_installed_fail_not_instrumented(
- self, mock_create_connection, mock_client_base):
+ def test_check_app_installed_fail_not_instrumented(self):
sc = self._make_client(MockAdbProxy(apk_not_instrumented=True))
expected_msg = '%s is installed on .*, but it is not instrumented.' % MOCK_PACKAGE_NAME
with self.assertRaisesRegexp(jsonrpc_client_base.AppStartError,
expected_msg):
sc._check_app_installed()
- @mock.patch('socket.create_connection')
- @mock.patch(JSONRPC_BASE_PACKAGE)
- def test_check_app_installed_fail_target_not_installed(
- self, mock_create_connection, mock_client_base):
+ def test_check_app_installed_fail_target_not_installed(self):
sc = self._make_client(MockAdbProxy(target_not_installed=True))
expected_msg = 'Instrumentation target %s is not installed on .*' % MOCK_MISSING_PACKAGE_NAME
with self.assertRaisesRegexp(jsonrpc_client_base.AppStartError,
expected_msg):
sc._check_app_installed()
+ @mock.patch('socket.create_connection')
+ def test_snippet_start(self, mock_create_connection):
+ self.setup_mock_socket_file(mock_create_connection)
+ client = self._make_client()
+ client.connect()
+ result = client.testSnippetCall()
+ self.assertEqual(123, result)
+
+ @mock.patch('socket.create_connection')
+ def test_snippet_start_event_client(self, mock_create_connection):
+ fake_file = self.setup_mock_socket_file(mock_create_connection)
+ client = self._make_client()
+ client.host_port = 123 # normally picked by start_app_and_connect
+ client.connect()
+ fake_file.resp = self.MOCK_RESP_WITH_CALLBACK
+ callback = client.testSnippetCall()
+ self.assertEqual(123, callback.ret_value)
+ self.assertEqual('1-0', callback._id)
+
+ # Check to make sure the event client is using the same port as the
+ # main client.
+ self.assertEqual(123, callback._event_client.host_port)
+
+ fake_file.resp = self.MOCK_RESP_WITH_ERROR
+ with self.assertRaisesRegexp(jsonrpc_client_base.ApiError, '1'):
+ callback.getAll('eventName')
+
def _make_client(self, adb_proxy=MockAdbProxy()):
return snippet_client.SnippetClient(
package=MOCK_PACKAGE_NAME, adb_proxy=adb_proxy)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y adb"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
future==1.0.0
iniconfig==2.1.0
-e git+https://github.com/google/mobly.git@bb85ca2966791baecca17e2cb251c09ded139d07#egg=mobly
mock==1.0.1
packaging==24.2
pluggy==1.5.0
portpicker==1.6.0
psutil==7.0.0
pytest==8.3.5
pytz==2025.2
PyYAML==6.0.2
timeout-decorator==0.5.0
tomli==2.2.1
| name: mobly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- future==1.0.0
- iniconfig==2.1.0
- mock==1.0.1
- packaging==24.2
- pluggy==1.5.0
- portpicker==1.6.0
- psutil==7.0.0
- pytest==8.3.5
- pytz==2025.2
- pyyaml==6.0.2
- timeout-decorator==0.5.0
- tomli==2.2.1
prefix: /opt/conda/envs/mobly
| [
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start_event_client"
]
| []
| [
"tests/mobly/base_test_test.py::BaseTestTest::test_abort_class",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_equal_fail",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_equal_fail_with_msg",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_equal_pass",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_raises_fail_with_noop",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_raises_fail_with_wrong_error",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_raises_fail_with_wrong_regex",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_raises_pass",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_raises_regex_fail_with_noop",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_raises_regex_fail_with_wrong_error",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_raises_regex_pass",
"tests/mobly/base_test_test.py::BaseTestTest::test_assert_true",
"tests/mobly/base_test_test.py::BaseTestTest::test_both_teardown_and_test_body_raise_exceptions",
"tests/mobly/base_test_test.py::BaseTestTest::test_cli_test_selection_fail_by_convention",
"tests/mobly/base_test_test.py::BaseTestTest::test_cli_test_selection_override_self_tests_list",
"tests/mobly/base_test_test.py::BaseTestTest::test_current_test_name",
"tests/mobly/base_test_test.py::BaseTestTest::test_default_execution_of_all_tests",
"tests/mobly/base_test_test.py::BaseTestTest::test_explicit_pass",
"tests/mobly/base_test_test.py::BaseTestTest::test_explicit_pass_but_teardown_test_raises_an_exception",
"tests/mobly/base_test_test.py::BaseTestTest::test_fail",
"tests/mobly/base_test_test.py::BaseTestTest::test_failure_in_procedure_functions_is_recorded",
"tests/mobly/base_test_test.py::BaseTestTest::test_failure_to_call_procedure_function_is_recorded",
"tests/mobly/base_test_test.py::BaseTestTest::test_generate_tests_call_outside_of_setup_generated_tests",
"tests/mobly/base_test_test.py::BaseTestTest::test_generate_tests_dup_test_name",
"tests/mobly/base_test_test.py::BaseTestTest::test_generate_tests_run",
"tests/mobly/base_test_test.py::BaseTestTest::test_generate_tests_selected_run",
"tests/mobly/base_test_test.py::BaseTestTest::test_generated_tests",
"tests/mobly/base_test_test.py::BaseTestTest::test_implicit_pass",
"tests/mobly/base_test_test.py::BaseTestTest::test_missing_requested_test_func",
"tests/mobly/base_test_test.py::BaseTestTest::test_on_fail_executed_if_teardown_test_fails",
"tests/mobly/base_test_test.py::BaseTestTest::test_on_fail_executed_if_test_fails",
"tests/mobly/base_test_test.py::BaseTestTest::test_on_fail_executed_if_test_setup_fails_by_exception",
"tests/mobly/base_test_test.py::BaseTestTest::test_on_fail_raise_exception",
"tests/mobly/base_test_test.py::BaseTestTest::test_on_pass_raise_exception",
"tests/mobly/base_test_test.py::BaseTestTest::test_self_tests_list",
"tests/mobly/base_test_test.py::BaseTestTest::test_self_tests_list_fail_by_convention",
"tests/mobly/base_test_test.py::BaseTestTest::test_setup_class_fail_by_exception",
"tests/mobly/base_test_test.py::BaseTestTest::test_setup_test_fail_by_exception",
"tests/mobly/base_test_test.py::BaseTestTest::test_setup_test_fail_by_test_signal",
"tests/mobly/base_test_test.py::BaseTestTest::test_skip",
"tests/mobly/base_test_test.py::BaseTestTest::test_skip_if",
"tests/mobly/base_test_test.py::BaseTestTest::test_teardown_test_assert_fail",
"tests/mobly/base_test_test.py::BaseTestTest::test_teardown_test_executed_if_setup_test_fails",
"tests/mobly/base_test_test.py::BaseTestTest::test_teardown_test_executed_if_test_fails",
"tests/mobly/base_test_test.py::BaseTestTest::test_teardown_test_executed_if_test_pass",
"tests/mobly/base_test_test.py::BaseTestTest::test_teardown_test_raise_exception",
"tests/mobly/base_test_test.py::BaseTestTest::test_uncaught_exception",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_basic",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_default_None",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_default_overwrite",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_default_overwrite_by_optional_param_list",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_default_overwrite_by_required_param_list",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_optional",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_optional_missing",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_optional_with_default",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_required",
"tests/mobly/base_test_test.py::BaseTestTest::test_unpack_userparams_required_missing",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_connect_handshake",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_connect_handshake_unknown_status",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_connect_timeout",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_handshake_error",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_open_timeout_io_error",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_call_increment_counter",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_callback_response",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_error_response",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_id_mismatch",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_no_response",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_send_to_socket",
"tests/mobly/controllers/android_device_lib/jsonrpc_client_base_test.py::JsonRpcClientBaseTest::test_rpc_send_to_socket_without_callback",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_check_app_installed_fail_app_not_installed",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_check_app_installed_fail_not_instrumented",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_check_app_installed_fail_target_not_installed",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_check_app_installed_normal",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start"
]
| []
| Apache License 2.0 | 1,306 | [
"mobly/controllers/android_device_lib/snippet_client.py"
]
| [
"mobly/controllers/android_device_lib/snippet_client.py"
]
|
jbasko__configmanager-113 | a4acefa81cb91b2ec25fbd38aa7a8acd26597d3d | 2017-05-28 19:38:41 | a4acefa81cb91b2ec25fbd38aa7a8acd26597d3d | diff --git a/configmanager/managers.py b/configmanager/managers.py
index fdc3cbb..e395936 100644
--- a/configmanager/managers.py
+++ b/configmanager/managers.py
@@ -242,7 +242,7 @@ class Config(BaseSection):
for path, _ in self.iter_all(recursive=recursive):
yield path
- def to_dict(self, with_defaults=True, dict_cls=dict):
+ def dump_values(self, with_defaults=True, dict_cls=dict):
"""
Export values of all items contained in this section to a dictionary.
@@ -251,12 +251,12 @@ class Config(BaseSection):
of their contents.
See Also:
- :meth:`.read_dict` does the opposite.
+ :meth:`.load_values` does the opposite.
"""
values = dict_cls()
for item_name, item in self._cm__configs.items():
if is_config_section(item):
- section_values = item.to_dict(with_defaults=with_defaults, dict_cls=dict_cls)
+ section_values = item.dump_values(with_defaults=with_defaults, dict_cls=dict_cls)
if section_values:
values[item_name] = section_values
else:
@@ -265,7 +265,7 @@ class Config(BaseSection):
values[item.name] = item.value
return values
- def read_dict(self, dictionary, as_defaults=False):
+ def load_values(self, dictionary, as_defaults=False):
"""
Import config values from a dictionary.
@@ -281,14 +281,14 @@ class Config(BaseSection):
as_defaults: if ``True``, the imported values will be set as defaults.
See Also:
- :meth:`to_dict` does the opposite.
+ :meth:`dump_values` does the opposite.
"""
for name, value in dictionary.items():
if name not in self:
if as_defaults:
if isinstance(value, dict):
self[name] = self.create_section()
- self[name].read_dict(value, as_defaults=as_defaults)
+ self[name].load_values(value, as_defaults=as_defaults)
else:
self[name] = self.create_item(name, default=value)
else:
@@ -300,7 +300,7 @@ class Config(BaseSection):
else:
self[name].value = value
else:
- self[name].read_dict(value, as_defaults=as_defaults)
+ self[name].load_values(value, as_defaults=as_defaults)
def reset(self):
"""
diff --git a/configmanager/persistence.py b/configmanager/persistence.py
index 7cdab00..3c315b8 100644
--- a/configmanager/persistence.py
+++ b/configmanager/persistence.py
@@ -72,7 +72,7 @@ class JsonReaderWriter(ConfigReaderWriter):
# the string we are trying to write is not unicode in Python 2
# because we open files with encoding=utf-8.
result = self.json.dumps(
- config.to_dict(with_defaults=with_defaults, dict_cls=collections.OrderedDict),
+ config.dump_values(with_defaults=with_defaults, dict_cls=collections.OrderedDict),
ensure_ascii=False,
indent=2,
**kwargs
@@ -83,13 +83,13 @@ class JsonReaderWriter(ConfigReaderWriter):
return result
def load_config_from_file(self, config, file_obj, as_defaults=False, **kwargs):
- config.read_dict(
+ config.load_values(
self.json.load(file_obj, object_pairs_hook=collections.OrderedDict, **kwargs),
as_defaults=as_defaults,
)
def load_config_from_string(self, config, string, as_defaults=False, **kwargs):
- config.read_dict(
+ config.load_values(
self.json.loads(string, object_pairs_hook=collections.OrderedDict, **kwargs),
as_defaults=as_defaults,
)
@@ -103,16 +103,16 @@ class YamlReaderWriter(ConfigReaderWriter):
self.yaml = yaml
def dump_config_to_file(self, config, file_obj, with_defaults=False, **kwargs):
- self.yaml.dump(config.to_dict(with_defaults=with_defaults), file_obj, **kwargs)
+ self.yaml.dump(config.dump_values(with_defaults=with_defaults), file_obj, **kwargs)
def dump_config_to_string(self, config, with_defaults=False, **kwargs):
- return self.yaml.dump(config.to_dict(with_defaults=with_defaults), **kwargs)
+ return self.yaml.dump(config.dump_values(with_defaults=with_defaults), **kwargs)
def load_config_from_file(self, config, file_obj, as_defaults=False, **kwargs):
- config.read_dict(self.yaml.load(file_obj, **kwargs), as_defaults=as_defaults)
+ config.load_values(self.yaml.load(file_obj, **kwargs), as_defaults=as_defaults)
def load_config_from_string(self, config, string, as_defaults=False, **kwargs):
- config.read_dict(self.yaml.load(string, **kwargs), as_defaults=as_defaults)
+ config.load_values(self.yaml.load(string, **kwargs), as_defaults=as_defaults)
class ConfigParserReaderWriter(ConfigReaderWriter):
diff --git a/docs/quickstart.rst.inc b/docs/quickstart.rst.inc
index 5115ce4..d8eb4c1 100644
--- a/docs/quickstart.rst.inc
+++ b/docs/quickstart.rst.inc
@@ -29,7 +29,7 @@ Quick Start
>>> config.greeting.value
'Hello, world!'
- >>> config.to_dict()
+ >>> config.dump_values()
{'greeting': 'Hello, world!'}
5. Change config values. ::
@@ -42,11 +42,11 @@ Quick Start
'_type': str,
'_value': 'Hey!'}
- >>> config.read_dict({'greeting': 'Good evening!'})
+ >>> config.load_values({'greeting': 'Good evening!'})
>>> config.greeting.value
'Good evening!'
- >>> config.to_dict()
+ >>> config.dump_values()
{'greeting': 'Good evening!'}
6. Persist the configuration. ::
| read_dict and to_dict names are misleading
These methods actually only work with values or defaults, although the name doesn't suggest that!
Maybe rename these to `export_values()` and `load_values()` instead?
On the other hand, the main task of the library is to provide access to config values...
#108
| jbasko/configmanager | diff --git a/tests/test_config.py b/tests/test_config.py
index 0a5b7fb..b784044 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -62,7 +62,7 @@ def test_assigning_nameless_item_directly_to_config_should_set_its_name():
config.dummy['y'] = Item(default=True)
assert config.dummy.y.name == 'y'
- assert config.to_dict() == {'dummy': {'x': 5, 'y': True}}
+ assert config.dump_values() == {'dummy': {'x': 5, 'y': True}}
def test_assigning_item_with_name_directly_to_config_should_preserve_its_name():
@@ -83,7 +83,7 @@ def test_assigning_item_with_name_directly_to_config_should_preserve_its_name():
assert config.dummy.w.name == 'www'
assert config.dummy.www.name == 'www'
- assert config.to_dict() == {'dummy': {'a.b': 'AB', 'www': 6}}
+ assert config.dump_values() == {'dummy': {'a.b': 'AB', 'www': 6}}
all_dummies = list(config.dummy.iter_items())
assert len(all_dummies) == 2
@@ -117,18 +117,18 @@ def test_section_name_must_be_a_string():
def test_to_dict_should_not_include_items_with_no_usable_value():
config = Config()
- assert config.to_dict() == {}
+ assert config.dump_values() == {}
config.a = Item()
config.b = Item()
config.dummies = Config({'x': Item(), 'y': Item()})
- assert config.to_dict() == {}
+ assert config.dump_values() == {}
config.dummies.x.value = 'yes'
- assert config.to_dict() == {'dummies': {'x': 'yes'}}
+ assert config.dump_values() == {'dummies': {'x': 'yes'}}
config.b.value = 'no'
- assert config.to_dict() == {'dummies': {'x': 'yes'}, 'b': 'no'}
+ assert config.dump_values() == {'dummies': {'x': 'yes'}, 'b': 'no'}
def test_read_dict_recursively_loads_values_from_a_dictionary():
@@ -150,13 +150,13 @@ def test_read_dict_recursively_loads_values_from_a_dictionary():
assert config.a.x.value == 0
assert config.a.y.value is True
- config.read_dict({
+ config.load_values({
'a': {'x': '5', 'y': 'no'},
})
assert config.a.x.value == 5
assert config.a.y.value is False
- config.b.c.read_dict({
+ config.b.c.load_values({
'e': 'haha', # will be ignored
'd': {'x': 'XXX'},
})
@@ -168,7 +168,7 @@ def test_read_dict_as_defaults_loads_default_values_from_a_dictionary():
config = Config()
# both will be ignored
- config.read_dict({
+ config.load_values({
'a': 5,
'b': True,
})
@@ -177,7 +177,7 @@ def test_read_dict_as_defaults_loads_default_values_from_a_dictionary():
assert 'b' not in config
# both will be added
- config.read_dict({
+ config.load_values({
'a': 5,
'b': True,
}, as_defaults=True)
@@ -269,7 +269,7 @@ def test_allows_iteration_over_all_items(mixed_app_config):
formatters_items = list(config['logging']['formatters'].iter_items(recursive=True))
assert len(formatters_items) == 2
- formatters = config['logging']['formatters'].to_dict()
+ formatters = config['logging']['formatters'].dump_values()
assert formatters['plain'] == {'format': '%(message)s'}
@@ -356,7 +356,7 @@ def test_forbids_accidental_item_overwrite_via_setattr(mixed_app_config):
def test_to_dict(mixed_app_config, raw_db_config, raw_logging_config):
config = mixed_app_config
- config_dict = config.to_dict()
+ config_dict = config.dump_values()
assert isinstance(config_dict, dict)
@@ -405,7 +405,7 @@ def test_can_have_a_dict_as_a_config_value_if_wrapped_inside_item():
# value, not the real thing.
config.aws.value['secret_key'] = 'NEW_SECRET'
- assert config.to_dict()['aws'] == {'access_key': '123', 'secret_key': 'secret'}
+ assert config.dump_values()['aws'] == {'access_key': '123', 'secret_key': 'secret'}
def test_len_of_config_returns_number_of_items_and_sections_in_current_level():
@@ -482,7 +482,7 @@ def test_config_item_value_can_be_unicode_str(tmpdir):
config2 = Config({'greeting': '', 'name': ''})
config2.configparser.load(path)
assert config2.name.value == u'Jānis Bērziņš'
- assert config1.to_dict(with_defaults=True) == config2.to_dict(with_defaults=True)
+ assert config1.dump_values(with_defaults=True) == config2.dump_values(with_defaults=True)
def test_config_of_config_is_a_deep_copy_of_original_config():
@@ -491,13 +491,13 @@ def test_config_of_config_is_a_deep_copy_of_original_config():
config2 = Config(config1)
assert config1 is not config2
- assert config1.to_dict() == config2.to_dict()
- assert config1.to_dict(with_defaults=True) == config2.to_dict(with_defaults=True)
+ assert config1.dump_values() == config2.dump_values()
+ assert config1.dump_values(with_defaults=True) == config2.dump_values(with_defaults=True)
config1.uploads.enabled.value = True
- config1.uploads.db.read_dict({'user': 'admin'})
+ config1.uploads.db.load_values({'user': 'admin'})
- assert config2.to_dict(with_defaults=True) == {'uploads': {'enabled': False, 'db': {'user': 'root'}}}
+ assert config2.dump_values(with_defaults=True) == {'uploads': {'enabled': False, 'db': {'user': 'root'}}}
config2.uploads.db.user.default = 'default-user'
assert config1.uploads.db.user.default == 'root'
diff --git a/tests/test_config_declaration_parser.py b/tests/test_config_declaration_parser.py
index 3c76934..4a9ff1e 100644
--- a/tests/test_config_declaration_parser.py
+++ b/tests/test_config_declaration_parser.py
@@ -119,19 +119,19 @@ def test_class_based_config_declaration(app_config_cls_example):
def test_dict_based_config_declaration(app_config_dict_example, app_config_cls_example):
dict_tree = Config(app_config_dict_example)
cls_tree = Config(app_config_cls_example)
- assert dict_tree.to_dict() == cls_tree.to_dict()
+ assert dict_tree.dump_values() == cls_tree.dump_values()
def test_module_based_config_declaration(app_config_module_example, app_config_cls_example):
module_tree = Config(app_config_module_example)
cls_tree = Config(app_config_cls_example)
- assert module_tree.to_dict() == cls_tree.to_dict()
+ assert module_tree.dump_values() == cls_tree.dump_values()
def test_mixed_config_declaration(app_config_mixed_example, app_config_cls_example):
mixed_tree = Config(app_config_mixed_example)
cls_tree = Config(app_config_cls_example)
- assert mixed_tree.to_dict() == cls_tree.to_dict()
+ assert mixed_tree.dump_values() == cls_tree.dump_values()
def test_default_value_is_deep_copied():
diff --git a/tests/test_configparser.py b/tests/test_configparser.py
index 5e3bd27..70cf393 100644
--- a/tests/test_configparser.py
+++ b/tests/test_configparser.py
@@ -44,7 +44,7 @@ def test_reads_empty_config_from_file_obj(simple_config, empty_config_file):
with open(empty_config_file) as f:
simple_config.configparser.load(f)
- assert simple_config.to_dict() == {
+ assert simple_config.dump_values() == {
'simple': {
'str': '',
'int': 0,
@@ -60,7 +60,7 @@ def test_reads_simple_config_from_file_obj(simple_config, simple_config_file):
with open(simple_config_file) as f:
simple_config.configparser.load(f)
- assert simple_config.to_dict() == {
+ assert simple_config.dump_values() == {
'simple': {
'str': 'hello',
'int': 5,
@@ -282,4 +282,4 @@ def test_writes_to_and_reads_from_default_section_transparently(tmpdir):
config2 = Config()
config2.configparser.load(config_ini, as_defaults=True)
- assert config1.to_dict() == config2.to_dict() == {'greeting': 'Hello', 'name': 'World'}
+ assert config1.dump_values() == config2.dump_values() == {'greeting': 'Hello', 'name': 'World'}
diff --git a/tests/test_json.py b/tests/test_json.py
index 6103beb..39bd306 100644
--- a/tests/test_json.py
+++ b/tests/test_json.py
@@ -44,8 +44,8 @@ def test_json_read_and_write(defaults_json_path, user_json_path):
with open(defaults_json_path) as f:
c3.json.load(f, as_defaults=True)
- assert c1.to_dict(with_defaults=False) == {}
- assert c1.to_dict(with_defaults=True) == {
+ assert c1.dump_values(with_defaults=False) == {}
+ assert c1.dump_values(with_defaults=True) == {
'uploads': {
'threads': 1,
'enabled': False,
@@ -53,20 +53,20 @@ def test_json_read_and_write(defaults_json_path, user_json_path):
}
}
- assert c1.to_dict() == c2.to_dict() == c3.to_dict()
+ assert c1.dump_values() == c2.dump_values() == c3.dump_values()
c1.json.load(user_json_path)
c2.json.load([user_json_path])
with open(user_json_path) as f:
c3.json.load(f)
- assert c1.to_dict(with_defaults=False) == {
+ assert c1.dump_values(with_defaults=False) == {
'uploads': {
'threads': 5,
'enabled': True,
}
}
- assert c1.to_dict() == c2.to_dict() == c3.to_dict()
+ assert c1.dump_values() == c2.dump_values() == c3.dump_values()
updates = {
'uploads': {
@@ -75,23 +75,23 @@ def test_json_read_and_write(defaults_json_path, user_json_path):
}
}
- c1.read_dict(updates)
- c2.read_dict(updates)
- c3.read_dict(updates)
+ c1.load_values(updates)
+ c2.load_values(updates)
+ c3.load_values(updates)
- assert c1.to_dict() == c2.to_dict() == c3.to_dict()
+ assert c1.dump_values() == c2.dump_values() == c3.dump_values()
c1.json.dump(user_json_path)
c2.json.load(user_json_path)
- assert c1.to_dict() == c2.to_dict() == c3.to_dict()
+ assert c1.dump_values() == c2.dump_values() == c3.dump_values()
- assert c1.to_dict() == c2.to_dict() == c3.to_dict()
+ assert c1.dump_values() == c2.dump_values() == c3.dump_values()
with open(user_json_path, 'w') as f:
c2.json.dump(f)
c1.json.load(user_json_path)
- assert c1.to_dict() == c2.to_dict() == c3.to_dict()
+ assert c1.dump_values() == c2.dump_values() == c3.dump_values()
def test_json_writes_with_defaults_false_by_default(user_json_path):
@@ -121,13 +121,13 @@ def test_json_reads_and_writes_strings():
assert c.json.dumps(with_defaults=True) == '{\n "greeting": "Hello"\n}'
c.json.loads('{"something_nonexistent": 1}')
- assert c.to_dict() == {'greeting': 'Hello'}
+ assert c.dump_values() == {'greeting': 'Hello'}
c.json.loads('{"something_nonexistent": 1}', as_defaults=True)
- assert c.to_dict() == {'greeting': 'Hello', 'something_nonexistent': 1}
+ assert c.dump_values() == {'greeting': 'Hello', 'something_nonexistent': 1}
c.json.loads('{"greeting": "Hello, world!"}')
- assert c.to_dict() == {'greeting': 'Hello, world!', 'something_nonexistent': 1}
+ assert c.dump_values() == {'greeting': 'Hello, world!', 'something_nonexistent': 1}
def test_json_reads_and_writes_preserve_order(tmpdir):
diff --git a/tests/test_v1.py b/tests/test_v1.py
index 6824e04..70b1209 100644
--- a/tests/test_v1.py
+++ b/tests/test_v1.py
@@ -41,7 +41,7 @@ def test_simple_config():
assert config.greeting.default == 'Hello, world!'
# Can export all values to a dictionary
- assert config.to_dict() == {
+ assert config.dump_values() == {
'greeting': 'Good evening!',
'threads': 1,
'throttling_enabled': False,
@@ -120,7 +120,7 @@ def test_nested_config():
# You can also change values by reading them from a dictionary.
# Unknown names will be ignored unless you pass as_defaults=True
# but in that case you will overwrite any previously existing items.
- config.read_dict({'greeting': 'Good morning!', 'comments': {'enabled': False}})
+ config.load_values({'greeting': 'Good morning!', 'comments': {'enabled': False}})
assert config.greeting.value == 'Good morning!'
assert 'comments' not in config
@@ -137,11 +137,11 @@ def test_nested_config():
assert all[('server', 'port')] is config.server.port
# Export all values
- config_dict = config.to_dict()
+ config_dict = config.dump_values()
assert config_dict['db'] == {'host': 'localhost', 'user': 'root', 'password': 'secret'}
# Each section is a Config instance too, so you can export those separately too:
- assert config.server.to_dict() == config_dict['server']
+ assert config.server.dump_values() == config_dict['server']
# You can reset individual items to their default values
assert config.db.user.value == 'root'
@@ -190,8 +190,7 @@ def test_configparser_integration(tmpdir):
# configuration without specifying as_defaults=True:
config.configparser.load(custom_ini_path)
- # other ConfigParser-like methods such as read_dict, loads, read_file are provided too.
- # when you are done setting config values, you can dump them to file too.
+ # when you are done setting config values, you can write them to a file.
config.configparser.dump(custom_ini_path)
# Note that default values won't be written unless you explicitly request it
diff --git a/tests/test_yaml.py b/tests/test_yaml.py
index 661ccc2..56b9fbc 100644
--- a/tests/test_yaml.py
+++ b/tests/test_yaml.py
@@ -18,16 +18,16 @@ def test_config_written_to_and_read_from_yaml_file(yaml_path1):
},
},
})
- original_values = config.to_dict()
+ original_values = config.dump_values()
config.yaml.dump(yaml_path1, with_defaults=True)
config.yaml.load(yaml_path1)
- assert config.to_dict() == original_values
+ assert config.dump_values() == original_values
config2 = Config()
config2.yaml.load(yaml_path1, as_defaults=True)
- assert config2.to_dict() == original_values
+ assert config2.dump_values() == original_values
def test_config_written_to_and_read_from_yaml_string():
@@ -42,7 +42,7 @@ def test_config_written_to_and_read_from_yaml_string():
config = Config()
config.yaml.loads(config_str, as_defaults=True)
- assert config.to_dict() == {
+ assert config.dump_values() == {
'uploads': {
'enabled': True,
'threads': 5,
@@ -56,4 +56,4 @@ def test_config_written_to_and_read_from_yaml_string():
config2 = Config()
config2.yaml.loads(config_str2, as_defaults=True)
- assert config2.to_dict() == config.to_dict()
+ assert config2.dump_values() == config.dump_values()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 3
} | 1.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-random-order",
"coverage",
"tox",
"bumpversion"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bumpversion==0.5.3
cachetools==5.5.2
chardet==5.2.0
colorama==0.4.6
-e git+https://github.com/jbasko/configmanager.git@a4acefa81cb91b2ec25fbd38aa7a8acd26597d3d#egg=configmanager
configparser==7.2.0
coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.2
filelock==3.18.0
future==1.0.0
iniconfig==2.1.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pyproject-api==1.9.0
pytest==8.3.5
pytest-random-order==1.1.1
PyYAML==6.0.2
six==1.17.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
virtualenv==20.29.3
| name: configmanager
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bumpversion==0.5.3
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- configparser==7.2.0
- coverage==7.8.0
- distlib==0.3.9
- exceptiongroup==1.2.2
- filelock==3.18.0
- future==1.0.0
- iniconfig==2.1.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-random-order==1.1.1
- pyyaml==6.0.2
- six==1.17.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/configmanager
| [
"tests/test_config.py::test_assigning_nameless_item_directly_to_config_should_set_its_name",
"tests/test_config.py::test_assigning_item_with_name_directly_to_config_should_preserve_its_name",
"tests/test_config.py::test_to_dict_should_not_include_items_with_no_usable_value",
"tests/test_config.py::test_read_dict_recursively_loads_values_from_a_dictionary",
"tests/test_config.py::test_read_dict_as_defaults_loads_default_values_from_a_dictionary",
"tests/test_config.py::test_allows_iteration_over_all_items",
"tests/test_config.py::test_to_dict",
"tests/test_config.py::test_can_have_a_dict_as_a_config_value_if_wrapped_inside_item",
"tests/test_config.py::test_config_item_value_can_be_unicode_str",
"tests/test_config.py::test_config_of_config_is_a_deep_copy_of_original_config",
"tests/test_config_declaration_parser.py::test_dict_based_config_declaration",
"tests/test_config_declaration_parser.py::test_module_based_config_declaration",
"tests/test_config_declaration_parser.py::test_mixed_config_declaration",
"tests/test_configparser.py::test_reads_empty_config_from_file_obj",
"tests/test_configparser.py::test_reads_simple_config_from_file_obj",
"tests/test_configparser.py::test_writes_to_and_reads_from_default_section_transparently",
"tests/test_json.py::test_json_read_and_write",
"tests/test_json.py::test_json_reads_and_writes_strings",
"tests/test_v1.py::test_simple_config",
"tests/test_v1.py::test_nested_config"
]
| [
"tests/test_yaml.py::test_config_written_to_and_read_from_yaml_file",
"tests/test_yaml.py::test_config_written_to_and_read_from_yaml_string"
]
| [
"tests/test_config.py::test_items_are_created_using_create_item_method",
"tests/test_config.py::test_reset_resets_values_to_defaults",
"tests/test_config.py::test_repr_of_config",
"tests/test_config.py::test_item_name_and_alias_must_be_a_string",
"tests/test_config.py::test_section_name_must_be_a_string",
"tests/test_config.py::test_declaration_parser_does_not_modify_config",
"tests/test_config.py::test_iter_items_with_recursive_false_iterates_only_over_current_section",
"tests/test_config.py::test_forbids_accidental_item_overwrite_via_setitem",
"tests/test_config.py::test_allows_iteration_over_sections",
"tests/test_config.py::test_attribute_read_access",
"tests/test_config.py::test_attribute_write_access",
"tests/test_config.py::test_forbids_accidental_item_overwrite_via_setattr",
"tests/test_config.py::test_can_inspect_config_contents",
"tests/test_config.py::test_len_of_config_returns_number_of_items_and_sections_in_current_level",
"tests/test_config.py::test__getitem__handles_paths_to_sections_and_items_and_so_does__contains__",
"tests/test_config.py::test_can_use__setitem__to_create_new_deep_paths",
"tests/test_config.py::test_section_knows_its_alias",
"tests/test_config.py::test_cofig_is_section_and_is_not_item",
"tests/test_config_declaration_parser.py::test_class_based_config_declaration",
"tests/test_config_declaration_parser.py::test_default_value_is_deep_copied",
"tests/test_config_declaration_parser.py::test_config_declaration_can_be_a_list_of_items_or_two_tuples",
"tests/test_config_declaration_parser.py::test_declaration_can_be_a_list_of_field_names",
"tests/test_config_declaration_parser.py::test_declaration_cannot_be_a_list_of_other_things",
"tests/test_configparser.py::test_writes_config_to_file",
"tests/test_configparser.py::test_preserves_bool_notation",
"tests/test_configparser.py::test_configparser_writer_does_not_accept_three_deep_paths",
"tests/test_configparser.py::test_read_reads_multiple_files_in_order",
"tests/test_configparser.py::test_read_string",
"tests/test_configparser.py::test_read_as_defaults_treats_all_values_as_declarations",
"tests/test_configparser.py::test_write_with_defaults_writes_defaults_too",
"tests/test_configparser.py::test_write_string_returns_valid_configparser_string",
"tests/test_json.py::test_json_writes_with_defaults_false_by_default",
"tests/test_json.py::test_json_reads_and_writes_preserve_order",
"tests/test_v1.py::test_exceptions",
"tests/test_v1.py::test_configparser_integration"
]
| []
| MIT License | 1,307 | [
"docs/quickstart.rst.inc",
"configmanager/persistence.py",
"configmanager/managers.py"
]
| [
"docs/quickstart.rst.inc",
"configmanager/persistence.py",
"configmanager/managers.py"
]
|
|
jboss-dockerfiles__dogen-126 | f141578aafffa0dbc7a33e34b9456e22b1750698 | 2017-05-29 09:09:47 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/plugins/cct.py b/dogen/plugins/cct.py
index 5df1945..75f1ca0 100644
--- a/dogen/plugins/cct.py
+++ b/dogen/plugins/cct.py
@@ -32,10 +32,7 @@ class CCT(Plugin):
def setup_cct(self, version):
cctdist = '%s/.dogen/plugin/cct/%s/%s.zip' % (os.path.expanduser('~'), version, version)
cct_runtime = '%s/.dogen/plugin/cct/%s/cct.zip' % (os.path.expanduser('~'), version)
- if version == 'master':
- # we dont care if it doesnt exist - so we ignore errors here
- shutil.rmtree('%s/.dogen/plugin/cct/%s/' % (os.path.expanduser('~'), version), ignore_errors=True)
- elif os.path.exists(cctdist):
+ if os.path.exists(cctdist):
return cct_runtime
os.makedirs(os.path.dirname(cctdist))
@@ -116,11 +113,13 @@ class CCT(Plugin):
self.log.info("CCT plugin downloaded artifacts")
+ cfg['entrypoint'] = ['/usr/bin/cct']
+
if 'runtime' in cfg['cct']:
- cfg['entrypoint'] = ['/usr/bin/cct']
self.runtime_changes(cfg)
cfg['entrypoint'].append(cfg['cct']['runtime_changes'])
- cfg['entrypoint'].append("-c")
+
+ cfg['entrypoint'].append("-c")
if 'user' not in cfg['cct']:
cfg['cct']['user'] = 'root'
diff --git a/dogen/schema/kwalify_schema.yaml b/dogen/schema/kwalify_schema.yaml
index 4a5a660..50ba8a0 100644
--- a/dogen/schema/kwalify_schema.yaml
+++ b/dogen/schema/kwalify_schema.yaml
@@ -43,10 +43,11 @@ map:
seq:
- map:
value: {type: int, required: True}
+ expose: {type: bool}
+ description: {type: str}
volumes:
seq:
- {type: str, required: True}
- debugport: {type: int}
dogen:
map:
version: {type: text}
diff --git a/dogen/template_helper.py b/dogen/template_helper.py
index 4f32ca7..3e88f03 100644
--- a/dogen/template_helper.py
+++ b/dogen/template_helper.py
@@ -63,3 +63,17 @@ class TemplateHelper(object):
return envs
+ def ports(self, available_ports):
+ """
+ Combines all ports that should be added to the
+ Dockerfile into one array
+ """
+
+ port_list = []
+
+ for p in available_ports:
+ if p.get('expose', True):
+ port_list.append(p.get('value'))
+
+ return port_list
+
diff --git a/dogen/templates/template.jinja b/dogen/templates/template.jinja
index 4afbd7e..7b81139 100644
--- a/dogen/templates/template.jinja
+++ b/dogen/templates/template.jinja
@@ -45,7 +45,7 @@ LABEL name="$JBOSS_IMAGE_NAME" \
{% if ports %}
# Exposed ports
-EXPOSE {%- for port in ports %} {{ port.value }}{% endfor %}
+EXPOSE {%- for port in helper.ports(ports) %} {{ port }}{% endfor %}
{% endif %}
| Remove debugport key
Instead a port key should be used with a specific type, optionally. | jboss-dockerfiles/dogen | diff --git a/tests/schemas/good/debugport.yaml b/tests/schemas/good/debugport.yaml
new file mode 100644
index 0000000..57a26a9
--- /dev/null
+++ b/tests/schemas/good/debugport.yaml
@@ -0,0 +1,13 @@
+# The minimal permitted configuration
+release: '1'
+version: '1'
+cmd:
+ - whoami
+from: scratch
+name: someimage
+ports:
+ - value: 8080
+ description: "This is default port"
+ - value: 9999
+ description: "This is debug port port"
+ expose: False
diff --git a/tests/test_dockerfile.py b/tests/test_dockerfile.py
index 63033dc..1c6fec8 100644
--- a/tests/test_dockerfile.py
+++ b/tests/test_dockerfile.py
@@ -128,3 +128,22 @@ class TestDockerfile(unittest.TestCase):
dockerfile = f.read()
regex = re.compile(r'.*VOLUME \["/var/lib"\]\nVOLUME \["/usr/lib"\]', re.MULTILINE)
self.assertRegexpMatches(dockerfile, regex)
+
+ # https://github.com/jboss-dockerfiles/dogen/issues/124
+ def test_debug_port(self):
+ """
+ Test that cmd: is mapped into a CMD instruction
+ """
+ with open(self.yaml, 'ab') as f:
+ f.write("ports:\n - value: 8080\n - value: 9999\n expose: False".encode())
+
+ generator = Generator(self.log, self.args)
+ generator.configure()
+ generator.render_from_template()
+
+ self.assertEqual(generator.cfg['ports'], [{'value': 8080}, {'expose': False, 'value': 9999}])
+
+ with open(os.path.join(self.target, "Dockerfile"), "r") as f:
+ dockerfile = f.read()
+ regex = re.compile(r'.*EXPOSE 8080$', re.MULTILINE)
+ self.assertRegexpMatches(dockerfile, regex)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 4
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@f141578aafffa0dbc7a33e34b9456e22b1750698#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_dockerfile.py::TestDockerfile::test_debug_port"
]
| []
| [
"tests/test_dockerfile.py::TestDockerfile::test_default_cmd_user",
"tests/test_dockerfile.py::TestDockerfile::test_set_cmd",
"tests/test_dockerfile.py::TestDockerfile::test_set_cmd_user",
"tests/test_dockerfile.py::TestDockerfile::test_set_entrypoint",
"tests/test_dockerfile.py::TestDockerfile::test_volumes"
]
| []
| MIT License | 1,309 | [
"dogen/plugins/cct.py",
"dogen/schema/kwalify_schema.yaml",
"dogen/template_helper.py",
"dogen/templates/template.jinja"
]
| [
"dogen/plugins/cct.py",
"dogen/schema/kwalify_schema.yaml",
"dogen/template_helper.py",
"dogen/templates/template.jinja"
]
|
|
tobgu__pyrsistent-108 | decc5bbb11a3c795ad7553760f1bfc29f370162e | 2017-05-29 13:15:00 | decc5bbb11a3c795ad7553760f1bfc29f370162e | diff --git a/README.rst b/README.rst
index ccbd38a..2fb4135 100644
--- a/README.rst
+++ b/README.rst
@@ -444,6 +444,8 @@ treated as matchers. If the matcher returns True for a specific key it is consid
pvector([1, 2, 4, 4, 5])
>>> v1.transform([lambda ix: 0 < ix < 4], 8)
pvector([1, 8, 8, 8, 5])
+ >>> v1.transform([lambda ix, v: ix == 0 or v == 5], 0)
+ pvector([0, 2, 3, 4, 0])
# The (a)ny matcher can be used to match anything
>>> v1.transform([ny], 8)
diff --git a/pyrsistent/_transformations.py b/pyrsistent/_transformations.py
index c4c7e10..6fe31cf 100644
--- a/pyrsistent/_transformations.py
+++ b/pyrsistent/_transformations.py
@@ -1,5 +1,13 @@
import re
import six
+try:
+ from inspect import Parameter, signature
+except ImportError:
+ signature = None
+ try:
+ from inspect import getfullargspec as getargspec
+ except ImportError:
+ from inspect import getargspec
def inc(x):
@@ -74,11 +82,40 @@ def _get(structure, key, default):
def _get_keys_and_values(structure, key_spec):
from pyrsistent._pmap import pmap
if callable(key_spec):
- return [(k, v) for k, v in _items(structure) if key_spec(k)]
-
+ # Support predicates as callable objects in the path
+ arity = _get_arity(key_spec)
+ if arity == 1:
+ # Unary predicates are called with the "key" of the path
+ # - eg a key in a mapping, an index in a sequence.
+ return [(k, v) for k, v in _items(structure) if key_spec(k)]
+ elif arity == 2:
+ # Binary predicates are called with the key and the corresponding
+ # value.
+ return [(k, v) for k, v in _items(structure) if key_spec(k, v)]
+ else:
+ # Other arities are an error.
+ raise ValueError(
+ "callable in transform path must take 1 or 2 arguments"
+ )
+
+ # Non-callables are used as-is as a key.
return [(key_spec, _get(structure, key_spec, pmap()))]
+if signature is None:
+ def _get_arity(f):
+ argspec = getargspec(f)
+ return len(argspec.args) - len(argspec.defaults or ())
+else:
+ def _get_arity(f):
+ return sum(
+ 1
+ for p
+ in signature(f).parameters.values()
+ if p.default is Parameter.empty
+ and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
+ )
+
def _update_structure(structure, kvs, path, command):
e = structure.evolver()
if not path and command is discard:
| Allow predicates on values in transform paths for pvectors and pmaps
I often want to write transformations like:
```
# Double even elements of the list
xs = pvector([1, 3, 2, 4])
assert xs.transform(lambda x: x % 2 == 0, lambda x: x * 2) == pvector([1, 3, 4, 8])
```
Particular with more complex paths:
```
xs = pmap({"foo": pvector([1, 3, 2, 4]), "bar": pvector([1, 2, 3, 4])})
assert xs.transform(
["foo", lambda x: x % 2 == 0],
lambda x: x * 2,
) == pmap({"foo": pvector([1, 3, 4, 8]), "bar": pvector([1, 2, 3, 4])})
```
I can often cobble together something that deals with the above two scenarios. It usually involves duplicating some information from the path but it's kind of manageable.
Where I really have trouble is when I want to use this missing feature twice or more within a single path:
```
xs = pmap({"foo": pvector([1, 3, 2, 4]), "bar": pvector([1, 2])})
assert xs.transform(
[lambda (k, v): len(v) == 4, lambda x: x % 2 == 0],
lambda x: x * 2,
) == pmap({"foo": pvector([1, 3, 4, 8]), "bar": pvector([1, 2])})
``` | tobgu/pyrsistent | diff --git a/tests/transform_test.py b/tests/transform_test.py
index 490ce9b..32e1772 100644
--- a/tests/transform_test.py
+++ b/tests/transform_test.py
@@ -3,7 +3,7 @@ from pyrsistent import freeze, inc, discard, rex, ny, field, PClass
def test_callable_command():
m = freeze({'foo': {'bar': {'baz': 1}}})
- m.transform(['foo', 'bar', 'baz'], inc) == {'foo': {'bar': {'baz': 2}}}
+ assert m.transform(['foo', 'bar', 'baz'], inc) == {'foo': {'bar': {'baz': 2}}}
def test_predicate():
@@ -11,6 +11,32 @@ def test_predicate():
assert m.transform(['foo', lambda x: x.startswith('b'), 'baz'], inc) == {'foo': {'bar': {'baz': 2}, 'qux': {'baz': 1}}}
+def test_broken_predicate():
+ broken_predicates = [
+ lambda: None,
+ lambda a, b, c: None,
+ lambda a, b, c, d=None: None,
+ lambda *args: None,
+ lambda **kwargs: None,
+ ]
+ for pred in broken_predicates:
+ try:
+ freeze({}).transform([pred], None)
+ assert False
+ except ValueError as e:
+ assert str(e) == "callable in transform path must take 1 or 2 arguments"
+
+
+def test_key_value_predicate():
+ m = freeze({
+ 'foo': 1,
+ 'bar': 2,
+ })
+ assert m.transform([
+ lambda k, v: (k, v) == ('foo', 1),
+ ], lambda v: v * 3) == {"foo": 3, "bar": 2}
+
+
def test_remove():
m = freeze({'foo': {'bar': {'baz': 1}}})
assert m.transform(['foo', 'bar', 'baz'], discard) == {'foo': {'bar': {}}}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
hypothesis==2.0.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
memory_profiler==0.31
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
psutil==2.1.1
py==1.11.0
Pygments==2.19.1
pyperform==1.86
pyproject-api==1.9.0
-e git+https://github.com/tobgu/pyrsistent.git@decc5bbb11a3c795ad7553760f1bfc29f370162e#egg=pyrsistent
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx_rtd_theme==0.1.5
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: pyrsistent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- hypothesis==2.0.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- memory-profiler==0.31
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- psutil==2.1.1
- py==1.11.0
- pygments==2.19.1
- pyperform==1.86
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==0.1.5
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/pyrsistent
| [
"tests/transform_test.py::test_broken_predicate",
"tests/transform_test.py::test_key_value_predicate"
]
| []
| [
"tests/transform_test.py::test_callable_command",
"tests/transform_test.py::test_predicate",
"tests/transform_test.py::test_remove",
"tests/transform_test.py::test_remove_pvector",
"tests/transform_test.py::test_remove_pclass",
"tests/transform_test.py::test_predicate_no_match",
"tests/transform_test.py::test_rex_predicate",
"tests/transform_test.py::test_rex_with_non_string_key",
"tests/transform_test.py::test_ny_predicated_matches_any_key",
"tests/transform_test.py::test_new_elements_created_when_missing",
"tests/transform_test.py::test_mixed_vector_and_map",
"tests/transform_test.py::test_vector_predicate_callable_command",
"tests/transform_test.py::test_vector_insert_map_one_step_beyond_end",
"tests/transform_test.py::test_multiple_transformations",
"tests/transform_test.py::test_no_transformation_returns_the_same_structure",
"tests/transform_test.py::test_discard_multiple_elements_in_pvector"
]
| []
| MIT License | 1,310 | [
"README.rst",
"pyrsistent/_transformations.py"
]
| [
"README.rst",
"pyrsistent/_transformations.py"
]
|
|
PyCQA__pyflakes-273 | 1af4f14ad4675bf5c61c47bbb7c2421b50d1cba4 | 2017-05-29 15:56:21 | 1af4f14ad4675bf5c61c47bbb7c2421b50d1cba4 | diff --git a/.travis.yml b/.travis.yml
index 1ea3e20..2d614cd 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -11,9 +11,6 @@ python:
- pypy-5.3
- pypy3
- pypy3.3-5.2-alpha1
-matrix:
- allow_failures:
- - python: nightly
install:
- pip install flake8==2.1.0 pep8==1.5.6
- python setup.py install
diff --git a/README.rst b/README.rst
index e84d334..aeb15f9 100644
--- a/README.rst
+++ b/README.rst
@@ -72,7 +72,7 @@ rebase your commits for you.
All changes should include tests and pass flake8_.
-.. image:: https://api.travis-ci.org/PyCQA/pyflakes.svg
+.. image:: https://api.travis-ci.org/PyCQA/pyflakes.svg?branch=master
:target: https://travis-ci.org/PyCQA/pyflakes
:alt: Build status
diff --git a/pyflakes/api.py b/pyflakes/api.py
index a535bff..49ee38d 100644
--- a/pyflakes/api.py
+++ b/pyflakes/api.py
@@ -5,6 +5,7 @@ from __future__ import with_statement
import sys
import os
+import re
import _ast
from pyflakes import checker, __version__
@@ -13,6 +14,9 @@ from pyflakes import reporter as modReporter
__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
+PYTHON_SHEBANG_REGEX = re.compile(br'^#!.*\bpython[23w]?\b\s*$')
+
+
def check(codeString, filename, reporter=None):
"""
Check the Python source given by C{codeString} for flakes.
@@ -108,6 +112,25 @@ def checkPath(filename, reporter=None):
return check(codestr, filename, reporter)
+def isPythonFile(filename):
+ """Return True if filename points to a Python file."""
+ if filename.endswith('.py'):
+ return True
+
+ max_bytes = 128
+
+ try:
+ with open(filename, 'rb') as f:
+ text = f.read(max_bytes)
+ if not text:
+ return False
+ except IOError:
+ return False
+
+ first_line = text.splitlines()[0]
+ return PYTHON_SHEBANG_REGEX.match(first_line)
+
+
def iterSourceCode(paths):
"""
Iterate over all Python source files in C{paths}.
@@ -120,8 +143,9 @@ def iterSourceCode(paths):
if os.path.isdir(path):
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
- if filename.endswith('.py'):
- yield os.path.join(dirpath, filename)
+ full_path = os.path.join(dirpath, filename)
+ if isPythonFile(full_path):
+ yield full_path
else:
yield path
diff --git a/pyflakes/checker.py b/pyflakes/checker.py
index 382574e..75abdc0 100644
--- a/pyflakes/checker.py
+++ b/pyflakes/checker.py
@@ -870,7 +870,19 @@ class Checker(object):
def handleDoctests(self, node):
try:
- (docstring, node_lineno) = self.getDocstring(node.body[0])
+ if hasattr(node, 'docstring'):
+ docstring = node.docstring
+
+ # This is just a reasonable guess. In Python 3.7, docstrings no
+ # longer have line numbers associated with them. This will be
+ # incorrect if there are empty lines between the beginning
+ # of the function and the docstring.
+ node_lineno = node.lineno
+ if hasattr(node, 'args'):
+ node_lineno = max([node_lineno] +
+ [arg.lineno for arg in node.args.args])
+ else:
+ (docstring, node_lineno) = self.getDocstring(node.body[0])
examples = docstring and self._getDoctestExamples(docstring)
except (ValueError, IndexError):
# e.g. line 6 of the docstring for <string> has inconsistent
diff --git a/setup.cfg b/setup.cfg
index 5e40900..2a9acf1 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,2 +1,2 @@
-[wheel]
+[bdist_wheel]
universal = 1
| Support Python 3.7
*I'm moving https://github.com/PyCQA/pyflakes/pull/90#issuecomment-301317699 here since we have GitHub issue support now.*
It looks like the [CPython 3.7 failure](https://travis-ci.org/PyCQA/pyflakes/jobs/231571260) is related to a change in the AST. The docstring is no longer in `node.body`.
https://github.com/python/cpython/pull/46
We can mostly replace our `getDocstring(node.body[0])` call with `node.docstring`. But I'm not sure how to get the line number of the docstring anymore. `node.lineno` is close, but not quite the line number of the docstring.
https://github.com/PyCQA/pyflakes/blob/074c21da3174bc88b2ec02fea767a436e2db643d/pyflakes/checker.py#L873
```diff
diff --git a/pyflakes/checker.py b/pyflakes/checker.py
index 382574e..2c19533 100644
--- a/pyflakes/checker.py
+++ b/pyflakes/checker.py
@@ -870,7 +870,11 @@ class Checker(object):
def handleDoctests(self, node):
try:
- (docstring, node_lineno) = self.getDocstring(node.body[0])
+ if hasattr(node, 'docstring'):
+ docstring = node.docstring
+ node_lineno = ??? # TODO
+ else:
+ (docstring, node_lineno) = self.getDocstring(node.body[0])
examples = docstring and self._getDoctestExamples(docstring)
except (ValueError, IndexError):
# e.g. line 6 of the docstring for <string> has inconsistent
```
I've reported this to the [CPython issue tracker](http://bugs.python.org/issue30497). | PyCQA/pyflakes | diff --git a/pyflakes/test/test_api.py b/pyflakes/test/test_api.py
index 51b0027..3f54ca4 100644
--- a/pyflakes/test/test_api.py
+++ b/pyflakes/test/test_api.py
@@ -187,6 +187,36 @@ class TestIterSourceCode(TestCase):
sorted(iterSourceCode([self.tempdir])),
sorted([apath, bpath, cpath]))
+ def test_shebang(self):
+ """
+ Find Python files that don't end with `.py`, but contain a Python
+ shebang.
+ """
+ python = os.path.join(self.tempdir, 'a')
+ with open(python, 'w') as fd:
+ fd.write('#!/usr/bin/env python\n')
+
+ self.makeEmptyFile('b')
+
+ with open(os.path.join(self.tempdir, 'c'), 'w') as fd:
+ fd.write('hello\nworld\n')
+
+ python2 = os.path.join(self.tempdir, 'd')
+ with open(python2, 'w') as fd:
+ fd.write('#!/usr/bin/env python2\n')
+
+ python3 = os.path.join(self.tempdir, 'e')
+ with open(python3, 'w') as fd:
+ fd.write('#!/usr/bin/env python3\n')
+
+ pythonw = os.path.join(self.tempdir, 'f')
+ with open(pythonw, 'w') as fd:
+ fd.write('#!/usr/bin/env pythonw\n')
+
+ self.assertEqual(
+ sorted(iterSourceCode([self.tempdir])),
+ sorted([python, python2, python3, pythonw]))
+
def test_multipleDirectories(self):
"""
L{iterSourceCode} can be given multiple directories. It will recurse
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 5
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"flake8",
"pep8",
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mccabe==0.7.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pep8==1.7.1
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.9.1
-e git+https://github.com/PyCQA/pyflakes.git@1af4f14ad4675bf5c61c47bbb7c2421b50d1cba4#egg=pyflakes
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: pyflakes
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- flake8==5.0.4
- importlib-metadata==4.2.0
- mccabe==0.7.0
- pep8==1.7.1
- pycodestyle==2.9.1
prefix: /opt/conda/envs/pyflakes
| [
"pyflakes/test/test_api.py::TestIterSourceCode::test_shebang"
]
| []
| [
"pyflakes/test/test_api.py::TestIterSourceCode::test_emptyDirectory",
"pyflakes/test/test_api.py::TestIterSourceCode::test_explicitFiles",
"pyflakes/test/test_api.py::TestIterSourceCode::test_multipleDirectories",
"pyflakes/test/test_api.py::TestIterSourceCode::test_onlyPythonSource",
"pyflakes/test/test_api.py::TestIterSourceCode::test_recurses",
"pyflakes/test/test_api.py::TestIterSourceCode::test_singleFile",
"pyflakes/test/test_api.py::TestReporter::test_flake",
"pyflakes/test/test_api.py::TestReporter::test_multiLineSyntaxError",
"pyflakes/test/test_api.py::TestReporter::test_syntaxError",
"pyflakes/test/test_api.py::TestReporter::test_syntaxErrorNoOffset",
"pyflakes/test/test_api.py::TestReporter::test_unexpectedError",
"pyflakes/test/test_api.py::CheckTests::test_CRLFLineEndings",
"pyflakes/test/test_api.py::CheckTests::test_checkPathNonExisting",
"pyflakes/test/test_api.py::CheckTests::test_checkRecursive",
"pyflakes/test/test_api.py::CheckTests::test_encodedFileUTF8",
"pyflakes/test/test_api.py::CheckTests::test_eofSyntaxError",
"pyflakes/test/test_api.py::CheckTests::test_eofSyntaxErrorWithTab",
"pyflakes/test/test_api.py::CheckTests::test_invalidEscape",
"pyflakes/test/test_api.py::CheckTests::test_legacyScript",
"pyflakes/test/test_api.py::CheckTests::test_misencodedFileUTF16",
"pyflakes/test/test_api.py::CheckTests::test_misencodedFileUTF8",
"pyflakes/test/test_api.py::CheckTests::test_missingTrailingNewline",
"pyflakes/test/test_api.py::CheckTests::test_multilineSyntaxError",
"pyflakes/test/test_api.py::CheckTests::test_nonDefaultFollowsDefaultSyntaxError",
"pyflakes/test/test_api.py::CheckTests::test_nonKeywordAfterKeywordSyntaxError",
"pyflakes/test/test_api.py::CheckTests::test_pyflakesWarning",
"pyflakes/test/test_api.py::IntegrationTests::test_errors_io",
"pyflakes/test/test_api.py::IntegrationTests::test_errors_syntax",
"pyflakes/test/test_api.py::IntegrationTests::test_fileWithFlakes",
"pyflakes/test/test_api.py::IntegrationTests::test_goodFile",
"pyflakes/test/test_api.py::IntegrationTests::test_readFromStdin",
"pyflakes/test/test_api.py::TestMain::test_errors_io",
"pyflakes/test/test_api.py::TestMain::test_errors_syntax",
"pyflakes/test/test_api.py::TestMain::test_fileWithFlakes",
"pyflakes/test/test_api.py::TestMain::test_goodFile",
"pyflakes/test/test_api.py::TestMain::test_readFromStdin"
]
| []
| MIT License | 1,311 | [
"README.rst",
".travis.yml",
"setup.cfg",
"pyflakes/api.py",
"pyflakes/checker.py"
]
| [
"README.rst",
".travis.yml",
"setup.cfg",
"pyflakes/api.py",
"pyflakes/checker.py"
]
|
|
rabitt__pysox-55 | 6347273c53907075fa0d2ed5891ac9364d7a2b0e | 2017-05-29 17:55:46 | 8a6748d32b6917d5ef920895fbfc734dda21f294 | diff --git a/docs/changes.rst b/docs/changes.rst
index 920d6fc..34e354e 100644
--- a/docs/changes.rst
+++ b/docs/changes.rst
@@ -8,4 +8,19 @@ v0.1
v1.1.8
~~~~~~
-- Move specification of input/output file arguments from __init__ to .build()
\ No newline at end of file
+- Move specification of input/output file arguments from __init__ to .build()
+
+v1.3.0
+~~~~~~
+- patched core sox call to work on Windows
+- added remix
+- added gain to mcompand
+- fixed scientific notation format bug
+- allow null output filepaths in `build`
+- added ability to capture `build` outputs to stdout and stderr
+- added `power_spectrum`
+- added `stat`
+- added `clear` method
+- added `noiseprof` and `noisered` effects
+- added `vol` effect
+- fixed `Combiner.preview()`
\ No newline at end of file
diff --git a/sox/combine.py b/sox/combine.py
index 1a50272..2103bb7 100644
--- a/sox/combine.py
+++ b/sox/combine.py
@@ -15,6 +15,7 @@ from .core import ENCODING_VALS
from .core import enquote_filepath
from .core import is_number
from .core import sox
+from .core import play
from .core import SoxError
from .core import SoxiError
from .core import VALID_FORMATS
@@ -110,6 +111,45 @@ class Combiner(Transformer):
logging.info("[SoX] {}".format(out))
return True
+ def preview(self, input_filepath_list, combine_type, input_volumes=None):
+ '''Play a preview of the output with the current set of effects
+
+ Parameters
+ ----------
+ input_filepath_list : list of str
+ List of paths to input audio files.
+ combine_type : str
+ Input file combining method. One of the following values:
+ * concatenate : combine input files by concatenating in the
+ order given.
+ * merge : combine input files by stacking each input file into
+ a new channel of the output file.
+ * mix : combine input files by summing samples in corresponding
+ channels.
+ * mix-power : combine input files with volume adjustments such
+ that the output volume is roughly equivlent to one of the
+ input signals.
+ * multiply : combine input files by multiplying samples in
+ corresponding samples.
+ input_volumes : list of float, default=None
+ List of volumes to be applied upon combining input files. Volumes
+ are applied to the input files in order.
+ If None, input files will be combined at their original volumes.
+
+ '''
+ args = ["play", "--no-show-progress"]
+ args.extend(self.globals)
+ args.extend(['--combine', combine_type])
+
+ input_format_list = _build_input_format_list(
+ input_filepath_list, input_volumes, self.input_format
+ )
+ input_args = _build_input_args(input_filepath_list, input_format_list)
+ args.extend(input_args)
+ args.extend(self.effects)
+
+ play(args)
+
def set_input_format(self, file_type=None, rate=None, bits=None,
channels=None, encoding=None, ignore_length=None):
'''Sets input file format arguments. This is primarily useful when
diff --git a/sox/file_info.py b/sox/file_info.py
index bcf368c..a66d9aa 100644
--- a/sox/file_info.py
+++ b/sox/file_info.py
@@ -258,8 +258,7 @@ def validate_output_file(output_filepath):
'''
nowrite_conditions = [
- bool(os.path.dirname(output_filepath)) or\
- not os.access(os.getcwd(), os.W_OK),
+ bool(os.path.dirname(output_filepath)),
not os.access(os.path.dirname(output_filepath), os.W_OK)]
if all(nowrite_conditions):
diff --git a/sox/transform.py b/sox/transform.py
index 74be29a..c694fee 100644
--- a/sox/transform.py
+++ b/sox/transform.py
@@ -458,6 +458,12 @@ class Transformer(object):
def preview(self, input_filepath):
'''Play a preview of the output with the current set of effects
+
+ Parameters
+ ----------
+ input_filepath : str
+ Path to input audio file.
+
'''
args = ["play", "--no-show-progress"]
args.extend(self.globals)
@@ -1875,15 +1881,10 @@ class Transformer(object):
'''
if os.path.isdir(profile_path):
- raise ValueError("profile_path {} is a directory, but filename should be specified.")
+ raise ValueError("profile_path {} is a directory, but should be a file")
- if os.path.dirname(profile_path) == '' and profile_path != '':
- _abs_profile_path = os.path.join(os.getcwd(), profile_path)
- else:
- _abs_profile_path = profile_path
-
- if not os.access(os.path.dirname(_abs_profile_path), os.W_OK):
- raise IOError("profile_path {} is not writeable.".format(_abs_profile_path))
+ if not os.access(os.path.dirname(profile_path), os.W_OK):
+ raise IOError("profile_path {} is not writeable.".format(profile_path))
effect_args = ['noiseprof', profile_path]
self.build(input_filepath, None, extra_args=effect_args)
diff --git a/sox/version.py b/sox/version.py
index 6d16084..83db3e9 100644
--- a/sox/version.py
+++ b/sox/version.py
@@ -2,5 +2,5 @@
# -*- coding: utf-8 -*-
"""Version info"""
-short_version = '1.2'
-version = '1.2.9'
+short_version = '1.3'
+version = '1.3.0'
| Combiner.preview() fails
`Combiner` inherits `preview` from `Transformer` but it needs to be overwritten because the base call is different for multiple inputs. | rabitt/pysox | diff --git a/tests/test_combine.py b/tests/test_combine.py
index 36985bf..c0fdea6 100644
--- a/tests/test_combine.py
+++ b/tests/test_combine.py
@@ -379,6 +379,22 @@ class TestBuildInputFormatList(unittest.TestCase):
self.assertEqual(expected, actual)
+class TestCombinePreview(unittest.TestCase):
+ def setUp(self):
+ self.cbn = new_combiner()
+ self.cbn.trim(0, 0.1)
+
+ def test_valid(self):
+ expected = None
+ actual = self.cbn.preview([INPUT_WAV, INPUT_WAV], 'mix')
+ self.assertEqual(expected, actual)
+
+ def test_valid_vol(self):
+ expected = None
+ actual = self.cbn.preview([INPUT_WAV, INPUT_WAV], 'mix', [1.0, 0.5])
+ self.assertEqual(expected, actual)
+
+
class TestBuildInputArgs(unittest.TestCase):
def test_unequal_length(self):
diff --git a/tests/test_transform.py b/tests/test_transform.py
index 1f7795a..3ad7533 100644
--- a/tests/test_transform.py
+++ b/tests/test_transform.py
@@ -2679,14 +2679,6 @@ class TestTransformerNoiseprof(unittest.TestCase):
with self.assertRaises(IOError):
tfm.noiseprof(INPUT_FILE, '/usr/noise.prof')
- def test_noise_prof_invalid_cwd(self):
- tfm = new_transformer()
- _cwd = os.getcwd()
- os.chdir('/')
- with self.assertRaises(IOError):
- tfm.noiseprof(INPUT_FILE, 'noise.prof')
- os.chdir(_cwd)
-
class TestTransformerNoisered(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 5
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-pep8"
],
"pre_install": [
"apt-get update",
"apt-get install -y sox"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
packaging==24.2
pep8==1.7.1
pluggy==1.5.0
pytest==8.3.5
pytest-cache==1.0
pytest-cov==6.0.0
pytest-pep8==1.0.6
-e git+https://github.com/rabitt/pysox.git@6347273c53907075fa0d2ed5891ac9364d7a2b0e#egg=sox
tomli==2.2.1
| name: pysox
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- packaging==24.2
- pep8==1.7.1
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cache==1.0
- pytest-cov==6.0.0
- pytest-pep8==1.0.6
- tomli==2.2.1
prefix: /opt/conda/envs/pysox
| [
"tests/test_combine.py::TestCombinePreview::test_valid",
"tests/test_combine.py::TestCombinePreview::test_valid_vol"
]
| [
"tests/test_transform.py::TestTransformerNoiseprof::test_noise_prof_invalid_write",
"tests/test_transform.py::TestTransformerNoisered::test_noise_prof_invalid"
]
| [
"tests/test_combine.py::TestCombineDefault::test_build",
"tests/test_combine.py::TestCombineDefault::test_build_with_vols",
"tests/test_combine.py::TestCombineDefault::test_effects",
"tests/test_combine.py::TestCombineDefault::test_effects_log",
"tests/test_combine.py::TestCombineDefault::test_failed_build",
"tests/test_combine.py::TestCombineDefault::test_globals",
"tests/test_combine.py::TestCombineDefault::test_output_format",
"tests/test_combine.py::TestCombineTypes::test_concatenate",
"tests/test_combine.py::TestCombineTypes::test_merge",
"tests/test_combine.py::TestCombineTypes::test_mix",
"tests/test_combine.py::TestCombineTypes::test_mixpower",
"tests/test_combine.py::TestCombineTypes::test_multiply",
"tests/test_combine.py::TestSetInputFormat::test_bits",
"tests/test_combine.py::TestSetInputFormat::test_build_greater_len",
"tests/test_combine.py::TestSetInputFormat::test_build_greater_len_vol",
"tests/test_combine.py::TestSetInputFormat::test_build_lesser_len",
"tests/test_combine.py::TestSetInputFormat::test_build_lesser_len_vol",
"tests/test_combine.py::TestSetInputFormat::test_build_same_len",
"tests/test_combine.py::TestSetInputFormat::test_build_same_len_vol",
"tests/test_combine.py::TestSetInputFormat::test_channels",
"tests/test_combine.py::TestSetInputFormat::test_encoding",
"tests/test_combine.py::TestSetInputFormat::test_file_type",
"tests/test_combine.py::TestSetInputFormat::test_ignore_length",
"tests/test_combine.py::TestSetInputFormat::test_invalid_bits",
"tests/test_combine.py::TestSetInputFormat::test_invalid_bits_val",
"tests/test_combine.py::TestSetInputFormat::test_invalid_channels",
"tests/test_combine.py::TestSetInputFormat::test_invalid_channels_val",
"tests/test_combine.py::TestSetInputFormat::test_invalid_encoding",
"tests/test_combine.py::TestSetInputFormat::test_invalid_encoding_val",
"tests/test_combine.py::TestSetInputFormat::test_invalid_file_type",
"tests/test_combine.py::TestSetInputFormat::test_invalid_file_type_val",
"tests/test_combine.py::TestSetInputFormat::test_invalid_ignore_length",
"tests/test_combine.py::TestSetInputFormat::test_invalid_ignore_length_val",
"tests/test_combine.py::TestSetInputFormat::test_invalid_rate",
"tests/test_combine.py::TestSetInputFormat::test_invalid_rate_val",
"tests/test_combine.py::TestSetInputFormat::test_multiple_different_len",
"tests/test_combine.py::TestSetInputFormat::test_multiple_same_len",
"tests/test_combine.py::TestSetInputFormat::test_none",
"tests/test_combine.py::TestSetInputFormat::test_rate",
"tests/test_combine.py::TestValidateFileFormats::test_different_num_channels",
"tests/test_combine.py::TestValidateFileFormats::test_different_samplerates",
"tests/test_combine.py::TestValidateSampleRates::test_different_samplerates",
"tests/test_combine.py::TestValidateSampleRates::test_same_samplerates",
"tests/test_combine.py::TestValidateNumChannels::test_different_numchannels",
"tests/test_combine.py::TestValidateNumChannels::test_same_numchannels",
"tests/test_combine.py::TestBuildInputFormatList::test_equal_num_fmt",
"tests/test_combine.py::TestBuildInputFormatList::test_equal_num_vol",
"tests/test_combine.py::TestBuildInputFormatList::test_greater_num_fmt",
"tests/test_combine.py::TestBuildInputFormatList::test_greater_num_vol",
"tests/test_combine.py::TestBuildInputFormatList::test_lesser_num_fmt",
"tests/test_combine.py::TestBuildInputFormatList::test_lesser_num_vol",
"tests/test_combine.py::TestBuildInputFormatList::test_none",
"tests/test_combine.py::TestBuildInputArgs::test_basic",
"tests/test_combine.py::TestBuildInputArgs::test_unequal_length",
"tests/test_combine.py::TestValidateCombineType::test_invalid",
"tests/test_combine.py::TestValidateCombineType::test_valid",
"tests/test_combine.py::TestValidateVolumes::test_invalid_type",
"tests/test_combine.py::TestValidateVolumes::test_invalid_vol",
"tests/test_combine.py::TestValidateVolumes::test_valid_list",
"tests/test_combine.py::TestValidateVolumes::test_valid_none",
"tests/test_transform.py::TestTransformDefault::test_effects",
"tests/test_transform.py::TestTransformDefault::test_effects_log",
"tests/test_transform.py::TestTransformDefault::test_globals",
"tests/test_transform.py::TestTransformDefault::test_input_format",
"tests/test_transform.py::TestTransformDefault::test_output_format",
"tests/test_transform.py::TestTransformSetGlobals::test_defaults",
"tests/test_transform.py::TestTransformSetGlobals::test_dither",
"tests/test_transform.py::TestTransformSetGlobals::test_dither_invalid",
"tests/test_transform.py::TestTransformSetGlobals::test_guard",
"tests/test_transform.py::TestTransformSetGlobals::test_guard_invalid",
"tests/test_transform.py::TestTransformSetGlobals::test_multithread",
"tests/test_transform.py::TestTransformSetGlobals::test_multithread_invalid",
"tests/test_transform.py::TestTransformSetGlobals::test_replay_gain",
"tests/test_transform.py::TestTransformSetGlobals::test_replay_gain_invalid",
"tests/test_transform.py::TestTransformSetGlobals::test_verbosity",
"tests/test_transform.py::TestTransformSetGlobals::test_verbosity_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_bits",
"tests/test_transform.py::TestTransformSetInputFormat::test_bits_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_bits_invalid2",
"tests/test_transform.py::TestTransformSetInputFormat::test_channels",
"tests/test_transform.py::TestTransformSetInputFormat::test_channels_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_channels_invalid2",
"tests/test_transform.py::TestTransformSetInputFormat::test_defaults",
"tests/test_transform.py::TestTransformSetInputFormat::test_encoding",
"tests/test_transform.py::TestTransformSetInputFormat::test_encoding_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_file_type",
"tests/test_transform.py::TestTransformSetInputFormat::test_file_type_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_ignore_length",
"tests/test_transform.py::TestTransformSetInputFormat::test_ignore_length_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_rate",
"tests/test_transform.py::TestTransformSetInputFormat::test_rate_invalid",
"tests/test_transform.py::TestTransformSetInputFormat::test_rate_invalid2",
"tests/test_transform.py::TestTransformSetInputFormat::test_rate_scinotation",
"tests/test_transform.py::TestTransformSetOutputFormat::test_append_comments",
"tests/test_transform.py::TestTransformSetOutputFormat::test_append_comments_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_bits",
"tests/test_transform.py::TestTransformSetOutputFormat::test_bits_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_bits_invalid2",
"tests/test_transform.py::TestTransformSetOutputFormat::test_channels",
"tests/test_transform.py::TestTransformSetOutputFormat::test_channels_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_channels_invalid2",
"tests/test_transform.py::TestTransformSetOutputFormat::test_comments",
"tests/test_transform.py::TestTransformSetOutputFormat::test_comments_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_defaults",
"tests/test_transform.py::TestTransformSetOutputFormat::test_encoding",
"tests/test_transform.py::TestTransformSetOutputFormat::test_encoding_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_file_type",
"tests/test_transform.py::TestTransformSetOutputFormat::test_file_type_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_file_type_null_output",
"tests/test_transform.py::TestTransformSetOutputFormat::test_rate",
"tests/test_transform.py::TestTransformSetOutputFormat::test_rate_invalid",
"tests/test_transform.py::TestTransformSetOutputFormat::test_rate_invalid2",
"tests/test_transform.py::TestTransformSetOutputFormat::test_rate_scinotation",
"tests/test_transform.py::TestTransformerBuild::test_extra_arg",
"tests/test_transform.py::TestTransformerBuild::test_extra_args_invalid",
"tests/test_transform.py::TestTransformerBuild::test_failed_sox",
"tests/test_transform.py::TestTransformerBuild::test_input_output_equal",
"tests/test_transform.py::TestTransformerBuild::test_invalid",
"tests/test_transform.py::TestTransformerBuild::test_null_output",
"tests/test_transform.py::TestTransformerBuild::test_return_outputs",
"tests/test_transform.py::TestTransformerBuild::test_return_outputs_err",
"tests/test_transform.py::TestTransformerBuild::test_valid",
"tests/test_transform.py::TestTransformerBuild::test_valid_spacey",
"tests/test_transform.py::TestTransformerClearEffects::test_clear",
"tests/test_transform.py::TestTransformerPreview::test_valid",
"tests/test_transform.py::TestTransformerAllpass::test_default",
"tests/test_transform.py::TestTransformerAllpass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerAllpass::test_width_q_invalid",
"tests/test_transform.py::TestTransformerBandpass::test_constant_skirt",
"tests/test_transform.py::TestTransformerBandpass::test_constant_skirt_invalid",
"tests/test_transform.py::TestTransformerBandpass::test_default",
"tests/test_transform.py::TestTransformerBandpass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerBandpass::test_width_q_invalid",
"tests/test_transform.py::TestTransformerBandreject::test_default",
"tests/test_transform.py::TestTransformerBandreject::test_frequency_invalid",
"tests/test_transform.py::TestTransformerBandreject::test_width_q_invalid",
"tests/test_transform.py::TestTransformerBass::test_default",
"tests/test_transform.py::TestTransformerBass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerBass::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerBass::test_slope_invalid",
"tests/test_transform.py::TestTransformerBend::test_cents_invalid_len",
"tests/test_transform.py::TestTransformerBend::test_cents_invalid_nonlist",
"tests/test_transform.py::TestTransformerBend::test_cents_invalid_vals",
"tests/test_transform.py::TestTransformerBend::test_default",
"tests/test_transform.py::TestTransformerBend::test_end_times_invalid_len",
"tests/test_transform.py::TestTransformerBend::test_end_times_invalid_nonlist",
"tests/test_transform.py::TestTransformerBend::test_end_times_invalid_order",
"tests/test_transform.py::TestTransformerBend::test_end_times_invalid_vals",
"tests/test_transform.py::TestTransformerBend::test_frame_rate_invalid",
"tests/test_transform.py::TestTransformerBend::test_frame_rate_valid",
"tests/test_transform.py::TestTransformerBend::test_n_bends_invalid",
"tests/test_transform.py::TestTransformerBend::test_overlapping_intervals",
"tests/test_transform.py::TestTransformerBend::test_oversample_rate_invalid",
"tests/test_transform.py::TestTransformerBend::test_oversample_rate_valid",
"tests/test_transform.py::TestTransformerBend::test_start_greater_end",
"tests/test_transform.py::TestTransformerBend::test_start_times_invalid_len",
"tests/test_transform.py::TestTransformerBend::test_start_times_invalid_nonlist",
"tests/test_transform.py::TestTransformerBend::test_start_times_invalid_order",
"tests/test_transform.py::TestTransformerBend::test_start_times_invalid_vals",
"tests/test_transform.py::TestTransformerBiquad::test_a_non_num",
"tests/test_transform.py::TestTransformerBiquad::test_a_nonlist",
"tests/test_transform.py::TestTransformerBiquad::test_a_wrong_len",
"tests/test_transform.py::TestTransformerBiquad::test_b_non_num",
"tests/test_transform.py::TestTransformerBiquad::test_b_nonlist",
"tests/test_transform.py::TestTransformerBiquad::test_b_wrong_len",
"tests/test_transform.py::TestTransformerBiquad::test_default",
"tests/test_transform.py::TestTransformerChannels::test_default",
"tests/test_transform.py::TestTransformerChannels::test_invalid_nchannels",
"tests/test_transform.py::TestTransformerChorus::test_default",
"tests/test_transform.py::TestTransformerChorus::test_explicit_args",
"tests/test_transform.py::TestTransformerChorus::test_invalid_decays",
"tests/test_transform.py::TestTransformerChorus::test_invalid_decays_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_decays_wronglen",
"tests/test_transform.py::TestTransformerChorus::test_invalid_delays",
"tests/test_transform.py::TestTransformerChorus::test_invalid_delays_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_delays_wronglen",
"tests/test_transform.py::TestTransformerChorus::test_invalid_depths",
"tests/test_transform.py::TestTransformerChorus::test_invalid_depths_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_depths_wronglen",
"tests/test_transform.py::TestTransformerChorus::test_invalid_gain_in",
"tests/test_transform.py::TestTransformerChorus::test_invalid_gain_out",
"tests/test_transform.py::TestTransformerChorus::test_invalid_n_voices",
"tests/test_transform.py::TestTransformerChorus::test_invalid_shapes",
"tests/test_transform.py::TestTransformerChorus::test_invalid_shapes_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_shapes_wronglen",
"tests/test_transform.py::TestTransformerChorus::test_invalid_speeds",
"tests/test_transform.py::TestTransformerChorus::test_invalid_speeds_vals",
"tests/test_transform.py::TestTransformerChorus::test_invalid_speeds_wronglen",
"tests/test_transform.py::TestTransformerContrast::test_default",
"tests/test_transform.py::TestTransformerContrast::test_invalid_amount_big",
"tests/test_transform.py::TestTransformerContrast::test_invalid_amount_neg",
"tests/test_transform.py::TestTransformerContrast::test_invalid_amount_nonnum",
"tests/test_transform.py::TestTransformerCompand::test_attack_bigger_decay",
"tests/test_transform.py::TestTransformerCompand::test_attack_time_invalid_neg",
"tests/test_transform.py::TestTransformerCompand::test_attack_time_invalid_nonnum",
"tests/test_transform.py::TestTransformerCompand::test_attack_time_valid",
"tests/test_transform.py::TestTransformerCompand::test_decay_time_invalid_neg",
"tests/test_transform.py::TestTransformerCompand::test_decay_time_invalid_nonnum",
"tests/test_transform.py::TestTransformerCompand::test_decay_time_valid",
"tests/test_transform.py::TestTransformerCompand::test_default",
"tests/test_transform.py::TestTransformerCompand::test_soft_knee_invalid",
"tests/test_transform.py::TestTransformerCompand::test_soft_knee_none",
"tests/test_transform.py::TestTransformerCompand::test_soft_knee_valid",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_empty",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_nonlist",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_nontuples",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_dups",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_len",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_nonnum",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_nonnum2",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_tup_positive",
"tests/test_transform.py::TestTransformerCompand::test_tf_points_valid",
"tests/test_transform.py::TestTransformerConvert::test_bitdepth_invalid",
"tests/test_transform.py::TestTransformerConvert::test_bitdepth_valid",
"tests/test_transform.py::TestTransformerConvert::test_channels_invalid1",
"tests/test_transform.py::TestTransformerConvert::test_channels_invalid2",
"tests/test_transform.py::TestTransformerConvert::test_channels_valid",
"tests/test_transform.py::TestTransformerConvert::test_default",
"tests/test_transform.py::TestTransformerConvert::test_samplerate_invalid",
"tests/test_transform.py::TestTransformerConvert::test_samplerate_valid",
"tests/test_transform.py::TestTransformerDcshift::test_default",
"tests/test_transform.py::TestTransformerDcshift::test_invalid_shift_big",
"tests/test_transform.py::TestTransformerDcshift::test_invalid_shift_neg",
"tests/test_transform.py::TestTransformerDcshift::test_invalid_shift_nonnum",
"tests/test_transform.py::TestTransformerDeemph::test_default",
"tests/test_transform.py::TestTransformerDelay::test_default",
"tests/test_transform.py::TestTransformerDelay::test_default_three_channel",
"tests/test_transform.py::TestTransformerDelay::test_invalid_position_type",
"tests/test_transform.py::TestTransformerDelay::test_invalid_position_vals",
"tests/test_transform.py::TestTransformerDownsample::test_default",
"tests/test_transform.py::TestTransformerDownsample::test_invalid_factor_neg",
"tests/test_transform.py::TestTransformerDownsample::test_invalid_factor_nonnum",
"tests/test_transform.py::TestTransformerEarwax::test_default",
"tests/test_transform.py::TestTransformerEcho::test_decays_invalid_len",
"tests/test_transform.py::TestTransformerEcho::test_decays_invalid_type",
"tests/test_transform.py::TestTransformerEcho::test_decays_invalid_vals",
"tests/test_transform.py::TestTransformerEcho::test_decays_valid",
"tests/test_transform.py::TestTransformerEcho::test_default",
"tests/test_transform.py::TestTransformerEcho::test_delays_invalid_len",
"tests/test_transform.py::TestTransformerEcho::test_delays_invalid_type",
"tests/test_transform.py::TestTransformerEcho::test_delays_invalid_vals",
"tests/test_transform.py::TestTransformerEcho::test_delays_valid",
"tests/test_transform.py::TestTransformerEcho::test_gain_in_invalid",
"tests/test_transform.py::TestTransformerEcho::test_gain_in_valid",
"tests/test_transform.py::TestTransformerEcho::test_gain_out_invalid",
"tests/test_transform.py::TestTransformerEcho::test_gain_out_valid",
"tests/test_transform.py::TestTransformerEcho::test_n_echos_invalid",
"tests/test_transform.py::TestTransformerEcho::test_n_echos_valid",
"tests/test_transform.py::TestTransformerEchos::test_decays_invalid_len",
"tests/test_transform.py::TestTransformerEchos::test_decays_invalid_type",
"tests/test_transform.py::TestTransformerEchos::test_decays_invalid_vals",
"tests/test_transform.py::TestTransformerEchos::test_decays_valid",
"tests/test_transform.py::TestTransformerEchos::test_default",
"tests/test_transform.py::TestTransformerEchos::test_delays_invalid_len",
"tests/test_transform.py::TestTransformerEchos::test_delays_invalid_type",
"tests/test_transform.py::TestTransformerEchos::test_delays_invalid_vals",
"tests/test_transform.py::TestTransformerEchos::test_delays_valid",
"tests/test_transform.py::TestTransformerEchos::test_gain_in_invalid",
"tests/test_transform.py::TestTransformerEchos::test_gain_in_valid",
"tests/test_transform.py::TestTransformerEchos::test_gain_out_invalid",
"tests/test_transform.py::TestTransformerEchos::test_gain_out_valid",
"tests/test_transform.py::TestTransformerEchos::test_n_echos_invalid",
"tests/test_transform.py::TestTransformerEchos::test_n_echos_valid",
"tests/test_transform.py::TestTransformerEqualizer::test_default",
"tests/test_transform.py::TestTransformerEqualizer::test_frequency_invalid",
"tests/test_transform.py::TestTransformerEqualizer::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerEqualizer::test_width_q_invalid",
"tests/test_transform.py::TestTransformerFade::test_default",
"tests/test_transform.py::TestTransformerFade::test_fade_in_invalid",
"tests/test_transform.py::TestTransformerFade::test_fade_in_valid",
"tests/test_transform.py::TestTransformerFade::test_fade_out_invalid",
"tests/test_transform.py::TestTransformerFade::test_fade_out_valid",
"tests/test_transform.py::TestTransformerFade::test_fade_shape_invalid",
"tests/test_transform.py::TestTransformerFade::test_fade_shape_valid",
"tests/test_transform.py::TestTransformerFir::test_default",
"tests/test_transform.py::TestTransformerFir::test_invalid_coeffs_nonlist",
"tests/test_transform.py::TestTransformerFir::test_invalid_coeffs_vals",
"tests/test_transform.py::TestTransformerFlanger::test_default",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_delay_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_delay_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_depth_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_depth_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_interp_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_interp_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_phase_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_phase_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_regen_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_regen_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_shape_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_shape_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_speed_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_speed_valid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_width_invalid",
"tests/test_transform.py::TestTransformerFlanger::test_flanger_width_valid",
"tests/test_transform.py::TestTransformerGain::test_balance_invalid",
"tests/test_transform.py::TestTransformerGain::test_balance_valid",
"tests/test_transform.py::TestTransformerGain::test_default",
"tests/test_transform.py::TestTransformerGain::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerGain::test_gain_db_valid",
"tests/test_transform.py::TestTransformerGain::test_limiter_invalid",
"tests/test_transform.py::TestTransformerGain::test_limiter_valid",
"tests/test_transform.py::TestTransformerGain::test_normalize_invalid",
"tests/test_transform.py::TestTransformerGain::test_normalize_valid",
"tests/test_transform.py::TestTransformerHighpass::test_default",
"tests/test_transform.py::TestTransformerHighpass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerHighpass::test_n_poles_invalid",
"tests/test_transform.py::TestTransformerHighpass::test_one_pole",
"tests/test_transform.py::TestTransformerHighpass::test_width_q_invalid",
"tests/test_transform.py::TestTransformerHilbert::test_default",
"tests/test_transform.py::TestTransformerHilbert::test_num_taps_invalid",
"tests/test_transform.py::TestTransformerHilbert::test_num_taps_invalid_even",
"tests/test_transform.py::TestTransformerHilbert::test_num_taps_valid",
"tests/test_transform.py::TestTransformerLowpass::test_default",
"tests/test_transform.py::TestTransformerLowpass::test_frequency_invalid",
"tests/test_transform.py::TestTransformerLowpass::test_n_poles_invalid",
"tests/test_transform.py::TestTransformerLowpass::test_one_pole",
"tests/test_transform.py::TestTransformerLowpass::test_width_q_invalid",
"tests/test_transform.py::TestTransformerLoudness::test_default",
"tests/test_transform.py::TestTransformerLoudness::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerLoudness::test_gain_db_valid",
"tests/test_transform.py::TestTransformerLoudness::test_reference_level_invalid",
"tests/test_transform.py::TestTransformerLoudness::test_reference_level_oorange",
"tests/test_transform.py::TestTransformerLoudness::test_reference_level_valid",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_invalid_len",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_invalid_neg",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_invalid_nonnum",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_invalid_type",
"tests/test_transform.py::TestTransformerMcompand::test_attack_time_valid",
"tests/test_transform.py::TestTransformerMcompand::test_crossover_frequencies_invalid",
"tests/test_transform.py::TestTransformerMcompand::test_crossover_frequencies_invalid_vals",
"tests/test_transform.py::TestTransformerMcompand::test_crossover_frequencies_valid",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_invalid_len",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_invalid_neg",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_invalid_nonnum",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_invalid_type",
"tests/test_transform.py::TestTransformerMcompand::test_decay_time_valid",
"tests/test_transform.py::TestTransformerMcompand::test_default",
"tests/test_transform.py::TestTransformerMcompand::test_gain_len_invalid",
"tests/test_transform.py::TestTransformerMcompand::test_gain_valid",
"tests/test_transform.py::TestTransformerMcompand::test_gain_values_invalid",
"tests/test_transform.py::TestTransformerMcompand::test_n_bands_invalid",
"tests/test_transform.py::TestTransformerMcompand::test_n_bands_valid",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_db_invalid_len",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_db_invalid_type",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_invalid",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_none",
"tests/test_transform.py::TestTransformerMcompand::test_soft_knee_valid",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_empty",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_nonlist",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_nontuples",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_tup_len",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_elt_tup_nonnum",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_tup_dups",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_tup_nonnum2",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_tup_positive",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_valid",
"tests/test_transform.py::TestTransformerMcompand::test_tf_points_wrong_len",
"tests/test_transform.py::TestTransformerNoiseprof::test_default",
"tests/test_transform.py::TestTransformerNoiseprof::test_noise_prof_invalid_dir",
"tests/test_transform.py::TestTransformerNoisered::test_amount_invalid",
"tests/test_transform.py::TestTransformerNoisered::test_amount_valid",
"tests/test_transform.py::TestTransformerNoisered::test_default",
"tests/test_transform.py::TestTransformerNoisered::test_noise_prof_valid",
"tests/test_transform.py::TestTransformerNorm::test_db_level_invalid",
"tests/test_transform.py::TestTransformerNorm::test_db_level_valid",
"tests/test_transform.py::TestTransformerNorm::test_default",
"tests/test_transform.py::TestTransformerOops::test_default",
"tests/test_transform.py::TestTransformerOverdrive::test_colour_invalid",
"tests/test_transform.py::TestTransformerOverdrive::test_colour_valid",
"tests/test_transform.py::TestTransformerOverdrive::test_default",
"tests/test_transform.py::TestTransformerOverdrive::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerOverdrive::test_gain_db_valid",
"tests/test_transform.py::TestTransformerPad::test_default",
"tests/test_transform.py::TestTransformerPad::test_end_duration_invalid",
"tests/test_transform.py::TestTransformerPad::test_end_duration_valid",
"tests/test_transform.py::TestTransformerPad::test_start_duration_invalid",
"tests/test_transform.py::TestTransformerPad::test_start_duration_valid",
"tests/test_transform.py::TestTransformerPhaser::test_decay_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_decay_valid",
"tests/test_transform.py::TestTransformerPhaser::test_default",
"tests/test_transform.py::TestTransformerPhaser::test_delay_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_delay_valid",
"tests/test_transform.py::TestTransformerPhaser::test_gain_in_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_gain_in_valid",
"tests/test_transform.py::TestTransformerPhaser::test_gain_out_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_gain_out_valid",
"tests/test_transform.py::TestTransformerPhaser::test_modulation_shape_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_modulation_shape_valid",
"tests/test_transform.py::TestTransformerPhaser::test_speed_invalid",
"tests/test_transform.py::TestTransformerPhaser::test_speed_valid",
"tests/test_transform.py::TestTransformerPitch::test_default",
"tests/test_transform.py::TestTransformerPitch::test_n_semitones_invalid",
"tests/test_transform.py::TestTransformerPitch::test_n_semitones_valid",
"tests/test_transform.py::TestTransformerPitch::test_n_semitones_warning",
"tests/test_transform.py::TestTransformerPitch::test_quick_invalid",
"tests/test_transform.py::TestTransformerPitch::test_quick_valid",
"tests/test_transform.py::TestTransformerRate::test_default",
"tests/test_transform.py::TestTransformerRate::test_quality_invalid",
"tests/test_transform.py::TestTransformerRate::test_quality_valid",
"tests/test_transform.py::TestTransformerRate::test_samplerate_invalid",
"tests/test_transform.py::TestTransformerRate::test_samplerate_valid",
"tests/test_transform.py::TestTransformerRemix::test_default",
"tests/test_transform.py::TestTransformerRemix::test_num_channels_valid",
"tests/test_transform.py::TestTransformerRemix::test_num_output_channels_invalid",
"tests/test_transform.py::TestTransformerRemix::test_remix_dict_invalid",
"tests/test_transform.py::TestTransformerRemix::test_remix_dict_invalid2",
"tests/test_transform.py::TestTransformerRemix::test_remix_dict_invalid3",
"tests/test_transform.py::TestTransformerRemix::test_remix_dict_invalid4",
"tests/test_transform.py::TestTransformerRemix::test_remix_dictionary_none",
"tests/test_transform.py::TestTransformerRemix::test_remix_dictionary_valid",
"tests/test_transform.py::TestTransformerRepeat::test_count_invalid",
"tests/test_transform.py::TestTransformerRepeat::test_count_invalid_fmt",
"tests/test_transform.py::TestTransformerRepeat::test_count_valid",
"tests/test_transform.py::TestTransformerRepeat::test_default",
"tests/test_transform.py::TestTransformerReverb::test_default",
"tests/test_transform.py::TestTransformerReverb::test_high_freq_damping_invalid",
"tests/test_transform.py::TestTransformerReverb::test_high_freq_damping_valid",
"tests/test_transform.py::TestTransformerReverb::test_pre_delay_invalid",
"tests/test_transform.py::TestTransformerReverb::test_pre_delay_valid",
"tests/test_transform.py::TestTransformerReverb::test_reverberance_invalid",
"tests/test_transform.py::TestTransformerReverb::test_reverberance_valid",
"tests/test_transform.py::TestTransformerReverb::test_room_scale_invalid",
"tests/test_transform.py::TestTransformerReverb::test_room_scale_valid",
"tests/test_transform.py::TestTransformerReverb::test_stereo_depth_invalid",
"tests/test_transform.py::TestTransformerReverb::test_stereo_depth_valid",
"tests/test_transform.py::TestTransformerReverb::test_wet_gain_invalid",
"tests/test_transform.py::TestTransformerReverb::test_wet_gain_valid",
"tests/test_transform.py::TestTransformerReverb::test_wet_only_invalid",
"tests/test_transform.py::TestTransformerReverb::test_wet_only_valid",
"tests/test_transform.py::TestTransformerReverse::test_default",
"tests/test_transform.py::TestTransformerSilence::test_buffer_around_silence_invalid",
"tests/test_transform.py::TestTransformerSilence::test_buffer_around_silence_valid",
"tests/test_transform.py::TestTransformerSilence::test_default",
"tests/test_transform.py::TestTransformerSilence::test_location_beginning",
"tests/test_transform.py::TestTransformerSilence::test_location_end",
"tests/test_transform.py::TestTransformerSilence::test_location_invalid",
"tests/test_transform.py::TestTransformerSilence::test_min_silence_duration_invalid",
"tests/test_transform.py::TestTransformerSilence::test_min_silence_duration_valid",
"tests/test_transform.py::TestTransformerSilence::test_silence_threshold_invalid",
"tests/test_transform.py::TestTransformerSilence::test_silence_threshold_invalid2",
"tests/test_transform.py::TestTransformerSilence::test_silence_threshold_valid",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_high",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_list",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_list_len",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_number",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_invalid_reject",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_valid_float",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_valid_list",
"tests/test_transform.py::TestTransformerSinc::test_cutoff_freq_valid_unordered",
"tests/test_transform.py::TestTransformerSinc::test_default",
"tests/test_transform.py::TestTransformerSinc::test_filter_type_invalid",
"tests/test_transform.py::TestTransformerSinc::test_filter_type_valid_low",
"tests/test_transform.py::TestTransformerSinc::test_filter_type_valid_pass",
"tests/test_transform.py::TestTransformerSinc::test_filter_type_valid_reject",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_invalid",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_invalid_large",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_invalid_small",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_valid_high",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_valid_low",
"tests/test_transform.py::TestTransformerSinc::test_phase_response_valid_mid",
"tests/test_transform.py::TestTransformerSinc::test_stop_band_attenuation_invalid",
"tests/test_transform.py::TestTransformerSinc::test_stop_band_attenuation_valid",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_invalid",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_invalid_float",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_invalid_list_elt",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_invalid_low",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_linvalid_list_len",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_valid_high",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_valid_low",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_valid_pass_float",
"tests/test_transform.py::TestTransformerSinc::test_transition_bw_valid_pass_list",
"tests/test_transform.py::TestTransformerSpeed::test_default",
"tests/test_transform.py::TestTransformerSpeed::test_factor_invalid",
"tests/test_transform.py::TestTransformerSpeed::test_factor_valid",
"tests/test_transform.py::TestTransformerSpeed::test_factor_valid_extreme",
"tests/test_transform.py::TestTransformerStat::test_default",
"tests/test_transform.py::TestTransformerStat::test_multichannel",
"tests/test_transform.py::TestTransformerStat::test_rms",
"tests/test_transform.py::TestTransformerStat::test_scale",
"tests/test_transform.py::TestTransformerStat::test_scale_invalid",
"tests/test_transform.py::TestTransformerPowerSpectrum::test_multichannel",
"tests/test_transform.py::TestTransformerPowerSpectrum::test_valid",
"tests/test_transform.py::TestTransformerStats::test_default",
"tests/test_transform.py::TestTransformerStats::test_multichannel",
"tests/test_transform.py::TestTransformerSwap::test_default",
"tests/test_transform.py::TestTransformerStretch::test_default",
"tests/test_transform.py::TestTransformerStretch::test_factor_extreme",
"tests/test_transform.py::TestTransformerStretch::test_factor_invalid",
"tests/test_transform.py::TestTransformerStretch::test_factor_valid",
"tests/test_transform.py::TestTransformerStretch::test_window_invalid",
"tests/test_transform.py::TestTransformerStretch::test_window_valid",
"tests/test_transform.py::TestTransformerTempo::test_audio_type_invalid",
"tests/test_transform.py::TestTransformerTempo::test_audio_type_valid",
"tests/test_transform.py::TestTransformerTempo::test_default",
"tests/test_transform.py::TestTransformerTempo::test_factor_invalid",
"tests/test_transform.py::TestTransformerTempo::test_factor_valid",
"tests/test_transform.py::TestTransformerTempo::test_factor_warning",
"tests/test_transform.py::TestTransformerTempo::test_quick_invalid",
"tests/test_transform.py::TestTransformerTempo::test_quick_valid",
"tests/test_transform.py::TestTransformerTreble::test_default",
"tests/test_transform.py::TestTransformerTreble::test_frequency_invalid",
"tests/test_transform.py::TestTransformerTreble::test_gain_db_invalid",
"tests/test_transform.py::TestTransformerTreble::test_slope_invalid",
"tests/test_transform.py::TestTransformerTremolo::test_default",
"tests/test_transform.py::TestTransformerTremolo::test_depth_invalid",
"tests/test_transform.py::TestTransformerTremolo::test_speed_invalid",
"tests/test_transform.py::TestTransformerTrim::test_default",
"tests/test_transform.py::TestTransformerTrim::test_invalid_end_time",
"tests/test_transform.py::TestTransformerTrim::test_invalid_start_time",
"tests/test_transform.py::TestTransformerTrim::test_invalid_time_pair",
"tests/test_transform.py::TestTransformerUpsample::test_default",
"tests/test_transform.py::TestTransformerUpsample::test_invalid_factor_decimal",
"tests/test_transform.py::TestTransformerUpsample::test_invalid_factor_neg",
"tests/test_transform.py::TestTransformerUpsample::test_invalid_factor_nonnum",
"tests/test_transform.py::TestTransformerVad::test_default",
"tests/test_transform.py::TestTransformerVad::test_end_location",
"tests/test_transform.py::TestTransformerVad::test_invalid_activity_threshold",
"tests/test_transform.py::TestTransformerVad::test_invalid_initial_pad",
"tests/test_transform.py::TestTransformerVad::test_invalid_initial_search_buffer",
"tests/test_transform.py::TestTransformerVad::test_invalid_location",
"tests/test_transform.py::TestTransformerVad::test_invalid_max_gap",
"tests/test_transform.py::TestTransformerVad::test_invalid_min_activity_duration",
"tests/test_transform.py::TestTransformerVad::test_invalid_normalize",
"tests/test_transform.py::TestTransformerVad::test_no_normalize",
"tests/test_transform.py::TestTransformerVol::test_default",
"tests/test_transform.py::TestTransformerVol::test_gain_type_db",
"tests/test_transform.py::TestTransformerVol::test_gain_type_power",
"tests/test_transform.py::TestTransformerVol::test_invalid_gain",
"tests/test_transform.py::TestTransformerVol::test_invalid_gain_power",
"tests/test_transform.py::TestTransformerVol::test_invalid_gain_type",
"tests/test_transform.py::TestTransformerVol::test_invalid_limiter_gain",
"tests/test_transform.py::TestTransformerVol::test_limiter_gain",
"tests/test_transform.py::TestTransformerVol::test_limiter_gain_vol_down",
"tests/test_transform.py::TestTransformerVol::test_limiter_gain_vol_down_db",
"tests/test_transform.py::TestTransformerVol::test_limiter_gain_vol_up_db"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,312 | [
"sox/combine.py",
"sox/transform.py",
"docs/changes.rst",
"sox/version.py",
"sox/file_info.py"
]
| [
"sox/combine.py",
"sox/transform.py",
"docs/changes.rst",
"sox/version.py",
"sox/file_info.py"
]
|
|
rthalley__dnspython-255 | 6b11f2677fe6d4da653a85512ab73fa31c27214f | 2017-05-30 08:08:04 | 1bb88cfecacb18fb406466e38a5b9c185cb5373e | diff --git a/LICENSE b/LICENSE
index 2896ca9..c0382e1 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,7 @@
ISC License
Copyright (C) 2001-2003 Nominum, Inc.
+Copyright (C) Google Inc.
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose with or without fee is hereby granted,
diff --git a/dns/zone.py b/dns/zone.py
index 468618f..0cf2682 100644
--- a/dns/zone.py
+++ b/dns/zone.py
@@ -28,6 +28,7 @@ import dns.node
import dns.rdataclass
import dns.rdatatype
import dns.rdata
+import dns.rdtypes.ANY.SOA
import dns.rrset
import dns.tokenizer
import dns.ttl
@@ -589,8 +590,14 @@ class _MasterReader(object):
@ivar tok: The tokenizer
@type tok: dns.tokenizer.Tokenizer object
- @ivar ttl: The default TTL
- @type ttl: int
+ @ivar last_ttl: The last seen explicit TTL for an RR
+ @type last_ttl: int
+ @ivar last_ttl_known: Has last TTL been detected
+ @type last_ttl_known: bool
+ @ivar default_ttl: The default TTL from a $TTL directive or SOA RR
+ @type default_ttl: int
+ @ivar default_ttl_known: Has default TTL been detected
+ @type default_ttl_known: bool
@ivar last_name: The last name read
@type last_name: dns.name.Name object
@ivar current_origin: The current origin
@@ -600,8 +607,8 @@ class _MasterReader(object):
@ivar zone: the zone
@type zone: dns.zone.Zone object
@ivar saved_state: saved reader state (used when processing $INCLUDE)
- @type saved_state: list of (tokenizer, current_origin, last_name, file)
- tuples.
+ @type saved_state: list of (tokenizer, current_origin, last_name, file,
+ last_ttl, last_ttl_known, default_ttl, default_ttl_known) tuples.
@ivar current_file: the file object of the $INCLUDed file being parsed
(None if no $INCLUDE is active).
@ivar allow_include: is $INCLUDE allowed?
@@ -618,7 +625,10 @@ class _MasterReader(object):
self.tok = tok
self.current_origin = origin
self.relativize = relativize
- self.ttl = 0
+ self.last_ttl = 0
+ self.last_ttl_known = False
+ self.default_ttl = 0
+ self.default_ttl_known = False
self.last_name = self.current_origin
self.zone = zone_factory(origin, rdclass, relativize=relativize)
self.saved_state = []
@@ -659,11 +669,18 @@ class _MasterReader(object):
# TTL
try:
ttl = dns.ttl.from_text(token.value)
+ self.last_ttl = ttl
+ self.last_ttl_known = True
token = self.tok.get()
if not token.is_identifier():
raise dns.exception.SyntaxError
except dns.ttl.BadTTL:
- ttl = self.ttl
+ if not (self.last_ttl_known or self.default_ttl_known):
+ raise dns.exception.SyntaxError("Missing default TTL value")
+ if self.default_ttl_known:
+ ttl = self.default_ttl
+ else:
+ ttl = self.last_ttl
# Class
try:
rdclass = dns.rdataclass.from_text(token.value)
@@ -703,6 +720,13 @@ class _MasterReader(object):
raise dns.exception.SyntaxError(
"caught exception %s: %s" % (str(ty), str(va)))
+ if not self.default_ttl_known and isinstance(rd, dns.rdtypes.ANY.SOA.SOA):
+ # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default
+ # TTL from the SOA minttl if no $TTL statement is present before the
+ # SOA is parsed.
+ self.default_ttl = rd.minimum
+ self.default_ttl_known = True
+
rd.choose_relativity(self.zone.origin, self.relativize)
covers = rd.covers()
rds = n.find_rdataset(rdclass, rdtype, covers, True)
@@ -778,11 +802,18 @@ class _MasterReader(object):
# TTL
try:
ttl = dns.ttl.from_text(token.value)
+ self.last_ttl = ttl
+ self.last_ttl_known = True
token = self.tok.get()
if not token.is_identifier():
raise dns.exception.SyntaxError
except dns.ttl.BadTTL:
- ttl = self.ttl
+ if not (self.last_ttl_known or self.default_ttl_known):
+ raise dns.exception.SyntaxError("Missing default TTL value")
+ if self.default_ttl_known:
+ ttl = self.default_ttl
+ else:
+ ttl = self.last_ttl
# Class
try:
rdclass = dns.rdataclass.from_text(token.value)
@@ -884,7 +915,10 @@ class _MasterReader(object):
self.current_origin,
self.last_name,
self.current_file,
- self.ttl) = self.saved_state.pop(-1)
+ self.last_ttl,
+ self.last_ttl_known,
+ self.default_ttl,
+ self.default_ttl_known) = self.saved_state.pop(-1)
continue
break
elif token.is_eol():
@@ -898,7 +932,8 @@ class _MasterReader(object):
token = self.tok.get()
if not token.is_identifier():
raise dns.exception.SyntaxError("bad $TTL")
- self.ttl = dns.ttl.from_text(token.value)
+ self.default_ttl = dns.ttl.from_text(token.value)
+ self.default_ttl_known = True
self.tok.get_eol()
elif c == u'$ORIGIN':
self.current_origin = self.tok.get_name()
@@ -923,7 +958,10 @@ class _MasterReader(object):
self.current_origin,
self.last_name,
self.current_file,
- self.ttl))
+ self.last_ttl,
+ self.last_ttl_known,
+ self.default_ttl,
+ self.default_ttl_known))
self.current_file = open(filename, 'r')
self.tok = dns.tokenizer.Tokenizer(self.current_file,
filename)
| Inherited TTL when parsing zone master files defaults to zero
When parsing a master zone file which has neither a $TTL directive nor an SOA RR, the default TTL applied to any RRs which don't specify one explicitly is zero.
This is not only surprising given the differing implementations of ttl=0, but also because [BIND](https://github.com/mirroring/bind9/blob/5125df6753f4b648a61502b261d2706a452e07d0/lib/dns/master.c#L1845-L1874) seemingly treats this as a syntax error and rejects the zone.
Example:
```
$ORIGIN example.
localhost IN A 127.0.0.1
```
Currently this parses as a localhost.example. RR with TTL=0. | rthalley/dnspython | diff --git a/tests/test_zone.py b/tests/test_zone.py
index 3c497a6..ce7caf0 100644
--- a/tests/test_zone.py
+++ b/tests/test_zone.py
@@ -76,6 +76,33 @@ ns1 1d1s a 10.0.0.1
ns2 1w1D1h1m1S a 10.0.0.2
"""
+# No $TTL so default TTL for RRs should be inherited from SOA minimum TTL (
+# not from the last explicit RR TTL).
+ttl_from_soa_text = """$ORIGIN example.
+@ 1h soa foo bar 1 2 3 4 5
+@ 1h ns ns1
+@ 1h ns ns2
+ns1 1w1D1h1m1S a 10.0.0.2
+ns2 a 10.0.0.1
+"""
+
+# No $TTL and no SOA, so default TTL for RRs should be inherited from last
+# explicit RR TTL.
+ttl_from_last_text = """$ORIGIN example.
+@ 1h ns ns1
+@ 1h ns ns2
+ns1 a 10.0.0.1
+ns2 1w1D1h1m1S a 10.0.0.2
+"""
+
+# No $TTL and no SOA should raise SyntaxError as no TTL can be determined.
+no_ttl_text = """$ORIGIN example.
+@ ns ns1
+@ ns ns2
+ns1 a 10.0.0.1
+ns2 a 10.0.0.2
+"""
+
no_soa_text = """$TTL 1h
$ORIGIN example.
@ ns ns1
@@ -442,6 +469,36 @@ class ZoneTestCase(unittest.TestCase):
rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
self.failUnless(rds.ttl == 694861)
+ def testTTLFromSOA(self):
+ z = dns.zone.from_text(ttl_from_soa_text, 'example.', relativize=True)
+ n = z['@']
+ rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.SOA)
+ self.failUnless(rds.ttl == 3600)
+ soa_rd = rds[0]
+ n = z['ns1']
+ rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
+ self.failUnless(rds.ttl == 694861)
+ n = z['ns2']
+ rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
+ self.failUnless(rds.ttl == soa_rd.minimum)
+
+ def testTTLFromLast(self):
+ z = dns.zone.from_text(ttl_from_last_text, 'example.', check_origin=False)
+ n = z['@']
+ rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.NS)
+ self.failUnless(rds.ttl == 3600)
+ n = z['ns1']
+ rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
+ self.failUnless(rds.ttl == 3600)
+ n = z['ns2']
+ rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
+ self.failUnless(rds.ttl == 694861)
+
+ def testNoTTL(self):
+ def bad():
+ dns.zone.from_text(no_ttl_text, 'example.', check_origin=False)
+ self.failUnlessRaises(dns.exception.SyntaxError, bad)
+
def testNoSOA(self):
def bad():
dns.zone.from_text(no_soa_text, 'example.', relativize=True)
@@ -465,12 +522,11 @@ class ZoneTestCase(unittest.TestCase):
def testFirstRRStartsWithWhitespace(self):
# no name is specified, so default to the initial origin
- # no ttl is specified, so default to the initial TTL of 0
- z = dns.zone.from_text(' IN A 10.0.0.1', origin='example.',
+ z = dns.zone.from_text(' 300 IN A 10.0.0.1', origin='example.',
check_origin=False)
n = z['@']
rds = n.get_rdataset(dns.rdataclass.IN, dns.rdatatype.A)
- self.failUnless(rds.ttl == 0)
+ self.failUnless(rds.ttl == 300)
def testZoneOrigin(self):
z = dns.zone.Zone('example.')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 1.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage",
"pylint",
"flake8",
"idna>=2.1",
"pycrypto>=2.6.1",
"ecdsa>=0.13"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | astroid==3.3.9
coverage==7.8.0
dill==0.3.9
-e git+https://github.com/rthalley/dnspython.git@6b11f2677fe6d4da653a85512ab73fa31c27214f#egg=dnspython
ecdsa==0.19.1
exceptiongroup==1.2.2
flake8==7.2.0
idna==3.10
iniconfig==2.1.0
isort==6.0.1
mccabe==0.7.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pycrypto==2.6.1
pyflakes==3.3.1
pylint==3.3.6
pytest==8.3.5
six==1.17.0
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
| name: dnspython
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- astroid==3.3.9
- coverage==7.8.0
- dill==0.3.9
- ecdsa==0.19.1
- exceptiongroup==1.2.2
- flake8==7.2.0
- idna==3.10
- iniconfig==2.1.0
- isort==6.0.1
- mccabe==0.7.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pycrypto==2.6.1
- pyflakes==3.3.1
- pylint==3.3.6
- pytest==8.3.5
- six==1.17.0
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
prefix: /opt/conda/envs/dnspython
| [
"tests/test_zone.py::ZoneTestCase::testNoTTL",
"tests/test_zone.py::ZoneTestCase::testTTLFromLast",
"tests/test_zone.py::ZoneTestCase::testTTLFromSOA"
]
| [
"tests/test_zone.py::ZoneTestCase::testFromFile1",
"tests/test_zone.py::ZoneTestCase::testFromFile2",
"tests/test_zone.py::ZoneTestCase::testInclude",
"tests/test_zone.py::ZoneTestCase::testToFileBinary",
"tests/test_zone.py::ZoneTestCase::testToFileFilename",
"tests/test_zone.py::ZoneTestCase::testToFileTextual",
"tests/test_zone.py::ZoneTestCase::testToText",
"tests/test_zone.py::ZoneTestCase::testTorture1"
]
| [
"tests/test_zone.py::ZoneTestCase::testBadDirective",
"tests/test_zone.py::ZoneTestCase::testDeleteRdataset1",
"tests/test_zone.py::ZoneTestCase::testDeleteRdataset2",
"tests/test_zone.py::ZoneTestCase::testEqual",
"tests/test_zone.py::ZoneTestCase::testFindRRset1",
"tests/test_zone.py::ZoneTestCase::testFindRRset2",
"tests/test_zone.py::ZoneTestCase::testFindRdataset1",
"tests/test_zone.py::ZoneTestCase::testFindRdataset2",
"tests/test_zone.py::ZoneTestCase::testFirstRRStartsWithWhitespace",
"tests/test_zone.py::ZoneTestCase::testFromText",
"tests/test_zone.py::ZoneTestCase::testGetRRset1",
"tests/test_zone.py::ZoneTestCase::testGetRRset2",
"tests/test_zone.py::ZoneTestCase::testGetRdataset1",
"tests/test_zone.py::ZoneTestCase::testGetRdataset2",
"tests/test_zone.py::ZoneTestCase::testIterateAllRdatas",
"tests/test_zone.py::ZoneTestCase::testIterateAllRdatasets",
"tests/test_zone.py::ZoneTestCase::testIterateRdatas",
"tests/test_zone.py::ZoneTestCase::testIterateRdatasets",
"tests/test_zone.py::ZoneTestCase::testNoNS",
"tests/test_zone.py::ZoneTestCase::testNoSOA",
"tests/test_zone.py::ZoneTestCase::testNodeDeleteRdataset1",
"tests/test_zone.py::ZoneTestCase::testNodeDeleteRdataset2",
"tests/test_zone.py::ZoneTestCase::testNodeFindRdataset1",
"tests/test_zone.py::ZoneTestCase::testNodeFindRdataset2",
"tests/test_zone.py::ZoneTestCase::testNodeGetRdataset1",
"tests/test_zone.py::ZoneTestCase::testNodeGetRdataset2",
"tests/test_zone.py::ZoneTestCase::testNotEqual1",
"tests/test_zone.py::ZoneTestCase::testNotEqual2",
"tests/test_zone.py::ZoneTestCase::testNotEqual3",
"tests/test_zone.py::ZoneTestCase::testReplaceRdataset1",
"tests/test_zone.py::ZoneTestCase::testReplaceRdataset2",
"tests/test_zone.py::ZoneTestCase::testTTLs",
"tests/test_zone.py::ZoneTestCase::testToFileBinaryStream",
"tests/test_zone.py::ZoneTestCase::testToFileTextualStream",
"tests/test_zone.py::ZoneTestCase::testZoneOrigin",
"tests/test_zone.py::ZoneTestCase::testZoneOriginNone"
]
| []
| ISC License | 1,313 | [
"LICENSE",
"dns/zone.py"
]
| [
"LICENSE",
"dns/zone.py"
]
|
|
zalando-incubator__kubernetes-log-watcher-42 | 914da75de2439781884273b859bf663fc5e1072c | 2017-05-30 12:06:02 | 914da75de2439781884273b859bf663fc5e1072c | codecov-io: # [Codecov](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=h1) Report
> Merging [#42](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=desc) into [master](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/commit/914da75de2439781884273b859bf663fc5e1072c?src=pr&el=desc) will **increase** coverage by `0.07%`.
> The diff coverage is `100%`.
[](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #42 +/- ##
==========================================
+ Coverage 84.57% 84.65% +0.07%
==========================================
Files 9 9
Lines 415 417 +2
==========================================
+ Hits 351 353 +2
Misses 64 64
```
| [Impacted Files](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [kube\_log\_watcher/\_\_init\_\_.py](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=tree#diff-a3ViZV9sb2dfd2F0Y2hlci9fX2luaXRfXy5weQ==) | `100% <100%> (ø)` | :arrow_up: |
| [kube\_log\_watcher/agents/scalyr.py](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=tree#diff-a3ViZV9sb2dfd2F0Y2hlci9hZ2VudHMvc2NhbHlyLnB5) | `91.73% <100%> (+0.13%)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=footer). Last update [914da75...fdaff26](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/42?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
elauria: 👍
lmineiro: 👍 | diff --git a/CHANGES.txt b/CHANGES.txt
index 94c60e3..1fe61c2 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -2,6 +2,12 @@
Kubernetes-log-watcher changelog
================================
+0.15 (2017-05-30)
+-----------------
+
+- Make Journald monitor write rate limit configurable. [#41]
+
+
0.14 (2017-05-15)
-----------------
diff --git a/README.rst b/README.rst
index 4654078..4ff49d2 100644
--- a/README.rst
+++ b/README.rst
@@ -251,30 +251,30 @@ Log watcher
Configuration variables can be set via Env variables:
-``WATCHER_CONTAINERS_PATH``
+WATCHER_CONTAINERS_PATH
Containers directory path mounted from the host (Default: ``/var/lib/docker/containers``)
-``WATCHER_STRICT_LABELS``
+WATCHER_STRICT_LABELS
If set then only containers running in pods with ``application`` and ``version`` metadata labels will be considered for log watching. (Default is ``False``)
If not set then kubernetes-log-watcher will set ``application_id`` from *pod name*; in order to provide consistent attributes to log configuration agents.
-``WATCHER_AGENTS``
+WATCHER_AGENTS
Comma separated string of required log configuration agents. (Required. Example: "scalyr,appdynamics")
-``WATCHER_CLUSTER_ID``
+WATCHER_CLUSTER_ID
Kubernetes Cluster ID.
-``WATCHER_KUBE_URL``
+WATCHER_KUBE_URL
URL to API proxy service. Service is expected to handle authentication to the Kubernetes cluster. If set, then log-watcher will not use serviceaccount config.
-``WATCHER_KUBERNETES_UPDATE_CERTIFICATES``
+WATCHER_KUBERNETES_UPDATE_CERTIFICATES
[Deprecated] Call update-ca-certificates for Kubernetes service account ca.crt.
-``WATCHER_INTERVAL``
+WATCHER_INTERVAL
Polling interval (secs) for the watcher to detect containers changes. (Default: 60 sec)
-``WATCHER_DEBUG``
+WATCHER_DEBUG
Verbose output. (Default: False)
Scalyr configuration agent
@@ -282,27 +282,32 @@ Scalyr configuration agent
Configuration variables can be set via Env variables:
-``WATCHER_SCALYR_API_KEY``
+WATCHER_SCALYR_API_KEY
Scalyr API key. (Required).
-``WATCHER_SCALYR_DEST_PATH``
+WATCHER_SCALYR_DEST_PATH
Scalyr configuration agent will symlink containers logs in this location. This is to provide more friendly name for log files. Typical log file name for a container will be in the form ``<application>-<version>.log``. (Required).
-``WATCHER_SCALYR_CONFIG_PATH``
+WATCHER_SCALYR_CONFIG_PATH
Scalyr configuration file path. (Default: ``/etc/scalyr-agent-2/agent.json``)
-``WATCHER_SCALYR_JOURNALD``
+WATCHER_SCALYR_JOURNALD
Scalyr should follow Journald logs. This is for node system processes log shipping (e.g. docker, kube) (Default: ``False``)
-``WATCHER_SCALYR_JOURNALD_ATTRIBUTES``
+WATCHER_SCALYR_JOURNALD_ATTRIBUTES
Add attributes to Journald logs. By default ``cluster`` and ``node`` will be added by the configuration agent.
-``WATCHER_SCALYR_JOURNALD_EXTRA_FIELDS``
+WATCHER_SCALYR_JOURNALD_EXTRA_FIELDS
Add extra Systemd Journald fields. Should be a JSON string. Example: '{"_COMM": "command"}'
-``WATCHER_SCALYR_JOURNALD_PATH``
+WATCHER_SCALYR_JOURNALD_PATH
Journald logs path mounted from the host. (Default: ``/var/log/journald``)
+WATCHER_SCALYR_JOURNALD_WRITE_RATE
+ Journald monitor write rate. (Default: 10000)
+
+WATCHER_SCALYR_JOURNALD_WRITE_BURST
+ Journald monitor write burst. (Default: 200000)
Scalyr custom parser
....................
@@ -321,8 +326,7 @@ AppDynamics configuration agent
Configuration variables can be set via Env variables:
-``WATCHER_APPDYNAMICS_DEST_PATH``
-
+WATCHER_APPDYNAMICS_DEST_PATH
AppDynamics job files path. (Required).
AppDynamics configuration agent could also add ``app_name`` and ``tier_name`` if ``appdynamics_app`` and ``appdynamics_tier`` were set in pod metadata labels.
diff --git a/kube_log_watcher/__init__.py b/kube_log_watcher/__init__.py
index 52a06cc..a3e6290 100644
--- a/kube_log_watcher/__init__.py
+++ b/kube_log_watcher/__init__.py
@@ -1,1 +1,1 @@
-__version__ = '0.14'
+__version__ = '0.15'
diff --git a/kube_log_watcher/agents/scalyr.py b/kube_log_watcher/agents/scalyr.py
index 4cfbdc1..25b913d 100644
--- a/kube_log_watcher/agents/scalyr.py
+++ b/kube_log_watcher/agents/scalyr.py
@@ -16,6 +16,8 @@ SCALYR_CONFIG_PATH = '/etc/scalyr-agent-2/agent.json'
# If exists! we expect serialized json str: '[{"container": "my-container", "parser": "my-custom-parser"}]'
SCALYR_ANNOTATION_PARSER = 'kubernetes-log-watcher/scalyr-parser'
SCALYR_DEFAULT_PARSER = 'json'
+SCALYR_DEFAULT_WRITE_RATE = 10000
+SCALYR_DEFAULT_WRITE_BURST = 200000
logger = logging.getLogger('kube_log_watcher')
@@ -54,6 +56,8 @@ class ScalyrAgent(BaseWatcher):
'journal_path': os.environ.get('WATCHER_SCALYR_JOURNALD_PATH'),
'attributes': json.loads(attributes_str),
'extra_fields': json.loads(extra_fields_str),
+ 'write_rate': int(os.environ.get('WATCHER_SCALYR_JOURNALD_WRITE_RATE', SCALYR_DEFAULT_WRITE_RATE)),
+ 'write_burst': int(os.environ.get('WATCHER_SCALYR_JOURNALD_WRITE_BURST', SCALYR_DEFAULT_WRITE_BURST)),
}
self.journald['attributes']['cluster'] = cluster_id
diff --git a/kube_log_watcher/templates/scalyr.json.jinja2 b/kube_log_watcher/templates/scalyr.json.jinja2
index 6442841..f8c7fb3 100644
--- a/kube_log_watcher/templates/scalyr.json.jinja2
+++ b/kube_log_watcher/templates/scalyr.json.jinja2
@@ -47,8 +47,8 @@
{% endif %}
"module": "scalyr_agent.builtin_monitors.journald_monitor",
- "monitor_log_write_rate": 10000,
- "monitor_log_max_write_burst": 200000
+ "monitor_log_write_rate": {{ monitor_journald.write_rate }},
+ "monitor_log_max_write_burst": {{ monitor_journald.write_burst }}
}
{% endif %}
]
| Make Journald monitor write rate limit configurable
Current settings:
```
"monitor_log_write_rate": 10000,
"monitor_log_max_write_burst": 200000
``` | zalando-incubator/kubernetes-log-watcher | diff --git a/tests/conftest.py b/tests/conftest.py
index 2707ed5..6d5ad6a 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -10,7 +10,10 @@ APPDYNAMICS_DEST_PATH = '/var/log/watcher/'
SCALYR_DEST_PATH = '/var/log/watcher/'
SCALYR_KEY = 'scalyr-key-123'
-SCALYR_JOURNALD_DEFAULTS = {'journal_path': None, 'attributes': {'cluster': CLUSTER_ID}, 'extra_fields': {}}
+SCALYR_JOURNALD_DEFAULTS = {
+ 'journal_path': None, 'attributes': {'cluster': CLUSTER_ID}, 'extra_fields': {}, 'write_rate': 10000,
+ 'write_burst': 200000
+}
SCALYR_ANNOTATION_PARSER = 'kubernetes-log-watcher/scalyr-parser'
TARGET = {
diff --git a/tests/test_scalyr.py b/tests/test_scalyr.py
index 59155ab..1f14c56 100644
--- a/tests/test_scalyr.py
+++ b/tests/test_scalyr.py
@@ -24,6 +24,11 @@ ENVS = (
'WATCHER_SCALYR_API_KEY': SCALYR_KEY, 'WATCHER_SCALYR_DEST_PATH': SCALYR_DEST_PATH,
'WATCHER_SCALYR_JOURNALD': 'true',
},
+ {
+ 'WATCHER_SCALYR_API_KEY': SCALYR_KEY, 'WATCHER_SCALYR_DEST_PATH': SCALYR_DEST_PATH,
+ 'WATCHER_SCALYR_JOURNALD': 'true', 'WATCHER_SCALYR_JOURNALD_WRITE_RATE': 1,
+ 'WATCHER_SCALYR_JOURNALD_WRITE_BURST': 2,
+ },
)
KWARGS_KEYS = ('scalyr_key', 'cluster_id', 'logs', 'monitor_journald')
@@ -45,7 +50,12 @@ def assert_agent(agent, env):
assert agent.config_path == env.get('WATCHER_SCALYR_CONFIG_PATH', SCALYR_CONFIG_PATH)
journald = env.get('WATCHER_SCALYR_JOURNALD')
- assert agent.journald == (SCALYR_JOURNALD_DEFAULTS if journald else None)
+ journald_defaults = copy.deepcopy(SCALYR_JOURNALD_DEFAULTS)
+ if env.get('WATCHER_SCALYR_JOURNALD_WRITE_RATE'):
+ journald_defaults['write_rate'] = env.get('WATCHER_SCALYR_JOURNALD_WRITE_RATE')
+ if env.get('WATCHER_SCALYR_JOURNALD_WRITE_BURST'):
+ journald_defaults['write_burst'] = env.get('WATCHER_SCALYR_JOURNALD_WRITE_BURST')
+ assert agent.journald == (journald_defaults if journald else None)
assert agent.cluster_id == CLUSTER_ID
@@ -369,7 +379,10 @@ def test_remove_log_target(monkeypatch, env, exc):
(
{
'scalyr_key': SCALYR_KEY, 'cluster_id': CLUSTER_ID, 'logs': [],
- 'monitor_journald': {'journal_path': None, 'attributes': {}, 'extra_fields': {}},
+ 'monitor_journald': {
+ 'journal_path': None, 'attributes': {}, 'extra_fields': {}, 'write_rate': 10000,
+ 'write_burst': 200000
+ },
},
{
'api_key': 'scalyr-key-123',
@@ -393,7 +406,9 @@ def test_remove_log_target(monkeypatch, env, exc):
'monitor_journald': {
'journal_path': '/var/log/journal',
'attributes': {'cluster': CLUSTER_ID, 'node': NODE},
- 'extra_fields': {'_COMM': 'command'}
+ 'extra_fields': {'_COMM': 'command'},
+ 'write_rate': 10000,
+ 'write_burst': 200000,
},
},
{
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 5
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest_cov",
"mock",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y python3-dev libffi-dev libssl-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
backports.zoneinfo==0.2.1
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
httplib2==0.22.0
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
Jinja2==2.8
-e git+https://github.com/zalando-incubator/kubernetes-log-watcher.git@914da75de2439781884273b859bf663fc5e1072c#egg=kubernetes_log_watcher
MarkupSafe==2.0.1
mccabe==0.7.0
mock==5.2.0
oauth2client==4.1.3
oauthlib==3.2.2
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycodestyle==2.9.1
pyflakes==2.5.0
pykube==0.15.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytz-deprecation-shim==0.1.0.post0
PyYAML==6.0.1
requests==2.27.1
requests-oauthlib==2.0.0
rsa==4.9
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
tzdata==2025.2
tzlocal==4.2
urllib3==1.26.20
zipp==3.6.0
| name: kubernetes-log-watcher
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- backports-zoneinfo==0.2.1
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- httplib2==0.22.0
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jinja2==2.8
- markupsafe==2.0.1
- mccabe==0.7.0
- mock==5.2.0
- oauth2client==4.1.3
- oauthlib==3.2.2
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pykube==0.15.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytz-deprecation-shim==0.1.0.post0
- pyyaml==6.0.1
- requests==2.27.1
- requests-oauthlib==2.0.0
- rsa==4.9
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- tzdata==2025.2
- tzlocal==4.2
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/kubernetes-log-watcher
| [
"tests/test_scalyr.py::test_add_log_target[fx_scalyr0-env2]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr0-env3]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr1-env2]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr1-env3]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr2-env2]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr2-env3]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr0-env2]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr0-env3]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr1-env2]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr1-env3]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr2-env2]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr2-env3]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr0-env2]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr0-env3]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr1-env2]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr1-env3]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr2-env2]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr2-env3]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr0-env2]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr0-env3]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr1-env2]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr1-env3]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr2-env2]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr2-env3]"
]
| []
| [
"tests/test_scalyr.py::test_initialization_failure[env0-exists0]",
"tests/test_scalyr.py::test_initialization_failure[env1-exists1]",
"tests/test_scalyr.py::test_initialization_failure[env2-exists2]",
"tests/test_scalyr.py::test_initialization_failure[env3-exists3]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr0-env0]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr0-env1]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr1-env0]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr1-env1]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr2-env0]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr2-env1]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr0-env0]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr0-env1]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr1-env0]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr1-env1]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr2-env0]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr2-env1]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr0-env0]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr0-env1]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr1-env0]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr1-env1]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr2-env0]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr2-env1]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr0-env0]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr0-env1]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr1-env0]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr1-env1]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr2-env0]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr2-env1]",
"tests/test_scalyr.py::test_get_current_log_paths[env0-config0-result0]",
"tests/test_scalyr.py::test_get_current_log_paths[env1-Exception-result1]",
"tests/test_scalyr.py::test_remove_log_target[env0-None]",
"tests/test_scalyr.py::test_remove_log_target[env1-Exception]",
"tests/test_scalyr.py::test_tpl_render[kwargs0-expected0]",
"tests/test_scalyr.py::test_tpl_render[kwargs1-expected1]",
"tests/test_scalyr.py::test_tpl_render[kwargs2-expected2]"
]
| []
| MIT License | 1,314 | [
"CHANGES.txt",
"kube_log_watcher/templates/scalyr.json.jinja2",
"kube_log_watcher/agents/scalyr.py",
"README.rst",
"kube_log_watcher/__init__.py"
]
| [
"CHANGES.txt",
"kube_log_watcher/templates/scalyr.json.jinja2",
"kube_log_watcher/agents/scalyr.py",
"README.rst",
"kube_log_watcher/__init__.py"
]
|
ipython__ipython-10618 | be46bc7bdccc3e147ccd509e6d50f36869f5b84c | 2017-05-30 18:50:07 | be46bc7bdccc3e147ccd509e6d50f36869f5b84c | Carreau: Wow, that's not what I was meaning to do, but that's ok too. | diff --git a/IPython/core/completer.py b/IPython/core/completer.py
index fbcf45af7..06a1348b3 100644
--- a/IPython/core/completer.py
+++ b/IPython/core/completer.py
@@ -136,7 +136,6 @@
from IPython.core.error import TryNext
from IPython.core.inputsplitter import ESC_MAGIC
from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol
-from IPython.core.oinspect import InspectColors
from IPython.utils import generics
from IPython.utils.dir2 import dir2, get_real_method
from IPython.utils.process import arg_split
@@ -1050,14 +1049,11 @@ def __init__(self, shell=None, namespace=None, global_namespace=None,
self.matchers = [
self.python_matches,
self.file_matches,
+ self.magic_config_matches,
self.magic_matches,
self.python_func_kw_matches,
self.dict_key_matches,
]
- self.magic_arg_matchers = [
- self.magic_config_matches,
- self.magic_color_matches,
- ]
# This is set externally by InteractiveShell
self.custom_completers = None
@@ -1185,11 +1181,13 @@ def magic_matches(self, text):
return comp
- def magic_config_matches(self, text:str) -> List[str]:
+ def magic_config_matches(self, text):
""" Match class names and attributes for %config magic """
- texts = text.strip().split()
+ # use line buffer instead of text (which is a word)
+ texts = self.line_buffer.strip().split()
- if len(texts) > 0 and (texts[0] == 'config' or texts[0] == '%config'):
+ if len(texts) > 0 and \
+ ('config'.startswith(texts[0]) or '%config'.startswith(texts[0])):
# get all configuration classes
classes = sorted(set([ c for c in self.shell.configurables
if c.__class__.class_traits(config=True)
@@ -1220,16 +1218,6 @@ def magic_config_matches(self, text:str) -> List[str]:
if attr.startswith(texts[1]) ]
return []
- def magic_color_matches(self, text:str) -> List[str] :
- """ Match color schemes for %colors magic"""
- texts = text.strip().split()
-
- if len(texts) > 0 and (texts[0] == 'colors' or texts[0] == '%colors'):
- prefix = texts[1] if len(texts) > 1 else ''
- return [ color for color in InspectColors.keys()
- if color.startswith(prefix) ]
- return []
-
def _jedi_matches(self, cursor_column:int, cursor_line:int, text:str):
"""
@@ -1890,14 +1878,6 @@ def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None,
self.line_buffer = line_buffer
self.text_until_cursor = self.line_buffer[:cursor_pos]
- # Do magic arg matches
- for matcher in self.magic_arg_matchers:
- matches = [(m, matcher.__qualname__) for m in matcher(line_buffer)]
- if matches:
- matches2 = [m[0] for m in matches]
- origins = [m[1] for m in matches]
- return text, matches2, origins, ()
-
# Start with a clean slate of completions
matches = []
custom_res = self.dispatch_custom_completer(text)
diff --git a/IPython/core/display.py b/IPython/core/display.py
index 96015dedc..e1875e122 100644
--- a/IPython/core/display.py
+++ b/IPython/core/display.py
@@ -145,9 +145,6 @@ def display(*objs, include=None, exclude=None, metadata=None, transient=None, di
By default all representations will be computed and sent to the frontends.
Frontends can decide which representation is used and how.
- In terminal IPython this will be similar to using :func:`print`, for use in richer
- frontends see Jupyter notebook examples with rich display logic.
-
Parameters
----------
objs : tuple of objects
@@ -155,11 +152,11 @@ def display(*objs, include=None, exclude=None, metadata=None, transient=None, di
raw : bool, optional
Are the objects to be displayed already mimetype-keyed dicts of raw display data,
or Python objects that need to be formatted before display? [default: False]
- include : list, tuple or set, optional
+ include : list or tuple, optional
A list of format type strings (MIME types) to include in the
format data dict. If this is set *only* the format types included
in this list will be computed.
- exclude : list, tuple or set, optional
+ exclude : list or tuple, optional
A list of format type strings (MIME types) to exclude in the format
data dict. If this is set all format types will be computed,
except for those included in this argument.
@@ -170,119 +167,25 @@ def display(*objs, include=None, exclude=None, metadata=None, transient=None, di
transient : dict, optional
A dictionary of transient data to associate with the output.
Data in this dict should not be persisted to files (e.g. notebooks).
- display_id : str, bool optional
+ display_id : str, optional
Set an id for the display.
This id can be used for updating this display area later via update_display.
- If given as `True`, generate a new `display_id`
+ If given as True, generate a new display_id
kwargs: additional keyword-args, optional
Additional keyword-arguments are passed through to the display publisher.
-
+
Returns
-------
-
+
handle: DisplayHandle
- Returns a handle on updatable displays for use with :func:`update_display`,
- if `display_id` is given. Returns :any:`None` if no `display_id` is given
- (default).
-
- Examples
- --------
-
- >>> class Json(object):
- ... def __init__(self, json):
- ... self.json = json
- ... def _repr_pretty_(self, pp, cycle):
- ... import json
- ... pp.text(json.dumps(self.json, indent=2))
- ... def __repr__(self):
- ... return str(self.json)
- ...
-
- >>> d = Json({1:2, 3: {4:5}})
-
- >>> print(d)
- {1: 2, 3: {4: 5}}
-
- >>> display(d)
- {
- "1": 2,
- "3": {
- "4": 5
- }
- }
-
- >>> def int_formatter(integer, pp, cycle):
- ... pp.text('I'*integer)
-
- >>> plain = get_ipython().display_formatter.formatters['text/plain']
- >>> plain.for_type(int, int_formatter)
- <function _repr_pprint at 0x...>
- >>> display(7-5)
- II
-
- >>> del plain.type_printers[int]
- >>> display(7-5)
- 2
-
- See Also
- --------
-
- :func:`update_display`
-
- Notes
- -----
-
- In Python, objects can declare their textual representation using the
- `__repr__` method. IPython expands on this idea and allows objects to declare
- other, rich representations including:
-
- - HTML
- - JSON
- - PNG
- - JPEG
- - SVG
- - LaTeX
-
- A single object can declare some or all of these representations; all are
- handled by IPython's display system.
-
- The main idea of the first approach is that you have to implement special
- display methods when you define your class, one for each representation you
- want to use. Here is a list of the names of the special methods and the
- values they must return:
-
- - `_repr_html_`: return raw HTML as a string
- - `_repr_json_`: return a JSONable dict
- - `_repr_jpeg_`: return raw JPEG data
- - `_repr_png_`: return raw PNG data
- - `_repr_svg_`: return raw SVG data as a string
- - `_repr_latex_`: return LaTeX commands in a string surrounded by "$".
- - `_repr_mimebundle_`: return a full mimebundle containing the mapping
- from all mimetypes to data
-
- When you are directly writing your own classes, you can adapt them for
- display in IPython by following the above approach. But in practice, you
- often need to work with existing classes that you can't easily modify.
-
- You can refer to the documentation on IPython display formatters in order to
- register custom formatters for already existing types.
-
- .. versionadded:: 5.4 display available without import
- .. versionadded:: 6.1 display available without import
-
- Since IPython 5.4 and 6.1 :func:`display` is automatically made available to
- the user without import. If you are using display in a document that might
- be used in a pure python context or with older version of IPython, use the
- following import at the top of your file::
-
- from IPython.display import display
-
+ Returns a handle on updatable displays, if display_id is given.
+ Returns None if no display_id is given (default).
"""
raw = kwargs.pop('raw', False)
if transient is None:
transient = {}
if display_id:
- if display_id is True:
+ if display_id == True:
display_id = _new_id()
transient['display_id'] = display_id
if kwargs.get('update') and 'display_id' not in transient:
@@ -322,11 +225,6 @@ def update_display(obj, *, display_id, **kwargs):
The object with which to update the display
display_id: keyword-only
The id of the display to update
-
- See Also
- --------
-
- :func:`display`
"""
kwargs['update'] = True
display(obj, display_id=display_id, **kwargs)
@@ -335,16 +233,10 @@ def update_display(obj, *, display_id, **kwargs):
class DisplayHandle(object):
"""A handle on an updatable display
- Call `.update(obj)` to display a new object.
+ Call .update(obj) to display a new object.
- Call `.display(obj`) to add a new instance of this display,
+ Call .display(obj) to add a new instance of this display,
and update existing instances.
-
- See Also
- --------
-
- :func:`display`, :func:`update_display`
-
"""
def __init__(self, display_id=None):
diff --git a/IPython/core/interactiveshell.py b/IPython/core/interactiveshell.py
index 42f4cdbf3..b9cee99e5 100644
--- a/IPython/core/interactiveshell.py
+++ b/IPython/core/interactiveshell.py
@@ -34,6 +34,7 @@
from IPython.core import magic
from IPython.core import page
from IPython.core import prefilter
+from IPython.core import shadowns
from IPython.core import ultratb
from IPython.core.alias import Alias, AliasManager
from IPython.core.autocall import ExitAutocall
@@ -55,7 +56,6 @@
from IPython.core.prefilter import PrefilterManager
from IPython.core.profiledir import ProfileDir
from IPython.core.usage import default_banner
-from IPython.display import display
from IPython.testing.skipdoctest import skip_doctest
from IPython.utils import PyColorize
from IPython.utils import io
@@ -618,7 +618,6 @@ def init_builtins(self):
# removing on exit or representing the existence of more than one
# IPython at a time.
builtin_mod.__dict__['__IPYTHON__'] = True
- builtin_mod.__dict__['display'] = display
self.builtin_trap = BuiltinTrap(shell=self)
@@ -1156,6 +1155,8 @@ def init_user_ns(self):
ns['_oh'] = self.history_manager.output_hist
ns['_dh'] = self.history_manager.dir_hist
+ ns['_sh'] = shadowns
+
# user aliases to input and output histories. These shouldn't show up
# in %who, as they can have very large reprs.
ns['In'] = self.history_manager.input_hist_parsed
diff --git a/IPython/core/shadowns.py b/IPython/core/shadowns.py
new file mode 100644
index 000000000..d2d93b61b
--- /dev/null
+++ b/IPython/core/shadowns.py
@@ -0,0 +1,1 @@
+""" Shadow namespace """
\ No newline at end of file
diff --git a/IPython/terminal/interactiveshell.py b/IPython/terminal/interactiveshell.py
index 9f9ef779c..14b997047 100644
--- a/IPython/terminal/interactiveshell.py
+++ b/IPython/terminal/interactiveshell.py
@@ -7,7 +7,7 @@
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
from IPython.utils import io
-from IPython.utils.py3compat import input, cast_unicode_py2
+from IPython.utils.py3compat import input
from IPython.utils.terminal import toggle_set_term_title, set_term_title
from IPython.utils.process import abbrev_cwd
from traitlets import (
@@ -101,7 +101,7 @@ def _space_for_menu_changed(self, old, new):
_pt_app = None
simple_prompt = Bool(_use_simple_prompt,
- help="""Use `raw_input` for the REPL, without completion and prompt colors.
+ help="""Use `raw_input` for the REPL, without completion, multiline input, and prompt colors.
Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are:
IPython own testing machinery, and emacs inferior-shell integration through elpy.
@@ -227,14 +227,7 @@ def init_prompt_toolkit_cli(self):
# Fall back to plain non-interactive output for tests.
# This is very limited, and only accepts a single line.
def prompt():
- isp = self.input_splitter
- prompt_text = "".join(x[1] for x in self.prompts.in_prompt_tokens())
- prompt_continuation = "".join(x[1] for x in self.prompts.continuation_prompt_tokens())
- while isp.push_accepts_more():
- line = cast_unicode_py2(input(prompt_text))
- isp.push(line)
- prompt_text = prompt_continuation
- return isp.source_reset()
+ return input('In [%d]: ' % self.execution_count)
self.prompt_for_code = prompt
return
diff --git a/IPython/utils/path.py b/IPython/utils/path.py
index a766cbbf7..1e7284d08 100644
--- a/IPython/utils/path.py
+++ b/IPython/utils/path.py
@@ -13,6 +13,7 @@
import random
import glob
from warnings import warn
+from hashlib import md5
from IPython.utils.process import system
from IPython.utils import py3compat
@@ -364,6 +365,13 @@ def target_update(target,deps,cmd):
if target_outdated(target,deps):
system(cmd)
+@undoc
+def filehash(path):
+ """Make an MD5 hash of a file, ignoring any differences in line
+ ending characters."""
+ warn("filehash() is deprecated since IPython 4.0", DeprecationWarning, stacklevel=2)
+ with open(path, "rU") as f:
+ return md5(py3compat.str_to_bytes(f.read())).hexdigest()
ENOLINK = 1998
diff --git a/docs/source/config/details.rst b/docs/source/config/details.rst
index f1967ded8..2686c4eff 100644
--- a/docs/source/config/details.rst
+++ b/docs/source/config/details.rst
@@ -228,3 +228,45 @@ a :ref:`startup file <startup_files>`::
For more information on filters and what you can do with the ``event`` object,
`see the prompt_toolkit docs
<http://python-prompt-toolkit.readthedocs.io/en/latest/pages/building_prompts.html#adding-custom-key-bindings>`__.
+
+
+Enter to execute
+----------------
+
+In the Terminal IPython shell – which by default uses the ``prompt_toolkit``
+interface, the semantic meaning of pressing the :kbd:`Enter` key can be
+ambiguous. In some case :kbd:`Enter` should execute code, and in others it
+should add a new line. IPython uses heuristics to decide whether to execute or
+insert a new line at cursor position. For example, if we detect that the current
+code is not valid Python, then the user is likely editing code and the right
+behavior is to likely to insert a new line. If the current code is a simple
+statement like `ord('*')`, then the right behavior is likely to execute. Though
+the exact desired semantics often varies from users to users.
+
+As the exact behavior of :kbd:`Enter` is is ambiguous, it has been special cased
+to allow users to completely configure the behavior they like. Hence you can
+have enter always execute code. If you prefer fancier behavior, you need to get
+your hands dirty and read the ``prompt_toolkit`` and IPython documentation
+though. See :ghpull:`10500`, set the
+``c.TerminalInteractiveShell.handle_return`` option and get inspiration from the
+following example that insert new lines only after a pipe (``|``). Place the
+following in your configuration to do so::
+
+ def new_line_after_pipe(shell):
+ # shell is the same as get_ipython()
+ def insert(event):
+ """When the user presses return, insert"""
+ b = event.current_buffer
+ d = b.document
+
+ # if character before cursor is `|`
+ if d.text[d.cursor_position-1] == '|':
+ # insert a new line
+ b.insert_text('\n')
+ else:
+ # otherwise execute.
+ b.accept_action.validate_and_handle(event.cli, b)
+ return insert
+
+ # set the heuristic to our new function
+ c.TerminalInteractiveShell.handle_return = new_line_after_pipe
diff --git a/docs/source/coredev/index.rst b/docs/source/coredev/index.rst
index 96bb128b9..64c78691e 100644
--- a/docs/source/coredev/index.rst
+++ b/docs/source/coredev/index.rst
@@ -22,11 +22,10 @@ If you are an admin on the IPython repository just mention the **backport bot**
do the work for you. The bot is evolving so instructions may be different. At
the time of this writing you can use::
- @meeseeksdev[bot] backport [to <branchname>]
+ @meeseeksdev[bot] backport to <branchname>
The bot will attempt to backport the current pull-request and issue a PR if
-possible. If the milestone is set on the issue you can omit the branch to
-backport to.
+possible.
.. note::
diff --git a/docs/source/interactive/plotting.rst b/docs/source/interactive/plotting.rst
index 2bf67b414..8d243d8c2 100644
--- a/docs/source/interactive/plotting.rst
+++ b/docs/source/interactive/plotting.rst
@@ -1,32 +1,7 @@
.. _plotting:
-Rich Outputs
-------------
-
-One of the main feature of IPython when used as a kernel is its ability to
-show rich output. This means that object that can be representing as image,
-sounds, animation, (etc...) can be shown this way if the frontend support it.
-
-In order for this to be possible, you need to use the ``display()`` function,
-that should be available by default on IPython 5.4+ and 6.1+, or that you can
-import with ``from IPython.display import display``. Then use ``display(<your
-object>)`` instead of ``print()``, and if possible your object will be displayed
-with a richer representation. In the terminal of course, there wont be much
-difference as object are most of the time represented by text, but in notebook
-and similar interface you will get richer outputs.
-
-
Plotting
--------
-
-.. note::
-
- Starting with IPython 5.0 and matplotlib 2.0 you can avoid the use of
- IPython's specific magic and use
- ``matplotlib.pyplot.ion()``/``matplotlib.pyplot.ioff()`` which have the
- advantages of working outside of IPython as well.
-
-
One major feature of the IPython kernel is the ability to display plots that
are the output of running code cells. The IPython kernel is designed to work
seamlessly with the matplotlib_ plotting library to provide this functionality.
diff --git a/docs/source/whatsnew/github-stats-5.rst b/docs/source/whatsnew/github-stats-5.rst
index 4b9dcf8e0..fddb62125 100644
--- a/docs/source/whatsnew/github-stats-5.rst
+++ b/docs/source/whatsnew/github-stats-5.rst
@@ -3,56 +3,6 @@
Issues closed in the 5.x development cycle
==========================================
-
-Issues closed in 5.4
---------------------
-
-GitHub stats for 2017/02/24 - 2017/05/30 (tag: 5.3.0)
-
-These lists are automatically generated, and may be incomplete or contain duplicates.
-
-We closed 8 issues and merged 43 pull requests.
-The full list can be seen `on GitHub <https://github.com/ipython/ipython/issues?q=milestone%3A5.4+>`__
-
-The following 11 authors contributed 64 commits.
-
-* Benjamin Ragan-Kelley
-* Carol Willing
-* Kyle Kelley
-* Leo Singer
-* Luke Pfister
-* Lumir Balhar
-* Matthias Bussonnier
-* meeseeksdev[bot]
-* memeplex
-* Thomas Kluyver
-* Ximin Luo
-
-Issues closed in 5.3
---------------------
-
-GitHub stats for 2017/02/24 - 2017/05/30 (tag: 5.3.0)
-
-These lists are automatically generated, and may be incomplete or contain duplicates.
-
-We closed 6 issues and merged 28 pull requests.
-The full list can be seen `on GitHub <https://github.com/ipython/ipython/issues?q=milestone%3A5.3+>`__
-
-The following 11 authors contributed 53 commits.
-
-* Benjamin Ragan-Kelley
-* Carol Willing
-* Justin Jent
-* Kyle Kelley
-* Lumir Balhar
-* Matthias Bussonnier
-* meeseeksdev[bot]
-* Segev Finer
-* Steven Maude
-* Thomas A Caswell
-* Thomas Kluyver
-
-
Issues closed in 5.2
--------------------
diff --git a/docs/source/whatsnew/github-stats-6.rst b/docs/source/whatsnew/github-stats-6.rst
index 520c60ce4..08c08de24 100644
--- a/docs/source/whatsnew/github-stats-6.rst
+++ b/docs/source/whatsnew/github-stats-6.rst
@@ -1,46 +1,6 @@
Issues closed in the 6.x development cycle
==========================================
-Issues closed in 6.1
---------------------
-
-GitHub stats for 2017/04/19 - 2017/05/30 (tag: 6.0.0)
-
-These lists are automatically generated, and may be incomplete or contain duplicates.
-
-We closed 10 issues and merged 43 pull requests.
-The full list can be seen `on GitHub <https://github.com/ipython/ipython/issues?q=milestone%3A6.1+>`__
-
-The following 26 authors contributed 116 commits.
-
-* Alex Alekseyev
-* Benjamin Ragan-Kelley
-* Brian E. Granger
-* Christopher C. Aycock
-* Dave Willmer
-* David Bradway
-* ICanWaitAndFishAllDay
-* Ignat Shining
-* Jarrod Janssen
-* Joshua Storck
-* Luke Pfister
-* Matthias Bussonnier
-* Matti Remes
-* meeseeksdev[bot]
-* memeplex
-* Ming Zhang
-* Nick Weseman
-* Paul Ivanov
-* Piotr Zielinski
-* ryan thielke
-* sagnak
-* Sang Min Park
-* Srinivas Reddy Thatiparthy
-* Steve Bartz
-* Thomas Kluyver
-* Tory Haavik
-
-
Issues closed in 6.0
--------------------
diff --git a/docs/source/whatsnew/version5.rst b/docs/source/whatsnew/version5.rst
index b11115ef4..3a4169e91 100644
--- a/docs/source/whatsnew/version5.rst
+++ b/docs/source/whatsnew/version5.rst
@@ -53,19 +53,6 @@ Implement display id and ability to update a given display. This should greatly
simplify a lot of code by removing the need for widgets and allow other frontend
to implement things like progress-bars. See :ghpull:`10048`
-Display function
-----------------
-
-The :func:`display() <IPython.display.display>` function is now available by
-default in an IPython session, meaning users can call it on any object to see
-their rich representation. This should allow for better interactivity both at
-the REPL and in notebook environment.
-
-Scripts and library that rely on display and may be run outside of IPython still
-need to import the display function using ``from IPython.display import
-display``. See :ghpull:`10596`
-
-
Miscs
-----
| Add narrative docs and example for #10500 custom keyboard function for enter
See #10500 even if unstable/dev only it might be good to document. | ipython/ipython | diff --git a/IPython/core/tests/test_completer.py b/IPython/core/tests/test_completer.py
index 179158862..1beff0ce8 100644
--- a/IPython/core/tests/test_completer.py
+++ b/IPython/core/tests/test_completer.py
@@ -578,22 +578,21 @@ def test_magic_completion_shadowing():
nt.assert_equal(matches, ["%matplotlib"])
+
def test_magic_config():
ip = get_ipython()
c = ip.Completer
s, matches = c.complete(None, 'conf')
nt.assert_in('%config', matches)
- s, matches = c.complete(None, 'conf')
- nt.assert_not_in('AliasManager', matches)
s, matches = c.complete(None, 'config ')
nt.assert_in('AliasManager', matches)
s, matches = c.complete(None, '%config ')
nt.assert_in('AliasManager', matches)
s, matches = c.complete(None, 'config Ali')
- nt.assert_list_equal(['AliasManager'], matches)
+ nt.assert_in('AliasManager', matches)
s, matches = c.complete(None, '%config Ali')
- nt.assert_list_equal(['AliasManager'], matches)
+ nt.assert_in('AliasManager', matches)
s, matches = c.complete(None, 'config AliasManager')
nt.assert_list_equal(['AliasManager'], matches)
s, matches = c.complete(None, '%config AliasManager')
@@ -603,27 +602,9 @@ def test_magic_config():
s, matches = c.complete(None, '%config AliasManager.')
nt.assert_in('AliasManager.default_aliases', matches)
s, matches = c.complete(None, 'config AliasManager.de')
- nt.assert_list_equal(['AliasManager.default_aliases'], matches)
+ nt.assert_in('AliasManager.default_aliases', matches)
s, matches = c.complete(None, 'config AliasManager.de')
- nt.assert_list_equal(['AliasManager.default_aliases'], matches)
-
-
-def test_magic_color():
- ip = get_ipython()
- c = ip.Completer
-
- s, matches = c.complete(None, 'colo')
- nt.assert_in('%colors', matches)
- s, matches = c.complete(None, 'colo')
- nt.assert_not_in('NoColor', matches)
- s, matches = c.complete(None, 'colors ')
- nt.assert_in('NoColor', matches)
- s, matches = c.complete(None, '%colors ')
- nt.assert_in('NoColor', matches)
- s, matches = c.complete(None, 'colors NoCo')
- nt.assert_list_equal(['NoColor'], matches)
- s, matches = c.complete(None, '%colors NoCo')
- nt.assert_list_equal(['NoColor'], matches)
+ nt.assert_in('AliasManager.default_aliases', matches)
def test_match_dict_keys():
diff --git a/IPython/core/tests/test_display.py b/IPython/core/tests/test_display.py
index ac716ee03..c639f9228 100644
--- a/IPython/core/tests/test_display.py
+++ b/IPython/core/tests/test_display.py
@@ -13,7 +13,6 @@
from IPython.core.getipython import get_ipython
from IPython.utils.tempdir import NamedFileInTemporaryDirectory
from IPython import paths as ipath
-from IPython.testing.tools import AssertPrints, AssertNotPrints
import IPython.testing.decorators as dec
@@ -142,25 +141,6 @@ def test_set_matplotlib_formats_kwargs():
expected.update(cfg.print_figure_kwargs)
nt.assert_equal(cell, expected)
-def test_display_available():
- """
- Test that display is available without import
-
- We don't really care if it's in builtin or anything else, but it should
- always be available.
- """
- ip = get_ipython()
- with AssertNotPrints('NameError'):
- ip.run_cell('display')
- try:
- ip.run_cell('del display')
- except NameError:
- pass # it's ok, it might be in builtins
- # even if deleted it should be back
- with AssertNotPrints('NameError'):
- ip.run_cell('display')
-
-
def test_displayobject_repr():
h = display.HTML('<br />')
nt.assert_equal(repr(h), '<IPython.core.display.HTML object>')
diff --git a/IPython/core/tests/test_imports.py b/IPython/core/tests/test_imports.py
index 7aa278fb6..88caef0ad 100644
--- a/IPython/core/tests/test_imports.py
+++ b/IPython/core/tests/test_imports.py
@@ -45,6 +45,9 @@ def test_import_prompts():
def test_import_release():
from IPython.core import release
+def test_import_shadowns():
+ from IPython.core import shadowns
+
def test_import_ultratb():
from IPython.core import ultratb
diff --git a/IPython/lib/tests/test_pretty.py b/IPython/lib/tests/test_pretty.py
index 934498b55..268de06bb 100644
--- a/IPython/lib/tests/test_pretty.py
+++ b/IPython/lib/tests/test_pretty.py
@@ -6,9 +6,7 @@
from collections import Counter, defaultdict, deque, OrderedDict
-import types
-import string
-import unittest
+import types, string
import nose.tools as nt
@@ -183,46 +181,44 @@ class SA(object):
class SB(SA):
pass
-class TestsPretty(unittest.TestCase):
-
- def test_super_repr(self):
- # "<super: module_name.SA, None>"
- output = pretty.pretty(super(SA))
- self.assertRegex(output, r"<super: \S+.SA, None>")
-
- # "<super: module_name.SA, <module_name.SB at 0x...>>"
- sb = SB()
- output = pretty.pretty(super(SA, sb))
- self.assertRegex(output, r"<super: \S+.SA,\s+<\S+.SB at 0x\S+>>")
-
-
- def test_long_list(self):
- lis = list(range(10000))
- p = pretty.pretty(lis)
- last2 = p.rsplit('\n', 2)[-2:]
- self.assertEqual(last2, [' 999,', ' ...]'])
-
- def test_long_set(self):
- s = set(range(10000))
- p = pretty.pretty(s)
- last2 = p.rsplit('\n', 2)[-2:]
- self.assertEqual(last2, [' 999,', ' ...}'])
-
- def test_long_tuple(self):
- tup = tuple(range(10000))
- p = pretty.pretty(tup)
- last2 = p.rsplit('\n', 2)[-2:]
- self.assertEqual(last2, [' 999,', ' ...)'])
-
- def test_long_dict(self):
- d = { n:n for n in range(10000) }
- p = pretty.pretty(d)
- last2 = p.rsplit('\n', 2)[-2:]
- self.assertEqual(last2, [' 999: 999,', ' ...}'])
-
- def test_unbound_method(self):
- output = pretty.pretty(MyObj.somemethod)
- self.assertIn('MyObj.somemethod', output)
+def test_super_repr():
+ # "<super: module_name.SA, None>"
+ output = pretty.pretty(super(SA))
+ nt.assert_regexp_matches(output, r"<super: \S+.SA, None>")
+
+ # "<super: module_name.SA, <module_name.SB at 0x...>>"
+ sb = SB()
+ output = pretty.pretty(super(SA, sb))
+ nt.assert_regexp_matches(output, r"<super: \S+.SA,\s+<\S+.SB at 0x\S+>>")
+
+
+def test_long_list():
+ lis = list(range(10000))
+ p = pretty.pretty(lis)
+ last2 = p.rsplit('\n', 2)[-2:]
+ nt.assert_equal(last2, [' 999,', ' ...]'])
+
+def test_long_set():
+ s = set(range(10000))
+ p = pretty.pretty(s)
+ last2 = p.rsplit('\n', 2)[-2:]
+ nt.assert_equal(last2, [' 999,', ' ...}'])
+
+def test_long_tuple():
+ tup = tuple(range(10000))
+ p = pretty.pretty(tup)
+ last2 = p.rsplit('\n', 2)[-2:]
+ nt.assert_equal(last2, [' 999,', ' ...)'])
+
+def test_long_dict():
+ d = { n:n for n in range(10000) }
+ p = pretty.pretty(d)
+ last2 = p.rsplit('\n', 2)[-2:]
+ nt.assert_equal(last2, [' 999: 999,', ' ...}'])
+
+def test_unbound_method():
+ output = pretty.pretty(MyObj.somemethod)
+ nt.assert_in('MyObj.somemethod', output)
class MetaClass(type):
diff --git a/IPython/utils/tests/test_path.py b/IPython/utils/tests/test_path.py
index d186242e0..4ae2fa9e4 100644
--- a/IPython/utils/tests/test_path.py
+++ b/IPython/utils/tests/test_path.py
@@ -8,7 +8,6 @@
import shutil
import sys
import tempfile
-import unittest
from contextlib import contextmanager
from unittest.mock import patch
from os.path import join, abspath
@@ -318,7 +317,7 @@ def test_unicode_in_filename():
str(ex)
-class TestShellGlob(unittest.TestCase):
+class TestShellGlob(object):
@classmethod
def setUpClass(cls):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 11
} | 6.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"requests",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
decorator==5.2.1
exceptiongroup==1.2.2
fastjsonschema==2.21.1
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
ipykernel==5.5.6
-e git+https://github.com/ipython/ipython.git@be46bc7bdccc3e147ccd509e6d50f36869f5b84c#egg=ipython
ipython-genutils==0.2.0
jedi==0.19.2
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter_client==8.6.3
jupyter_core==5.7.2
nbformat==5.10.4
nose==1.3.7
numpy==2.0.2
packaging==24.2
parso==0.8.4
pexpect==4.9.0
pickleshare==0.7.5
platformdirs==4.3.7
pluggy==1.5.0
prompt-toolkit==1.0.18
ptyprocess==0.7.0
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
pyzmq==26.3.0
referencing==0.36.2
requests==2.32.3
rpds-py==0.24.0
simplegeneric==0.8.1
six==1.17.0
testpath==0.6.0
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
typing_extensions==4.13.0
urllib3==2.3.0
wcwidth==0.2.13
zipp==3.21.0
| name: ipython
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- decorator==5.2.1
- exceptiongroup==1.2.2
- fastjsonschema==2.21.1
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- ipykernel==5.5.6
- ipython-genutils==0.2.0
- jedi==0.19.2
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-client==8.6.3
- jupyter-core==5.7.2
- nbformat==5.10.4
- nose==1.3.7
- numpy==2.0.2
- packaging==24.2
- parso==0.8.4
- pexpect==4.9.0
- pickleshare==0.7.5
- platformdirs==4.3.7
- pluggy==1.5.0
- prompt-toolkit==1.0.18
- ptyprocess==0.7.0
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pyzmq==26.3.0
- referencing==0.36.2
- requests==2.32.3
- rpds-py==0.24.0
- simplegeneric==0.8.1
- six==1.17.0
- testpath==0.6.0
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- typing-extensions==4.13.0
- urllib3==2.3.0
- wcwidth==0.2.13
- zipp==3.21.0
prefix: /opt/conda/envs/ipython
| [
"IPython/core/tests/test_imports.py::test_import_shadowns"
]
| [
"IPython/core/tests/test_completer.py::test_custom_completion_error",
"IPython/core/tests/test_completer.py::test_unicode_completions",
"IPython/core/tests/test_completer.py::test_latex_completions",
"IPython/core/tests/test_completer.py::test_back_latex_completion",
"IPython/core/tests/test_completer.py::test_back_unicode_completion",
"IPython/core/tests/test_completer.py::test_forward_unicode_completion",
"IPython/core/tests/test_completer.py::test_no_ascii_back_completion",
"IPython/core/tests/test_completer.py::test_abspath_file_completions",
"IPython/core/tests/test_completer.py::test_local_file_completions",
"IPython/core/tests/test_completer.py::test_quoted_file_completions",
"IPython/core/tests/test_completer.py::test_deduplicate_completions",
"IPython/core/tests/test_completer.py::test_omit__names",
"IPython/core/tests/test_completer.py::test_limit_to__all__False_ok",
"IPython/core/tests/test_completer.py::test_func_kw_completions",
"IPython/core/tests/test_completer.py::test_default_arguments_from_docstring",
"IPython/core/tests/test_completer.py::test_line_magics",
"IPython/core/tests/test_completer.py::test_cell_magics",
"IPython/core/tests/test_completer.py::test_line_cell_magics",
"IPython/core/tests/test_completer.py::test_magic_completion_order",
"IPython/core/tests/test_completer.py::test_magic_completion_shadowing",
"IPython/core/tests/test_completer.py::test_magic_config",
"IPython/core/tests/test_completer.py::test_dict_key_completion_string",
"IPython/core/tests/test_completer.py::test_dict_key_completion_contexts",
"IPython/core/tests/test_completer.py::test_dict_key_completion_bytes",
"IPython/core/tests/test_completer.py::test_dict_key_completion_unicode_py3",
"IPython/core/tests/test_completer.py::test_struct_array_key_completion",
"IPython/core/tests/test_completer.py::test_dict_key_completion_invalids",
"IPython/core/tests/test_completer.py::test_object_key_completion",
"IPython/core/tests/test_completer.py::test_aimport_module_completer",
"IPython/core/tests/test_completer.py::test_nested_import_module_completer",
"IPython/core/tests/test_completer.py::test_import_module_completer",
"IPython/core/tests/test_completer.py::test_from_module_completer",
"IPython/core/tests/test_completer.py::test_snake_case_completion",
"IPython/core/tests/test_display.py::test_display_id",
"IPython/core/tests/test_display.py::test_update_display",
"IPython/core/tests/test_display.py::test_display_handle",
"IPython/utils/tests/test_path.py::test_get_home_dir_3",
"IPython/utils/tests/test_path.py::TestLinkOrCopy::test_link_successful",
"IPython/utils/tests/test_path.py::TestLinkOrCopy::test_link_into_dir",
"IPython/utils/tests/test_path.py::TestLinkOrCopy::test_target_exists",
"IPython/utils/tests/test_path.py::TestLinkOrCopy::test_no_link",
"IPython/utils/tests/test_path.py::TestLinkOrCopy::test_link_twice"
]
| [
"IPython/core/tests/test_completer.py::test_protect_filename",
"IPython/core/tests/test_completer.py::test_line_split",
"IPython/core/tests/test_completer.py::CompletionSplitterTestCase::test_delim_setting",
"IPython/core/tests/test_completer.py::CompletionSplitterTestCase::test_spaces",
"IPython/core/tests/test_completer.py::test_has_open_quotes1",
"IPython/core/tests/test_completer.py::test_has_open_quotes2",
"IPython/core/tests/test_completer.py::test_has_open_quotes3",
"IPython/core/tests/test_completer.py::test_has_open_quotes4",
"IPython/core/tests/test_completer.py::test_get__all__entries_ok",
"IPython/core/tests/test_completer.py::test_get__all__entries_no__all__ok",
"IPython/core/tests/test_completer.py::test_match_dict_keys",
"IPython/core/tests/test_completer.py::test_tryimport",
"IPython/core/tests/test_display.py::test_image_size",
"IPython/core/tests/test_display.py::test_geojson",
"IPython/core/tests/test_display.py::test_retina_png",
"IPython/core/tests/test_display.py::test_retina_jpeg",
"IPython/core/tests/test_display.py::test_base64image",
"IPython/core/tests/test_display.py::test_image_filename_defaults",
"IPython/core/tests/test_display.py::test_displayobject_repr",
"IPython/core/tests/test_display.py::test_json",
"IPython/core/tests/test_display.py::test_video_embedding",
"IPython/core/tests/test_imports.py::test_import_completer",
"IPython/core/tests/test_imports.py::test_import_crashhandler",
"IPython/core/tests/test_imports.py::test_import_debugger",
"IPython/core/tests/test_imports.py::test_import_excolors",
"IPython/core/tests/test_imports.py::test_import_history",
"IPython/core/tests/test_imports.py::test_import_hooks",
"IPython/core/tests/test_imports.py::test_import_getipython",
"IPython/core/tests/test_imports.py::test_import_interactiveshell",
"IPython/core/tests/test_imports.py::test_import_logger",
"IPython/core/tests/test_imports.py::test_import_macro",
"IPython/core/tests/test_imports.py::test_import_magic",
"IPython/core/tests/test_imports.py::test_import_oinspect",
"IPython/core/tests/test_imports.py::test_import_prefilter",
"IPython/core/tests/test_imports.py::test_import_prompts",
"IPython/core/tests/test_imports.py::test_import_release",
"IPython/core/tests/test_imports.py::test_import_ultratb",
"IPython/core/tests/test_imports.py::test_import_usage",
"IPython/lib/tests/test_pretty.py::test_indentation",
"IPython/lib/tests/test_pretty.py::test_dispatch",
"IPython/lib/tests/test_pretty.py::test_callability_checking",
"IPython/lib/tests/test_pretty.py::test_pprint_heap_allocated_type",
"IPython/lib/tests/test_pretty.py::test_pprint_nomod",
"IPython/lib/tests/test_pretty.py::test_pprint_break",
"IPython/lib/tests/test_pretty.py::test_pprint_break_repr",
"IPython/lib/tests/test_pretty.py::test_bad_repr",
"IPython/lib/tests/test_pretty.py::test_really_bad_repr",
"IPython/lib/tests/test_pretty.py::test_super_repr",
"IPython/lib/tests/test_pretty.py::test_long_list",
"IPython/lib/tests/test_pretty.py::test_long_set",
"IPython/lib/tests/test_pretty.py::test_long_tuple",
"IPython/lib/tests/test_pretty.py::test_long_dict",
"IPython/lib/tests/test_pretty.py::test_unbound_method",
"IPython/lib/tests/test_pretty.py::test_metaclass_repr",
"IPython/lib/tests/test_pretty.py::test_unicode_repr",
"IPython/lib/tests/test_pretty.py::test_basic_class",
"IPython/lib/tests/test_pretty.py::test_collections_defaultdict",
"IPython/lib/tests/test_pretty.py::test_collections_ordereddict",
"IPython/lib/tests/test_pretty.py::test_collections_deque",
"IPython/lib/tests/test_pretty.py::test_collections_counter",
"IPython/lib/tests/test_pretty.py::test_mappingproxy",
"IPython/utils/tests/test_path.py::test_get_home_dir_4",
"IPython/utils/tests/test_path.py::test_get_home_dir_5",
"IPython/utils/tests/test_path.py::test_get_xdg_dir_0",
"IPython/utils/tests/test_path.py::test_get_xdg_dir_1",
"IPython/utils/tests/test_path.py::test_get_xdg_dir_2",
"IPython/utils/tests/test_path.py::test_get_xdg_dir_3",
"IPython/utils/tests/test_path.py::test_filefind",
"IPython/utils/tests/test_path.py::test_get_long_path_name",
"IPython/utils/tests/test_path.py::test_get_py_filename",
"IPython/utils/tests/test_path.py::test_unicode_in_filename",
"IPython/utils/tests/test_path.py::test_unescape_glob",
"IPython/utils/tests/test_path.py::test_ensure_dir_exists"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,315 | [
"docs/source/config/details.rst",
"docs/source/whatsnew/version5.rst",
"docs/source/coredev/index.rst",
"docs/source/whatsnew/github-stats-5.rst",
"docs/source/interactive/plotting.rst",
"IPython/utils/path.py",
"IPython/core/completer.py",
"IPython/core/shadowns.py",
"IPython/core/interactiveshell.py",
"IPython/core/display.py",
"IPython/terminal/interactiveshell.py",
"docs/source/whatsnew/github-stats-6.rst"
]
| [
"docs/source/config/details.rst",
"docs/source/whatsnew/version5.rst",
"docs/source/coredev/index.rst",
"docs/source/whatsnew/github-stats-5.rst",
"docs/source/interactive/plotting.rst",
"IPython/utils/path.py",
"IPython/core/completer.py",
"IPython/core/shadowns.py",
"IPython/core/interactiveshell.py",
"IPython/core/display.py",
"IPython/terminal/interactiveshell.py",
"docs/source/whatsnew/github-stats-6.rst"
]
|
PyCQA__pyflakes-275 | 1af4f14ad4675bf5c61c47bbb7c2421b50d1cba4 | 2017-05-31 03:24:45 | 1af4f14ad4675bf5c61c47bbb7c2421b50d1cba4 | diff --git a/README.rst b/README.rst
index e84d334..aeb15f9 100644
--- a/README.rst
+++ b/README.rst
@@ -72,7 +72,7 @@ rebase your commits for you.
All changes should include tests and pass flake8_.
-.. image:: https://api.travis-ci.org/PyCQA/pyflakes.svg
+.. image:: https://api.travis-ci.org/PyCQA/pyflakes.svg?branch=master
:target: https://travis-ci.org/PyCQA/pyflakes
:alt: Build status
diff --git a/pyflakes/api.py b/pyflakes/api.py
index a535bff..e30f920 100644
--- a/pyflakes/api.py
+++ b/pyflakes/api.py
@@ -5,6 +5,7 @@ from __future__ import with_statement
import sys
import os
+import re
import _ast
from pyflakes import checker, __version__
@@ -13,6 +14,9 @@ from pyflakes import reporter as modReporter
__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
+PYTHON_SHEBANG_REGEX = re.compile(br'^#!.*\bpython[23]?\b\s*$')
+
+
def check(codeString, filename, reporter=None):
"""
Check the Python source given by C{codeString} for flakes.
@@ -108,6 +112,25 @@ def checkPath(filename, reporter=None):
return check(codestr, filename, reporter)
+def isPythonFile(filename):
+ """Return True if filename points to a Python file."""
+ if filename.endswith('.py'):
+ return True
+
+ max_bytes = 128
+
+ try:
+ with open(filename, 'rb') as f:
+ text = f.read(max_bytes)
+ if not text:
+ return False
+ except IOError:
+ return False
+
+ first_line = text.splitlines()[0]
+ return PYTHON_SHEBANG_REGEX.match(first_line)
+
+
def iterSourceCode(paths):
"""
Iterate over all Python source files in C{paths}.
@@ -120,8 +143,9 @@ def iterSourceCode(paths):
if os.path.isdir(path):
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
- if filename.endswith('.py'):
- yield os.path.join(dirpath, filename)
+ full_path = os.path.join(dirpath, filename)
+ if isPythonFile(full_path):
+ yield full_path
else:
yield path
| Please also check python scripts that are not named *.py
*Original report by [morph-debian](https://launchpad.net/~morph-debian) on [Launchpad](https://bugs.launchpad.net/bugs/970465):*
------------------------------------
Hello,
as reported on Debian at http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=653890:
"""
It would be great if pyflakes could detect files that are python scripts
but are not named *.py. It would have to read the first two bytes of
each file to check for scripts and then read the rest of the line to
determine if it is a python script. It would need to process stuff like:
#!/usr/bin/python
#!/usr/local/bin/python
#!/usr/bin/env python
#!/usr/bin/python2.6
#!/usr/bin/python3.2
"""
Regards,
Sandro
| PyCQA/pyflakes | diff --git a/pyflakes/test/test_api.py b/pyflakes/test/test_api.py
index 51b0027..a1e0be2 100644
--- a/pyflakes/test/test_api.py
+++ b/pyflakes/test/test_api.py
@@ -187,6 +187,22 @@ class TestIterSourceCode(TestCase):
sorted(iterSourceCode([self.tempdir])),
sorted([apath, bpath, cpath]))
+ def test_shebang(self):
+ """
+ Find Python files that don't end with `.py`, but contain a Python
+ shebang.
+ """
+ apath = os.path.join(self.tempdir, 'a')
+ fd = open(apath, 'w')
+ fd.write('#!/usr/bin/env python\n')
+ fd.close()
+
+ self.makeEmptyFile('b')
+
+ self.assertEqual(
+ list(iterSourceCode([self.tempdir])),
+ list([apath]))
+
def test_multipleDirectories(self):
"""
L{iterSourceCode} can be given multiple directories. It will recurse
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"flake8",
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mccabe==0.7.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.9.1
-e git+https://github.com/PyCQA/pyflakes.git@1af4f14ad4675bf5c61c47bbb7c2421b50d1cba4#egg=pyflakes
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: pyflakes
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- flake8==5.0.4
- importlib-metadata==4.2.0
- mccabe==0.7.0
- pycodestyle==2.9.1
prefix: /opt/conda/envs/pyflakes
| [
"pyflakes/test/test_api.py::TestIterSourceCode::test_shebang"
]
| []
| [
"pyflakes/test/test_api.py::TestIterSourceCode::test_emptyDirectory",
"pyflakes/test/test_api.py::TestIterSourceCode::test_explicitFiles",
"pyflakes/test/test_api.py::TestIterSourceCode::test_multipleDirectories",
"pyflakes/test/test_api.py::TestIterSourceCode::test_onlyPythonSource",
"pyflakes/test/test_api.py::TestIterSourceCode::test_recurses",
"pyflakes/test/test_api.py::TestIterSourceCode::test_singleFile",
"pyflakes/test/test_api.py::TestReporter::test_flake",
"pyflakes/test/test_api.py::TestReporter::test_multiLineSyntaxError",
"pyflakes/test/test_api.py::TestReporter::test_syntaxError",
"pyflakes/test/test_api.py::TestReporter::test_syntaxErrorNoOffset",
"pyflakes/test/test_api.py::TestReporter::test_unexpectedError",
"pyflakes/test/test_api.py::CheckTests::test_CRLFLineEndings",
"pyflakes/test/test_api.py::CheckTests::test_checkPathNonExisting",
"pyflakes/test/test_api.py::CheckTests::test_checkRecursive",
"pyflakes/test/test_api.py::CheckTests::test_encodedFileUTF8",
"pyflakes/test/test_api.py::CheckTests::test_eofSyntaxError",
"pyflakes/test/test_api.py::CheckTests::test_eofSyntaxErrorWithTab",
"pyflakes/test/test_api.py::CheckTests::test_invalidEscape",
"pyflakes/test/test_api.py::CheckTests::test_legacyScript",
"pyflakes/test/test_api.py::CheckTests::test_misencodedFileUTF16",
"pyflakes/test/test_api.py::CheckTests::test_misencodedFileUTF8",
"pyflakes/test/test_api.py::CheckTests::test_missingTrailingNewline",
"pyflakes/test/test_api.py::CheckTests::test_multilineSyntaxError",
"pyflakes/test/test_api.py::CheckTests::test_nonDefaultFollowsDefaultSyntaxError",
"pyflakes/test/test_api.py::CheckTests::test_nonKeywordAfterKeywordSyntaxError",
"pyflakes/test/test_api.py::CheckTests::test_pyflakesWarning",
"pyflakes/test/test_api.py::IntegrationTests::test_errors_io",
"pyflakes/test/test_api.py::IntegrationTests::test_errors_syntax",
"pyflakes/test/test_api.py::IntegrationTests::test_fileWithFlakes",
"pyflakes/test/test_api.py::IntegrationTests::test_goodFile",
"pyflakes/test/test_api.py::IntegrationTests::test_readFromStdin",
"pyflakes/test/test_api.py::TestMain::test_errors_io",
"pyflakes/test/test_api.py::TestMain::test_errors_syntax",
"pyflakes/test/test_api.py::TestMain::test_fileWithFlakes",
"pyflakes/test/test_api.py::TestMain::test_goodFile",
"pyflakes/test/test_api.py::TestMain::test_readFromStdin"
]
| []
| MIT License | 1,316 | [
"README.rst",
"pyflakes/api.py"
]
| [
"README.rst",
"pyflakes/api.py"
]
|
|
frictionlessdata__goodtables-py-195 | faf5dddc7c99c4ea9581fe7565150a49c0e4bdd9 | 2017-05-31 09:28:22 | faf5dddc7c99c4ea9581fe7565150a49c0e4bdd9 | roll: @brew
Could you please review
roll: Thanks Brook! | diff --git a/data/empty.csv b/data/empty.csv
new file mode 100644
index 0000000..e69de29
diff --git a/data/invalid_no_headers.csv b/data/invalid_no_headers.csv
new file mode 100644
index 0000000..090c676
--- /dev/null
+++ b/data/invalid_no_headers.csv
@@ -0,0 +1,3 @@
+1,english
+2,中国人
+3,german,extra
diff --git a/goodtables/inspector.py b/goodtables/inspector.py
index dce6a06..ca64eea 100644
--- a/goodtables/inspector.py
+++ b/goodtables/inspector.py
@@ -4,6 +4,7 @@ from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
+import six
import inspect
import datetime
import operator
@@ -133,7 +134,7 @@ class Inspector(object):
# Prepare vars
errors = []
warnings = []
- headers = None
+ headers = []
row_number = 0
fatal_error = False
checks = copy(self.__checks)
@@ -147,6 +148,8 @@ class Inspector(object):
stream.open()
sample = stream.sample
headers = stream.headers
+ if headers is None:
+ headers = [None] * len(sample[0]) if sample else []
if _filter_checks(checks, type='schema'):
if schema is None and self.__infer_schema:
schema = Schema(infer(headers, sample))
@@ -174,13 +177,14 @@ class Inspector(object):
# Head checks
if not fatal_error:
- head_checks = _filter_checks(checks, context='head')
- for check in head_checks:
- if not columns:
- break
- check['func'](errors, columns, sample)
- for error in errors:
- error['row'] = None
+ if None not in headers:
+ head_checks = _filter_checks(checks, context='head')
+ for check in head_checks:
+ if not columns:
+ break
+ check['func'](errors, columns, sample)
+ for error in errors:
+ error['row'] = None
# Body checks
if not fatal_error:
@@ -191,7 +195,7 @@ class Inspector(object):
extended_rows = stream.iter(extended=True)
while True:
try:
- row_number, headers, row = next(extended_rows)
+ row_number, _, row = next(extended_rows)
except StopIteration:
break
except Exception as exception:
@@ -235,6 +239,7 @@ class Inspector(object):
stop = datetime.datetime.now()
# Compose report
+ headers = headers if None not in headers else None
errors = errors[:self.__error_limit]
errors = _sort_errors(errors)
report = copy(extra)
@@ -319,10 +324,13 @@ def _prepare_checks(setup, custom, order_fields, infer_fields):
# Bind options
for check in checks:
- args, _, _, _ = inspect.getargspec(check['func'])
- if 'order_fields' in args:
+ if six.PY2:
+ parameters, _, _, _ = inspect.getargspec(check['func'])
+ else:
+ parameters = inspect.signature(check['func']).parameters
+ if 'order_fields' in parameters:
check['func'] = partial(check['func'], order_fields=order_fields)
- if 'infer_fields' in args:
+ if 'infer_fields' in parameters:
check['func'] = partial(check['func'], infer_fields=infer_fields)
return checks
| Ability to check table without headers?
(This would obviously not run checks related to headers.)
```
(pilot-dm4t) legend:refit-dirty dan (master)$ head archive/House1.csv
1381323977,523,74,0,69,0,0,0,0,0,1
1381323990,526,75,0,69,0,0,0,0,0,1
1381324005,540,74,0,68,0,0,0,0,0,1
1381324020,532,74,0,68,0,0,0,0,0,1
1381324035,540,74,0,69,0,0,0,0,0,1
```
Adding a `"dialect": {"header": False}` to resource has no effect.
`datapackage.json`:
```
...
"resources": [
{
"name": "house_1.csv",
"path": "archive/House1.csv",
"dialect": {
"header": false
},
...
```
I would opt to just skip the header-related checks on command line, but can't see how (#175)
---
# Tasks
- [x] support inspection for tables without headers
- [x] support inspection for data package resources without headers (now it's possible with supported CSV dialect `header` option- https://github.com/frictionlessdata/datapackage-py/blob/master/tests/test_resource.py#L517-L528) - MOVED #174 | frictionlessdata/goodtables-py | diff --git a/tests/test_inspector.py b/tests/test_inspector.py
index 82d4626..aedb14a 100644
--- a/tests/test_inspector.py
+++ b/tests/test_inspector.py
@@ -4,6 +4,7 @@ from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
+import pytest
from goodtables import Inspector
@@ -179,3 +180,22 @@ def test_inspector_warnings_table_and_error_limit():
assert len(report['warnings']) == 2
assert 'table(s) limit' in report['warnings'][0]
assert 'error(s) limit' in report['warnings'][1]
+
+
+# Empty source
+
+def test_inspector_empty_source():
+ inspector = Inspector()
+ report = inspector.inspect('data/empty.csv')
+ assert report['tables'][0]['row-count'] == 0
+ assert report['tables'][0]['error-count'] == 0
+
+
+# No headers source
+
+def test_inspector_no_headers():
+ inspector = Inspector()
+ report = inspector.inspect('data/invalid_no_headers.csv', headers=None)
+ assert report['tables'][0]['row-count'] == 3
+ assert report['tables'][0]['error-count'] == 1
+ assert report['tables'][0]['errors'][0]['code'] == 'extra-value'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference",
"has_added_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[develop,ods]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-mock",
"mock",
"pyyaml"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
chardet==5.0.0
charset-normalizer==2.0.12
click==6.7
coverage==6.2
datapackage==0.8.9
distlib==0.3.9
et-xmlfile==1.1.0
ezodf==0.3.2
filelock==3.4.1
future==0.18.3
-e git+https://github.com/frictionlessdata/goodtables-py.git@faf5dddc7c99c4ea9581fe7565150a49c0e4bdd9#egg=goodtables
greenlet==2.0.2
idna==3.10
ijson==3.3.0
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
isodate==0.6.1
jmespath==0.10.0
jsonlines==3.1.0
jsonschema==2.6.0
jsontableschema==0.10.1
linear-tsv==1.1.0
lxml==3.8.0
mccabe==0.7.0
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
openpyxl==3.1.3
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.10.0
pydocstyle==6.3.0
pyflakes==3.0.1
pylama==7.7.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-cov==4.0.0
pytest-mock==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.1
requests==2.27.1
rfc3986==0.4.1
s3transfer==0.5.2
six==1.17.0
snowballstemmer==2.2.0
SQLAlchemy==1.4.54
tabulator==1.53.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
tox==3.28.0
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
unicodecsv==0.14.1
urllib3==1.26.20
virtualenv==20.17.1
xlrd==2.0.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: goodtables-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.23.10
- botocore==1.26.10
- chardet==5.0.0
- charset-normalizer==2.0.12
- click==6.7
- coverage==6.2
- datapackage==0.8.9
- distlib==0.3.9
- et-xmlfile==1.1.0
- ezodf==0.3.2
- filelock==3.4.1
- future==0.18.3
- greenlet==2.0.2
- idna==3.10
- ijson==3.3.0
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- isodate==0.6.1
- jmespath==0.10.0
- jsonlines==3.1.0
- jsonschema==2.6.0
- jsontableschema==0.10.1
- linear-tsv==1.1.0
- lxml==3.8.0
- mccabe==0.7.0
- mock==5.2.0
- openpyxl==3.1.3
- platformdirs==2.4.0
- pycodestyle==2.10.0
- pydocstyle==6.3.0
- pyflakes==3.0.1
- pylama==7.7.1
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- requests==2.27.1
- rfc3986==0.4.1
- s3transfer==0.5.2
- six==1.17.0
- snowballstemmer==2.2.0
- sqlalchemy==1.4.54
- tabulator==1.53.5
- tomli==1.2.3
- tox==3.28.0
- unicodecsv==0.14.1
- urllib3==1.26.20
- virtualenv==20.17.1
- xlrd==2.0.1
prefix: /opt/conda/envs/goodtables-py
| [
"tests/test_inspector.py::test_inspector_empty_source",
"tests/test_inspector.py::test_inspector_no_headers"
]
| [
"tests/test_inspector.py::test_inspector_catch_all_open_exceptions"
]
| [
"tests/test_inspector.py::test_inspector_table_valid",
"tests/test_inspector.py::test_inspector_table_invalid",
"tests/test_inspector.py::test_inspector_table_invalid_error_limit",
"tests/test_inspector.py::test_inspector_table_invalid_row_limit",
"tests/test_inspector.py::test_inspector_datapackage_valid",
"tests/test_inspector.py::test_inspector_datapackage_invalid",
"tests/test_inspector.py::test_inspector_datapackage_invalid_table_limit",
"tests/test_inspector.py::test_inspector_tables_invalid",
"tests/test_inspector.py::test_inspector_catch_all_iter_exceptions",
"tests/test_inspector.py::test_inspector_warnings_no",
"tests/test_inspector.py::test_inspector_warnings_bad_datapackage_json",
"tests/test_inspector.py::test_inspector_warnings_table_limit",
"tests/test_inspector.py::test_inspector_warnings_row_limit",
"tests/test_inspector.py::test_inspector_warnings_error_limit",
"tests/test_inspector.py::test_inspector_warnings_table_and_row_limit",
"tests/test_inspector.py::test_inspector_warnings_table_and_error_limit"
]
| []
| MIT License | 1,317 | [
"data/empty.csv",
"goodtables/inspector.py",
"data/invalid_no_headers.csv"
]
| [
"data/empty.csv",
"goodtables/inspector.py",
"data/invalid_no_headers.csv"
]
|
opsdroid__opsdroid-163 | b6a6b803844b36e49146b468dc104be1e49235a5 | 2017-05-31 20:46:21 | b6a6b803844b36e49146b468dc104be1e49235a5 | coveralls:
[](https://coveralls.io/builds/11771359)
Coverage increased (+0.02%) to 95.876% when pulling **7372d076c962767f4d418fd0e84f96a6a2f02aa6 on jacobtomlinson:new-module-path** into **b6a6b803844b36e49146b468dc104be1e49235a5 on opsdroid:dev**.
| diff --git a/opsdroid/const.py b/opsdroid/const.py
index be722ef..c12d2ae 100644
--- a/opsdroid/const.py
+++ b/opsdroid/const.py
@@ -4,5 +4,6 @@ __version__ = "0.7.1"
LOG_FILENAME = 'output.log'
DEFAULT_GIT_URL = "https://github.com/opsdroid/"
-MODULES_DIRECTORY = "modules"
+MODULES_DIRECTORY = "opsdroid-modules"
+DEFAULT_MODULES_PATH = "~/.opsdroid/modules"
DEFAULT_MODULE_BRANCH = "master"
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
index e494cda..34d1330 100644
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -8,7 +8,8 @@ import subprocess
import importlib
import yaml
from opsdroid.const import (
- DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULE_BRANCH)
+ DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
+ DEFAULT_MODULE_BRANCH)
_LOGGER = logging.getLogger(__name__)
@@ -20,7 +21,7 @@ class Loader:
def __init__(self, opsdroid):
"""Create object with opsdroid instance."""
self.opsdroid = opsdroid
- self.modules_directory = MODULES_DIRECTORY
+ self.modules_directory = None
self.current_import_config = None
_LOGGER.debug("Loaded loader")
@@ -122,16 +123,26 @@ class Loader:
except FileNotFoundError as error:
self.opsdroid.critical(str(error), 1)
+ def setup_modules_directory(self, config):
+ """Create and configure the modules directory."""
+ module_path = os.path.expanduser(
+ config.get("module-path", DEFAULT_MODULES_PATH))
+ sys.path.append(module_path)
+
+ if not os.path.isdir(module_path):
+ os.makedirs(module_path, exist_ok=True)
+
+ self.modules_directory = os.path.join(module_path, MODULES_DIRECTORY)
+
+ # Create modules directory if doesn't exist
+ if not os.path.isdir(self.modules_directory):
+ os.makedirs(self.modules_directory)
+
def load_modules_from_config(self, config):
"""Load all module types based on config."""
_LOGGER.debug("Loading modules from config")
- if "module-path" in config:
- sys.path.append(config["module-path"])
- if not os.path.isdir(config["module-path"]):
- os.makedirs(config["module-path"], exist_ok=True)
- self.modules_directory = os.path.join(config["module-path"],
- self.modules_directory)
+ self.setup_modules_directory(config)
connectors, databases, skills = None, None, None
@@ -159,10 +170,6 @@ class Loader:
_LOGGER.debug("Loading " + modules_type + " modules")
loaded_modules = []
- # Create modules directory if doesn't exist
- if not os.path.isdir(self.modules_directory):
- os.makedirs(self.modules_directory)
-
for module in modules:
# Set up module config
| Change default modules directory
Currently the default modules directory location is `./modules`.
This makes a few assumptions:
* Current directory is in the python path
* There are no other python modules in the current directory
* There are no other modules named `modules`
* Current directory is writable
A better default location may be `~/.opsdroid/modules/opsdroid-modules`. This would be created if it doesn't exist and `~/.opsdroid/modules` could be added to the python path without fear of collision as `opsdroid-modules` is less generic. As it is in the home directory we can be fairly sure it is writable.
Also when a user specifies a custom modules directory it should still be suffixed with `/opsdroid-modules` and the custom directory should be added to the python path. | opsdroid/opsdroid | diff --git a/tests/test_loader.py b/tests/test_loader.py
index 1724997..ae83378 100644
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -153,20 +153,10 @@ class TestLoader(unittest.TestCase):
with mock.patch.object(loader, '_install_module') as mockinstall, \
mock.patch.object(loader, 'import_module',
mockedmodule) as mockimport:
+ loader.setup_modules_directory({})
loader._load_modules(modules_type, modules)
- mockinstall.assert_called_with({
- 'branch': 'master',
- 'module_path': 'modules.test.testmodule',
- 'name': 'testmodule',
- 'type': modules_type,
- 'install_path': 'modules/test/testmodule'})
- mockimport.assert_called_with({
- 'module_path': 'modules.test.testmodule',
- 'name': 'testmodule',
- 'type': modules_type,
- 'branch': 'master',
- 'install_path':
- 'modules/test/testmodule'})
+ assert mockinstall.call_count
+ assert mockimport.call_count
def test_install_existing_module(self):
opsdroid, loader = self.setup()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip3 install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y git"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohttp==1.3.5
arrow==0.10.0
async-timeout==4.0.2
attrs==22.2.0
certifi==2021.5.30
chardet==5.0.0
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
multidict==5.2.0
-e git+https://github.com/opsdroid/opsdroid.git@b6a6b803844b36e49146b468dc104be1e49235a5#egg=opsdroid
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycron==0.40
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
PyYAML==3.12
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
yarl==0.9.8
zipp==3.6.0
| name: opsdroid
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohttp==1.3.5
- arrow==0.10.0
- async-timeout==4.0.2
- attrs==22.2.0
- chardet==5.0.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- multidict==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycron==0.40
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pyyaml==3.12
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- yarl==0.9.8
- zipp==3.6.0
prefix: /opt/conda/envs/opsdroid
| [
"tests/test_loader.py::TestLoader::test_load_modules"
]
| []
| [
"tests/test_loader.py::TestLoader::test_build_module_path",
"tests/test_loader.py::TestLoader::test_check_cache_leaves",
"tests/test_loader.py::TestLoader::test_check_cache_removes_dir",
"tests/test_loader.py::TestLoader::test_check_cache_removes_file",
"tests/test_loader.py::TestLoader::test_git_clone",
"tests/test_loader.py::TestLoader::test_import_module",
"tests/test_loader.py::TestLoader::test_import_module_failure",
"tests/test_loader.py::TestLoader::test_import_module_new",
"tests/test_loader.py::TestLoader::test_install_default_remote_module",
"tests/test_loader.py::TestLoader::test_install_existing_module",
"tests/test_loader.py::TestLoader::test_install_local_module_dir",
"tests/test_loader.py::TestLoader::test_install_local_module_failure",
"tests/test_loader.py::TestLoader::test_install_local_module_file",
"tests/test_loader.py::TestLoader::test_install_missing_local_module",
"tests/test_loader.py::TestLoader::test_install_specific_local_git_module",
"tests/test_loader.py::TestLoader::test_install_specific_local_path_module",
"tests/test_loader.py::TestLoader::test_install_specific_remote_module",
"tests/test_loader.py::TestLoader::test_load_broken_config_file",
"tests/test_loader.py::TestLoader::test_load_config",
"tests/test_loader.py::TestLoader::test_load_config_file",
"tests/test_loader.py::TestLoader::test_load_empty_config",
"tests/test_loader.py::TestLoader::test_load_non_existant_config_file",
"tests/test_loader.py::TestLoader::test_pip_install_deps"
]
| []
| Apache License 2.0 | 1,318 | [
"opsdroid/const.py",
"opsdroid/loader.py"
]
| [
"opsdroid/const.py",
"opsdroid/loader.py"
]
|
DarkEnergySurvey__mkauthlist-16 | 15365e9ab90a623e3109739dd3f9c7d1fbb91fb7 | 2017-06-01 04:21:26 | 2644d26323e073616ccad45dea426bb9c485ee3a | diff --git a/.gitignore b/.gitignore
index 5b6ffac..086cdcc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,4 +10,5 @@ dist
*.out
*.aux
*.log
-*.spl
\ No newline at end of file
+*.spl
+*.cls
\ No newline at end of file
diff --git a/data/author_order.csv b/data/author_order.csv
index 3749293..de4b8f3 100644
--- a/data/author_order.csv
+++ b/data/author_order.csv
@@ -1,5 +1,6 @@
Melchior
Sheldon, Erin
+#Commented, Name
Drlica-Wagner
Rykoff
Plazas Malagón
\ No newline at end of file
diff --git a/data/example_author_list.csv b/data/example_author_list.csv
index 4efafbb..ccbc4ed 100644
--- a/data/example_author_list.csv
+++ b/data/example_author_list.csv
@@ -4,7 +4,7 @@ Drlica-Wagner,Alex,A.~Drlica-Wagner,False,"Fermi National Accelerator Laboratory
Rykoff,Eli,E.~S.~Rykoff,False,"Kavli Institute for Particle Astrophysics \& Cosmology, P. O. Box 2450, Stanford University, Stanford, CA 94305, USA","Data quality expert",
Rykoff,Eli,E.~S.~Rykoff,False,"SLAC National Accelerator Laboratory, Menlo Park, CA 94025, USA","Data quality expert",
Sheldon,Erin,E.~Sheldon,False,"Brookhaven National Laboratory, Bldg 510, Upton, NY 11973, USA","Data backend",
-Abbott,Tim,T. M. C.~Abbott,True,"Cerro Tololo Inter-American Observatory, National Optical Astronomy Observatory, Casilla 603, La Serena, Chile",,
+Zhang,Yuanyuan,Y.~Zhang,True,"Fermi National Accelerator Laboratory, P. O. Box 500, Batavia, IL 60510, USA",,
Abdalla,Filipe,F.~B.~Abdalla,True,"Department of Physics \& Astronomy, University College London, Gower Street, London, WC1E 6BT, UK",,
Abdalla,Filipe,F.~B.~Abdalla,True,"Department of Physics and Electronics, Rhodes University, PO Box 94, Grahamstown, 6140, South Africa",,
Allam,Sahar,S.~Allam,True,"Fermi National Accelerator Laboratory, P. O. Box 500, Batavia, IL 60510, USA",,
@@ -66,4 +66,4 @@ Tarle,Gregory,G.~Tarle,True,"Department of Physics, University of Michigan, Ann
Vikram,Vinu,V.~Vikram,True,"Argonne National Laboratory, 9700 South Cass Avenue, Lemont, IL 60439, USA",,
Walker,Alistair,A.~R.~Walker,True,"Cerro Tololo Inter-American Observatory, National Optical Astronomy Observatory, Casilla 603, La Serena, Chile",,
Wester,William,W.~Wester,True,"Fermi National Accelerator Laboratory, P. O. Box 500, Batavia, IL 60510, USA",,
-Zhang,Yuanyuan,Y.~Zhang,True,"Fermi National Accelerator Laboratory, P. O. Box 500, Batavia, IL 60510, USA",,
\ No newline at end of file
+Abbott,Tim,T.~M.~C.~Abbott,True,"Cerro Tololo Inter-American Observatory, National Optical Astronomy Observatory, Casilla 603, La Serena, Chile",,
diff --git a/mkauthlist/mkauthlist.py b/mkauthlist/mkauthlist.py
index 1393e93..4745d3a 100755
--- a/mkauthlist/mkauthlist.py
+++ b/mkauthlist/mkauthlist.py
@@ -134,7 +134,7 @@ def write_contributions(filename,data):
logging.info('Writing contribution file: %s'%filename)
- out = open(filename,'wb')
+ out = open(filename,'w')
out.write(output)
out.close()
@@ -374,7 +374,7 @@ if __name__ == "__main__":
affidict = odict()
authdict = odict()
- # Hack for Munich affiliation...
+ # Hack for umlauts in affiliations...
for k,v in HACK.items():
logging.warn("Hacking '%s' ..."%k)
select = (np.char.count(data['Affiliation'],k) > 0)
@@ -382,13 +382,18 @@ if __name__ == "__main__":
# Pre-sort the csv file by the auxiliary file
if args.aux is not None:
- aux = [r for r in csv.DictReader(open(args.aux),['Lastname','Firstname'])]
+ auxcols = ['Lastname','Firstname']
+ aux = [[r[c] for c in auxcols] for r in
+ csv.DictReader(open(args.aux),fieldnames=auxcols)
+ if not r[auxcols[0]].startswith('#')]
+ aux = np.rec.fromrecords(aux,names=auxcols)
if len(np.unique(aux)) != len(aux):
logging.error('Non-unique names in aux file.')
print(open(args.aux).read())
raise Exception()
-
- raw = np.array(zip(data['Lastname'],range(len(data))))
+
+ # Ugh, python2/3 compatibility
+ raw = np.array(list(zip(data['Lastname'],list(range(len(data))))))
order = np.empty((0,2),dtype=raw.dtype)
for r in aux:
lastname = r['Lastname']
| Python 3 byte strings
The Python 3 `str` object is based on Unicode, which means that `file.write(..., 'wb')` doesn't work anymore. We need to change this, but we should probably also understand why we were specifying `'wb'` in the first place.
Some documentation on the Python 3 change:
https://www.python.org/dev/peps/pep-0404/#strings-and-bytes
https://stackoverflow.com/a/33054552/4075339
And the specific place that needs to be changed: [L137](https://github.com/DarkEnergySurvey/mkauthlist/blob/1fb62affeaa73c5192cce84323f77f66d466306a/mkauthlist/mkauthlist.py#L137)
| DarkEnergySurvey/mkauthlist | diff --git a/tests/test_authlist.py b/tests/test_authlist.py
index 958472e..cb57ba3 100644
--- a/tests/test_authlist.py
+++ b/tests/test_authlist.py
@@ -9,7 +9,7 @@ import logging
import subprocess
import unittest
-class TestAuthlistFunc(unittest.TestCase):
+class TestAuthlist(unittest.TestCase):
def setUp(self):
self.csv = 'example_author_list.csv'
@@ -32,12 +32,13 @@ class TestAuthlistFunc(unittest.TestCase):
# shutil.copy(os.path.join('data',filename),'.')
def tearDown(self):
- self.clean = [self.csv,self.tex,self.aux,self.out,self.log,self.bib,self.pdf,self.order]
+ self.clean = [self.csv,self.tex,self.aux,self.out,self.log,self.bib,
+ self.pdf,self.order,self.cntrb]
self.clean += self.cls
cmd = "rm -f "+' '.join(self.clean)
print(cmd)
- subprocess.check_output(cmd,shell=True)
+ #subprocess.check_output(cmd,shell=True)
def latex(self, tex=None, pdf=None):
if tex is None: tex = self.tex
@@ -49,35 +50,76 @@ class TestAuthlistFunc(unittest.TestCase):
shutil.copy(tex.replace('.tex','.pdf'),pdf)
def test_mkauthlist(self):
+ """Run 'vanilla' mkauthlist."""
cmd = "mkauthlist -f --doc %(csv)s %(tex)s"%self.files
print(cmd)
subprocess.check_output(cmd,shell=True)
self.latex(pdf='test_mkauthlist.pdf')
- def test_author_order(self):
+ def test_order(self):
+ """Explicitly order some authors."""
cmd = "mkauthlist -f --doc %(csv)s %(tex)s -a %(order)s"%self.files
print(cmd)
subprocess.check_output(cmd,shell=True)
- self.latex(pdf='test_order.pdf')
+
+ # Shouldn't be any need to build the file
+ #self.latex(pdf='test_order.pdf')
+
+ with open(self.tex,'r') as f:
+ authors = [l for l in f.readlines() if l.startswith('\\author')]
+ self.assertEqual(authors[1],'\\author{E.~Sheldon}\n')
+ self.assertEqual(authors[4],'\\author{A.~A.~Plazas}\n')
+ self.assertEqual(authors[5],'\\author{Y.~Zhang}\n')
+ self.assertEqual(authors[-1],'\\author{T.~M.~C.~Abbott}\n')
def test_contribution(self):
+ """Write author contributions."""
cmd = "mkauthlist -f --doc %(csv)s %(tex)s --cntrb %(cntrb)s"%self.files
print(cmd)
subprocess.check_output(cmd,shell=True)
- self.latex(pdf='test_contrib.pdf')
- if not os.path.exists(self.cntrb):
- msg = "No contributions found"
- raise Exception(msg)
-
- with open(self.cntrb) as cntrb:
- lines = cntrb.readlines()
- msg = "Unexpected author contributions: "
- if not lines[0].split()[0] == 'Author':
- raise Exception(msg+'\n'+lines[0])
- msg = "Unexpected author contributions"
- if not lines[1].split()[0] == 'P.~Melchior:':
- raise Exception(msg+'\n'+lines[1])
+ # Shouldn't be any need to build the file
+ #self.latex(pdf='test_contrib.pdf')
+
+ with open(self.cntrb,'r') as f:
+ lines = f.readlines()
+ self.assertEqual(lines[0],'Author contributions are listed below. \\\\\n')
+ self.assertEqual(lines[1],'P.~Melchior: Lead designer and author \\\\\n')
+ self.assertEqual(lines[-1],'T.~M.~C.~Abbott: \\\\\n')
+
+ def test_sort(self):
+ """Sort all authors alphabetically."""
+ cmd = "mkauthlist -f --doc %(csv)s %(tex)s --sort"%self.files
+ print(cmd)
+ subprocess.check_output(cmd,shell=True)
+
+ with open(self.tex,'r') as f:
+ authors = [l for l in f.readlines() if l.startswith('\\author')]
+ self.assertEqual(authors[0],'\\author{T.~M.~C.~Abbott}\n')
+ self.assertEqual(authors[-1],'\\author{Y.~Zhang}\n')
+
+ def test_sort_order(self):
+ """Order some authors, sort the rest."""
+ cmd = "mkauthlist -f --doc %(csv)s %(tex)s --sort -a %(order)s"%self.files
+ print(cmd)
+ subprocess.check_output(cmd,shell=True)
+
+ with open(self.tex,'r') as f:
+ authors = [l for l in f.readlines() if l.startswith('\\author')]
+ self.assertEqual(authors[1],'\\author{E.~Sheldon}\n')
+ self.assertEqual(authors[-1],'\\author{Y.~Zhang}\n')
+
+ def test_sort_builder(self):
+ """Sort builders, but leave other authors unchanged."""
+ cmd = "mkauthlist -f --doc %(csv)s %(tex)s -sb"%self.files
+ print(cmd)
+ subprocess.check_output(cmd,shell=True)
+
+ with open(self.tex,'r') as f:
+ authors = [l for l in f.readlines() if l.startswith('\\author')]
+ self.assertEqual(authors[3],'\\author{E.~Sheldon}\n')
+ self.assertEqual(authors[4],'\\author{T.~M.~C.~Abbott}\n')
+ self.assertEqual(authors[-1],'\\author{Y.~Zhang}\n')
if __name__ == "__main__":
unittest.main()
diff --git a/tests/test_journals.py b/tests/test_journals.py
index 09c7f86..4833263 100755
--- a/tests/test_journals.py
+++ b/tests/test_journals.py
@@ -9,7 +9,7 @@ import logging
import subprocess
import unittest
-class TestJournalFunc(unittest.TestCase):
+class TestJournal(unittest.TestCase):
def setUp(self):
self.csv = 'example_author_list.csv'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 4
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"numpy>=1.16.0",
"pandas>=1.0.0",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
-e git+https://github.com/DarkEnergySurvey/mkauthlist.git@15365e9ab90a623e3109739dd3f9c7d1fbb91fb7#egg=mkauthlist
numpy==2.0.2
packaging==24.2
pandas==2.2.3
pluggy==1.5.0
pytest==8.3.5
python-dateutil==2.9.0.post0
pytz==2025.2
six==1.17.0
tomli==2.2.1
tzdata==2025.2
| name: mkauthlist
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- numpy==2.0.2
- packaging==24.2
- pandas==2.2.3
- pluggy==1.5.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- pytz==2025.2
- six==1.17.0
- tomli==2.2.1
- tzdata==2025.2
prefix: /opt/conda/envs/mkauthlist
| [
"tests/test_authlist.py::TestAuthlist::test_contribution",
"tests/test_authlist.py::TestAuthlist::test_order",
"tests/test_authlist.py::TestAuthlist::test_sort",
"tests/test_authlist.py::TestAuthlist::test_sort_builder",
"tests/test_authlist.py::TestAuthlist::test_sort_order"
]
| [
"tests/test_authlist.py::TestAuthlist::test_mkauthlist",
"tests/test_journals.py::TestJournal::test_aastex",
"tests/test_journals.py::TestJournal::test_aastex61",
"tests/test_journals.py::TestJournal::test_elsevier",
"tests/test_journals.py::TestJournal::test_emulateapj",
"tests/test_journals.py::TestJournal::test_mkauthlist",
"tests/test_journals.py::TestJournal::test_mnras",
"tests/test_journals.py::TestJournal::test_revtex"
]
| []
| []
| MIT License | 1,319 | [
".gitignore",
"data/author_order.csv",
"mkauthlist/mkauthlist.py",
"data/example_author_list.csv"
]
| [
".gitignore",
"data/author_order.csv",
"mkauthlist/mkauthlist.py",
"data/example_author_list.csv"
]
|
|
FlorianLudwig__rueckenwind-9 | a9856bf3b640562310eda0953c921054b7a050ef | 2017-06-01 07:16:21 | a9856bf3b640562310eda0953c921054b7a050ef | diff --git a/rw/event.py b/rw/event.py
index 103a14b..4cf69f3 100644
--- a/rw/event.py
+++ b/rw/event.py
@@ -20,22 +20,16 @@ Signal
"""
from __future__ import absolute_import, division, print_function, with_statement
-import traceback
-
from tornado import gen
-import rw.scope
-
-
-class EventException(Exception):
- def __init__(self, exceptions):
- self.exceptions = exceptions
- message = '{} exceptions encountered:\n'.format(len(exceptions))
- for func, e in exceptions:
- message += '{}:\n{}'.format(func, e)
- Exception.__init__(self, ''.join(message))
class Event(set):
+ """
+ A simple within-process pub/sub event system.
+
+ If multiple callbacks are provided and raise exceptions,
+ the first detected exception is re-raised and all successive exceptions are ignored.
+ """
def __init__(self, name, accumulator=None):
super(Event, self).__init__()
self.name = name
@@ -43,38 +37,24 @@ class Event(set):
@gen.coroutine
def __call__(self, *args, **kwargs):
- scope = rw.scope.get_current_scope()
- rw_tracing = scope.get('rw_trace', None) if scope else None
-
re = []
- exceptions = []
futures = []
for func in self:
- try:
- result = func(*args, **kwargs)
- if isinstance(result, gen.Future):
- # we are not waiting for future objects result here
- # so they evaluate in parallel
- futures.append((func, result))
- else:
- re.append(result)
- except Exception:
- exceptions.append((func, traceback.format_exc()))
+ result = func(*args, **kwargs)
+ if isinstance(result, gen.Future):
+ # we are not waiting for future objects result here
+ # so they evaluate in parallel
+ futures.append((func, result))
+ else:
+ re.append(result)
# wait for results
for func, future in futures:
- try:
- if not future.done():
- yield future
- re.append(future.result())
-
- except Exception:
- exceptions.append((func, traceback.format_exc()))
-
- if exceptions:
- raise EventException(exceptions)
+ if not future.done():
+ yield future
+ re.append(future.result())
- # apply accumolator
+ # apply accumulator
if self.accumulator:
re = self.accumulator(re)
| exceptions thrown in events
Exceptions thrown in events are caught and wrapped into an EventException (https://github.com/FlorianLudwig/rueckenwind/blob/master/rw/event.py#L75). As the EventException is not an HTTPError, it results in tornado not responding at all to the request.
Example:
```python
@plugin.init
def init():
rw.httpbase.PRE_REQUEST.add(do_stuff)
@rw.gen.corouting
def do_stuff():
raise HTTPError(401)
```
Expected behavior: the request is responded with a statuscode 401
Actual behavior: the request ist not responded at all
As there may be multiple exceptions in https://github.com/FlorianLudwig/rueckenwind/blob/master/rw/event.py#L75 common behavior would be to raise the first and log the rest. | FlorianLudwig/rueckenwind | diff --git a/test/test_event.py b/test/test_event.py
index 93abd6e..379487f 100644
--- a/test/test_event.py
+++ b/test/test_event.py
@@ -23,18 +23,9 @@ class MyTestCase(tornado.testing.AsyncTestCase):
def fail():
1 / 0
- with pytest.raises(rw.event.EventException):
+ with pytest.raises(ZeroDivisionError):
yield MY_EVENT()
- try:
- yield MY_EVENT()
- assert False # this line should never be reached
- except rw.event.EventException as e:
- # the original traceback should be get printed
- assert 'in fail' in str(e) # function name of the actual exception
- assert '1 / 0' in str(e) # the source line of the exception
- assert 'ZeroDivisionError' in str(e)
-
@tornado.testing.gen_test
def test_event_listener(self):
MY_EVENT = rw.event.Event('MY_EVENT')
@@ -87,14 +78,5 @@ class MyTestCase(tornado.testing.AsyncTestCase):
MY_EVENT = rw.event.Event('MY_EVENT')
MY_EVENT.add(something_lazy_failing)
- with pytest.raises(rw.event.EventException):
- yield MY_EVENT()
-
- try:
+ with pytest.raises(ZeroDivisionError):
yield MY_EVENT()
- assert False # this line should never be reached
- except rw.event.EventException as e:
- # the original traceback should be get printed
- assert 'in something_lazy_failing' in str(e) # function name of the actual exception
- assert '1 / 0' in str(e) # the source line of the exception
- assert 'ZeroDivisionError' in str(e)
\ No newline at end of file
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.4",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argcomplete==0.9.0
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
certifi==2021.5.30
chardet==5.0.0
configobj==5.0.8
future==1.0.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
MarkupSafe==2.0.1
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
PyYAML==6.0.1
-e git+https://github.com/FlorianLudwig/rueckenwind.git@a9856bf3b640562310eda0953c921054b7a050ef#egg=rueckenwind
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tornado==4.5.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: rueckenwind
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- argcomplete==0.9.0
- babel==2.11.0
- chardet==5.0.0
- configobj==5.0.8
- future==1.0.0
- jinja2==3.0.3
- markupsafe==2.0.1
- pytz==2025.2
- pyyaml==6.0.1
- six==1.17.0
- tornado==4.5.3
prefix: /opt/conda/envs/rueckenwind
| [
"test/test_event.py::MyTestCase::test_exception_handling",
"test/test_event.py::MyTestCase::test_futures_fail"
]
| []
| [
"test/test_event.py::MyTestCase::test_accumulator",
"test/test_event.py::MyTestCase::test_decorator",
"test/test_event.py::MyTestCase::test_event_listener",
"test/test_event.py::MyTestCase::test_futures"
]
| []
| Apache License 2.0 | 1,320 | [
"rw/event.py"
]
| [
"rw/event.py"
]
|
|
oasis-open__cti-python-stix2-20 | 229ab5d323a412ee91e8462701c816e1dfc0d521 | 2017-06-01 16:45:07 | 58f39f80af5cbfe02879c2efa4b3b4ef7a504390 | diff --git a/stix2/base.py b/stix2/base.py
index cde8bb6..3f696fd 100644
--- a/stix2/base.py
+++ b/stix2/base.py
@@ -69,22 +69,19 @@ class _STIXBase(collections.Mapping):
if list_of_properties and (not list_of_properties_populated or list_of_properties_populated == set(["extensions"])):
raise AtLeastOnePropertyError(self.__class__, list_of_properties)
- def _check_properties_dependency(self, list_of_properties, list_of_dependent_properties, values=[]):
+ def _check_properties_dependency(self, list_of_properties, list_of_dependent_properties):
failed_dependency_pairs = []
- current_properties = self.properties_populated()
for p in list_of_properties:
- v = values.pop() if values else None
for dp in list_of_dependent_properties:
- if dp in current_properties and (p not in current_properties or (v and not current_properties(p) == v)):
+ if not self.__getattr__(p) and self.__getattr__(dp):
failed_dependency_pairs.append((p, dp))
if failed_dependency_pairs:
raise DependentPropertiesError(self.__class__, failed_dependency_pairs)
def _check_object_constraints(self):
- if self.granular_markings:
- for m in self.granular_markings:
- # TODO: check selectors
- pass
+ for m in self.get("granular_markings", []):
+ # TODO: check selectors
+ pass
def __init__(self, **kwargs):
cls = self.__class__
@@ -100,7 +97,7 @@ class _STIXBase(collections.Mapping):
# Remove any keyword arguments whose value is None
setting_kwargs = {}
for prop_name, prop_value in kwargs.items():
- if prop_value:
+ if prop_value is not None:
setting_kwargs[prop_name] = prop_value
# Detect any missing required properties
@@ -127,11 +124,20 @@ class _STIXBase(collections.Mapping):
# Handle attribute access just like key access
def __getattr__(self, name):
- return self.get(name)
+ try:
+ # Return attribute value.
+ return self.__getitem__(name)
+ except KeyError:
+ # If attribute not found, check if its a property of the object.
+ if name in self._properties:
+ return None
+
+ raise AttributeError("'%s' object has no attribute '%s'" %
+ (self.__class__.__name__, name))
def __setattr__(self, name, value):
if name != '_inner' and not name.startswith("_STIXBase__"):
- raise ImmutableError
+ raise ImmutableError(self.__class__, name)
super(_STIXBase, self).__setattr__(name, value)
def __str__(self):
diff --git a/stix2/exceptions.py b/stix2/exceptions.py
index 5cbaae9..3043047 100644
--- a/stix2/exceptions.py
+++ b/stix2/exceptions.py
@@ -47,8 +47,14 @@ class ExtraPropertiesError(STIXError, TypeError):
class ImmutableError(STIXError, ValueError):
"""Attempted to modify an object after creation"""
- def __init__(self):
- super(ImmutableError, self).__init__("Cannot modify properties after creation.")
+ def __init__(self, cls, key):
+ super(ImmutableError, self).__init__()
+ self.cls = cls
+ self.key = key
+
+ def __str__(self):
+ msg = "Cannot modify '{0.key}' property in '{0.cls.__name__}' after creation."
+ return msg.format(self)
class DictionaryKeyError(STIXError, ValueError):
| Forgot to instantiate ImmutableError
I'm guessing [this line](https://github.com/oasis-open/cti-python-stix2/blob/f937e2bb3f104e3cdf0578ed8323b2f7e39119a4/stix2/base.py#L99) should raise an instance of the exception class, not the class itself? | oasis-open/cti-python-stix2 | diff --git a/stix2/test/test_indicator.py b/stix2/test/test_indicator.py
index 99d34d7..5db50e6 100644
--- a/stix2/test/test_indicator.py
+++ b/stix2/test/test_indicator.py
@@ -129,7 +129,7 @@ def test_cannot_assign_to_indicator_attributes(indicator):
with pytest.raises(stix2.exceptions.ImmutableError) as excinfo:
indicator.valid_from = dt.datetime.now()
- assert str(excinfo.value) == "Cannot modify properties after creation."
+ assert str(excinfo.value) == "Cannot modify 'valid_from' property in 'Indicator' after creation."
def test_invalid_kwarg_to_indicator():
diff --git a/stix2/test/test_malware.py b/stix2/test/test_malware.py
index 8952769..266d012 100644
--- a/stix2/test/test_malware.py
+++ b/stix2/test/test_malware.py
@@ -92,7 +92,7 @@ def test_cannot_assign_to_malware_attributes(malware):
with pytest.raises(stix2.exceptions.ImmutableError) as excinfo:
malware.name = "Cryptolocker II"
- assert str(excinfo.value) == "Cannot modify properties after creation."
+ assert str(excinfo.value) == "Cannot modify 'name' property in 'Malware' after creation."
def test_invalid_kwarg_to_malware():
diff --git a/stix2/test/test_observed_data.py b/stix2/test/test_observed_data.py
index c47338f..f221494 100644
--- a/stix2/test/test_observed_data.py
+++ b/stix2/test/test_observed_data.py
@@ -764,8 +764,7 @@ def test_file_example_encryption_error():
with pytest.raises(stix2.exceptions.DependentPropertiesError) as excinfo:
stix2.File(name="qwerty.dll",
is_encrypted=False,
- encryption_algorithm="AES128-CBC"
- )
+ encryption_algorithm="AES128-CBC")
assert excinfo.value.cls == stix2.File
assert excinfo.value.dependencies == [("is_encrypted", "encryption_algorithm")]
diff --git a/stix2/test/test_relationship.py b/stix2/test/test_relationship.py
index 1e799bf..1ad792c 100644
--- a/stix2/test/test_relationship.py
+++ b/stix2/test/test_relationship.py
@@ -103,7 +103,7 @@ def test_cannot_assign_to_relationship_attributes(relationship):
with pytest.raises(stix2.exceptions.ImmutableError) as excinfo:
relationship.relationship_type = "derived-from"
- assert str(excinfo.value) == "Cannot modify properties after creation."
+ assert str(excinfo.value) == "Cannot modify 'relationship_type' property in 'Relationship' after creation."
def test_invalid_kwarg_to_relationship():
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
bump2version==1.0.1
bumpversion==0.6.0
cachetools==5.5.2
certifi==2025.1.31
cfgv==3.4.0
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
filelock==3.18.0
identify==2.6.9
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
nodeenv==1.9.1
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pre_commit==4.2.0
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-prompt==1.8.0
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
-e git+https://github.com/oasis-open/cti-python-stix2.git@229ab5d323a412ee91e8462701c816e1dfc0d521#egg=stix2
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: cti-python-stix2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- bump2version==1.0.1
- bumpversion==0.6.0
- cachetools==5.5.2
- certifi==2025.1.31
- cfgv==3.4.0
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- filelock==3.18.0
- identify==2.6.9
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- nodeenv==1.9.1
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pre-commit==4.2.0
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-prompt==1.8.0
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/cti-python-stix2
| [
"stix2/test/test_indicator.py::test_cannot_assign_to_indicator_attributes",
"stix2/test/test_malware.py::test_cannot_assign_to_malware_attributes",
"stix2/test/test_relationship.py::test_cannot_assign_to_relationship_attributes"
]
| []
| [
"stix2/test/test_indicator.py::test_indicator_with_all_required_properties",
"stix2/test/test_indicator.py::test_indicator_autogenerated_properties",
"stix2/test/test_indicator.py::test_indicator_type_must_be_indicator",
"stix2/test/test_indicator.py::test_indicator_id_must_start_with_indicator",
"stix2/test/test_indicator.py::test_indicator_required_properties",
"stix2/test/test_indicator.py::test_indicator_required_property_pattern",
"stix2/test/test_indicator.py::test_indicator_created_ref_invalid_format",
"stix2/test/test_indicator.py::test_indicator_revoked_invalid",
"stix2/test/test_indicator.py::test_invalid_kwarg_to_indicator",
"stix2/test/test_indicator.py::test_created_modified_time_are_identical_by_default",
"stix2/test/test_indicator.py::test_parse_indicator[{\\n",
"stix2/test/test_indicator.py::test_parse_indicator[data1]",
"stix2/test/test_malware.py::test_malware_with_all_required_properties",
"stix2/test/test_malware.py::test_malware_autogenerated_properties",
"stix2/test/test_malware.py::test_malware_type_must_be_malware",
"stix2/test/test_malware.py::test_malware_id_must_start_with_malware",
"stix2/test/test_malware.py::test_malware_required_properties",
"stix2/test/test_malware.py::test_malware_required_property_name",
"stix2/test/test_malware.py::test_invalid_kwarg_to_malware",
"stix2/test/test_malware.py::test_parse_malware[{\\n",
"stix2/test/test_malware.py::test_parse_malware[data1]",
"stix2/test/test_malware.py::test_parse_malware_invalid_labels",
"stix2/test/test_malware.py::test_parse_malware_kill_chain_phases",
"stix2/test/test_malware.py::test_parse_malware_clean_kill_chain_phases",
"stix2/test/test_observed_data.py::test_observed_data_example",
"stix2/test/test_observed_data.py::test_observed_data_example_with_refs",
"stix2/test/test_observed_data.py::test_observed_data_example_with_bad_refs",
"stix2/test/test_observed_data.py::test_parse_observed_data[{\\n",
"stix2/test/test_observed_data.py::test_parse_observed_data[data1]",
"stix2/test/test_observed_data.py::test_parse_artifact_valid[\"0\":",
"stix2/test/test_observed_data.py::test_parse_artifact_invalid[\"0\":",
"stix2/test/test_observed_data.py::test_parse_autonomous_system_valid[\"0\":",
"stix2/test/test_observed_data.py::test_parse_email_address[{\\n",
"stix2/test/test_observed_data.py::test_parse_email_message[\\n",
"stix2/test/test_observed_data.py::test_parse_file_archive[\"0\":",
"stix2/test/test_observed_data.py::test_parse_email_message_with_at_least_one_error[\\n",
"stix2/test/test_observed_data.py::test_parse_basic_tcp_traffic[\\n",
"stix2/test/test_observed_data.py::test_parse_basic_tcp_traffic_with_error[\\n",
"stix2/test/test_observed_data.py::test_observed_data_with_process_example",
"stix2/test/test_observed_data.py::test_artifact_example",
"stix2/test/test_observed_data.py::test_artifact_mutual_exclusion_error",
"stix2/test/test_observed_data.py::test_directory_example",
"stix2/test/test_observed_data.py::test_directory_example_ref_error",
"stix2/test/test_observed_data.py::test_domain_name_example",
"stix2/test/test_observed_data.py::test_domain_name_example_invalid_ref_type",
"stix2/test/test_observed_data.py::test_file_example",
"stix2/test/test_observed_data.py::test_file_example_with_NTFSExt",
"stix2/test/test_observed_data.py::test_file_example_with_empty_NTFSExt",
"stix2/test/test_observed_data.py::test_file_example_with_PDFExt",
"stix2/test/test_observed_data.py::test_file_example_with_PDFExt_Object",
"stix2/test/test_observed_data.py::test_file_example_with_RasterImageExt_Object",
"stix2/test/test_observed_data.py::test_file_example_with_WindowsPEBinaryExt",
"stix2/test/test_observed_data.py::test_file_example_encryption_error",
"stix2/test/test_observed_data.py::test_ip4_address_example",
"stix2/test/test_observed_data.py::test_ip4_address_example_cidr",
"stix2/test/test_observed_data.py::test_ip6_address_example",
"stix2/test/test_observed_data.py::test_mac_address_example",
"stix2/test/test_observed_data.py::test_network_traffic_example",
"stix2/test/test_observed_data.py::test_network_traffic_http_request_example",
"stix2/test/test_observed_data.py::test_network_traffic_icmp_example",
"stix2/test/test_observed_data.py::test_network_traffic_socket_example",
"stix2/test/test_observed_data.py::test_network_traffic_tcp_example",
"stix2/test/test_observed_data.py::test_mutex_example",
"stix2/test/test_observed_data.py::test_process_example",
"stix2/test/test_observed_data.py::test_process_example_empty_error",
"stix2/test/test_observed_data.py::test_process_example_empty_with_extensions",
"stix2/test/test_observed_data.py::test_process_example_windows_process_ext_empty",
"stix2/test/test_observed_data.py::test_process_example_extensions_empty",
"stix2/test/test_observed_data.py::test_process_example_with_WindowsProcessExt_Object",
"stix2/test/test_observed_data.py::test_process_example_with_WindowsServiceExt",
"stix2/test/test_observed_data.py::test_process_example_with_WindowsProcessServiceExt",
"stix2/test/test_observed_data.py::test_software_example",
"stix2/test/test_observed_data.py::test_url_example",
"stix2/test/test_observed_data.py::test_user_account_example",
"stix2/test/test_observed_data.py::test_user_account_unix_account_ext_example",
"stix2/test/test_observed_data.py::test_windows_registry_key_example",
"stix2/test/test_observed_data.py::test_x509_certificate_example",
"stix2/test/test_relationship.py::test_relationship_all_required_properties",
"stix2/test/test_relationship.py::test_relationship_autogenerated_properties",
"stix2/test/test_relationship.py::test_relationship_type_must_be_relationship",
"stix2/test/test_relationship.py::test_relationship_id_must_start_with_relationship",
"stix2/test/test_relationship.py::test_relationship_required_property_relationship_type",
"stix2/test/test_relationship.py::test_relationship_missing_some_required_properties",
"stix2/test/test_relationship.py::test_relationship_required_properties_target_ref",
"stix2/test/test_relationship.py::test_invalid_kwarg_to_relationship",
"stix2/test/test_relationship.py::test_create_relationship_from_objects_rather_than_ids",
"stix2/test/test_relationship.py::test_create_relationship_with_positional_args",
"stix2/test/test_relationship.py::test_parse_relationship[{\\n",
"stix2/test/test_relationship.py::test_parse_relationship[data1]"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,321 | [
"stix2/exceptions.py",
"stix2/base.py"
]
| [
"stix2/exceptions.py",
"stix2/base.py"
]
|
|
python-cmd2__cmd2-114 | bfd50611a7647af8b56fe34babf0b634708aaae9 | 2017-06-02 01:40:52 | ddfd3d9a400ae81468e9abcc89fe690c30b7ec7f | diff --git a/cmd2.py b/cmd2.py
index 34b17e94..4befe019 100755
--- a/cmd2.py
+++ b/cmd2.py
@@ -576,7 +576,6 @@ class Cmd(cmd.Cmd):
Line-oriented command interpreters are often useful for test harnesses, internal tools, and rapid prototypes.
"""
# Attributes which are NOT dynamically settable at runtime
-
allow_cli_args = True # Should arguments passed on the command-line be processed as commands?
allow_redirection = True # Should output redirection and pipes be allowed
blankLinesAllowed = False
@@ -588,6 +587,7 @@ class Cmd(cmd.Cmd):
default_to_shell = False # Attempt to run unrecognized commands as shell commands
defaultExtension = 'txt' # For ``save``, ``load``, etc.
excludeFromHistory = '''run r list l history hi ed edit li eof'''.split()
+ exclude_from_help = ['do_eof'] # Commands to exclude from the help menu
# make sure your terminators are not in legalChars!
legalChars = u'!#$%.:?@_-' + pyparsing.alphanums + pyparsing.alphas8bit
@@ -666,11 +666,13 @@ class Cmd(cmd.Cmd):
# noinspection PyUnresolvedReferences
self.keywords = self.reserved_words + [fname[3:] for fname in dir(self)
if fname.startswith('do_')]
- self.parser_manager = ParserManager(redirector=self.redirector, terminators=self.terminators, multilineCommands=self.multilineCommands,
- legalChars=self.legalChars, commentGrammars=self.commentGrammars,
- commentInProgress=self.commentInProgress, case_insensitive=self.case_insensitive,
- blankLinesAllowed=self.blankLinesAllowed, prefixParser=self.prefixParser,
- preparse=self.preparse, postparse=self.postparse, shortcuts=self.shortcuts)
+ self.parser_manager = ParserManager(redirector=self.redirector, terminators=self.terminators,
+ multilineCommands=self.multilineCommands,
+ legalChars=self.legalChars, commentGrammars=self.commentGrammars,
+ commentInProgress=self.commentInProgress,
+ case_insensitive=self.case_insensitive,
+ blankLinesAllowed=self.blankLinesAllowed, prefixParser=self.prefixParser,
+ preparse=self.preparse, postparse=self.postparse, shortcuts=self.shortcuts)
self._transcript_files = transcript_files
# Used to enable the ability for a Python script to quit the application
@@ -1126,19 +1128,60 @@ class Cmd(cmd.Cmd):
def do_help(self, arg):
"""List available commands with "help" or detailed help with "help cmd"."""
if arg:
+ # Getting help for a specific command
funcname = self._func_named(arg)
if funcname:
fn = getattr(self, funcname)
try:
+ # Use Optparse help for @options commands
fn.optionParser.print_help(file=self.stdout)
except AttributeError:
+ # No special behavior needed, delegate to cmd base class do_help()
cmd.Cmd.do_help(self, funcname[3:])
else:
- cmd.Cmd.do_help(self, arg)
+ # Show a menu of what commands help can be gotten for
+ self._help_menu()
+
+ def _help_menu(self):
+ """Show a list of commands which help can be displayed for.
+ """
+ # Get a list of all method names
+ names = self.get_names()
+
+ # Remove any command names which are explicitly excluded from the help menu
+ for name in self.exclude_from_help:
+ names.remove(name)
+
+ cmds_doc = []
+ cmds_undoc = []
+ help_dict = {}
+ for name in names:
+ if name[:5] == 'help_':
+ help_dict[name[5:]] = 1
+ names.sort()
+ # There can be duplicates if routines overridden
+ prevname = ''
+ for name in names:
+ if name[:3] == 'do_':
+ if name == prevname:
+ continue
+ prevname = name
+ command = name[3:]
+ if command in help_dict:
+ cmds_doc.append(command)
+ del help_dict[command]
+ elif getattr(self, name).__doc__:
+ cmds_doc.append(command)
+ else:
+ cmds_undoc.append(command)
+ self.stdout.write("%s\n" % str(self.doc_leader))
+ self.print_topics(self.doc_header, cmds_doc, 15, 80)
+ self.print_topics(self.misc_header, list(help_dict.keys()), 15, 80)
+ self.print_topics(self.undoc_header, cmds_undoc, 15, 80)
# noinspection PyUnusedLocal
def do_shortcuts(self, args):
- """Lists single-key shortcuts available."""
+ """Lists shortcuts (aliases) available."""
result = "\n".join('%s: %s' % (sc[0], sc[1]) for sc in sorted(self.shortcuts))
self.stdout.write("Single-key shortcuts for other commands:\n{}\n".format(result))
@@ -1466,7 +1509,7 @@ class Cmd(cmd.Cmd):
py: Enters interactive Python mode.
End with ``Ctrl-D`` (Unix) / ``Ctrl-Z`` (Windows), ``quit()``, '`exit()``.
Non-python commands can be issued with ``cmd("your command")``.
- Run python code from external files with ``run("filename.py")``
+ Run python code from external script files with ``run("filename.py")``
"""
if self._in_py:
self.perror("Recursively entering interactive Python consoles is not allowed.", traceback_war=False)
@@ -1759,7 +1802,7 @@ Edited files are run on close if the `autorun_on_edit` settable parameter is Tru
def do__relative_load(self, arg=None):
"""Runs commands in script at file or URL.
- Usage: load [file_path]
+ Usage: _relative_load [file_path]
optional argument:
file_path a file path or URL pointing to a script
@@ -1769,6 +1812,8 @@ Script should contain one command per line, just like command would be typed in
If this is called from within an already-running script, the filename will be interpreted
relative to the already-running script's directory.
+
+NOTE: This command is intended to only be used within text file scripts.
"""
if arg:
arg = arg.split(None, 1)
@@ -1922,28 +1967,31 @@ Script should contain one command per line, just like command would be typed in
self.postloop()
+# noinspection PyPep8Naming
class ParserManager:
-
- def __init__(self, redirector, terminators, multilineCommands, legalChars, commentGrammars,
- commentInProgress, case_insensitive, blankLinesAllowed, prefixParser,
- preparse, postparse, shortcuts):
- "Creates and uses parsers for user input according to app's paramters."
+ """
+ Class which encapsulates all of the pyparsing parser functionality for cmd2 in a single location.
+ """
+ def __init__(self, redirector, terminators, multilineCommands, legalChars, commentGrammars, commentInProgress,
+ case_insensitive, blankLinesAllowed, prefixParser, preparse, postparse, shortcuts):
+ """Creates and uses parsers for user input according to app's paramters."""
self.commentGrammars = commentGrammars
self.preparse = preparse
self.postparse = postparse
self.shortcuts = shortcuts
- self.main_parser = self._build_main_parser(
- redirector=redirector, terminators=terminators, multilineCommands=multilineCommands,
- legalChars=legalChars,
- commentInProgress=commentInProgress, case_insensitive=case_insensitive,
- blankLinesAllowed=blankLinesAllowed, prefixParser=prefixParser)
- self.input_source_parser = self._build_input_source_parser(legalChars=legalChars, commentInProgress=commentInProgress)
+ self.main_parser = self._build_main_parser(redirector=redirector, terminators=terminators,
+ multilineCommands=multilineCommands, legalChars=legalChars,
+ commentInProgress=commentInProgress,
+ case_insensitive=case_insensitive,
+ blankLinesAllowed=blankLinesAllowed, prefixParser=prefixParser)
+ self.input_source_parser = self._build_input_source_parser(legalChars=legalChars,
+ commentInProgress=commentInProgress)
def _build_main_parser(self, redirector, terminators, multilineCommands, legalChars,
commentInProgress, case_insensitive, blankLinesAllowed, prefixParser):
- "Builds a PyParsing parser for interpreting user commands."
+ """Builds a PyParsing parser for interpreting user commands."""
# Build several parsing components that are eventually compiled into overall parser
output_destination_parser = (pyparsing.Literal(redirector * 2) |
@@ -1959,7 +2007,8 @@ class ParserManager:
pipe = pyparsing.Keyword('|', identChars='|')
do_not_parse = self.commentGrammars | commentInProgress | pyparsing.quotedString
after_elements = \
- pyparsing.Optional(pipe + pyparsing.SkipTo(output_destination_parser ^ string_end, ignore=do_not_parse)('pipeTo')) + \
+ pyparsing.Optional(pipe + pyparsing.SkipTo(output_destination_parser ^ string_end,
+ ignore=do_not_parse)('pipeTo')) + \
pyparsing.Optional(output_destination_parser +
pyparsing.SkipTo(string_end,
ignore=do_not_parse).setParseAction(lambda x: x[0].strip())('outputTo'))
@@ -1972,24 +2021,23 @@ class ParserManager:
blankLineTerminator = (pyparsing.lineEnd + pyparsing.lineEnd)('terminator')
blankLineTerminator.setResultsName('terminator')
blankLineTerminationParser = ((multilineCommand ^ oneline_command) +
- pyparsing.SkipTo(blankLineTerminator,
- ignore=do_not_parse).setParseAction(
- lambda x: x[0].strip())('args') +
- blankLineTerminator)('statement')
+ pyparsing.SkipTo(blankLineTerminator, ignore=do_not_parse).setParseAction(
+ lambda x: x[0].strip())('args') + blankLineTerminator)('statement')
multilineParser = (((multilineCommand ^ oneline_command) +
- pyparsing.SkipTo(terminator_parser,
- ignore=do_not_parse).setParseAction(
- lambda x: x[0].strip())('args') + terminator_parser)('statement') +
- pyparsing.SkipTo(output_destination_parser ^ pipe ^ string_end, ignore=do_not_parse).setParseAction(
- lambda x: x[0].strip())('suffix') + after_elements)
+ pyparsing.SkipTo(terminator_parser,
+ ignore=do_not_parse).setParseAction(lambda x: x[0].strip())('args') +
+ terminator_parser)('statement') +
+ pyparsing.SkipTo(output_destination_parser ^ pipe ^ string_end,
+ ignore=do_not_parse).setParseAction(lambda x: x[0].strip())('suffix') +
+ after_elements)
multilineParser.ignore(commentInProgress)
singleLineParser = ((oneline_command +
- pyparsing.SkipTo(terminator_parser ^ string_end ^ pipe ^ output_destination_parser,
- ignore=do_not_parse).setParseAction(
- lambda x: x[0].strip())('args'))('statement') +
- pyparsing.Optional(terminator_parser) + after_elements)
+ pyparsing.SkipTo(terminator_parser ^ string_end ^ pipe ^ output_destination_parser,
+ ignore=do_not_parse).setParseAction(
+ lambda x: x[0].strip())('args'))('statement') +
+ pyparsing.Optional(terminator_parser) + after_elements)
blankLineTerminationParser = blankLineTerminationParser.setResultsName('statement')
@@ -2003,8 +2051,9 @@ class ParserManager:
parser.ignore(self.commentGrammars)
return parser
- def _build_input_source_parser(self, legalChars, commentInProgress):
- "Builds a PyParsing parser for alternate user input sources (from file, pipe, etc.)"
+ @staticmethod
+ def _build_input_source_parser(legalChars, commentInProgress):
+ """Builds a PyParsing parser for alternate user input sources (from file, pipe, etc.)"""
input_mark = pyparsing.Literal('<')
input_mark.setParseAction(lambda x: '')
@@ -2049,8 +2098,6 @@ class ParserManager:
return p
-
-
class HistoryItem(str):
"""Class used to represent an item in the History list.
| Add ability to exclude some commands from help menu
It would be nice to be able to explicitly exclude certain commands from being included in the help menu.
For example, the **eof** "command" shouldn't ever show up in the help menu. This isn't something an end user would ever really want to manually enter as a command, but is something that will be encountered while running a script or when <Ctrl>-d is pressed.
It would be nice to include a member variable which is a list of commands to exclude from the help menu. By default, "do_eof" should be in there and it is debatable whether or not "do__relative_load" should be there as well. | python-cmd2/cmd2 | diff --git a/tests/conftest.py b/tests/conftest.py
index b036943d..3977de5f 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -15,8 +15,8 @@ import cmd2
# Help text for base cmd2.Cmd application
BASE_HELP = """Documented commands (type help <topic>):
========================================
-_relative_load edit help list pause quit save shell show
-cmdenvironment eof history load py run set shortcuts
+_relative_load edit history load py run set shortcuts
+cmdenvironment help list pause quit save shell show
"""
# Help text for the history command
diff --git a/tests/test_transcript.py b/tests/test_transcript.py
index 6049119f..c31b519d 100644
--- a/tests/test_transcript.py
+++ b/tests/test_transcript.py
@@ -106,8 +106,9 @@ def test_base_with_transcript(_cmdline_app):
Documented commands (type help <topic>):
========================================
-_relative_load edit help list orate py run say shell show
-cmdenvironment eof history load pause quit save set shortcuts speak
+_relative_load help load py save shell speak
+cmdenvironment history orate quit say shortcuts
+edit list pause run set show
(Cmd) help say
Repeats what you tell me to.
diff --git a/tests/transcript.txt b/tests/transcript.txt
index 1e10ee31..013e2d0f 100644
--- a/tests/transcript.txt
+++ b/tests/transcript.txt
@@ -2,8 +2,9 @@
Documented commands (type help <topic>):
========================================
-_relative_load edit help list orate py run say shell show
-cmdenvironment eof history load pause quit save set shortcuts speak
+_relative_load help load py save shell speak
+cmdenvironment history orate quit say shortcuts
+edit list pause run set show
(Cmd) help say
Repeats what you tell me to.
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pyparsing>=2.0.1 six",
"pip_packages": [
"pytest",
"mock",
"sphinx",
"sphinx-rtd-theme"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
certifi==2025.1.31
charset-normalizer==3.4.1
-e git+https://github.com/python-cmd2/cmd2.git@bfd50611a7647af8b56fe34babf0b634708aaae9#egg=cmd2
docutils==0.21.2
exceptiongroup==1.2.2
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
Pygments==2.19.1
pyparsing @ file:///croot/pyparsing_1731445506121/work
pytest==8.3.5
requests==2.32.3
six @ file:///tmp/build/80754af9/six_1644875935023/work
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
urllib3==2.3.0
zipp==3.21.0
| name: cmd2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- pyparsing=3.2.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- docutils==0.21.2
- exceptiongroup==1.2.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pygments==2.19.1
- pytest==8.3.5
- requests==2.32.3
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/cmd2
| [
"tests/test_transcript.py::test_base_with_transcript",
"tests/test_transcript.py::test_transcript_from_cmdloop"
]
| []
| [
"tests/test_transcript.py::Cmd2TestCase::runTest",
"tests/test_transcript.py::TestMyAppCase::runTest",
"tests/test_transcript.py::test_optparser",
"tests/test_transcript.py::test_optparser_nosuchoption",
"tests/test_transcript.py::test_comment_stripping",
"tests/test_transcript.py::test_optarser_correct_args_with_quotes_and_midline_options",
"tests/test_transcript.py::test_optarser_options_with_spaces_in_quotes",
"tests/test_transcript.py::test_commands_at_invocation",
"tests/test_transcript.py::test_select_options",
"tests/test_transcript.py::test_multiline_command_transcript_with_comments_at_beginning",
"tests/test_transcript.py::test_invalid_syntax",
"tests/test_transcript.py::test_regex_transcript"
]
| []
| MIT License | 1,322 | [
"cmd2.py"
]
| [
"cmd2.py"
]
|
|
python-hyper__wsproto-29 | f74bd34194d496b690246dfa78dcf133b6f8be34 | 2017-06-02 05:17:37 | b4bdf4e8491a02e6f7ddc614dd9ba2fe33540cd9 | diff --git a/tox.ini b/tox.ini
index c7a5b7e..cb7abad 100644
--- a/tox.ini
+++ b/tox.ini
@@ -3,7 +3,7 @@ envlist = py27, py35, py36, lint, docs
[testenv]
deps = -r{toxinidir}/test_requirements.txt
-commands = pytest --cov {envsitepackagesdir}/wsproto --cov-config {toxinidir}/.coveragerc {toxinidir}/test/
+commands = pytest --cov {envsitepackagesdir}/wsproto --cov-report term-missing --cov-config {toxinidir}/.coveragerc {toxinidir}/test/
[testenv:lint]
basepython = python3.6
diff --git a/wsproto/compat.py b/wsproto/compat.py
index 9e8b7b0..267fa78 100644
--- a/wsproto/compat.py
+++ b/wsproto/compat.py
@@ -1,4 +1,17 @@
+# flake8: noqa
+
import sys
+
PY2 = sys.version_info.major == 2
PY3 = sys.version_info.major == 3
+
+
+if PY3:
+ unicode = str
+
+ def Utf8Validator():
+ return None
+else:
+ unicode = unicode
+ from .utf8validator import Utf8Validator
diff --git a/wsproto/frame_protocol.py b/wsproto/frame_protocol.py
index 8c5e23f..9426826 100644
--- a/wsproto/frame_protocol.py
+++ b/wsproto/frame_protocol.py
@@ -14,8 +14,7 @@ from collections import namedtuple
from enum import Enum, IntEnum
-from .compat import PY2, PY3
-from .utf8validator import Utf8Validator
+from .compat import unicode, Utf8Validator
try:
from wsaccel.xormask import XorMaskerSimple
@@ -34,10 +33,6 @@ class XorMaskerNull:
return data
-if PY3:
- unicode = str
-
-
# RFC6455, Section 5.2 - Base Framing Protocol
# Payload length constants
@@ -202,8 +197,7 @@ class MessageDecoder(object):
raise ParseFailed("expected CONTINUATION, got %r" % frame.opcode)
if frame.opcode is Opcode.TEXT:
- if PY2:
- self.validator = Utf8Validator()
+ self.validator = Utf8Validator()
self.decoder = getincrementaldecoder("utf-8")()
finished = frame.frame_finished and frame.message_finished
@@ -223,7 +217,7 @@ class MessageDecoder(object):
def decode_payload(self, data, finished):
if self.validator is not None:
- results = self.validator.validate(str(data))
+ results = self.validator.validate(bytes(data))
if not results[0] or (finished and not results[1]):
raise ParseFailed(u'encountered invalid UTF-8 while processing'
' text message at payload octet index %d' %
@@ -433,8 +427,9 @@ class FrameProtocol(object):
code <= MAX_PROTOCOL_CLOSE_REASON:
raise ParseFailed(
"CLOSE with unknown reserved code")
- if PY2:
- results = Utf8Validator().validate(str(data[2:]))
+ validator = Utf8Validator()
+ if validator is not None:
+ results = validator.validate(bytes(data[2:]))
if not (results[0] and results[1]):
raise ParseFailed(u'encountered invalid UTF-8 while'
' processing close message at payload'
@@ -497,7 +492,7 @@ class FrameProtocol(object):
return self._serialize_frame(Opcode.CLOSE, payload)
- def pong(self, payload=None):
+ def pong(self, payload=b''):
return self._serialize_frame(Opcode.PONG, payload)
def send_data(self, payload=b'', fin=True):
@@ -506,6 +501,8 @@ class FrameProtocol(object):
elif isinstance(payload, unicode):
opcode = Opcode.TEXT
payload = payload.encode('utf-8')
+ else:
+ raise ValueError('Must provide bytes or text')
if self._outbound_opcode is None:
self._outbound_opcode = opcode
@@ -522,9 +519,6 @@ class FrameProtocol(object):
def _serialize_frame(self, opcode, payload=b'', fin=True):
rsv = RsvBits(False, False, False)
for extension in reversed(self.extensions):
- if not extension.enabled():
- continue
-
rsv, payload = extension.frame_outbound(self, opcode, rsv, payload,
fin)
| We need more send-side tests.
With the recent rewrite we now have fairly good coverage on the receive side but we need a lot more coverage on the send side. | python-hyper/wsproto | diff --git a/test/test_frame_protocol.py b/test/test_frame_protocol.py
index d4555af..0c25c07 100644
--- a/test/test_frame_protocol.py
+++ b/test/test_frame_protocol.py
@@ -1,14 +1,31 @@
# -*- coding: utf-8 -*-
-import pytest
+import itertools
from binascii import unhexlify
from codecs import getincrementaldecoder
import struct
+import pytest
+
import wsproto.frame_protocol as fp
import wsproto.extensions as wpext
+class FakeValidator(object):
+ def __init__(self):
+ self.validated = b''
+
+ self.valid = True
+ self.ends_on_complete = True
+ self.octet = 0
+ self.code_point = 0
+
+ def validate(self, data):
+ self.validated += data
+ return (self.valid, self.ends_on_complete, self.octet,
+ self.code_point)
+
+
class TestBuffer(object):
def test_consume_at_most_zero_bytes(self):
buf = fp.Buffer(b'xxyyy')
@@ -320,6 +337,65 @@ class TestMessageDecoder(object):
assert frame.message_finished is True
assert frame.payload == text_payload[(split // 3):]
+ def send_frame_to_validator(self, payload, finished):
+ decoder = fp.MessageDecoder()
+ frame = fp.Frame(
+ opcode=fp.Opcode.TEXT,
+ payload=payload,
+ frame_finished=finished,
+ message_finished=True,
+ )
+ frame = decoder.process_frame(frame)
+
+ def test_text_message_hits_validator(self, monkeypatch):
+ validator = FakeValidator()
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
+
+ text_payload = u'fñör∂'
+ binary_payload = text_payload.encode('utf8')
+ self.send_frame_to_validator(binary_payload, True)
+
+ assert validator.validated == binary_payload
+
+ def test_message_validation_failure_fails_properly(self, monkeypatch):
+ validator = FakeValidator()
+ validator.valid = False
+
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
+
+ with pytest.raises(fp.ParseFailed):
+ self.send_frame_to_validator(b'', True)
+
+ def test_message_validation_finish_on_incomplete(self, monkeypatch):
+ validator = FakeValidator()
+ validator.valid = True
+ validator.ends_on_complete = False
+
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
+
+ with pytest.raises(fp.ParseFailed):
+ self.send_frame_to_validator(b'', True)
+
+ def test_message_validation_unfinished_on_incomplete(self, monkeypatch):
+ validator = FakeValidator()
+ validator.valid = True
+ validator.ends_on_complete = False
+
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
+
+ self.send_frame_to_validator(b'', False)
+
+ def test_message_no_validation_can_still_fail(self, monkeypatch):
+ validator = FakeValidator()
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
+
+ payload = u'fñörd'
+ payload = payload.encode('iso-8859-1')
+
+ with pytest.raises(fp.ParseFailed) as exc:
+ self.send_frame_to_validator(payload, True)
+ assert exc.value.code == fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
+
class TestFrameDecoder(object):
def _single_frame_test(self, client, frame_bytes, opcode, payload,
@@ -552,6 +628,15 @@ class TestFrameDecoder(object):
split=7,
)
+ def test_eight_byte_length_with_msb_set(self):
+ frame_bytes = b'\x81\x7f\x80\x80\x80\x80\x80\x80\x80\x80'
+
+ self._parse_failure_test(
+ client=True,
+ frame_bytes=frame_bytes,
+ close_reason=fp.CloseReason.PROTOCOL_ERROR,
+ )
+
def test_not_enough_for_mask(self):
payload = bytearray(b'xy')
mask = bytearray(b'abcd')
@@ -631,6 +716,17 @@ class TestFrameDecoder(object):
split=65535,
)
+ def test_overly_long_control_frame(self):
+ payload = b'x' * 128
+ payload_len = struct.pack('!H', len(payload))
+ frame_bytes = b'\x89\x7e' + payload_len + payload
+
+ self._parse_failure_test(
+ client=True,
+ frame_bytes=frame_bytes,
+ close_reason=fp.CloseReason.PROTOCOL_ERROR,
+ )
+
class TestFrameDecoderExtensions(object):
class FakeExtension(wpext.Extension):
@@ -638,10 +734,11 @@ class TestFrameDecoderExtensions(object):
def __init__(self):
self._inbound_header_called = False
- self._rsv_bit_set = False
+ self._inbound_rsv_bit_set = False
self._inbound_payload_data_called = False
self._inbound_complete_called = False
self._fail_inbound_complete = False
+ self._outbound_rsv_bit_set = False
def enabled(self):
return True
@@ -650,7 +747,7 @@ class TestFrameDecoderExtensions(object):
self._inbound_header_called = True
if opcode is fp.Opcode.PONG:
return fp.CloseReason.MANDATORY_EXT
- self._rsv_bit_set = rsv[2]
+ self._inbound_rsv_bit_set = rsv.rsv3
return fp.RsvBits(False, False, True)
def frame_inbound_payload_data(self, proto, data):
@@ -659,7 +756,7 @@ class TestFrameDecoderExtensions(object):
return fp.CloseReason.POLICY_VIOLATION
elif data == b'ragequit':
self._fail_inbound_complete = True
- if self._rsv_bit_set:
+ if self._inbound_rsv_bit_set:
data = data.decode('utf-8').upper().encode('utf-8')
return data
@@ -667,9 +764,18 @@ class TestFrameDecoderExtensions(object):
self._inbound_complete_called = True
if self._fail_inbound_complete:
return fp.CloseReason.ABNORMAL_CLOSURE
- if fin and self._rsv_bit_set:
+ if fin and self._inbound_rsv_bit_set:
return u'™'.encode('utf-8')
+ def frame_outbound(self, proto, opcode, rsv, data, fin):
+ if opcode is fp.Opcode.TEXT:
+ rsv = fp.RsvBits(rsv.rsv1, rsv.rsv2, True)
+ self._outbound_rsv_bit_set = True
+ if fin and self._outbound_rsv_bit_set:
+ data += u'®'.encode('utf-8')
+ self._outbound_rsv_bit_set = False
+ return rsv, data
+
def test_rsv_bit(self):
ext = self.FakeExtension()
decoder = fp.FrameDecoder(client=True, extensions=[ext])
@@ -680,7 +786,7 @@ class TestFrameDecoderExtensions(object):
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
- assert ext._rsv_bit_set
+ assert ext._inbound_rsv_bit_set
def test_wrong_rsv_bit(self):
ext = self.FakeExtension()
@@ -719,7 +825,7 @@ class TestFrameDecoderExtensions(object):
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
- assert ext._rsv_bit_set
+ assert ext._inbound_rsv_bit_set
assert ext._inbound_payload_data_called
assert frame.payload == expected_payload
@@ -736,7 +842,7 @@ class TestFrameDecoderExtensions(object):
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
- assert not ext._rsv_bit_set
+ assert not ext._inbound_rsv_bit_set
assert ext._inbound_payload_data_called
assert frame.payload == expected_payload
@@ -766,7 +872,7 @@ class TestFrameDecoderExtensions(object):
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
- assert ext._rsv_bit_set
+ assert ext._inbound_rsv_bit_set
assert ext._inbound_payload_data_called
assert ext._inbound_complete_called
assert frame.payload == expected_payload
@@ -784,7 +890,7 @@ class TestFrameDecoderExtensions(object):
frame = decoder.process_buffer()
assert frame is not None
assert ext._inbound_header_called
- assert not ext._rsv_bit_set
+ assert not ext._inbound_rsv_bit_set
assert ext._inbound_payload_data_called
assert ext._inbound_complete_called
assert frame.payload == expected_payload
@@ -802,6 +908,27 @@ class TestFrameDecoderExtensions(object):
decoder.process_buffer()
assert excinfo.value.code is fp.CloseReason.ABNORMAL_CLOSURE
+ def test_outbound_handling_single_frame(self):
+ ext = self.FakeExtension()
+ proto = fp.FrameProtocol(client=False, extensions=[ext])
+ payload = u'😃😄🙃😉'
+ data = proto.send_data(payload, fin=True)
+ payload = (payload + u'®').encode('utf8')
+ assert data == b'\x91' + bytearray([len(payload)]) + payload
+
+ def test_outbound_handling_multiple_frames(self):
+ ext = self.FakeExtension()
+ proto = fp.FrameProtocol(client=False, extensions=[ext])
+ payload = u'😃😄🙃😉'
+ data = proto.send_data(payload, fin=False)
+ payload = payload.encode('utf8')
+ assert data == b'\x11' + bytearray([len(payload)]) + payload
+
+ payload = u'¯\_(ツ)_/¯'
+ data = proto.send_data(payload, fin=True)
+ payload = (payload + u'®').encode('utf8')
+ assert data == b'\x80' + bytearray([len(payload)]) + payload
+
class TestFrameProtocolReceive(object):
def test_long_text_message(self):
@@ -820,7 +947,9 @@ class TestFrameProtocolReceive(object):
assert frame.payload == payload
def _close_test(self, code, reason=None, reason_bytes=None):
- payload = struct.pack('!H', code)
+ payload = b''
+ if code:
+ payload += struct.pack('!H', code)
if reason:
payload += reason.encode('utf8')
elif reason_bytes:
@@ -834,12 +963,39 @@ class TestFrameProtocolReceive(object):
assert len(frames) == 1
frame = frames[0]
assert frame.opcode == fp.Opcode.CLOSE
- assert frame.payload[0] == code
+ assert frame.payload[0] == code or fp.CloseReason.NO_STATUS_RCVD
if reason:
assert frame.payload[1] == reason
else:
assert not frame.payload[1]
+ def test_close_no_code(self):
+ self._close_test(None)
+
+ def test_close_one_byte_code(self):
+ frame_bytes = b'\x88\x01\x0e'
+ protocol = fp.FrameProtocol(client=True, extensions=[])
+
+ with pytest.raises(fp.ParseFailed) as exc:
+ protocol.receive_bytes(frame_bytes)
+ list(protocol.received_frames())
+ assert exc.value.code == fp.CloseReason.PROTOCOL_ERROR
+
+ def test_close_bad_code(self):
+ with pytest.raises(fp.ParseFailed) as exc:
+ self._close_test(123)
+ assert exc.value.code == fp.CloseReason.PROTOCOL_ERROR
+
+ def test_close_unknown_code(self):
+ with pytest.raises(fp.ParseFailed) as exc:
+ self._close_test(2998)
+ assert exc.value.code == fp.CloseReason.PROTOCOL_ERROR
+
+ def test_close_local_only_code(self):
+ with pytest.raises(fp.ParseFailed) as exc:
+ self._close_test(fp.CloseReason.NO_STATUS_RCVD)
+ assert exc.value.code == fp.CloseReason.PROTOCOL_ERROR
+
def test_close_no_payload(self):
self._close_test(fp.CloseReason.NORMAL_CLOSURE)
@@ -863,66 +1019,240 @@ class TestFrameProtocolReceive(object):
reason_bytes=payload)
assert exc.value.code == fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
+ def test_random_control_frame(self):
+ payload = b'give me one ping vasily'
+ frame_bytes = b'\x89' + bytearray([len(payload)]) + payload
-def test_close_with_long_reason():
- # Long close reasons get silently truncated
- proto = fp.FrameProtocol(client=False, extensions=[])
- data = proto.close(code=fp.CloseReason.NORMAL_CLOSURE,
- reason="x" * 200)
- assert data == bytearray(unhexlify("887d03e8")) + b"x" * 123
-
- # While preserving valid utf-8
- proto = fp.FrameProtocol(client=False, extensions=[])
- # pound sign is 2 bytes in utf-8, so naive truncation to 123 bytes will
- # cut it in half. Instead we truncate to 122 bytes.
- data = proto.close(code=fp.CloseReason.NORMAL_CLOSURE,
- reason=u"£" * 100)
- assert data == unhexlify("887c03e8") + u"£".encode("utf-8") * 61
-
-
-def test_payload_length_decode():
- # "the minimal number of bytes MUST be used to encode the length, for
- # example, the length of a 124-byte-long string can't be encoded as the
- # sequence 126, 0, 124" -- RFC 6455
-
- def make_header(encoding_bytes, payload_len):
- if encoding_bytes == 1:
- assert payload_len <= 125
- return unhexlify("81") + bytes([payload_len])
- elif encoding_bytes == 2:
- assert payload_len < 2**16
- return unhexlify("81" "7e") + struct.pack("!H", payload_len)
- elif encoding_bytes == 8:
- return unhexlify("81" "7f") + struct.pack("!Q", payload_len)
- else:
- assert False
+ protocol = fp.FrameProtocol(client=True, extensions=[])
+ protocol.receive_bytes(frame_bytes)
+ frames = list(protocol.received_frames())
+ assert len(frames) == 1
+ frame = frames[0]
+ assert frame.opcode == fp.Opcode.PING
+ assert len(frame.payload) == len(payload)
+ assert frame.payload == payload
- def make_and_parse(encoding_bytes, payload_len):
- proto = fp.FrameProtocol(client=True, extensions=[])
- proto.receive_bytes(make_header(encoding_bytes, payload_len))
- list(proto.received_frames())
-
- # Valid lengths for 1 byte
- for payload_len in [0, 1, 2, 123, 124, 125]:
- make_and_parse(1, payload_len)
- for encoding_bytes in [2, 8]:
- with pytest.raises(fp.ParseFailed) as excinfo:
- make_and_parse(encoding_bytes, payload_len)
- assert "used {} bytes".format(encoding_bytes) in str(excinfo.value)
-
- # Valid lengths for 2 bytes
- for payload_len in [126, 127, 1000, 2**16 - 1]:
- make_and_parse(2, payload_len)
- with pytest.raises(fp.ParseFailed) as excinfo:
- make_and_parse(8, payload_len)
- assert "used 8 bytes" in str(excinfo.value)
+ def test_close_reasons_get_utf8_validated(self, monkeypatch):
+ validator = FakeValidator()
+ reason = u'ƒñø®∂'
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
- # Valid lengths for 8 bytes
- for payload_len in [2**16, 2**16 + 1, 2**32, 2**63 - 1]:
- make_and_parse(8, payload_len)
+ self._close_test(fp.CloseReason.NORMAL_CLOSURE, reason=reason)
- # Invalid lengths for 8 bytes
- for payload_len in [2**63, 2**63 + 1]:
- with pytest.raises(fp.ParseFailed) as excinfo:
- make_and_parse(8, payload_len)
- assert "non-zero MSB" in str(excinfo.value)
+ assert validator.validated == reason.encode('utf8')
+
+ def test_close_reason_failing_validation_fails(self, monkeypatch):
+ validator = FakeValidator()
+ validator.valid = False
+ reason = u'ƒñø®∂'
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
+
+ with pytest.raises(fp.ParseFailed) as exc:
+ self._close_test(fp.CloseReason.NORMAL_CLOSURE, reason=reason)
+ assert exc.value.code == fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
+
+ def test_close_reason_with_incomplete_utf8_fails(self, monkeypatch):
+ validator = FakeValidator()
+ validator.ends_on_complete = False
+ reason = u'ƒñø®∂'
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
+
+ with pytest.raises(fp.ParseFailed) as exc:
+ self._close_test(fp.CloseReason.NORMAL_CLOSURE, reason=reason)
+ assert exc.value.code == fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
+
+ def test_close_no_validation(self, monkeypatch):
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: None)
+ reason = u'ƒñø®∂'
+ self._close_test(fp.CloseReason.NORMAL_CLOSURE, reason=reason)
+
+ def test_close_no_validation_can_still_fail(self, monkeypatch):
+ validator = FakeValidator()
+ monkeypatch.setattr(fp, 'Utf8Validator', lambda: validator)
+
+ reason = u'fñörd'
+ reason = reason.encode('iso-8859-1')
+
+ with pytest.raises(fp.ParseFailed) as exc:
+ self._close_test(fp.CloseReason.NORMAL_CLOSURE,
+ reason_bytes=reason)
+ assert exc.value.code == fp.CloseReason.INVALID_FRAME_PAYLOAD_DATA
+
+
+class TestFrameProtocolSend(object):
+ def test_simplest_possible_close(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ data = proto.close()
+ assert data == b'\x88\x00'
+
+ def test_unreasoning_close(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ data = proto.close(code=fp.CloseReason.NORMAL_CLOSURE)
+ assert data == b'\x88\x02\x03\xe8'
+
+ def test_reasoned_close(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ reason = u'¯\_(ツ)_/¯'
+ expected_payload = struct.pack('!H', fp.CloseReason.NORMAL_CLOSURE) + \
+ reason.encode('utf8')
+ data = proto.close(code=fp.CloseReason.NORMAL_CLOSURE, reason=reason)
+ assert data == b'\x88' + bytearray([len(expected_payload)]) + \
+ expected_payload
+
+ def test_overly_reasoned_close(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ reason = u'¯\_(ツ)_/¯' * 10
+ data = proto.close(code=fp.CloseReason.NORMAL_CLOSURE, reason=reason)
+ assert bytes(data[0:1]) == b'\x88'
+ assert len(data) <= 127
+ assert data[4:].decode('utf8')
+
+ def test_reasoned_but_uncoded_close(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ with pytest.raises(TypeError):
+ proto.close(reason='termites')
+
+ def test_local_only_close_reason(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ data = proto.close(code=fp.CloseReason.NO_STATUS_RCVD)
+ assert data == b'\x88\x02\x03\xe8'
+
+ def test_pong_without_payload(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ data = proto.pong()
+ assert data == b'\x8a\x00'
+
+ def test_pong_with_payload(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = u'¯\_(ツ)_/¯'.encode('utf8')
+ data = proto.pong(payload)
+ assert data == b'\x8a' + bytearray([len(payload)]) + payload
+
+ def test_single_short_binary_data(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = b"it's all just ascii, right?"
+ data = proto.send_data(payload, fin=True)
+ assert data == b'\x82' + bytearray([len(payload)]) + payload
+
+ def test_single_short_text_data(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = u'😃😄🙃😉'
+ data = proto.send_data(payload, fin=True)
+ payload = payload.encode('utf8')
+ assert data == b'\x81' + bytearray([len(payload)]) + payload
+
+ def test_multiple_short_binary_data(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = b"it's all just ascii, right?"
+ data = proto.send_data(payload, fin=False)
+ assert data == b'\x02' + bytearray([len(payload)]) + payload
+
+ payload = b'sure no worries'
+ data = proto.send_data(payload, fin=True)
+ assert data == b'\x80' + bytearray([len(payload)]) + payload
+
+ def test_multiple_short_text_data(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = u'😃😄🙃😉'
+ data = proto.send_data(payload, fin=False)
+ payload = payload.encode('utf8')
+ assert data == b'\x01' + bytearray([len(payload)]) + payload
+
+ payload = u'🙈🙉🙊'
+ data = proto.send_data(payload, fin=True)
+ payload = payload.encode('utf8')
+ assert data == b'\x80' + bytearray([len(payload)]) + payload
+
+ def test_mismatched_data_messages1(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = u'😃😄🙃😉'
+ data = proto.send_data(payload, fin=False)
+ payload = payload.encode('utf8')
+ assert data == b'\x01' + bytearray([len(payload)]) + payload
+
+ payload = b'seriously, all ascii'
+ with pytest.raises(TypeError):
+ proto.send_data(payload)
+
+ def test_mismatched_data_messages2(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = b"it's all just ascii, right?"
+ data = proto.send_data(payload, fin=False)
+ assert data == b'\x02' + bytearray([len(payload)]) + payload
+
+ payload = u'✔️☑️✅✔︎☑'
+ with pytest.raises(TypeError):
+ proto.send_data(payload)
+
+ def test_message_length_max_short(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = b'x' * 125
+ data = proto.send_data(payload, fin=True)
+ assert data == b'\x82' + bytearray([len(payload)]) + payload
+
+ def test_message_length_min_two_byte(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = b'x' * 126
+ data = proto.send_data(payload, fin=True)
+ assert data == b'\x82\x7e' + struct.pack('!H', len(payload)) + payload
+
+ def test_message_length_max_two_byte(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = b'x' * (2 ** 16 - 1)
+ data = proto.send_data(payload, fin=True)
+ assert data == b'\x82\x7e' + struct.pack('!H', len(payload)) + payload
+
+ def test_message_length_min_eight_byte(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = b'x' * (2 ** 16)
+ data = proto.send_data(payload, fin=True)
+ assert data == b'\x82\x7f' + struct.pack('!Q', len(payload)) + payload
+
+ def test_client_side_masking_short_frame(self):
+ proto = fp.FrameProtocol(client=True, extensions=[])
+ payload = b'x' * 125
+ data = proto.send_data(payload, fin=True)
+ assert data[0] == 0x82
+ assert struct.unpack('!B', data[1:2])[0] == len(payload) | 0x80
+ masking_key = data[2:6]
+ maskbytes = itertools.cycle(masking_key)
+ assert data[6:] == \
+ bytearray(b ^ next(maskbytes) for b in bytearray(payload))
+
+ def test_client_side_masking_two_byte_frame(self):
+ proto = fp.FrameProtocol(client=True, extensions=[])
+ payload = b'x' * 126
+ data = proto.send_data(payload, fin=True)
+ assert data[0] == 0x82
+ assert data[1] == 0xfe
+ assert struct.unpack('!H', data[2:4])[0] == len(payload)
+ masking_key = data[4:8]
+ maskbytes = itertools.cycle(masking_key)
+ assert data[8:] == \
+ bytearray(b ^ next(maskbytes) for b in bytearray(payload))
+
+ def test_client_side_masking_eight_byte_frame(self):
+ proto = fp.FrameProtocol(client=True, extensions=[])
+ payload = b'x' * 65536
+ data = proto.send_data(payload, fin=True)
+ assert data[0] == 0x82
+ assert data[1] == 0xff
+ assert struct.unpack('!Q', data[2:10])[0] == len(payload)
+ masking_key = data[10:14]
+ maskbytes = itertools.cycle(masking_key)
+ assert data[14:] == \
+ bytearray(b ^ next(maskbytes) for b in bytearray(payload))
+
+ def test_control_frame_with_overly_long_payload(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = b'x' * 126
+
+ with pytest.raises(ValueError):
+ proto.pong(payload)
+
+ def test_data_we_have_no_idea_what_to_do_with(self):
+ proto = fp.FrameProtocol(client=False, extensions=[])
+ payload = dict()
+
+ with pytest.raises(ValueError):
+ proto.send_data(payload)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 3
} | 0.10 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
h11==0.7.0
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
tomli==2.2.1
-e git+https://github.com/python-hyper/wsproto.git@f74bd34194d496b690246dfa78dcf133b6f8be34#egg=wsproto
| name: wsproto
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- h11==0.7.0
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- tomli==2.2.1
prefix: /opt/conda/envs/wsproto
| [
"test/test_frame_protocol.py::TestMessageDecoder::test_text_message_hits_validator",
"test/test_frame_protocol.py::TestMessageDecoder::test_message_validation_failure_fails_properly",
"test/test_frame_protocol.py::TestMessageDecoder::test_message_validation_finish_on_incomplete",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_reasons_get_utf8_validated",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_reason_failing_validation_fails",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_reason_with_incomplete_utf8_fails",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_pong_without_payload",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_data_we_have_no_idea_what_to_do_with"
]
| []
| [
"test/test_frame_protocol.py::TestBuffer::test_consume_at_most_zero_bytes",
"test/test_frame_protocol.py::TestBuffer::test_consume_at_most_with_no_data",
"test/test_frame_protocol.py::TestBuffer::test_consume_at_most_with_sufficient_data",
"test/test_frame_protocol.py::TestBuffer::test_consume_at_most_with_more_than_sufficient_data",
"test/test_frame_protocol.py::TestBuffer::test_consume_at_most_with_insufficient_data",
"test/test_frame_protocol.py::TestBuffer::test_consume_exactly_with_sufficient_data",
"test/test_frame_protocol.py::TestBuffer::test_consume_exactly_with_more_than_sufficient_data",
"test/test_frame_protocol.py::TestBuffer::test_consume_exactly_with_insufficient_data",
"test/test_frame_protocol.py::TestBuffer::test_feed",
"test/test_frame_protocol.py::TestBuffer::test_rollback",
"test/test_frame_protocol.py::TestBuffer::test_commit",
"test/test_frame_protocol.py::TestBuffer::test_length",
"test/test_frame_protocol.py::TestMessageDecoder::test_single_binary_frame",
"test/test_frame_protocol.py::TestMessageDecoder::test_follow_on_binary_frame",
"test/test_frame_protocol.py::TestMessageDecoder::test_single_text_frame",
"test/test_frame_protocol.py::TestMessageDecoder::test_follow_on_text_frame",
"test/test_frame_protocol.py::TestMessageDecoder::test_final_text_frame",
"test/test_frame_protocol.py::TestMessageDecoder::test_start_with_continuation",
"test/test_frame_protocol.py::TestMessageDecoder::test_missing_continuation_1",
"test/test_frame_protocol.py::TestMessageDecoder::test_missing_continuation_2",
"test/test_frame_protocol.py::TestMessageDecoder::test_incomplete_unicode",
"test/test_frame_protocol.py::TestMessageDecoder::test_not_even_unicode",
"test/test_frame_protocol.py::TestMessageDecoder::test_bad_unicode",
"test/test_frame_protocol.py::TestMessageDecoder::test_split_message",
"test/test_frame_protocol.py::TestMessageDecoder::test_split_unicode_message",
"test/test_frame_protocol.py::TestMessageDecoder::test_message_validation_unfinished_on_incomplete",
"test/test_frame_protocol.py::TestMessageDecoder::test_message_no_validation_can_still_fail",
"test/test_frame_protocol.py::TestFrameDecoder::test_zero_length_message",
"test/test_frame_protocol.py::TestFrameDecoder::test_short_server_message_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_short_client_message_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_reject_masked_server_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_reject_unmasked_client_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_reject_bad_opcode",
"test/test_frame_protocol.py::TestFrameDecoder::test_reject_unfinished_control_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_reject_reserved_bits",
"test/test_frame_protocol.py::TestFrameDecoder::test_long_message_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_very_long_message_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_insufficiently_long_message_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_insufficiently_very_long_message_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_very_insufficiently_very_long_message_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_not_enough_for_header",
"test/test_frame_protocol.py::TestFrameDecoder::test_not_enough_for_long_length",
"test/test_frame_protocol.py::TestFrameDecoder::test_not_enough_for_very_long_length",
"test/test_frame_protocol.py::TestFrameDecoder::test_eight_byte_length_with_msb_set",
"test/test_frame_protocol.py::TestFrameDecoder::test_not_enough_for_mask",
"test/test_frame_protocol.py::TestFrameDecoder::test_partial_message_frames",
"test/test_frame_protocol.py::TestFrameDecoder::test_partial_control_frame",
"test/test_frame_protocol.py::TestFrameDecoder::test_long_message_sliced",
"test/test_frame_protocol.py::TestFrameDecoder::test_overly_long_control_frame",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_rsv_bit",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_wrong_rsv_bit",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_header_error_handling",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_payload_processing",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_no_payload_processing_when_not_wanted",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_payload_error_handling",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_frame_completion",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_no_frame_completion_when_not_wanted",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_completion_error_handling",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_outbound_handling_single_frame",
"test/test_frame_protocol.py::TestFrameDecoderExtensions::test_outbound_handling_multiple_frames",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_long_text_message",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_no_code",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_one_byte_code",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_bad_code",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_unknown_code",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_local_only_code",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_no_payload",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_easy_payload",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_utf8_payload",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_bad_utf8_payload",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_incomplete_utf8_payload",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_random_control_frame",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_no_validation",
"test/test_frame_protocol.py::TestFrameProtocolReceive::test_close_no_validation_can_still_fail",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_simplest_possible_close",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_unreasoning_close",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_reasoned_close",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_overly_reasoned_close",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_reasoned_but_uncoded_close",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_local_only_close_reason",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_pong_with_payload",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_single_short_binary_data",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_single_short_text_data",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_multiple_short_binary_data",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_multiple_short_text_data",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_mismatched_data_messages1",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_mismatched_data_messages2",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_message_length_max_short",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_message_length_min_two_byte",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_message_length_max_two_byte",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_message_length_min_eight_byte",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_client_side_masking_short_frame",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_client_side_masking_two_byte_frame",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_client_side_masking_eight_byte_frame",
"test/test_frame_protocol.py::TestFrameProtocolSend::test_control_frame_with_overly_long_payload"
]
| []
| MIT License | 1,323 | [
"wsproto/compat.py",
"wsproto/frame_protocol.py",
"tox.ini"
]
| [
"wsproto/compat.py",
"wsproto/frame_protocol.py",
"tox.ini"
]
|
|
google__mobly-227 | caffb80efb51ec19f73fcb334ada67bdeac1d390 | 2017-06-02 07:17:53 | 31dcff279d4808e011f6af8ab0661b9750357cda | dthkao:
Review status: 0 of 2 files reviewed at latest revision, 2 unresolved discussions.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 189 at r1](https://reviewable.io:443/reviews/google/mobly/227#-KlblfMBHGUVsYsb35BX:-KlblfMC2k4pyw9i44TT:b879d2f) ([raw file](https://github.com/google/mobly/blob/3606e54d198968fa807da58991817244a73b1fb5/mobly/controllers/android_device_lib/snippet_client.py#L189)):*
> ```Python
> self._adb.forward(
> ['tcp:%d' % self.host_port,
> 'tcp:%d' % self.device_port])
> ```
this was the autoformatter?
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 221 at r1](https://reviewable.io:443/reviews/google/mobly/227#-KlblqEfvw26Ok0DC1qe:-KlblqEfvw26Ok0DC1qf:botn7lo) ([raw file](https://github.com/google/mobly/blob/3606e54d198968fa807da58991817244a73b1fb5/mobly/controllers/android_device_lib/snippet_client.py#L221)):*
> ```Python
>
> def _read_line(self):
> while True:
> ```
what if we never get an acceptable line?
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/227)*
<!-- Sent from Reviewable.io -->
adorokhine:
Review status: 0 of 2 files reviewed at latest revision, 2 unresolved discussions.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 189 at r1](https://reviewable.io:443/reviews/google/mobly/227#-KlblfMBHGUVsYsb35BX:-Klbm9igg1oyi650B6wP:b2nhtan) ([raw file](https://github.com/google/mobly/blob/3606e54d198968fa807da58991817244a73b1fb5/mobly/controllers/android_device_lib/snippet_client.py#L189)):*
<details><summary><i>Previously, dthkao (David T.H. Kao) wrote…</i></summary><blockquote>
this was the autoformatter?
</blockquote></details>
Yup
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 221 at r1](https://reviewable.io:443/reviews/google/mobly/227#-KlblqEfvw26Ok0DC1qe:-KlboFdOyj5NPihjPUAj:b-896fix) ([raw file](https://github.com/google/mobly/blob/3606e54d198968fa807da58991817244a73b1fb5/mobly/controllers/android_device_lib/snippet_client.py#L221)):*
<details><summary><i>Previously, dthkao (David T.H. Kao) wrote…</i></summary><blockquote>
what if we never get an acceptable line?
</blockquote></details>
Done.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/227)*
<!-- Sent from Reviewable.io -->
dthkao: <img class="emoji" title=":lgtm:" alt=":lgtm:" align="absmiddle" src="https://reviewable.io/lgtm.png" height="20" width="61"/>
---
Review status: 0 of 2 files reviewed at latest revision, 2 unresolved discussions.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/227#-:-Klbp4-Zgil5GlAXMhnK:bnfp4nl)*
<!-- Sent from Reviewable.io -->
xpconanfan:
Review status: 0 of 2 files reviewed at latest revision, 3 unresolved discussions.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 227 at r2](https://reviewable.io:443/reviews/google/mobly/227#-KldwJjmq90v1vEAzqiZ:-KldwJjnZt2xIGaxbdo0:bn32ide) ([raw file](https://github.com/google/mobly/blob/f618e1ff6ce4ee08d35e28c5b5dfb9192d347e5c/mobly/controllers/android_device_lib/snippet_client.py#L227)):*
> ```Python
> 'Unexpected EOF waiting for app to start')
> line = line.strip()
> if (line.startswith('INSTRUMENTATION_RESULT:') or
> ```
So this readline function is really only looking for a couple of particular lines?
Could we rename the function to reflect this and add docstring?
`_read_line` sounds like a generic function that reads any line.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/227)*
<!-- Sent from Reviewable.io -->
xpconanfan:
Review status: 0 of 2 files reviewed at latest revision, 4 unresolved discussions.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 226 at r2](https://reviewable.io:443/reviews/google/mobly/227#-KldxLWGMgBkeXLklRXg:-KldxLWGMgBkeXLklRXh:b-3b3kof) ([raw file](https://github.com/google/mobly/blob/f618e1ff6ce4ee08d35e28c5b5dfb9192d347e5c/mobly/controllers/android_device_lib/snippet_client.py#L226)):*
> ```Python
> raise jsonrpc_client_base.AppStartError(
> 'Unexpected EOF waiting for app to start')
> line = line.strip()
> ```
Shouldn't we strip before we do `not line` check?
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/227)*
<!-- Sent from Reviewable.io -->
adorokhine:
Review status: 0 of 2 files reviewed at latest revision, 4 unresolved discussions.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 226 at r2](https://reviewable.io:443/reviews/google/mobly/227#-KldxLWGMgBkeXLklRXg:-Kle85GDasmQERQlc-4K:bwju1v5) ([raw file](https://github.com/google/mobly/blob/f618e1ff6ce4ee08d35e28c5b5dfb9192d347e5c/mobly/controllers/android_device_lib/snippet_client.py#L226)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
Shouldn't we strip before we do `not line` check?
</blockquote></details>
Actually we can't, because readline() returns '' for EOF and '\n' for regular empty lines. So if we strip before the 'not line' check, we would consider any empty line in the output to be the same as EOF. Added a comment to this effect.
---
*[mobly/controllers/android_device_lib/snippet_client.py, line 227 at r2](https://reviewable.io:443/reviews/google/mobly/227#-KldwJjmq90v1vEAzqiZ:-Kle84qz-_723Td_9_EB:b-896fix) ([raw file](https://github.com/google/mobly/blob/f618e1ff6ce4ee08d35e28c5b5dfb9192d347e5c/mobly/controllers/android_device_lib/snippet_client.py#L227)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
So this readline function is really only looking for a couple of particular lines?
Could we rename the function to reflect this and add docstring?
`_read_line` sounds like a generic function that reads any line.
</blockquote></details>
Done.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/227)*
<!-- Sent from Reviewable.io -->
xpconanfan: <img class="emoji" title=":lgtm:" alt=":lgtm:" align="absmiddle" src="https://reviewable.io/lgtm.png" height="20" width="61"/>
---
Review status: 0 of 2 files reviewed at latest revision, 2 unresolved discussions.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/227#-:-KleGSHQZjebDqg7QOzd:bnfp4nl)*
<!-- Sent from Reviewable.io -->
| diff --git a/mobly/controllers/android_device_lib/snippet_client.py b/mobly/controllers/android_device_lib/snippet_client.py
index 4eb15e8..769140f 100644
--- a/mobly/controllers/android_device_lib/snippet_client.py
+++ b/mobly/controllers/android_device_lib/snippet_client.py
@@ -1,11 +1,11 @@
# Copyright 2016 Google Inc.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -42,7 +42,7 @@ class Error(Exception):
pass
-class ProtocolVersionError(Error):
+class ProtocolVersionError(jsonrpc_client_base.AppStartError):
"""Raised when the protocol reported by the snippet is unknown."""
@@ -92,7 +92,7 @@ class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
# just warn and retry as v0.
# TODO(adorokhine): delete this in Mobly 1.6 when snippet v0 support is
# removed.
- line = self._read_line()
+ line = self._read_protocol_line()
if line in ('INSTRUMENTATION_RESULT: shortMsg=Process crashed.',
'INSTRUMENTATION_RESULT: shortMsg='
'java.lang.IllegalArgumentException'):
@@ -185,7 +185,8 @@ class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
def _connect_to_v0(self):
self.device_port = self.host_port
self._adb.forward(
- ['tcp:%d' % self.host_port, 'tcp:%d' % self.device_port])
+ ['tcp:%d' % self.host_port,
+ 'tcp:%d' % self.device_port])
start_time = time.time()
expiration_time = start_time + _APP_START_WAIT_TIME_V0
while time.time() < expiration_time:
@@ -203,19 +204,46 @@ class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
'%s failed to start on %s.' % (self.package, self._adb.serial))
def _connect_to_v1(self):
- line = self._read_line()
+ line = self._read_protocol_line()
match = re.match('^SNIPPET SERVING, PORT ([0-9]+)$', line)
if not match:
- raise ProtocolVersionError(line)
+ raise jsonrpc_client_base.AppStartError(line)
self.device_port = int(match.group(1))
# Forward the device port to a new host port, and connect to that port
self.host_port = utils.get_available_host_port()
self._adb.forward(
- ['tcp:%d' % self.host_port, 'tcp:%d' % self.device_port])
+ ['tcp:%d' % self.host_port,
+ 'tcp:%d' % self.device_port])
self.connect()
- def _read_line(self):
- line = self._proc.stdout.readline().rstrip()
- self.log.debug('Read line from instrumentation output: "%s"', line)
- return line
+ def _read_protocol_line(self):
+ """Reads the next line of instrumentation output relevant to snippets.
+
+ This method will skip over lines that don't start with 'SNIPPET' or
+ 'INSTRUMENTATION_RESULT'.
+
+ Returns:
+ (str) Next line of snippet-related instrumentation output, stripped.
+
+ Raises:
+ jsonrpc_client_base.AppStartError: If EOF is reached without any
+ protocol lines being read.
+ """
+ while True:
+ line = self._proc.stdout.readline().decode('utf-8')
+ if not line:
+ raise jsonrpc_client_base.AppStartError(
+ 'Unexpected EOF waiting for app to start')
+ # readline() uses an empty string to mark EOF, and a single newline
+ # to mark regular empty lines in the output. Don't move the strip()
+ # call above the truthiness check, or this method will start
+ # considering any blank output line to be EOF.
+ line = line.strip()
+ if (line.startswith('INSTRUMENTATION_RESULT:') or
+ line.startswith('SNIPPET ')):
+ self.log.debug(
+ 'Accepted line from instrumentation output: "%s"', line)
+ return line
+ self.log.debug('Discarded line from instrumentation output: "%s"',
+ line)
| Snippet crashes on phones that print extra instrumentation output.
Some phones print extra information prior to instrumentation output. On such phones, the snippet crashes. | google/mobly | diff --git a/tests/mobly/controllers/android_device_lib/snippet_client_test.py b/tests/mobly/controllers/android_device_lib/snippet_client_test.py
index 911f53e..244be5f 100755
--- a/tests/mobly/controllers/android_device_lib/snippet_client_test.py
+++ b/tests/mobly/controllers/android_device_lib/snippet_client_test.py
@@ -1,11 +1,11 @@
# Copyright 2017 Google Inc.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -120,10 +120,129 @@ class SnippetClientTest(jsonrpc_client_test_base.JsonRpcClientTestBase):
with self.assertRaisesRegexp(jsonrpc_client_base.ApiError, '1'):
callback.getAll('eventName')
+ @mock.patch('socket.create_connection')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.start_standing_subprocess')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.get_available_host_port')
+ def test_snippet_start_app_and_connect_v1(self, mock_get_port,
+ mock_start_standing_subprocess,
+ mock_create_connection):
+ self.setup_mock_socket_file(mock_create_connection)
+ self._setup_mock_instrumentation_cmd(
+ mock_start_standing_subprocess,
+ resp_lines=[
+ b'SNIPPET START, PROTOCOL 1 0\n',
+ b'SNIPPET SERVING, PORT 123\n',
+ ])
+ client = self._make_client()
+ client.start_app_and_connect()
+ self.assertEqual(123, client.device_port)
+
+ @mock.patch('socket.create_connection')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.start_standing_subprocess')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.get_available_host_port')
+ def test_snippet_start_app_and_connect_v0(self, mock_get_port,
+ mock_start_standing_subprocess,
+ mock_create_connection):
+ mock_get_port.return_value = 456
+ self.setup_mock_socket_file(mock_create_connection)
+ self._setup_mock_instrumentation_cmd(
+ mock_start_standing_subprocess,
+ resp_lines=[b'INSTRUMENTATION_RESULT: shortMsg=Process crashed.\n'])
+ client = self._make_client()
+ client.start_app_and_connect()
+ self.assertEqual(456, client.device_port)
+
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.start_standing_subprocess')
+ def test_snippet_start_app_and_connect_unknown_protocol(
+ self, mock_start_standing_subprocess):
+ self._setup_mock_instrumentation_cmd(
+ mock_start_standing_subprocess,
+ resp_lines=[b'SNIPPET START, PROTOCOL 99 0\n'])
+ client = self._make_client()
+ with self.assertRaises(snippet_client.ProtocolVersionError):
+ client.start_app_and_connect()
+
+ @mock.patch('socket.create_connection')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.start_standing_subprocess')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.get_available_host_port')
+ def test_snippet_start_app_and_connect_v1_header_junk(
+ self, mock_get_port, mock_start_standing_subprocess,
+ mock_create_connection):
+ self.setup_mock_socket_file(mock_create_connection)
+ self._setup_mock_instrumentation_cmd(
+ mock_start_standing_subprocess,
+ resp_lines=[
+ b'This is some header junk\n',
+ b'Some phones print arbitrary output\n',
+ b'SNIPPET START, PROTOCOL 1 0\n',
+ b'Maybe in the middle too\n',
+ b'SNIPPET SERVING, PORT 123\n',
+ ])
+ client = self._make_client()
+ client.start_app_and_connect()
+ self.assertEqual(123, client.device_port)
+
+ @mock.patch('socket.create_connection')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.start_standing_subprocess')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.get_available_host_port')
+ def test_snippet_start_app_and_connect_v0_header_junk(
+ self, mock_get_port, mock_start_standing_subprocess,
+ mock_create_connection):
+ mock_get_port.return_value = 456
+ self.setup_mock_socket_file(mock_create_connection)
+ self._setup_mock_instrumentation_cmd(
+ mock_start_standing_subprocess,
+ resp_lines=[
+ b'This is some header junk\n',
+ b'Some phones print arbitrary output\n',
+ b'\n',
+ b'INSTRUMENTATION_RESULT: shortMsg=Process crashed.\n',
+ ])
+ client = self._make_client()
+ client.start_app_and_connect()
+ self.assertEqual(456, client.device_port)
+
+ @mock.patch('socket.create_connection')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.start_standing_subprocess')
+ @mock.patch('mobly.controllers.android_device_lib.snippet_client.'
+ 'utils.get_available_host_port')
+ def test_snippet_start_app_and_connect_no_valid_line(
+ self, mock_get_port, mock_start_standing_subprocess,
+ mock_create_connection):
+ mock_get_port.return_value = 456
+ self.setup_mock_socket_file(mock_create_connection)
+ self._setup_mock_instrumentation_cmd(
+ mock_start_standing_subprocess,
+ resp_lines=[
+ b'This is some header junk\n',
+ b'Some phones print arbitrary output\n',
+ b'', # readline uses '' to mark EOF
+ ])
+ client = self._make_client()
+ with self.assertRaisesRegexp(
+ jsonrpc_client_base.AppStartError,
+ 'Unexpected EOF waiting for app to start'):
+ client.start_app_and_connect()
+
def _make_client(self, adb_proxy=MockAdbProxy()):
return snippet_client.SnippetClient(
package=MOCK_PACKAGE_NAME, adb_proxy=adb_proxy)
+ def _setup_mock_instrumentation_cmd(self, mock_start_standing_subprocess,
+ resp_lines):
+ mock_proc = mock_start_standing_subprocess()
+ mock_proc.stdout.readline.side_effect = resp_lines
+
if __name__ == "__main__":
unittest.main()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 1.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y adb"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
future==1.0.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/google/mobly.git@caffb80efb51ec19f73fcb334ada67bdeac1d390#egg=mobly
mock==1.0.1
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
portpicker==1.6.0
psutil==7.0.0
pytest @ file:///croot/pytest_1738938843180/work
pytz==2025.2
PyYAML==6.0.2
timeout-decorator==0.5.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: mobly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- future==1.0.0
- mock==1.0.1
- portpicker==1.6.0
- psutil==7.0.0
- pytz==2025.2
- pyyaml==6.0.2
- timeout-decorator==0.5.0
prefix: /opt/conda/envs/mobly
| [
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start_app_and_connect_no_valid_line",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start_app_and_connect_unknown_protocol",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start_app_and_connect_v0",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start_app_and_connect_v0_header_junk",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start_app_and_connect_v1",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start_app_and_connect_v1_header_junk"
]
| []
| [
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_check_app_installed_fail_app_not_installed",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_check_app_installed_fail_not_instrumented",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_check_app_installed_fail_target_not_installed",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_check_app_installed_normal",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start",
"tests/mobly/controllers/android_device_lib/snippet_client_test.py::SnippetClientTest::test_snippet_start_event_client"
]
| []
| Apache License 2.0 | 1,324 | [
"mobly/controllers/android_device_lib/snippet_client.py"
]
| [
"mobly/controllers/android_device_lib/snippet_client.py"
]
|
aio-libs__aiosmtpd-106 | 2ec2b40aab6288baf2153672aae51800d3e73765 | 2017-06-02 08:59:24 | b87538bc1fc0137b5d188db938c9b386c71683a3 | diff --git a/aiosmtpd/smtp.py b/aiosmtpd/smtp.py
index f123514..287875b 100644
--- a/aiosmtpd/smtp.py
+++ b/aiosmtpd/smtp.py
@@ -284,10 +284,6 @@ class SMTP(asyncio.StreamReaderProtocol):
if not hostname:
await self.push('501 Syntax: HELO hostname')
return
- # See issue #21783 for a discussion of this behavior.
- if self.session.host_name:
- await self.push('503 Duplicate HELO/EHLO')
- return
self._set_rset_state()
self.session.extended_smtp = False
status = await self._call_handler_hook('HELO', hostname)
@@ -300,11 +296,6 @@ class SMTP(asyncio.StreamReaderProtocol):
if not hostname:
await self.push('501 Syntax: EHLO hostname')
return
- # See https://bugs.python.org/issue21783 for a discussion of this
- # behavior.
- if self.session.host_name:
- await self.push('503 Duplicate HELO/EHLO')
- return
self._set_rset_state()
self.session.extended_smtp = True
await self.push('250-%s' % self.hostname)
| Duplicated HELO/EHLO
As already mentioned in source code (https://github.com/aio-libs/aiosmtpd/blob/c25b878473b086150d32fcd21c35c58099947e5f/aiosmtpd/smtp.py#L264, also in original smtpd's code) a second HELO/EHLO is rejected but the standart says it should be handled as a RSET command.
Is there any special reason to not allow duplicated HELO/EHLO? I think the best way is to do the same other SMTP servers do: Allowing a duplicated HELO/EHLO.
Another option is to have a configuration option. | aio-libs/aiosmtpd | diff --git a/aiosmtpd/tests/test_smtp.py b/aiosmtpd/tests/test_smtp.py
index 8e61021..9f78c16 100644
--- a/aiosmtpd/tests/test_smtp.py
+++ b/aiosmtpd/tests/test_smtp.py
@@ -38,6 +38,15 @@ class ReceivingHandler:
return '250 OK'
+class StoreEnvelopeOnVRFYHandler:
+ """Saves envelope for later inspection when handling VRFY."""
+ envelope = None
+
+ async def handle_VRFY(self, server, session, envelope, addr):
+ self.envelope = envelope
+ return '250 OK'
+
+
class SizedController(Controller):
def __init__(self, handler, size):
self.size = size
@@ -201,8 +210,7 @@ class TestSMTP(unittest.TestCase):
code, response = client.helo('example.com')
self.assertEqual(code, 250)
code, response = client.helo('example.org')
- self.assertEqual(code, 503)
- self.assertEqual(response, b'Duplicate HELO/EHLO')
+ self.assertEqual(code, 250)
def test_ehlo(self):
with SMTP(*self.address) as client:
@@ -219,8 +227,7 @@ class TestSMTP(unittest.TestCase):
code, response = client.ehlo('example.com')
self.assertEqual(code, 250)
code, response = client.ehlo('example.org')
- self.assertEqual(code, 503)
- self.assertEqual(response, b'Duplicate HELO/EHLO')
+ self.assertEqual(code, 250)
def test_ehlo_no_hostname(self):
with SMTP(*self.address) as client:
@@ -235,16 +242,14 @@ class TestSMTP(unittest.TestCase):
code, response = client.helo('example.com')
self.assertEqual(code, 250)
code, response = client.ehlo('example.org')
- self.assertEqual(code, 503)
- self.assertEqual(response, b'Duplicate HELO/EHLO')
+ self.assertEqual(code, 250)
def test_ehlo_then_helo(self):
with SMTP(*self.address) as client:
code, response = client.ehlo('example.com')
self.assertEqual(code, 250)
code, response = client.helo('example.org')
- self.assertEqual(code, 503)
- self.assertEqual(response, b'Duplicate HELO/EHLO')
+ self.assertEqual(code, 250)
def test_noop(self):
with SMTP(*self.address) as client:
@@ -665,6 +670,80 @@ class TestSMTP(unittest.TestCase):
b'Error: command "FOOBAR" not recognized')
+class TestResetCommands(unittest.TestCase):
+ """Test that sender and recipients are reset on RSET, HELO, and EHLO.
+
+ The tests below issue each command twice with different addresses and
+ verify that mail_from and rcpt_tos have been replacecd.
+ """
+
+ expected_envelope_data = [{
+ 'mail_from': '[email protected]',
+ 'rcpt_tos': ['[email protected]',
+ '[email protected]']}, {
+ 'mail_from': '[email protected]',
+ 'rcpt_tos': ['[email protected]',
+ '[email protected]']}]
+
+ def send_envolope_data(self, client, mail_from, rcpt_tos):
+ client.mail(mail_from)
+ for rcpt in rcpt_tos:
+ client.rcpt(rcpt)
+
+ def test_helo(self):
+ handler = StoreEnvelopeOnVRFYHandler()
+ controller = DecodingController(handler)
+ controller.start()
+ self.addCleanup(controller.stop)
+
+ with SMTP(controller.hostname, controller.port) as client:
+ for data in self.expected_envelope_data:
+ client.helo('example.com')
+ client.vrfy('[email protected]') # Save envelope in handler
+ self.assertIsNone(handler.envelope.mail_from)
+ self.assertEqual(len(handler.envelope.rcpt_tos), 0)
+ self.send_envolope_data(client, **data)
+ client.vrfy('[email protected]') # Save envelope in handler
+ self.assertEqual(handler.envelope.mail_from, data['mail_from'])
+ self.assertEqual(handler.envelope.rcpt_tos, data['rcpt_tos'])
+
+ def test_ehlo(self):
+ handler = StoreEnvelopeOnVRFYHandler()
+ controller = DecodingController(handler)
+ controller.start()
+ self.addCleanup(controller.stop)
+
+ with SMTP(controller.hostname, controller.port) as client:
+ for data in self.expected_envelope_data:
+ client.ehlo('example.com')
+ client.vrfy('[email protected]') # Save envelope in handler
+ self.assertIsNone(handler.envelope.mail_from)
+ self.assertEqual(len(handler.envelope.rcpt_tos), 0)
+ self.send_envolope_data(client, **data)
+ client.vrfy('[email protected]') # Save envelope in handler
+ self.assertEqual(handler.envelope.mail_from, data['mail_from'])
+ self.assertEqual(handler.envelope.rcpt_tos, data['rcpt_tos'])
+
+ def test_rset(self):
+ handler = StoreEnvelopeOnVRFYHandler()
+ controller = DecodingController(handler)
+ controller.start()
+ self.addCleanup(controller.stop)
+
+ with SMTP(controller.hostname, controller.port) as client:
+ client.helo('example.com')
+
+ for data in self.expected_envelope_data:
+ self.send_envolope_data(client, **data)
+ client.vrfy('[email protected]') # Save envelope in handler
+ self.assertEqual(handler.envelope.mail_from, data['mail_from'])
+ self.assertEqual(handler.envelope.rcpt_tos, data['rcpt_tos'])
+ client.rset()
+ client.vrfy('[email protected]') # Save envelope in handler
+ self.assertIsNone(handler.envelope.mail_from)
+ self.assertEqual(len(handler.envelope.rcpt_tos), 0)
+
+
class TestSMTPWithController(unittest.TestCase):
def test_mail_with_size_too_large(self):
controller = SizedController(Sink(), 9999)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "atpublic",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/aio-libs/aiosmtpd.git@2ec2b40aab6288baf2153672aae51800d3e73765#egg=aiosmtpd
atpublic @ file:///home/conda/feedstock_root/build_artifacts/atpublic_1737771474411/work
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: aiosmtpd
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- atpublic=5.1=pyhd8ed1ab_0
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/aiosmtpd
| [
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_ehlo_duplicate",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_ehlo_then_helo",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_helo_duplicate",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_helo_then_ehlo",
"aiosmtpd/tests/test_smtp.py::TestResetCommands::test_ehlo",
"aiosmtpd/tests/test_smtp.py::TestResetCommands::test_helo"
]
| []
| [
"aiosmtpd/tests/test_smtp.py::TestProtocol::test_empty_email",
"aiosmtpd/tests/test_smtp.py::TestProtocol::test_honors_mail_delimeters",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_data_invalid_params",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_data_no_helo",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_data_no_rcpt",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_ehlo",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_ehlo_no_hostname",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_empty_command",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_expn",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_helo",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_helo_no_hostname",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_bad_arg",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_data",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_ehlo",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_helo",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_mail",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_mail_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_noop",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_quit",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_rcpt",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_rcpt_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_rset",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_help_vrfy",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_fail_parse_email",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_from_malformed",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_from_twice",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_malformed_params_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_missing_params_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_no_arg",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_no_from",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_no_helo",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_params_bad_syntax_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_params_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_params_no_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_mail_unrecognized_params_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_noop",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_noop_with_arg",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_quit",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_quit_with_arg",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_fail_parse_email",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_no_address",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_no_arg",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_no_arg_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_no_helo",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_no_mail",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_no_to",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_with_bad_params",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_with_params_no_esmtp",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rcpt_with_unknown_params",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rset",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_rset_with_arg",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_too_long_command",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_unknown_command",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_vrfy",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_vrfy_no_arg",
"aiosmtpd/tests/test_smtp.py::TestSMTP::test_vrfy_not_an_address",
"aiosmtpd/tests/test_smtp.py::TestResetCommands::test_rset",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_bad_encodings",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_dots_escaped",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_esmtp_no_size_limit",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_exception_handler_exception",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_exception_handler_undescribable",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_mail_invalid_body",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_mail_with_compatible_smtputf8",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_mail_with_incompatible_smtputf8",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_mail_with_size_too_large",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_mail_with_unrequited_smtputf8",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_process_message_error",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_too_long_message_body",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_unexpected_errors",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_unexpected_errors_custom_response",
"aiosmtpd/tests/test_smtp.py::TestSMTPWithController::test_unexpected_errors_unhandled",
"aiosmtpd/tests/test_smtp.py::TestCustomizations::test_custom_greeting",
"aiosmtpd/tests/test_smtp.py::TestCustomizations::test_custom_hostname",
"aiosmtpd/tests/test_smtp.py::TestCustomizations::test_default_greeting",
"aiosmtpd/tests/test_smtp.py::TestCustomizations::test_mail_invalid_body_param",
"aiosmtpd/tests/test_smtp.py::TestClientCrash::test_close_in_command",
"aiosmtpd/tests/test_smtp.py::TestClientCrash::test_close_in_data",
"aiosmtpd/tests/test_smtp.py::TestClientCrash::test_connection_reset_during_DATA",
"aiosmtpd/tests/test_smtp.py::TestClientCrash::test_connection_reset_during_command",
"aiosmtpd/tests/test_smtp.py::TestStrictASCII::test_bad_encoded_param",
"aiosmtpd/tests/test_smtp.py::TestStrictASCII::test_data",
"aiosmtpd/tests/test_smtp.py::TestStrictASCII::test_ehlo",
"aiosmtpd/tests/test_smtp.py::TestStrictASCII::test_mail_param"
]
| []
| Apache License 2.0 | 1,325 | [
"aiosmtpd/smtp.py"
]
| [
"aiosmtpd/smtp.py"
]
|
|
python-cmd2__cmd2-115 | a0d829e4105cd431198feb43534adaf953b090ba | 2017-06-03 16:12:27 | ddfd3d9a400ae81468e9abcc89fe690c30b7ec7f | diff --git a/CHANGES.md b/CHANGES.md
index 98aa2cf0..2324522f 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1,6 +1,17 @@
News
====
+0.7.3
+-----
+
+*Release date: TBD*
+
+* Bug fixes
+ * Fixed a bug in display a span of history items when only an end index is supplied
+* Enhancements
+ * Added the ability to exclude commands from the help menu (**eof** included by default)
+ * Redundant list command removed and features merged into history command
+
0.7.2
-----
diff --git a/cmd2.py b/cmd2.py
index 4befe019..3755ef56 100755
--- a/cmd2.py
+++ b/cmd2.py
@@ -1121,9 +1121,12 @@ class Cmd(cmd.Cmd):
Commands may be terminated with: {}
Command-line arguments allowed: {}
Output redirection and pipes allowed: {}
- Settable parameters: {}\n""".format(not self.case_insensitive, str(self.terminators),
- self.allow_cli_args,
- self.allow_redirection, ' '.join(self.settable)))
+ Parsing of @options commands:
+ Use POSIX-style argument parser (vs Windows): {}
+ Strip Quotes when using Windows-style argument parser: {}
+ Use a list of arguments instead of a single argument string: {}
+ \n""".format(not self.case_insensitive, str(self.terminators), self.allow_cli_args, self.allow_redirection,
+ POSIX_SHLEX, STRIP_QUOTES_FOR_NON_POSIX, USE_ARG_LIST))
def do_help(self, arg):
"""List available commands with "help" or detailed help with "help cmd"."""
@@ -1592,7 +1595,8 @@ class Cmd(cmd.Cmd):
| no arg: list all
| arg is integer: list one history item, by index
- | arg is string: string search
+ | a..b, a:b, a:, ..b -> list history items by a span of indices (inclusive)
+ | arg is string: list all commands matching string search
| arg is /enclosed in forward-slashes/: regular expression search
"""
# If arguments are being passed as a list instead of as a string
@@ -1602,10 +1606,20 @@ class Cmd(cmd.Cmd):
else:
arg = ''
+ # If an argument was supplied, then retrieve partial contents of the history
if arg:
- history = self.history.get(arg)
+ # If a character indicating a slice is present, retrieve a slice of the history
+ if '..' in arg or ':' in arg:
+ # Get a slice of history
+ history = self.history.span(arg)
+ else:
+ # Get item(s) from history by index or string search
+ history = self.history.get(arg)
else:
+ # If no arg given, then retrieve the entire history
history = self.history
+
+ # Display the history items retrieved
for hi in history:
if opts.script:
self.poutput(hi)
@@ -1628,38 +1642,6 @@ class Cmd(cmd.Cmd):
except IndexError:
return None
- def do_list(self, arg):
- """list [arg]: lists command(s) from history in a flexible/searchable way.
-
- :param arg: str - behavior varies as follows:
-
- * no arg -> list most recent command
- * arg is integer -> list one history item, by index
- * a..b, a:b, a:, ..b -> list spans from a (or start) to b (or end)
- * arg is string -> list all commands matching string search
- * arg is /enclosed in forward-slashes/ -> regular expression search
- """
- try:
- history = self.history.span(arg or '-1')
- except IndexError:
- history = self.history.search(arg)
- for hi in history:
- self.poutput(hi.pr())
-
- def help_list(self):
- """Print help for do_list()."""
- help_str = """Lists command(s) from history in a flexible/searchable way.
-
- Usage: list [arg]
-
- Where arg is:
- no arg -> list most recent command
- arg is integer -> list one history item, by index
- a..b, a:b, a:, ..b -> list spans from a (or start) to b (or end)
- arg is string -> list all commands matching string search
- arg is /enclosed in forward-slashes/ -> regular expression search"""
- self.stdout.write("{}\n".format(help_str))
-
def do_edit(self, arg):
"""Edit a file or command in a text editor.
@@ -2170,7 +2152,7 @@ class History(list):
raise IndexError
if not results.group('separator'):
return [self[self._to_index(results.group('start'))]]
- start = self._to_index(results.group('start'))
+ start = self._to_index(results.group('start')) or 0 # Ensure start is not None
end = self._to_index(results.group('end'))
reverse = False
if end is not None:
diff --git a/docs/freefeatures.rst b/docs/freefeatures.rst
index 3c740c00..5e354549 100644
--- a/docs/freefeatures.rst
+++ b/docs/freefeatures.rst
@@ -220,8 +220,6 @@ also provide `bash-like history list editing`_.
.. automethod:: cmd2.Cmd.do_history
-.. automethod:: cmd2.Cmd.do_list
-
.. automethod:: cmd2.Cmd.do_run
Quitting the application
diff --git a/examples/exampleSession.txt b/examples/exampleSession.txt
index 893d7b59..62c130f0 100644
--- a/examples/exampleSession.txt
+++ b/examples/exampleSession.txt
@@ -3,8 +3,8 @@
Documented commands (type help <topic>):
========================================
-_relative_load edit help list orate py run say shell show
-cmdenvironment eof history load pause quit save set shortcuts speak
+_relative_load edit history orate py run say shell show
+cmdenvironment help load pause quit save set shortcuts speak
(Cmd) help say
Repeats what you tell me to.
| history and list commands are largely redundant
The **history** and **list** commands largely have the same basic functionality of displaying commands from the history.
There is a partial overlap in the functionality where they both support some of the same features. But each also has at least one feature that the other doesn't have.
It would probably be more convenient for end users to combine all of the functionality into one common **history** command and remove the **list** command. | python-cmd2/cmd2 | diff --git a/tests/conftest.py b/tests/conftest.py
index 3977de5f..41bd007a 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -15,8 +15,8 @@ import cmd2
# Help text for base cmd2.Cmd application
BASE_HELP = """Documented commands (type help <topic>):
========================================
-_relative_load edit history load py run set shortcuts
-cmdenvironment help list pause quit save shell show
+_relative_load edit history pause quit save shell show
+cmdenvironment help load py run set shortcuts
"""
# Help text for the history command
@@ -24,7 +24,8 @@ HELP_HISTORY = """history [arg]: lists past commands issued
| no arg: list all
| arg is integer: list one history item, by index
- | arg is string: string search
+ | a..b, a:b, a:, ..b -> list history items by a span of indices (inclusive)
+ | arg is string: list all commands matching string search
| arg is /enclosed in forward-slashes/: regular expression search
Usage: history [options] (limit on which commands to include)
diff --git a/tests/test_cmd2.py b/tests/test_cmd2.py
index ce47ef47..5783d944 100644
--- a/tests/test_cmd2.py
+++ b/tests/test_cmd2.py
@@ -142,48 +142,62 @@ shortcuts
""")
assert out == expected
-
-def test_base_list(base_app):
+def test_history_with_string_argument(base_app):
run_cmd(base_app, 'help')
run_cmd(base_app, 'shortcuts')
- out = run_cmd(base_app, 'list')
+ run_cmd(base_app, 'help history')
+ out = run_cmd(base_app, 'history help')
expected = normalize("""
--------------------------[2]
-shortcuts
+-------------------------[1]
+help
+-------------------------[3]
+help history
""")
assert out == expected
-def test_list_with_string_argument(base_app):
+def test_history_with_integer_argument(base_app):
run_cmd(base_app, 'help')
run_cmd(base_app, 'shortcuts')
- run_cmd(base_app, 'help list')
- out = run_cmd(base_app, 'list help')
+ out = run_cmd(base_app, 'history 1')
expected = normalize("""
-------------------------[1]
help
--------------------------[3]
-help list
""")
assert out == expected
-def test_list_with_integer_argument(base_app):
+def test_history_with_integer_span(base_app):
run_cmd(base_app, 'help')
run_cmd(base_app, 'shortcuts')
- out = run_cmd(base_app, 'list 1')
+ run_cmd(base_app, 'help history')
+ out = run_cmd(base_app, 'history 1..2')
expected = normalize("""
-------------------------[1]
help
+-------------------------[2]
+shortcuts
""")
assert out == expected
+def test_history_with_span_start(base_app):
+ run_cmd(base_app, 'help')
+ run_cmd(base_app, 'shortcuts')
+ run_cmd(base_app, 'help history')
+ out = run_cmd(base_app, 'history 2:')
+ expected = normalize("""
+-------------------------[2]
+shortcuts
+-------------------------[3]
+help history
+""")
+ assert out == expected
-def test_list_with_integer_span(base_app):
+def test_history_with_span_end(base_app):
run_cmd(base_app, 'help')
run_cmd(base_app, 'shortcuts')
- run_cmd(base_app, 'help list')
- out = run_cmd(base_app, 'list 1..2')
+ run_cmd(base_app, 'help history')
+ out = run_cmd(base_app, 'history :2')
expected = normalize("""
-------------------------[1]
help
@@ -201,17 +215,13 @@ def test_base_cmdenvironment(base_app):
Commands may be terminated with: [';']
Command-line arguments allowed: True
Output redirection and pipes allowed: True
+ Parsing of @options commands:
+ Use POSIX-style argument parser (vs Windows): False
+ Strip Quotes when using Windows-style argument parser: True
+ Use a list of arguments instead of a single argument string: False
+
""")
- assert out[:4] == expected[:4]
- assert out[4].strip().startswith('Settable parameters: ')
-
- # Settable parameters can be listed in any order, so need to validate carefully using unordered sets
- settable_params = {'continuation_prompt', 'default_file_name', 'prompt', 'abbrev', 'quiet', 'case_insensitive',
- 'colors', 'echo', 'timing', 'editor', 'feedback_to_output', 'debug', 'autorun_on_edit',
- 'locals_in_py'}
- out_params = set(out[4].split("Settable parameters: ")[1].split())
- assert settable_params == out_params
-
+ assert out == expected
def test_base_load(base_app, request):
test_dir = os.path.dirname(request.module.__file__)
diff --git a/tests/test_transcript.py b/tests/test_transcript.py
index c31b519d..03fec92a 100644
--- a/tests/test_transcript.py
+++ b/tests/test_transcript.py
@@ -106,9 +106,8 @@ def test_base_with_transcript(_cmdline_app):
Documented commands (type help <topic>):
========================================
-_relative_load help load py save shell speak
-cmdenvironment history orate quit say shortcuts
-edit list pause run set show
+_relative_load edit history orate py run say shell show
+cmdenvironment help load pause quit save set shortcuts speak
(Cmd) help say
Repeats what you tell me to.
diff --git a/tests/transcript.txt b/tests/transcript.txt
index 013e2d0f..d0fd86a6 100644
--- a/tests/transcript.txt
+++ b/tests/transcript.txt
@@ -2,9 +2,8 @@
Documented commands (type help <topic>):
========================================
-_relative_load help load py save shell speak
-cmdenvironment history orate quit say shortcuts
-edit list pause run set show
+_relative_load edit history orate py run say shell show
+cmdenvironment help load pause quit save set shortcuts speak
(Cmd) help say
Repeats what you tell me to.
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 4
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/python-cmd2/cmd2.git@a0d829e4105cd431198feb43534adaf953b090ba#egg=cmd2
exceptiongroup==1.2.2
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pyparsing==3.2.3
pytest==8.3.5
six==1.17.0
tomli==2.2.1
| name: cmd2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pyparsing==3.2.3
- pytest==8.3.5
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/cmd2
| [
"tests/test_cmd2.py::test_base_help",
"tests/test_cmd2.py::test_base_help_history",
"tests/test_cmd2.py::test_history_with_integer_span",
"tests/test_cmd2.py::test_history_with_span_start",
"tests/test_cmd2.py::test_history_with_span_end",
"tests/test_cmd2.py::test_base_cmdenvironment",
"tests/test_cmd2.py::test_allow_redirection",
"tests/test_cmd2.py::test_input_redirection",
"tests/test_transcript.py::test_base_with_transcript",
"tests/test_transcript.py::test_transcript_from_cmdloop"
]
| [
"tests/test_cmd2.py::test_output_redirection"
]
| [
"tests/test_cmd2.py::test_ver",
"tests/test_cmd2.py::test_base_shortcuts",
"tests/test_cmd2.py::test_base_show",
"tests/test_cmd2.py::test_base_show_long",
"tests/test_cmd2.py::test_base_set",
"tests/test_cmd2.py::test_base_set_not_supported",
"tests/test_cmd2.py::test_base_shell",
"tests/test_cmd2.py::test_base_py",
"tests/test_cmd2.py::test_base_run_python_script",
"tests/test_cmd2.py::test_base_error",
"tests/test_cmd2.py::test_base_history",
"tests/test_cmd2.py::test_history_with_string_argument",
"tests/test_cmd2.py::test_history_with_integer_argument",
"tests/test_cmd2.py::test_base_load",
"tests/test_cmd2.py::test_base_load_default_file",
"tests/test_cmd2.py::test_base_relative_load",
"tests/test_cmd2.py::test_base_save",
"tests/test_cmd2.py::test_pipe_to_shell",
"tests/test_cmd2.py::test_send_to_paste_buffer",
"tests/test_cmd2.py::test_base_timing",
"tests/test_cmd2.py::test_base_debug",
"tests/test_cmd2.py::test_base_colorize",
"tests/test_cmd2.py::test_edit_no_editor",
"tests/test_cmd2.py::test_edit_file",
"tests/test_cmd2.py::test_edit_number",
"tests/test_cmd2.py::test_edit_blank",
"tests/test_cmd2.py::test_base_py_interactive",
"tests/test_cmd2.py::test_base_cmdloop_with_queue",
"tests/test_cmd2.py::test_base_cmdloop_without_queue",
"tests/test_cmd2.py::test_cmdloop_without_rawinput",
"tests/test_transcript.py::Cmd2TestCase::runTest",
"tests/test_transcript.py::TestMyAppCase::runTest",
"tests/test_transcript.py::test_optparser",
"tests/test_transcript.py::test_optparser_nosuchoption",
"tests/test_transcript.py::test_comment_stripping",
"tests/test_transcript.py::test_optarser_correct_args_with_quotes_and_midline_options",
"tests/test_transcript.py::test_optarser_options_with_spaces_in_quotes",
"tests/test_transcript.py::test_commands_at_invocation",
"tests/test_transcript.py::test_select_options",
"tests/test_transcript.py::test_multiline_command_transcript_with_comments_at_beginning",
"tests/test_transcript.py::test_invalid_syntax",
"tests/test_transcript.py::test_regex_transcript"
]
| []
| MIT License | 1,326 | [
"cmd2.py",
"examples/exampleSession.txt",
"docs/freefeatures.rst",
"CHANGES.md"
]
| [
"cmd2.py",
"examples/exampleSession.txt",
"docs/freefeatures.rst",
"CHANGES.md"
]
|
|
fact-project__pyfact-70 | 58e9bdbae66a65f16f900fe65560d3c83592c4b5 | 2017-06-03 17:52:44 | 58e9bdbae66a65f16f900fe65560d3c83592c4b5 | diff --git a/.travis.yml b/.travis.yml
index ff5dd0e..071380c 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,6 +8,7 @@ before_install:
# The next couple lines fix a crash with multiprocessing on Travis and are not specific to using Miniconda
- sudo rm -rf /dev/shm
- sudo ln -s /run/shm /dev/shm
+ - pip install --upgrade pip
install:
- pip install restructuredtext-lint sphinx pygments
diff --git a/fact/VERSION b/fact/VERSION
index d9df1bb..af88ba8 100644
--- a/fact/VERSION
+++ b/fact/VERSION
@@ -1,1 +1,1 @@
-0.11.0
+0.11.1
diff --git a/fact/io.py b/fact/io.py
index 5e5d175..26e6183 100644
--- a/fact/io.py
+++ b/fact/io.py
@@ -258,7 +258,7 @@ def to_h5py(filename, df, key='data', mode='a', dtypes=None, index=True, **kwarg
with h5py.File(filename, mode=mode) as f:
if key not in f:
- initialize_h5py(f, array.dtype, key=key, **kwargs)
+ initialize_h5py(f, array, key=key, **kwargs)
append_to_h5py(f, array, key=key)
@@ -293,7 +293,7 @@ def change_recarray_dtype(array, dtypes):
return array.astype(dt)
-def initialize_h5py(f, dtypes, key='events', **kwargs):
+def initialize_h5py(f, array, key='events', **kwargs):
'''
Create a group with name `key` and empty datasets for each
entry in dtypes.
@@ -302,9 +302,8 @@ def initialize_h5py(f, dtypes, key='events', **kwargs):
----------
f: h5py.File
the hdf5 file, opened either in write or append mode
- dtypes: numpy.dtype
- the numpy dtype object of a record or structured array describing
- the columns
+ array: numpy structured array
+ The data
key: str
the name for the hdf5 group to hold all datasets, default: data
@@ -312,13 +311,19 @@ def initialize_h5py(f, dtypes, key='events', **kwargs):
'''
group = f.create_group(key)
+ dtypes = array.dtype
for name in dtypes.names:
dtype = dtypes[name]
maxshape = [None] + list(dtype.shape)
shape = [0] + list(dtype.shape)
if dtype.base == object:
- dt = h5py.special_dtype(vlen=str)
+ if isinstance(array[name][0], list):
+ dt = np.array(array[name][0]).dtype
+ shape = [0, len(array[name][0])]
+ maxshape = [None, len(array[name][0])]
+ else:
+ dt = h5py.special_dtype(vlen=str)
elif dtype.type == np.datetime64:
# save dates as ISO string, create dummy date to get correct length
@@ -372,6 +377,10 @@ def append_to_h5py(f, array, key='events'):
if data.dtype.type == np.datetime64:
data = data.astype('S')
+ if data.dtype.base == object:
+ if isinstance(data[0], list):
+ data = np.array([o for o in data])
+
if data.ndim == 1:
dataset[n_existing_rows:] = data
| to_h5py converts lists in dataframes to strings
This is because if dtype is object, than str is assumend. | fact-project/pyfact | diff --git a/tests/test_io.py b/tests/test_io.py
index c034d91..472f8df 100644
--- a/tests/test_io.py
+++ b/tests/test_io.py
@@ -178,6 +178,21 @@ def test_write_data_h5py():
write_data(df, f.name, use_h5py=True)
+def test_write_lists_h5py():
+ from fact.io import to_h5py, read_h5py
+
+ df = pd.DataFrame({
+ 'x': [[1.0, 2.0], [3.0, 4.0]]
+ })
+
+ with tempfile.NamedTemporaryFile(suffix='.hdf5') as f:
+ to_h5py(f.name, df)
+
+ df = read_h5py(f.name, columns=['x'])
+
+ assert df['x_0'].iloc[0] == 1.0
+
+
def test_write_data_root():
from fact.io import write_data
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 3
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"restructuredtext-lint",
"sphinx",
"pygments"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
astropy==4.1
attrs==22.2.0
Babel==2.11.0
cached-property==1.5.2
certifi==2021.5.30
charset-normalizer==2.0.12
cycler==0.11.0
docutils==0.18.1
greenlet==2.0.2
h5py==3.1.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
kiwisolver==1.3.1
MarkupSafe==2.0.1
matplotlib==3.3.4
numexpr==2.8.1
numpy==1.19.5
packaging==21.3
pandas==1.1.5
peewee==3.17.9
Pillow==8.4.0
pluggy==1.0.0
py==1.11.0
pycrypto==2.6.1
-e git+https://github.com/fact-project/pyfact.git@58e9bdbae66a65f16f900fe65560d3c83592c4b5#egg=pyfact
Pygments==2.14.0
pymongo==4.1.1
PyMySQL==1.0.2
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
restructuredtext-lint==1.4.0
scipy==1.5.4
simple-crypt==4.1.7
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
SQLAlchemy==1.4.54
tables==3.7.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
wrapt==1.16.0
zipp==3.6.0
| name: pyfact
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- astropy==4.1
- attrs==22.2.0
- babel==2.11.0
- cached-property==1.5.2
- charset-normalizer==2.0.12
- cycler==0.11.0
- docutils==0.18.1
- greenlet==2.0.2
- h5py==3.1.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- kiwisolver==1.3.1
- markupsafe==2.0.1
- matplotlib==3.3.4
- numexpr==2.8.1
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- peewee==3.17.9
- pillow==8.4.0
- pluggy==1.0.0
- py==1.11.0
- pycrypto==2.6.1
- pygments==2.14.0
- pymongo==4.1.1
- pymysql==1.0.2
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- restructuredtext-lint==1.4.0
- scipy==1.5.4
- simple-crypt==4.1.7
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- sqlalchemy==1.4.54
- tables==3.7.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/pyfact
| [
"tests/test_io.py::test_write_lists_h5py"
]
| [
"tests/test_io.py::test_to_h5py",
"tests/test_io.py::test_to_h5py_string",
"tests/test_io.py::test_write_data_pandas_hdf",
"tests/test_io.py::test_write_data_h5py"
]
| [
"tests/test_io.py::test_to_h5py_datetime",
"tests/test_io.py::test_to_h5py_append",
"tests/test_io.py::test_to_h5py_append_second_group",
"tests/test_io.py::test_write_data_csv",
"tests/test_io.py::test_write_data_json",
"tests/test_io.py::test_write_data_jsonlines",
"tests/test_io.py::test_write_data_root"
]
| []
| null | 1,327 | [
".travis.yml",
"fact/io.py",
"fact/VERSION"
]
| [
".travis.yml",
"fact/io.py",
"fact/VERSION"
]
|
|
smok-serwis__coolamqp-25 | a379f33e87f4285a931031f4f69c70a5f30b46b7 | 2017-06-04 21:19:17 | 1b7c1619d9a65eabc4bb2502b098930e3d48a959 | diff --git a/coolamqp/framing/base.py b/coolamqp/framing/base.py
index 5f7ccf2..31753c9 100644
--- a/coolamqp/framing/base.py
+++ b/coolamqp/framing/base.py
@@ -92,6 +92,15 @@ class AMQPContentPropertyList(object):
# todo they are immutable, so they could just serialize themselves...
+ def get(self, property_name, default=None):
+ """
+ Return a particular property, or default if not defined
+ :param property_name: property name, unicode
+ :param default: default value
+ :return: memoryview or bytes
+ """
+ return getattr(self, property_name, default)
+
@staticmethod
def zero_property_flags(property_flags):
"""
diff --git a/coolamqp/framing/compilation/compile_definitions.py b/coolamqp/framing/compilation/compile_definitions.py
index 0109147..62a957d 100644
--- a/coolamqp/framing/compilation/compile_definitions.py
+++ b/coolamqp/framing/compilation/compile_definitions.py
@@ -55,7 +55,7 @@ binary string? It's a memoryview all right.
Only thing that isn't are field names in tables.
"""
-import struct, collections, warnings, logging, six
+import struct, collections, logging, six
from coolamqp.framing.base import AMQPClass, AMQPMethodPayload, AMQPContentPropertyList
from coolamqp.framing.field_table import enframe_table, deframe_table, frame_table_size
@@ -230,7 +230,7 @@ Field = collections.namedtuple('Field', ('name', 'type', 'basic_type', 'reserved
#
# If you do not know in advance what properties you will be using, it is correct to use
# this constructor.
-
+ if zpf in BasicContentPropertyList.PARTICULAR_CLASSES:
return %s.PARTICULAR_CLASSES[zpf](**kwargs)
else:
logger.debug('Property field (%s:%d) not seen yet, compiling', repr(zpf))
diff --git a/coolamqp/framing/definitions.py b/coolamqp/framing/definitions.py
index cab1e07..e472c3d 100644
--- a/coolamqp/framing/definitions.py
+++ b/coolamqp/framing/definitions.py
@@ -23,7 +23,7 @@ binary string? It's a memoryview all right.
Only thing that isn't are field names in tables.
"""
-import struct, collections, warnings, logging, six
+import struct, collections, logging, six
from coolamqp.framing.base import AMQPClass, AMQPMethodPayload, AMQPContentPropertyList
from coolamqp.framing.field_table import enframe_table, deframe_table, frame_table_size
@@ -2359,6 +2359,23 @@ class BasicContentPropertyList(AMQPContentPropertyList):
])
zpf = six.binary_type(zpf)
+# If you know in advance what properties you will be using, use typized constructors like
+#
+# runs once
+# my_type = BasicContentPropertyList.typize('content_type', 'content_encoding')
+#
+# runs many times
+# props = my_type('text/plain', 'utf8')
+#
+# instead of
+#
+# # runs many times
+# props = BasicContentPropertyList(content_type='text/plain', content_encoding='utf8')
+#
+# This way you will be faster.
+#
+# If you do not know in advance what properties you will be using, it is correct to use
+# this constructor.
if zpf in BasicContentPropertyList.PARTICULAR_CLASSES:
return BasicContentPropertyList.PARTICULAR_CLASSES[zpf](**kwargs)
else:
| Add some kind of .get() for attributes
Because doing
```python
try:
mode = message.properties.content_type.tobytes()
except AttributeError:
mode = b'application/x-pickle'
```
all over sucks | smok-serwis/coolamqp | diff --git a/tests/test_objects.py b/tests/test_objects.py
index 6a4c0c6..e3a109d 100644
--- a/tests/test_objects.py
+++ b/tests/test_objects.py
@@ -5,9 +5,7 @@ It sounds like a melody
from __future__ import print_function, absolute_import, division
import six
import unittest
-
-
-from coolamqp.objects import NodeDefinition
+from coolamqp.objects import NodeDefinition, MessageProperties
class TestObjects(unittest.TestCase):
@@ -23,3 +21,10 @@ class TestObjects(unittest.TestCase):
n1 = NodeDefinition(u'amqp://ala:ma@kota/')
self.assertEquals(n1.virtual_host, u'/')
+
+ def test_get_message_properties(self):
+ empty_p_msg = MessageProperties()
+ ce_p_msg = MessageProperties(content_encoding=b'wtf')
+
+ self.assertIsNone(empty_p_msg.get('content_encoding'), None)
+ self.assertEquals(ce_p_msg.get('content_encoding', b'wtf'), b'wtf')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 3
} | 0.90 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"coverage",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/smok-serwis/coolamqp.git@a379f33e87f4285a931031f4f69c70a5f30b46b7#egg=CoolAMQP
coverage==6.2
futures==2.2.0
importlib-metadata==4.8.3
iniconfig==1.1.1
monotonic==1.6
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: coolamqp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- futures==2.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- monotonic==1.6
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/coolamqp
| [
"tests/test_objects.py::TestObjects::test_get_message_properties"
]
| []
| [
"tests/test_objects.py::TestObjects::test_node_definition_from_amqp"
]
| []
| MIT License | 1,328 | [
"coolamqp/framing/compilation/compile_definitions.py",
"coolamqp/framing/definitions.py",
"coolamqp/framing/base.py"
]
| [
"coolamqp/framing/compilation/compile_definitions.py",
"coolamqp/framing/definitions.py",
"coolamqp/framing/base.py"
]
|
|
python-cmd2__cmd2-117 | ccef0b4193297c0b88e8add373f6a49126ffe772 | 2017-06-04 23:45:14 | ddfd3d9a400ae81468e9abcc89fe690c30b7ec7f | diff --git a/cmd2.py b/cmd2.py
index 3755ef56..13169a17 100755
--- a/cmd2.py
+++ b/cmd2.py
@@ -1610,8 +1610,11 @@ class Cmd(cmd.Cmd):
if arg:
# If a character indicating a slice is present, retrieve a slice of the history
if '..' in arg or ':' in arg:
- # Get a slice of history
- history = self.history.span(arg)
+ try:
+ # Get a slice of history
+ history = self.history.span(arg)
+ except IndexError:
+ history = self.history.get(arg)
else:
# Get item(s) from history by index or string search
history = self.history.get(arg)
| Unhandled exception in history command introduced by recent changes
A bug was introduced by the recent changes which merged the list functionality into the history command.
When attempting to do a span of history indices, an index error can occur and that exception needs to be caught and default history behavior needs to occur at that point instead. | python-cmd2/cmd2 | diff --git a/tests/test_cmd2.py b/tests/test_cmd2.py
index 5783d944..2198d5c0 100644
--- a/tests/test_cmd2.py
+++ b/tests/test_cmd2.py
@@ -206,6 +206,17 @@ shortcuts
""")
assert out == expected
+def test_history_with_span_index_error(base_app):
+ run_cmd(base_app, 'help')
+ run_cmd(base_app, 'help history')
+ run_cmd(base_app, '!ls -hal :')
+ out = run_cmd(base_app, 'history "hal :"')
+ expected = normalize("""
+-------------------------[3]
+!ls -hal :
+""")
+ assert out == expected
+
def test_base_cmdenvironment(base_app):
out = run_cmd(base_app, 'cmdenvironment')
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"sphinx",
"sphinx-rtd-theme",
"pytest-xdist",
"pytest-cov"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
-e git+https://github.com/python-cmd2/cmd2.git@ccef0b4193297c0b88e8add373f6a49126ffe772#egg=cmd2
coverage==6.2
docutils==0.18.1
execnet==1.9.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-xdist==3.0.2
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: cmd2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- coverage==6.2
- docutils==0.18.1
- execnet==1.9.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-xdist==3.0.2
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/cmd2
| [
"tests/test_cmd2.py::test_history_with_span_index_error"
]
| [
"tests/test_cmd2.py::test_output_redirection"
]
| [
"tests/test_cmd2.py::test_ver",
"tests/test_cmd2.py::test_base_help",
"tests/test_cmd2.py::test_base_help_history",
"tests/test_cmd2.py::test_base_shortcuts",
"tests/test_cmd2.py::test_base_show",
"tests/test_cmd2.py::test_base_show_long",
"tests/test_cmd2.py::test_base_set",
"tests/test_cmd2.py::test_base_set_not_supported",
"tests/test_cmd2.py::test_base_shell",
"tests/test_cmd2.py::test_base_py",
"tests/test_cmd2.py::test_base_run_python_script",
"tests/test_cmd2.py::test_base_error",
"tests/test_cmd2.py::test_base_history",
"tests/test_cmd2.py::test_history_with_string_argument",
"tests/test_cmd2.py::test_history_with_integer_argument",
"tests/test_cmd2.py::test_history_with_integer_span",
"tests/test_cmd2.py::test_history_with_span_start",
"tests/test_cmd2.py::test_history_with_span_end",
"tests/test_cmd2.py::test_base_cmdenvironment",
"tests/test_cmd2.py::test_base_load",
"tests/test_cmd2.py::test_base_load_default_file",
"tests/test_cmd2.py::test_base_relative_load",
"tests/test_cmd2.py::test_base_save",
"tests/test_cmd2.py::test_allow_redirection",
"tests/test_cmd2.py::test_input_redirection",
"tests/test_cmd2.py::test_pipe_to_shell",
"tests/test_cmd2.py::test_send_to_paste_buffer",
"tests/test_cmd2.py::test_base_timing",
"tests/test_cmd2.py::test_base_debug",
"tests/test_cmd2.py::test_base_colorize",
"tests/test_cmd2.py::test_edit_no_editor",
"tests/test_cmd2.py::test_edit_file",
"tests/test_cmd2.py::test_edit_number",
"tests/test_cmd2.py::test_edit_blank",
"tests/test_cmd2.py::test_base_py_interactive",
"tests/test_cmd2.py::test_base_cmdloop_with_queue",
"tests/test_cmd2.py::test_base_cmdloop_without_queue",
"tests/test_cmd2.py::test_cmdloop_without_rawinput"
]
| []
| MIT License | 1,329 | [
"cmd2.py"
]
| [
"cmd2.py"
]
|
|
tornadoweb__tornado-2074 | 62e47215ce12aee83f951758c96775a43e80475b | 2017-06-05 05:01:04 | 03f13800e854a6fc9e6efa2168e694d9599348bd | ploxiln: just one test failure on Python 2.7.8:
```
FAIL: test_connection_refused (tornado.test.iostream_test.TestIOStream)
...
tornado/test/iostream_test.py", line 240, in test_connection_refused
self.assertFalse(self.connect_called)
AssertionError: True is not false
``` | diff --git a/tornado/http1connection.py b/tornado/http1connection.py
index c6d3e336..b7a62b8c 100644
--- a/tornado/http1connection.py
+++ b/tornado/http1connection.py
@@ -357,6 +357,10 @@ class HTTP1Connection(httputil.HTTPConnection):
# Applications are discouraged from touching Transfer-Encoding,
# but if they do, leave it alone.
'Transfer-Encoding' not in headers)
+ # If connection to a 1.1 client will be closed, inform client
+ if (self._request_start_line.version == 'HTTP/1.1' and
+ self._disconnect_on_finish):
+ headers['Connection'] = 'close'
# If a 1.0 client asked for keep-alive, add the header.
if (self._request_start_line.version == 'HTTP/1.0' and
(self._request_headers.get('Connection', '').lower() ==
@@ -418,7 +422,7 @@ class HTTP1Connection(httputil.HTTPConnection):
def write(self, chunk, callback=None):
"""Implements `.HTTPConnection.write`.
- For backwards compatibility is is allowed but deprecated to
+ For backwards compatibility it is allowed but deprecated to
skip `write_headers` and instead call `write()` with a
pre-encoded header block.
"""
diff --git a/tornado/httpserver.py b/tornado/httpserver.py
index 7a7d08cd..95273a36 100644
--- a/tornado/httpserver.py
+++ b/tornado/httpserver.py
@@ -154,7 +154,6 @@ class HTTPServer(TCPServer, Configurable,
max_body_size=None, max_buffer_size=None,
trusted_downstream=None):
self.request_callback = request_callback
- self.no_keep_alive = no_keep_alive
self.xheaders = xheaders
self.protocol = protocol
self.conn_params = HTTP1ConnectionParameters(
| http1connection: Send `Connection: close` before closing connection
From https://github.com/tornadoweb/tornado/pull/1963#issuecomment-305983002
When the client sends `Connection: close`, Tornado (correctly) responds by closing the connection after sending the response. [RFC 7230 section 6.6](https://tools.ietf.org/html/rfc7230#section-6.6) says that a server SHOULD set the `Connection: close` header in the response when it does so. | tornadoweb/tornado | diff --git a/tornado/test/httpserver_test.py b/tornado/test/httpserver_test.py
index f5f91a9d..d8342f74 100644
--- a/tornado/test/httpserver_test.py
+++ b/tornado/test/httpserver_test.py
@@ -719,6 +719,7 @@ class KeepAliveTest(AsyncHTTPTestCase):
self.stream.read_until_close(callback=self.stop)
data = self.wait()
self.assertTrue(not data)
+ self.assertEqual(self.headers['Connection'], 'close')
self.close()
# keepalive is supported for http 1.0 too, but it's opt-in
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 4.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"coverage",
"codecov",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
requests==2.27.1
tomli==1.2.3
-e git+https://github.com/tornadoweb/tornado.git@62e47215ce12aee83f951758c96775a43e80475b#egg=tornado
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.27.1
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/tornado
| [
"tornado/test/httpserver_test.py::KeepAliveTest::test_request_close"
]
| [
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_invalid_content_length",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_malformed_first_line",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_malformed_headers",
"tornado/test/httpserver_test.py::UnixSocketTest::test_unix_socket_bad_request",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_body_size_override_reset",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_buffered",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_buffered_chunked",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_chunked",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_timeout"
]
| [
"tornado/test/httpserver_test.py::SSLv23Test::test_error_logging",
"tornado/test/httpserver_test.py::SSLv23Test::test_large_post",
"tornado/test/httpserver_test.py::SSLv23Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLv23Test::test_ssl",
"tornado/test/httpserver_test.py::SSLv3Test::test_error_logging",
"tornado/test/httpserver_test.py::SSLv3Test::test_large_post",
"tornado/test/httpserver_test.py::SSLv3Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLv3Test::test_ssl",
"tornado/test/httpserver_test.py::TLSv1Test::test_error_logging",
"tornado/test/httpserver_test.py::TLSv1Test::test_large_post",
"tornado/test/httpserver_test.py::TLSv1Test::test_non_ssl_request",
"tornado/test/httpserver_test.py::TLSv1Test::test_ssl",
"tornado/test/httpserver_test.py::SSLContextTest::test_error_logging",
"tornado/test/httpserver_test.py::SSLContextTest::test_large_post",
"tornado/test/httpserver_test.py::SSLContextTest::test_non_ssl_request",
"tornado/test/httpserver_test.py::SSLContextTest::test_ssl",
"tornado/test/httpserver_test.py::BadSSLOptionsTest::test_missing_arguments",
"tornado/test/httpserver_test.py::BadSSLOptionsTest::test_missing_key",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_100_continue",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_multipart_form",
"tornado/test/httpserver_test.py::HTTPConnectionTest::test_newlines",
"tornado/test/httpserver_test.py::HTTPServerTest::test_double_slash",
"tornado/test/httpserver_test.py::HTTPServerTest::test_empty_post_parameters",
"tornado/test/httpserver_test.py::HTTPServerTest::test_empty_query_string",
"tornado/test/httpserver_test.py::HTTPServerTest::test_malformed_body",
"tornado/test/httpserver_test.py::HTTPServerTest::test_query_string_encoding",
"tornado/test/httpserver_test.py::HTTPServerTest::test_types",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_chunked_request_body",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_chunked_request_uppercase",
"tornado/test/httpserver_test.py::HTTPServerRawTest::test_empty_request",
"tornado/test/httpserver_test.py::XHeaderTest::test_ip_headers",
"tornado/test/httpserver_test.py::XHeaderTest::test_scheme_headers",
"tornado/test/httpserver_test.py::XHeaderTest::test_trusted_downstream",
"tornado/test/httpserver_test.py::SSLXHeaderTest::test_request_without_xprotocol",
"tornado/test/httpserver_test.py::ManualProtocolTest::test_manual_protocol",
"tornado/test/httpserver_test.py::UnixSocketTest::test_unix_socket",
"tornado/test/httpserver_test.py::KeepAliveTest::test_cancel_during_download",
"tornado/test/httpserver_test.py::KeepAliveTest::test_finish_while_closed",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10_keepalive",
"tornado/test/httpserver_test.py::KeepAliveTest::test_http10_keepalive_extra_crlf",
"tornado/test/httpserver_test.py::KeepAliveTest::test_keepalive_chunked",
"tornado/test/httpserver_test.py::KeepAliveTest::test_pipelined_cancel",
"tornado/test/httpserver_test.py::KeepAliveTest::test_pipelined_requests",
"tornado/test/httpserver_test.py::KeepAliveTest::test_two_requests",
"tornado/test/httpserver_test.py::GzipTest::test_gzip",
"tornado/test/httpserver_test.py::GzipTest::test_uncompressed",
"tornado/test/httpserver_test.py::GzipUnsupportedTest::test_gzip_unsupported",
"tornado/test/httpserver_test.py::GzipUnsupportedTest::test_uncompressed",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_chunked_body",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_chunked_compressed",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_compressed_body",
"tornado/test/httpserver_test.py::StreamingChunkSizeTest::test_regular_body",
"tornado/test/httpserver_test.py::MaxHeaderSizeTest::test_large_headers",
"tornado/test/httpserver_test.py::MaxHeaderSizeTest::test_small_headers",
"tornado/test/httpserver_test.py::IdleTimeoutTest::test_idle_after_use",
"tornado/test/httpserver_test.py::IdleTimeoutTest::test_unused_connection",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_chunked_override",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_large_body_streaming_override",
"tornado/test/httpserver_test.py::BodyLimitsTest::test_small_body",
"tornado/test/httpserver_test.py::LegacyInterfaceTest::test_legacy_interface"
]
| []
| Apache License 2.0 | 1,330 | [
"tornado/http1connection.py",
"tornado/httpserver.py"
]
| [
"tornado/http1connection.py",
"tornado/httpserver.py"
]
|
typesafehub__conductr-cli-484 | c91b5dd3bee57bd813dfa26c9724edc13b3ce2c9 | 2017-06-05 13:40:46 | 39719b38ec6fc0f598756700a8a815b56bd8bc59 | diff --git a/conductr_cli/endpoint.py b/conductr_cli/endpoint.py
index b8048f5..4cf8d4c 100644
--- a/conductr_cli/endpoint.py
+++ b/conductr_cli/endpoint.py
@@ -112,7 +112,7 @@ class HttpRequest:
def hocon(self):
request_tree = ConfigTree()
- request_tree.put(self.match if self.match else 'path', self.value)
+ request_tree.put(self.match if self.match else 'path-beg', self.value)
if self.method:
request_tree.put('method', self.method)
if self.rewrite:
| bndl endpoint should default to --path-beg | typesafehub/conductr-cli | diff --git a/conductr_cli/test/test_bndl_utils.py b/conductr_cli/test/test_bndl_utils.py
index 70ab43c..72c200b 100644
--- a/conductr_cli/test/test_bndl_utils.py
+++ b/conductr_cli/test/test_bndl_utils.py
@@ -434,7 +434,7 @@ class TestBndlUtils(CliTestCase):
| http {
| requests = [
| {
- | path = "/"
+ | path-beg = "/"
| }
| ]
| }
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"tox",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argcomplete==3.6.1
arrow==1.3.0
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
-e git+https://github.com/typesafehub/conductr-cli.git@c91b5dd3bee57bd813dfa26c9724edc13b3ce2c9#egg=conductr_cli
distlib==0.3.9
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
filelock==3.18.0
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jsonschema==2.6.0
packaging @ file:///croot/packaging_1734472117206/work
pager==3.3
platformdirs==4.3.7
pluggy @ file:///croot/pluggy_1733169602837/work
prettytable==0.7.2
psutil==5.9.8
Pygments==2.19.1
pyhocon==0.3.35
PyJWT==1.4.2
pyparsing==3.2.3
pyproject-api==1.9.0
pyreadline==2.1
pytest @ file:///croot/pytest_1738938843180/work
python-dateutil==2.9.0.post0
requests==2.32.3
requests-toolbelt==1.0.0
six==1.17.0
sseclient==0.0.14
toml==0.10.2
tomli==2.2.1
tox==4.25.0
types-python-dateutil==2.9.0.20241206
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
www-authenticate==0.9.2
| name: conductr-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argcomplete==3.6.1
- arrow==1.3.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- distlib==0.3.9
- filelock==3.18.0
- idna==3.10
- jsonschema==2.6.0
- pager==3.3
- platformdirs==4.3.7
- prettytable==0.7.2
- psutil==5.9.8
- pygments==2.19.1
- pyhocon==0.3.35
- pyjwt==1.4.2
- pyparsing==3.2.3
- pyproject-api==1.9.0
- pyreadline==2.1
- python-dateutil==2.9.0.post0
- requests==2.32.3
- requests-toolbelt==1.0.0
- six==1.17.0
- sseclient==0.0.14
- toml==0.10.2
- tomli==2.2.1
- tox==4.25.0
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- www-authenticate==0.9.2
prefix: /opt/conda/envs/conductr-cli
| [
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_load_bundle_args_into_conf"
]
| []
| [
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_detect_format_dir",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_detect_format_stream",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_digest_reader_writer",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_first_mtime",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_load_bundle_args_into_conf_with_generic_defaults",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_load_bundle_args_into_conf_with_play_defaults"
]
| []
| Apache License 2.0 | 1,331 | [
"conductr_cli/endpoint.py"
]
| [
"conductr_cli/endpoint.py"
]
|
|
nipy__nipype-2065 | a63c52d97df65d316a5c97a40dd9c7e5a63d237c | 2017-06-05 17:17:48 | 14161a590a3166b5a9c0f4afd42ff1acf843a960 | diff --git a/README.rst b/README.rst
index aa41f34d6..5064198dd 100644
--- a/README.rst
+++ b/README.rst
@@ -33,7 +33,7 @@ NIPYPE: Neuroimaging in Python: Pipelines and Interfaces
.. image:: https://img.shields.io/badge/gitter-join%20chat%20%E2%86%92-brightgreen.svg?style=flat
:target: http://gitter.im/nipy/nipype
:alt: Chat
-
+
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.581704.svg
:target: https://doi.org/10.5281/zenodo.581704
diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py
index ac42f7c5a..141a7de1c 100644
--- a/nipype/interfaces/mipav/developer.py
+++ b/nipype/interfaces/mipav/developer.py
@@ -722,10 +722,10 @@ class JistIntensityMp2rageMaskingInputSpec(CommandLineInputSpec):
inSkip = traits.Enum("true", "false", desc="Skip zero values", argstr="--inSkip %s")
inMasking = traits.Enum("binary", "proba", desc="Whether to use a binary threshold or a weighted average based on the probability.", argstr="--inMasking %s")
xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s")
- outSignal = traits.Either(traits.Bool, File(), hash_files=False, desc="Signal Proba Image", argstr="--outSignal %s")
- outSignal2 = traits.Either(traits.Bool, File(), hash_files=False, desc="Signal Mask Image", argstr="--outSignal2 %s")
- outMasked = traits.Either(traits.Bool, File(), hash_files=False, desc="Masked T1 Map Image", argstr="--outMasked %s")
- outMasked2 = traits.Either(traits.Bool, File(), hash_files=False, desc="Masked Iso Image", argstr="--outMasked2 %s")
+ outSignal = traits.Either(traits.Bool, File(), hash_files=False, desc="Signal Proba Image", argstr="--outSignal_Proba %s")
+ outSignal2 = traits.Either(traits.Bool, File(), hash_files=False, desc="Signal Mask Image", argstr="--outSignal_Mask %s")
+ outMasked = traits.Either(traits.Bool, File(), hash_files=False, desc="Masked T1 Map Image", argstr="--outMasked_T1_Map %s")
+ outMasked2 = traits.Either(traits.Bool, File(), hash_files=False, desc="Masked Iso Image", argstr="--outMasked_T1weighted %s")
null = traits.Str(desc="Execution Time", argstr="--null %s")
xDefaultMem = traits.Int(desc="Set default maximum heap size", argstr="-xDefaultMem %d")
xMaxProcess = traits.Int(1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True)
| JistIntensityMp2rageMasking interface incompatible with current CBS Tools release
For the command underlying the interface mipav.JistIntensityMp2rageMasking the parameter names have changed in the current CBS Tools release, which makes the interface fail. I meant to do a PR but then saw the mipav/developer.py file is autogenerated and should not be be edited.
These would be the required changes:
https://github.com/nipy/nipype/compare/master...juhuntenburg:fix/cbstools_params
| nipy/nipype | diff --git a/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py b/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py
index 95700af1b..0fd3ed52e 100644
--- a/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py
+++ b/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py
@@ -26,16 +26,16 @@ def test_JistIntensityMp2rageMasking_inputs():
),
null=dict(argstr='--null %s',
),
- outMasked=dict(argstr='--outMasked %s',
+ outMasked=dict(argstr='--outMasked_T1_Map %s',
hash_files=False,
),
- outMasked2=dict(argstr='--outMasked2 %s',
+ outMasked2=dict(argstr='--outMasked_T1weighted %s',
hash_files=False,
),
- outSignal=dict(argstr='--outSignal %s',
+ outSignal=dict(argstr='--outSignal_Proba %s',
hash_files=False,
),
- outSignal2=dict(argstr='--outSignal2 %s',
+ outSignal2=dict(argstr='--outSignal_Mask %s',
hash_files=False,
),
terminal_output=dict(nohash=True,
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 0.13 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
click==8.0.4
configparser==5.2.0
decorator==4.4.2
funcsigs==1.0.2
future==1.0.0
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.6.1
lxml==5.3.1
mock==5.2.0
networkx==2.5.1
nibabel==3.2.2
-e git+https://github.com/nipy/nipype.git@a63c52d97df65d316a5c97a40dd9c7e5a63d237c#egg=nipype
numpy==1.19.5
packaging==21.3
pluggy==1.0.0
prov==2.0.1
py==1.11.0
pydotplus==2.0.2
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
rdflib==5.0.0
scipy==1.5.4
simplejson==3.20.1
six==1.17.0
tomli==1.2.3
traits==6.4.1
typing_extensions==4.1.1
zipp==3.6.0
| name: nipype
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- click==8.0.4
- configparser==5.2.0
- decorator==4.4.2
- funcsigs==1.0.2
- future==1.0.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.6.1
- lxml==5.3.1
- mock==5.2.0
- networkx==2.5.1
- nibabel==3.2.2
- numpy==1.19.5
- packaging==21.3
- pluggy==1.0.0
- prov==2.0.1
- py==1.11.0
- pydotplus==2.0.2
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- rdflib==5.0.0
- scipy==1.5.4
- simplejson==3.20.1
- six==1.17.0
- tomli==1.2.3
- traits==6.4.1
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/nipype
| [
"nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py::test_JistIntensityMp2rageMasking_inputs"
]
| []
| [
"nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py::test_JistIntensityMp2rageMasking_outputs"
]
| []
| Apache License 2.0 | 1,332 | [
"README.rst",
"nipype/interfaces/mipav/developer.py"
]
| [
"README.rst",
"nipype/interfaces/mipav/developer.py"
]
|
|
borgbackup__borg-2617 | fb26b3c7286cbeb9687e3a062b868a4a413edf39 | 2017-06-05 22:42:37 | a439fa3e720c8bb2a82496768ffcce282fb7f7b7 | codecov-io: # [Codecov](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=h1) Report
> Merging [#2617](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=desc) into [master](https://codecov.io/gh/borgbackup/borg/commit/88669611b184b17fb6b72840e32d7a0cc97ce37b?src=pr&el=desc) will **decrease** coverage by `0.64%`.
> The diff coverage is `100%`.
[](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #2617 +/- ##
==========================================
- Coverage 83.62% 82.98% -0.65%
==========================================
Files 22 22
Lines 8225 8227 +2
Branches 1395 1396 +1
==========================================
- Hits 6878 6827 -51
- Misses 962 1017 +55
+ Partials 385 383 -2
```
| [Impacted Files](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/borg/version.py](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree#diff-c3JjL2JvcmcvdmVyc2lvbi5weQ==) | `57.69% <100%> (+3.52%)` | :arrow_up: |
| [src/borg/xattr.py](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree#diff-c3JjL2JvcmcveGF0dHIucHk=) | `56% <0%> (-23.5%)` | :arrow_down: |
| [src/borg/platform/\_\_init\_\_.py](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree#diff-c3JjL2JvcmcvcGxhdGZvcm0vX19pbml0X18ucHk=) | `73.68% <0%> (-15.79%)` | :arrow_down: |
| [src/borg/platform/base.py](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree#diff-c3JjL2JvcmcvcGxhdGZvcm0vYmFzZS5weQ==) | `72.61% <0%> (-10.72%)` | :arrow_down: |
| [src/borg/helpers.py](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree#diff-c3JjL2JvcmcvaGVscGVycy5weQ==) | `87.11% <0%> (-0.18%)` | :arrow_down: |
| [src/borg/remote.py](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree#diff-c3JjL2JvcmcvcmVtb3RlLnB5) | `76.73% <0%> (+0.19%)` | :arrow_up: |
| [src/borg/archive.py](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=tree#diff-c3JjL2JvcmcvYXJjaGl2ZS5weQ==) | `81.72% <0%> (+0.55%)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=footer). Last update [8866961...7838fb1](https://codecov.io/gh/borgbackup/borg/pull/2617?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
ThomasWaldmann: @RonnyPfannschmidt ok, parametrize is there. not sure what you wanted with the dict?
textshell: @RonnyPfannschmidt is there any upstream documentation on the various formats used in different setup tools versions? It seems that would be very useful to have a complete parser without surprises (not just for borg)
ThomasWaldmann: @textshell and if it is `1.1.0+dYYYMMMDD` now, was it `1.1.0-dYYYMMMDD` before setuptools 8.0?
textshell: I have no clue. Maybe it can't even generate a version like this. I tried looking in the implementation but really don't grok what is going on.
I guess we can take wild guesses, just chop off everything starting with the first `+` or try to get some more information.
I think i originally tried just going with choping off everything starting with `+`. Maybe just add that back to the code that new also understands other but disjunkt special cases?
Or maybe split on `.`, `+` and `-` and discard from the first item that does not match `([0-9]+)(a|b|rc)?([0-9]+)`?
enkore: Why are we using this automation? It does not seem to save time or troubles.
textshell: i don't do any borg release work. But i would not mind if borg would just have the version somewhere in a file tracked in git.
(Besides, looking at a random git commit in github and having some way to see what version span that commit came from is harder if the version is nowhere in the files, borg of course has a changelog that usually works to find the version)
One nice thing about a scm based version is that in theory it allows to see where something that is not a real version is roughly on the development time line. i.e. for debug output.
ThomasWaldmann: i like that the version is derived from git. that way, it is immediately obvious which version people run (not just roughly as the case if we would update manually now and then).
ThomasWaldmann: I switched to a different approach:
use a regex to start parsing from the left, only parse as long as it matches stuff we actually want.
and do not support x.y version numbers any more, we do not use them anyway and it is much simpler that way.
textshell: I wonder if we want to terminate the regex with something that constrains the following chars. currently a version like 1.1.0abc would parse as 1.1.0. But greediness currently ensures that all digits are picked up. So i'm not sure if something like `$|[^a-zA-Z0-9] would be of any practical use at the end of the regex.
Explicitly restricting the parser to major.minor.patch seems like a good simplification. Much less guessing. We just need to make sure not to have versions that don't fit without adjusting that code. Preferably by not having version that do not fit into that form.
ThomasWaldmann: well, practically we only have to parse what setuptools_scm would generate (or what we would put into some constant in the src).
textshell: indeed. We only need to prove against anything setuptools_scm did produce or might produce in the future.
I think this should work in the current form. | diff --git a/docs/changes.rst b/docs/changes.rst
index 51170348..6e3394cd 100644
--- a/docs/changes.rst
+++ b/docs/changes.rst
@@ -172,7 +172,6 @@ New features:
- add --debug-profile option (and also "borg debug convert-profile"), #2473
Fixes:
-
- hashindex: read/write indices >2 GiB on 32bit systems, better error
reporting, #2496
- repository URLs: implement IPv6 address support and also more informative
diff --git a/docs/faq.rst b/docs/faq.rst
index 71cdc014..cb9c3a2a 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -266,7 +266,6 @@ See :ref:`encrypted_repos` for more details.
.. _password_env:
.. note:: Be careful how you set the environment; using the ``env``
command, a ``system()`` call or using inline shell scripts
- (e.g. ``BORG_PASSPHRASE=hunter12 borg ...``)
might expose the credentials in the process list directly
and they will be readable to all users on a system. Using
``export`` in a shell script file should be safe, however, as
diff --git a/docs/quickstart.rst b/docs/quickstart.rst
index 43d62fc9..ffc42e35 100644
--- a/docs/quickstart.rst
+++ b/docs/quickstart.rst
@@ -169,8 +169,8 @@ may be surprised that the following ``export`` has no effect on your command::
export BORG_PASSPHRASE='complicated & long'
sudo ./yourborgwrapper.sh # still prompts for password
-For more information, refer to the sudo(8) man page and ``env_keep`` in
-the sudoers(5) man page.
+For more information, see sudo(8) man page. Hint: see ``env_keep`` in
+sudoers(5), or try ``sudo BORG_PASSPHRASE='yourphrase' borg`` syntax.
.. Tip::
To debug what your borg process is actually seeing, find its PID
diff --git a/src/borg/helpers.py b/src/borg/helpers.py
index 1e79f63a..db66b822 100644
--- a/src/borg/helpers.py
+++ b/src/borg/helpers.py
@@ -1995,23 +1995,6 @@ def secure_erase(path):
os.unlink(path)
-def truncate_and_unlink(path):
- """
- Truncate and then unlink *path*.
-
- Do not create *path* if it does not exist.
- Open *path* for truncation in r+b mode (=O_RDWR|O_BINARY).
-
- Use this when deleting potentially large files when recovering
- from a VFS error such as ENOSPC. It can help a full file system
- recover. Refer to the "File system interaction" section
- in repository.py for further explanations.
- """
- with open(path, 'r+b') as fd:
- fd.truncate()
- os.unlink(path)
-
-
def popen_with_error_handling(cmd_line: str, log_prefix='', **kwargs):
"""
Handle typical errors raised by subprocess.Popen. Return None if an error occurred,
diff --git a/src/borg/platform/base.py b/src/borg/platform/base.py
index be4b694e..0d2fb51b 100644
--- a/src/borg/platform/base.py
+++ b/src/borg/platform/base.py
@@ -1,8 +1,6 @@
import errno
import os
-from borg.helpers import truncate_and_unlink
-
"""
platform base module
====================
@@ -159,7 +157,7 @@ def __init__(self, path, binary=False):
def __enter__(self):
from .. import platform
try:
- truncate_and_unlink(self.tmppath)
+ os.unlink(self.tmppath)
except FileNotFoundError:
pass
self.fd = platform.SyncFile(self.tmppath, self.binary)
@@ -169,7 +167,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
from .. import platform
self.fd.close()
if exc_type is not None:
- truncate_and_unlink(self.tmppath)
+ os.unlink(self.tmppath)
return
os.replace(self.tmppath, self.path)
platform.sync_dir(os.path.dirname(self.path))
diff --git a/src/borg/repository.py b/src/borg/repository.py
index a5806373..597d3ca5 100644
--- a/src/borg/repository.py
+++ b/src/borg/repository.py
@@ -18,7 +18,7 @@
from .helpers import ProgressIndicatorPercent
from .helpers import bin_to_hex
from .helpers import hostname_is_unique
-from .helpers import secure_erase, truncate_and_unlink
+from .helpers import secure_erase
from .locking import Lock, LockError, LockErrorT
from .logger import create_logger
from .lrucache import LRUCache
@@ -83,30 +83,6 @@ class Repository:
dir/data/<X // SEGMENTS_PER_DIR>/<X>
dir/index.X
dir/hints.X
-
- File system interaction
- -----------------------
-
- LoggedIO generally tries to rely on common behaviours across transactional file systems.
-
- Segments that are deleted are truncated first, which avoids problems if the FS needs to
- allocate space to delete the dirent of the segment. This mostly affects CoW file systems,
- traditional journaling file systems have a fairly good grip on this problem.
-
- Note that deletion, i.e. unlink(2), is atomic on every file system that uses inode reference
- counts, which includes pretty much all of them. To remove a dirent the inodes refcount has
- to be decreased, but you can't decrease the refcount before removing the dirent nor can you
- decrease the refcount after removing the dirent. File systems solve this with a lock,
- and by ensuring it all stays within the same FS transaction.
-
- Truncation is generally not atomic in itself, and combining truncate(2) and unlink(2) is of
- course never guaranteed to be atomic. Truncation in a classic extent-based FS is done in
- roughly two phases, first the extents are removed then the inode is updated. (In practice
- this is of course way more complex).
-
- LoggedIO gracefully handles truncate/unlink splits as long as the truncate resulted in
- a zero length file. Zero length segments are considered to not exist, while LoggedIO.cleanup()
- will still get rid of them.
"""
class DoesNotExist(Error):
@@ -1135,8 +1111,6 @@ def segment_iterator(self, segment=None, reverse=False):
filenames = [filename for filename in filenames if filename.isdigit() and int(filename) <= segment]
filenames = sorted(filenames, key=int, reverse=reverse)
for filename in filenames:
- # Note: Do not filter out logically deleted segments (see "File system interaction" above),
- # since this is used by cleanup and txn state detection as well.
yield int(filename), os.path.join(data_path, dir, filename)
def get_latest_segment(self):
@@ -1158,7 +1132,7 @@ def cleanup(self, transaction_id):
self.segment = transaction_id + 1
for segment, filename in self.segment_iterator(reverse=True):
if segment > transaction_id:
- truncate_and_unlink(filename)
+ os.unlink(filename)
else:
break
@@ -1233,15 +1207,12 @@ def delete_segment(self, segment):
if segment in self.fds:
del self.fds[segment]
try:
- truncate_and_unlink(self.segment_filename(segment))
+ os.unlink(self.segment_filename(segment))
except FileNotFoundError:
pass
def segment_exists(self, segment):
- filename = self.segment_filename(segment)
- # When deleting segments, they are first truncated. If truncate(2) and unlink(2) are split
- # across FS transactions, then logically deleted segments will show up as truncated.
- return os.path.exists(filename) and os.path.getsize(filename)
+ return os.path.exists(self.segment_filename(segment))
def segment_size(self, segment):
return os.path.getsize(self.segment_filename(segment))
@@ -1287,7 +1258,6 @@ def recover_segment(self, segment, filename):
if segment in self.fds:
del self.fds[segment]
with open(filename, 'rb') as fd:
- # XXX: Rather use mmap, this loads the entire segment (up to 500 MB by default) into memory.
data = memoryview(fd.read())
os.rename(filename, filename + '.beforerecover')
logger.info('attempting to recover ' + filename)
diff --git a/src/borg/version.py b/src/borg/version.py
index 4eb0c77d..7e2e95b7 100644
--- a/src/borg/version.py
+++ b/src/borg/version.py
@@ -3,33 +3,33 @@
def parse_version(version):
"""
- simplistic parser for setuptools_scm versions
+ Simplistic parser for setuptools_scm versions.
- supports final versions and alpha ('a'), beta ('b') and rc versions. It just discards commits since last tag
- and git revision hash.
+ Supports final versions and alpha ('a'), beta ('b') and release candidate ('rc') versions.
+ It does not try to parse anything else than that, even if there is more in the version string.
Output is a version tuple containing integers. It ends with one or two elements that ensure that relational
- operators yield correct relations for alpha, beta and rc versions too. For final versions the last element
- is a -1, for prerelease versions the last two elements are a smaller negative number and the number of e.g.
- the beta.
-
- Note, this sorts version 1.0 before 1.0.0.
+ operators yield correct relations for alpha, beta and rc versions, too.
+ For final versions the last element is a -1.
+ For prerelease versions the last two elements are a smaller negative number and the number of e.g. the beta.
This version format is part of the remote protocol, don‘t change in breaking ways.
"""
-
- parts = version.split('+')[0].split('.')
- if parts[-1].startswith('dev'):
- del parts[-1]
- version = [int(segment) for segment in parts[:-1]]
-
- prerelease = re.fullmatch('([0-9]+)(a|b|rc)([0-9]+)', parts[-1])
- if prerelease:
- version_type = {'a': -4, 'b': -3, 'rc': -2}[prerelease.group(2)]
- version += [int(prerelease.group(1)), version_type, int(prerelease.group(3))]
+ version_re = r"""
+ (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+) # version, e.g. 1.2.33
+ (?P<prerelease>(?P<ptype>a|b|rc)(?P<pnum>\d+))? # optional prerelease, e.g. a1 or b2 or rc33
+ """
+ m = re.match(version_re, version, re.VERBOSE)
+ if m is None:
+ raise ValueError('Invalid version string %s' % version)
+ gd = m.groupdict()
+ version = [int(gd['major']), int(gd['minor']), int(gd['patch'])]
+ if m.lastgroup == 'prerelease':
+ p_type = {'a': -4, 'b': -3, 'rc': -2}[gd['ptype']]
+ p_num = int(gd['pnum'])
+ version += [p_type, p_num]
else:
- version += [int(parts[-1]), -1]
-
+ version += [-1]
return tuple(version)
| version parsing code blows up with a dirty workdir | borgbackup/borg | diff --git a/src/borg/testsuite/version.py b/src/borg/testsuite/version.py
new file mode 100644
index 00000000..b5f32e6e
--- /dev/null
+++ b/src/borg/testsuite/version.py
@@ -0,0 +1,38 @@
+import pytest
+
+from ..version import parse_version
+
+
[email protected]("version_str, version_tuple", [
+ # setuptools < 8.0 uses "-"
+ ('1.0.0a1.dev204-g8866961.d20170606', (1, 0, 0, -4, 1)),
+ ('1.0.0a1.dev204-g8866961', (1, 0, 0, -4, 1)),
+ ('1.0.0-d20170606', (1, 0, 0, -1)),
+ # setuptools >= 8.0 uses "+"
+ ('1.0.0a1.dev204+g8866961.d20170606', (1, 0, 0, -4, 1)),
+ ('1.0.0a1.dev204+g8866961', (1, 0, 0, -4, 1)),
+ ('1.0.0+d20170606', (1, 0, 0, -1)),
+ # pre-release versions:
+ ('1.0.0a1', (1, 0, 0, -4, 1)),
+ ('1.0.0a2', (1, 0, 0, -4, 2)),
+ ('1.0.0b3', (1, 0, 0, -3, 3)),
+ ('1.0.0rc4', (1, 0, 0, -2, 4)),
+ # release versions:
+ ('0.0.0', (0, 0, 0, -1)),
+ ('0.0.11', (0, 0, 11, -1)),
+ ('0.11.0', (0, 11, 0, -1)),
+ ('11.0.0', (11, 0, 0, -1)),
+])
+def test_parse_version(version_str, version_tuple):
+ assert parse_version(version_str) == version_tuple
+
+
+def test_parse_version_invalid():
+ with pytest.raises(ValueError):
+ assert parse_version('') # we require x.y.z versions
+ with pytest.raises(ValueError):
+ assert parse_version('1') # we require x.y.z versions
+ with pytest.raises(ValueError):
+ assert parse_version('1.2') # we require x.y.z versions
+ with pytest.raises(ValueError):
+ assert parse_version('crap')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 7
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-xdist pytest-cov pytest-benchmark"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libacl1-dev liblz4-dev libfuse-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.d/development.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/borgbackup/borg.git@fb26b3c7286cbeb9687e3a062b868a4a413edf39#egg=borgbackup
cachetools==5.5.2
chardet==5.2.0
colorama==0.4.6
coverage==7.8.0
Cython==3.0.12
distlib==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
iniconfig==2.1.0
msgpack-python==0.5.6
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
py-cpuinfo==9.0.0
pyproject-api==1.9.0
pytest==8.3.5
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-xdist==3.6.1
setuptools-scm==8.2.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
virtualenv==20.29.3
| name: borg
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- coverage==7.8.0
- cython==3.0.12
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- iniconfig==2.1.0
- msgpack-python==0.5.6
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-xdist==3.6.1
- setuptools-scm==8.2.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/borg
| [
"src/borg/testsuite/version.py::test_parse_version[1.0.0a1.dev204-g8866961.d20170606-version_tuple0]",
"src/borg/testsuite/version.py::test_parse_version[1.0.0-d20170606-version_tuple2]",
"src/borg/testsuite/version.py::test_parse_version_invalid"
]
| []
| [
"src/borg/testsuite/version.py::test_parse_version[1.0.0a1.dev204-g8866961-version_tuple1]",
"src/borg/testsuite/version.py::test_parse_version[1.0.0a1.dev204+g8866961.d20170606-version_tuple3]",
"src/borg/testsuite/version.py::test_parse_version[1.0.0a1.dev204+g8866961-version_tuple4]",
"src/borg/testsuite/version.py::test_parse_version[1.0.0+d20170606-version_tuple5]",
"src/borg/testsuite/version.py::test_parse_version[1.0.0a1-version_tuple6]",
"src/borg/testsuite/version.py::test_parse_version[1.0.0a2-version_tuple7]",
"src/borg/testsuite/version.py::test_parse_version[1.0.0b3-version_tuple8]",
"src/borg/testsuite/version.py::test_parse_version[1.0.0rc4-version_tuple9]",
"src/borg/testsuite/version.py::test_parse_version[0.0.0-version_tuple10]",
"src/borg/testsuite/version.py::test_parse_version[0.0.11-version_tuple11]",
"src/borg/testsuite/version.py::test_parse_version[0.11.0-version_tuple12]",
"src/borg/testsuite/version.py::test_parse_version[11.0.0-version_tuple13]"
]
| []
| BSD License | 1,333 | [
"docs/quickstart.rst",
"src/borg/helpers.py",
"docs/faq.rst",
"docs/changes.rst",
"src/borg/repository.py",
"src/borg/platform/base.py",
"src/borg/version.py"
]
| [
"docs/quickstart.rst",
"src/borg/helpers.py",
"docs/faq.rst",
"docs/changes.rst",
"src/borg/repository.py",
"src/borg/platform/base.py",
"src/borg/version.py"
]
|
tornadoweb__tornado-2075 | 62e47215ce12aee83f951758c96775a43e80475b | 2017-06-06 08:44:56 | 03f13800e854a6fc9e6efa2168e694d9599348bd | diff --git a/tornado/netutil.py b/tornado/netutil.py
index 31bbe803..e58db35d 100644
--- a/tornado/netutil.py
+++ b/tornado/netutil.py
@@ -241,9 +241,18 @@ def add_accept_handler(sock, callback):
is different from the ``callback(fd, events)`` signature used for
`.IOLoop` handlers.
+ A callable is returned which, when called, will remove the `.IOLoop`
+ event handler and stop processing further incoming connections.
+
.. versionchanged:: 5.0
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
+
+ .. versionchanged:: 5.0
+ A callable is returned (``None`` was returned before).
"""
+ io_loop = IOLoop.current()
+ removed = [False]
+
def accept_handler(fd, events):
# More connections may come in while we're handling callbacks;
# to prevent starvation of other tasks we must limit the number
@@ -257,6 +266,9 @@ def add_accept_handler(sock, callback):
# heuristic for the number of connections we can reasonably
# accept at once.
for i in xrange(_DEFAULT_BACKLOG):
+ if removed[0]:
+ # The socket was probably closed
+ return
try:
connection, address = sock.accept()
except socket.error as e:
@@ -272,7 +284,13 @@ def add_accept_handler(sock, callback):
raise
set_close_exec(connection.fileno())
callback(connection, address)
- IOLoop.current().add_handler(sock, accept_handler, IOLoop.READ)
+
+ def remove_handler():
+ io_loop.remove_handler(sock)
+ removed[0] = True
+
+ io_loop.add_handler(sock, accept_handler, IOLoop.READ)
+ return remove_handler
def is_valid_ip(ip):
diff --git a/tornado/tcpserver.py b/tornado/tcpserver.py
index b3cdaa2c..fea215f5 100644
--- a/tornado/tcpserver.py
+++ b/tornado/tcpserver.py
@@ -110,7 +110,8 @@ class TCPServer(object):
read_chunk_size=None):
self.io_loop = IOLoop.current()
self.ssl_options = ssl_options
- self._sockets = {} # fd -> socket object
+ self._sockets = {} # fd -> socket object
+ self._handlers = {} # fd -> remove_handler callable
self._pending_sockets = []
self._started = False
self._stopped = False
@@ -156,7 +157,8 @@ class TCPServer(object):
"""
for sock in sockets:
self._sockets[sock.fileno()] = sock
- add_accept_handler(sock, self._handle_connection)
+ self._handlers[sock.fileno()] = add_accept_handler(
+ sock, self._handle_connection)
def add_socket(self, socket):
"""Singular version of `add_sockets`. Takes a single socket object."""
@@ -233,7 +235,8 @@ class TCPServer(object):
self._stopped = True
for fd, sock in self._sockets.items():
assert sock.fileno() == fd
- self.io_loop.remove_handler(fd)
+ # Unregister socket from IOLoop
+ self._handlers.pop(fd)()
sock.close()
def handle_stream(self, stream, address):
| EBADF in accept() handler if TCPServer.stop() was called in the meantime
We've had sporadic issues where a EBADF error when calling accept() was being logged. See example here:
https://travis-ci.org/dask/distributed/jobs/238031120#L1596-L1607
After some debugging, I've come to the conclusion that this happens whenever a IO handler calls TCPServer.stop(), while another handler is pending for a connection request on that TCPServer's listening socket(s). TCPServer.stop() closes the listening socket immediately and, when the queued accept handler is called, it finds a closed socket fd. | tornadoweb/tornado | diff --git a/tornado/test/tcpserver_test.py b/tornado/test/tcpserver_test.py
index 9afb5420..2417992c 100644
--- a/tornado/test/tcpserver_test.py
+++ b/tornado/test/tcpserver_test.py
@@ -68,3 +68,48 @@ class TCPServerTest(AsyncTestCase):
server.add_socket(sock)
server.stop()
server.stop()
+
+ @gen_test
+ def test_stop_in_callback(self):
+ # Issue #2069: calling server.stop() in a loop callback should not
+ # raise EBADF when the loop handles other server connection
+ # requests in the same loop iteration
+
+ class TestServer(TCPServer):
+ @gen.coroutine
+ def handle_stream(self, stream, address):
+ server.stop()
+ yield stream.read_until_close()
+
+ sock, port = bind_unused_port()
+ server = TestServer()
+ server.add_socket(sock)
+ server_addr = ('localhost', port)
+ N = 40
+ clients = [IOStream(socket.socket()) for i in range(N)]
+ connected_clients = []
+
+ @gen.coroutine
+ def connect(c):
+ try:
+ yield c.connect(server_addr)
+ except EnvironmentError:
+ pass
+ else:
+ connected_clients.append(c)
+
+ yield [connect(c) for c in clients]
+
+ self.assertGreater(len(connected_clients), 0,
+ "all clients failed connecting")
+ try:
+ if len(connected_clients) == N:
+ # Ideally we'd make the test deterministic, but we're testing
+ # for a race condition in combination with the system's TCP stack...
+ self.skipTest("at least one client should fail connecting "
+ "for the test to be meaningful")
+ finally:
+ for c in connected_clients:
+ c.close()
+
+ # Here tearDown() would re-raise the EBADF encountered in the IO loop
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 2
} | 4.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pycurl>=7.19.3.1",
"twisted",
"pycares",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Automat==22.10.0
certifi==2021.5.30
cffi==1.15.1
constantly==15.1.0
hyperlink==21.0.0
idna==3.10
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
incremental==22.10.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycares==4.3.0
pycparser==2.21
pycurl==7.45.6
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@62e47215ce12aee83f951758c96775a43e80475b#egg=tornado
Twisted==22.4.0
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
zope.interface==5.5.2
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- automat==22.10.0
- cffi==1.15.1
- constantly==15.1.0
- hyperlink==21.0.0
- idna==3.10
- incremental==22.10.0
- pycares==4.3.0
- pycparser==2.21
- pycurl==7.45.6
- six==1.17.0
- twisted==22.4.0
- zope-interface==5.5.2
prefix: /opt/conda/envs/tornado
| [
"tornado/test/tcpserver_test.py::TCPServerTest::test_stop_in_callback"
]
| []
| [
"tornado/test/tcpserver_test.py::TCPServerTest::test_handle_stream_coroutine_logging",
"tornado/test/tcpserver_test.py::TCPServerTest::test_handle_stream_native_coroutine",
"tornado/test/tcpserver_test.py::TCPServerTest::test_stop_twice"
]
| []
| Apache License 2.0 | 1,334 | [
"tornado/netutil.py",
"tornado/tcpserver.py"
]
| [
"tornado/netutil.py",
"tornado/tcpserver.py"
]
|
|
jboss-dockerfiles__dogen-134 | a92bdcc194b4ca56c6691fbec7fe8e7b8cd19017 | 2017-06-06 09:03:05 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/cli.py b/dogen/cli.py
index 2e8930d..6540695 100644
--- a/dogen/cli.py
+++ b/dogen/cli.py
@@ -55,7 +55,6 @@ class CLI(object):
parser.add_argument('--without-sources', '--ws', action='store_true', help='Do not process sources, only generate Dockerfile')
parser.add_argument('--skip-ssl-verification', action='store_true', help='Should we skip SSL verification when retrieving data?')
parser.add_argument('--scripts-path', help='Location of the scripts directory containing script packages.')
- parser.add_argument('--additional-script', action='append', help='Location of additional script (can be url). Can be specified multiple times.')
parser.add_argument('--template', help='Path to custom template (can be url)')
parser.add_argument('path', help="Path to yaml descriptor to process")
diff --git a/dogen/generator.py b/dogen/generator.py
index ba48876..52fa501 100644
--- a/dogen/generator.py
+++ b/dogen/generator.py
@@ -28,7 +28,6 @@ class Generator(object):
self.dockerfile = os.path.join(self.output, "Dockerfile")
self.template = args.template
self.scripts_path = args.scripts_path
- self.additional_scripts = args.additional_script
ssl_verify = None
if args.skip_ssl_verification:
@@ -133,11 +132,6 @@ class Generator(object):
if scripts and not self.scripts_path:
self.scripts_path = scripts
- additional_scripts = dogen_cfg.get('additional_scripts')
-
- if additional_scripts and not self.additional_scripts:
- self.additional_scripts = additional_scripts
-
if self.scripts_path and not os.path.exists(self.scripts_path):
raise Error("Provided scripts directory '%s' does not exist" % self.scripts_path)
@@ -164,24 +158,6 @@ class Generator(object):
shutil.copytree(src=src_path, dst=output_path)
self.log.debug("Done.")
- def _handle_additional_scripts(self):
- self.log.info("Additional scripts provided, installing them...")
- output_scripts = os.path.join(self.output, "scripts")
-
- if not os.path.exists(output_scripts):
- os.makedirs(output_scripts)
-
- for f in self.additional_scripts:
- self.log.debug("Handling '%s' file..." % f)
- if Tools.is_url(f):
- self._fetch_file(f, os.path.join(output_scripts, os.path.basename(f)))
- else:
- if not (os.path.exists(f) and os.path.isfile(f)):
- raise Error("File '%s' does not exist. Please make sure you specified correct path to a file when specifying additional scripts." % f)
-
- self.log.debug("Copying '%s' file to target scripts directory..." % f)
- shutil.copy(f, output_scripts)
-
def _validate_cfg(self):
"""
Open and parse the YAML configuration file and ensure it matches
@@ -241,10 +217,6 @@ class Generator(object):
else:
self.log.warn("No scripts will be copied, mistake?")
- # Additional scripts (not package scripts)
- if self.additional_scripts:
- self._handle_additional_scripts()
-
self.handle_sources()
self.render_from_template()
diff --git a/dogen/schema/kwalify_schema.yaml b/dogen/schema/kwalify_schema.yaml
index 50ba8a0..a54aa67 100644
--- a/dogen/schema/kwalify_schema.yaml
+++ b/dogen/schema/kwalify_schema.yaml
@@ -54,9 +54,6 @@ map:
ssl_verify: {type: bool}
template: {type: str}
scripts_path: {type: str}
- additional_scripts:
- seq:
- - {type: str}
scripts:
seq:
- map:
| Remove '--additional-script' argument
There is no use case for this option anymore. | jboss-dockerfiles/dogen | diff --git a/tests/test_unit_generate_configuration.py b/tests/test_unit_generate_configuration.py
index 4572a7d..531ee8e 100644
--- a/tests/test_unit_generate_configuration.py
+++ b/tests/test_unit_generate_configuration.py
@@ -19,8 +19,7 @@ class TestConfig(unittest.TestCase):
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.descriptor.write(self.basic_config.encode())
self.args = argparse.Namespace(path=self.descriptor.name, output="target", without_sources=False,
- template=None, scripts_path=None, additional_script=None,
- skip_ssl_verification=None)
+ template=None, scripts_path=None, skip_ssl_verification=None)
def tearDown(self):
os.remove(self.descriptor.name)
@@ -31,7 +30,6 @@ class TestConfig(unittest.TestCase):
self.assertEqual(self.generator.descriptor, self.descriptor.name)
self.assertEqual(self.generator.template, None)
self.assertEqual(self.generator.scripts_path, None)
- self.assertEqual(self.generator.additional_scripts, None)
self.assertEqual(self.generator.without_sources, False)
# Set to True in the configure() method later
self.assertEqual(self.generator.ssl_verify, None)
@@ -121,23 +119,6 @@ class TestConfig(unittest.TestCase):
mock_patch.assert_called_with('custom-scripts')
self.assertEqual(generator.scripts_path, "custom-scripts")
- def test_custom_additional_scripts_in_descriptor(self):
- with self.descriptor as f:
- f.write("dogen:\n additional_scripts:\n - http://host/somescript".encode())
-
- generator = Generator(self.log, self.args)
- generator.configure()
- self.assertEqual(generator.additional_scripts, ["http://host/somescript"])
-
- def test_custom_additional_scripts_in_cli_should_override_in_descriptor(self):
- with self.descriptor as f:
- f.write("dogen:\n additional_scripts:\n - http://host/somescript".encode())
- args = self.args
- args.additional_script=["https://otherhost/otherscript"]
- generator = Generator(self.log, args)
- generator.configure()
- self.assertEqual(generator.additional_scripts, ["https://otherhost/otherscript"])
-
@mock.patch('dogen.generator.os.path.exists', return_value=True)
def helper_test_script_exec(self, exec_to_test, cfg, mock_patch):
"""Helper method for tests around script exec value"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 3
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@a92bdcc194b4ca56c6691fbec7fe8e7b8cd19017#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_exec_not_env",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_user_not_default",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_user_not_env",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_scripts_dir_in_cli_should_override_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_scripts_dir_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_template_in_cli_should_override_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_template_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_script_user",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_values",
"tests/test_unit_generate_configuration.py::TestConfig::test_do_not_skip_ssl_verification_in_cli_false_should_override_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_do_not_skip_ssl_verification_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_env_provided_script_user",
"tests/test_unit_generate_configuration.py::TestConfig::test_env_supplied_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_fail_if_version_mismatch",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_when_local_file_exists_and_is_broken",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_when_local_file_exists_and_is_correct",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_cache_url",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_cache_url_and_target_filename",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_cache_url_with_filename_to_replace",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_deprecated_md5sum",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_multiple_hashes",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_multiple_hashes_and_cache_url",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_sha1",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_sha256",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_target_filename",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_without_specified_md5sum",
"tests/test_unit_generate_configuration.py::TestConfig::test_no_scripts_defined",
"tests/test_unit_generate_configuration.py::TestConfig::test_scripts_dir_found_by_convention",
"tests/test_unit_generate_configuration.py::TestConfig::test_skip_ssl_verification_in_descriptor"
]
| []
| []
| []
| MIT License | 1,335 | [
"dogen/schema/kwalify_schema.yaml",
"dogen/generator.py",
"dogen/cli.py"
]
| [
"dogen/schema/kwalify_schema.yaml",
"dogen/generator.py",
"dogen/cli.py"
]
|
|
jboss-dockerfiles__dogen-135 | a155c8b0383d66f5a4f81501e855edfcb148cb3e | 2017-06-06 09:14:02 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | dbecvarik: url should be deprecated only, otherwise it will make all of current descriptors unusable. | diff --git a/dogen/generator.py b/dogen/generator.py
index 52fa501..d59602d 100644
--- a/dogen/generator.py
+++ b/dogen/generator.py
@@ -58,7 +58,7 @@ class Generator(object):
r = requests.get(location, verify=self.ssl_verify, stream=True)
if r.status_code != 200:
- raise Exception("Could not download file from %s" % location)
+ raise Error("Could not download file from %s" % location)
with open(output, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
@@ -253,10 +253,23 @@ class Generator(object):
self.cfg['artifacts'] = {}
for source in self.cfg['sources']:
- url = source['url']
- target = source.get('target')
+ url = source.get('url')
+ artifact = source.get('artifact')
+
+ if url:
+ self.log.warn("The 'url' key is deprecated, please use 'artifact' for specifying the %s artifact location" % url)
+
+ if artifact:
+ self.log.warn("You specified both: 'artifact' and 'url' for a source file, 'artifact': will be used: %s" % artifact)
+ else:
+ # Backward compatibility
+ artifact = url
- basename = os.path.basename(url)
+ if not artifact:
+ raise Error("Artifact location for one or more sources was not provided, please check your image descriptor!")
+
+ basename = os.path.basename(artifact)
+ target = source.get('target')
# In case we specify target name for the artifact - use it
if not target:
@@ -270,7 +283,7 @@ class Generator(object):
md5sum = source.get('md5sum')
if md5sum:
- self.log.warn("The 'md5sum' key is deprecated, please use 'md5' for %s. Or better switch to sha256 or sha1." % url)
+ self.log.warn("The 'md5sum' key is deprecated, please use 'md5' for %s. Or better switch to sha256 or sha1." % artifact)
# Backwards compatibility for md5sum
if not source.get('md5'):
@@ -295,17 +308,17 @@ class Generator(object):
sources_cache = os.environ.get("DOGEN_SOURCES_CACHE")
if sources_cache:
- url = sources_cache.replace('#filename#', basename)
+ artifact = sources_cache.replace('#filename#', basename)
if algorithms:
if len(algorithms) > 1:
- self.log.warn("You specified multiple algorithms for '%s' url, but only '%s' will be used to fetch it from cache" % (url, algorithms[0]))
+ self.log.warn("You specified multiple algorithms for '%s' artifact, but only '%s' will be used to fetch it from cache" % (artifact, algorithms[0]))
- url = url.replace('#hash#', source[algorithms[0]]).replace('#algorithm#', algorithms[0])
+ artifact = artifact.replace('#hash#', source[algorithms[0]]).replace('#algorithm#', algorithms[0])
- self.log.info("Using '%s' as cached location for artifact" % url)
+ self.log.info("Using '%s' as cached location for artifact" % artifact)
- self._fetch_file(url, filename)
+ self._fetch_file(artifact, filename)
if algorithms:
for algorithm in algorithms:
@@ -327,6 +340,6 @@ class Generator(object):
filesum = hash.hexdigest()
if filesum.lower() != checksum.lower():
- raise Exception("The %s computed for the '%s' file ('%s') doesn't match the '%s' value" % (algorithm, filename, filesum, checksum))
+ raise Error("The %s computed for the '%s' file ('%s') doesn't match the '%s' value" % (algorithm, filename, filesum, checksum))
self.log.debug("Hash is correct.")
diff --git a/dogen/schema/kwalify_schema.yaml b/dogen/schema/kwalify_schema.yaml
index a54aa67..b8035d1 100644
--- a/dogen/schema/kwalify_schema.yaml
+++ b/dogen/schema/kwalify_schema.yaml
@@ -66,8 +66,9 @@ map:
sources:
seq:
- map:
- url: {type: str}
- md5sum: {type: str}
+ url: {type: str} # deprecated, use artifact
+ artifact: {type: str}
+ md5sum: {type: str} # deprecated, use md5
md5: {type: str}
sha1: {type: str}
sha256: {type: str}
diff --git a/dogen/template_helper.py b/dogen/template_helper.py
index 3e88f03..e36693b 100644
--- a/dogen/template_helper.py
+++ b/dogen/template_helper.py
@@ -10,7 +10,7 @@ class TemplateHelper(object):
if target:
return target
- return os.path.basename(source['url'])
+ return os.path.basename(source['artifact'])
def cmd(self, arr):
"""
| Rename "url" in sources to "artifact"
It doesn't need to be a URL, it can be a filename only too. It's a bit misleading to call it "url". | jboss-dockerfiles/dogen | diff --git a/tests/schemas/good/openshift_amq_6.2_image.yaml b/tests/schemas/good/openshift_amq_6.2_image.yaml
index fa4f3cf..2cde6e8 100644
--- a/tests/schemas/good/openshift_amq_6.2_image.yaml
+++ b/tests/schemas/good/openshift_amq_6.2_image.yaml
@@ -51,10 +51,10 @@ scripts:
- package: os-amq-permissions
exec: install.sh
sources:
- - url: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
+ - artifact: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
md5: 1b996b9083f537917b307309b0e2f16d
- - url: http://redacted/openshift-activemq-plugin-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-activemq-plugin-1.0.0.Final-redhat-1.jar
md5: 325fbbaff0f7dbea42203346d9c3bf98
- - url: http://redacted/jboss-dmr-1.2.2.Final-redhat-1.jar
+ - artifact: http://redacted/jboss-dmr-1.2.2.Final-redhat-1.jar
md5: 8df4cbf6f39c3bce21de16ad708084d5
diff --git a/tests/schemas/good/openshift_datagrid_6.5_image.yaml b/tests/schemas/good/openshift_datagrid_6.5_image.yaml
index b20b77e..67ff7a1 100644
--- a/tests/schemas/good/openshift_datagrid_6.5_image.yaml
+++ b/tests/schemas/good/openshift_datagrid_6.5_image.yaml
@@ -150,17 +150,17 @@ scripts:
exec: configure.sh
user: 185
sources:
- - url: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
+ - artifact: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
md5: 1b996b9083f537917b307309b0e2f16d
- - url: http://redacted/javax.json-1.0.4.jar
+ - artifact: http://redacted/javax.json-1.0.4.jar
md5: 569870f975deeeb6691fcb9bc02a9555
- - url: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
+ - artifact: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
md5: 7c743e35463db5f55f415dd666d705c5
- - url: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
md5: bafa4db7efe4082d76cde2fa9499bf84
- - url: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
md5: 71bbfdf795a2c65e4473df242f765490
- - url: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
md5: 145add030a89c3ed588dce27b0f24999
- - url: http://redacted/oauth-20100527.jar
+ - artifact: http://redacted/oauth-20100527.jar
md5: 91c7c70579f95b7ddee95b2143a49b41
diff --git a/tests/schemas/good/openshift_eap_6.4_image.yaml b/tests/schemas/good/openshift_eap_6.4_image.yaml
index 2b24f39..0ab9029 100644
--- a/tests/schemas/good/openshift_eap_6.4_image.yaml
+++ b/tests/schemas/good/openshift_eap_6.4_image.yaml
@@ -97,23 +97,23 @@ scripts:
exec: configure.sh
user: 185
sources:
- - url: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
+ - artifact: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
md5: 1b996b9083f537917b307309b0e2f16d
- - url: http://redacted/javax.json-1.0.4.jar
+ - artifact: http://redacted/javax.json-1.0.4.jar
md5: 569870f975deeeb6691fcb9bc02a9555
- - url: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
+ - artifact: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
md5: 7c743e35463db5f55f415dd666d705c5
- - url: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
md5: bafa4db7efe4082d76cde2fa9499bf84
- - url: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
md5: 71bbfdf795a2c65e4473df242f765490
- - url: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
md5: 145add030a89c3ed588dce27b0f24999
- - url: http://redacted/oauth-20100527.jar
+ - artifact: http://redacted/oauth-20100527.jar
md5: 91c7c70579f95b7ddee95b2143a49b41
- - url: http://redacted/activemq-rar-5.11.0.redhat-621084.rar
+ - artifact: http://redacted/activemq-rar-5.11.0.redhat-621084.rar
md5: 207e17ac8102c93233fe2764d1fe8499
- - url: http://redacted/rh-sso-7.0.0-eap6-adapter.zip
+ - artifact: http://redacted/rh-sso-7.0.0-eap6-adapter.zip
md5: 6fd81306ea4297307dcc5f51712e5f95
- - url: http://redacted/rh-sso-7.0.0-saml-eap6-adapter.zip
+ - artifact: http://redacted/rh-sso-7.0.0-saml-eap6-adapter.zip
md5: 3b953c114dd09f86e71e18cd57d8af56
diff --git a/tests/schemas/good/openshift_eap_7.0_image.yaml b/tests/schemas/good/openshift_eap_7.0_image.yaml
index 08f5a5c..7c0c4b4 100644
--- a/tests/schemas/good/openshift_eap_7.0_image.yaml
+++ b/tests/schemas/good/openshift_eap_7.0_image.yaml
@@ -106,23 +106,23 @@ scripts:
exec: configure.sh
user: 185
sources:
- - url: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
+ - artifact: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
md5: 1b996b9083f537917b307309b0e2f16d
- - url: http://redacted/javax.json-1.0.4.jar
+ - artifact: http://redacted/javax.json-1.0.4.jar
md5: 569870f975deeeb6691fcb9bc02a9555
- - url: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
+ - artifact: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
md5: 7c743e35463db5f55f415dd666d705c5
- - url: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
md5: bafa4db7efe4082d76cde2fa9499bf84
- - url: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
md5: 71bbfdf795a2c65e4473df242f765490
- - url: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
md5: 145add030a89c3ed588dce27b0f24999
- - url: http://redacted/oauth-20100527.jar
+ - artifact: http://redacted/oauth-20100527.jar
md5: 91c7c70579f95b7ddee95b2143a49b41
- - url: http://redacted/activemq-rar-5.11.0.redhat-621084.rar
+ - artifact: http://redacted/activemq-rar-5.11.0.redhat-621084.rar
md5: 207e17ac8102c93233fe2764d1fe8499
- - url: http://redacted/rh-sso-7.0.0-eap7-adapter.zip
+ - artifact: http://redacted/rh-sso-7.0.0-eap7-adapter.zip
md5: 1542c1014d9ebc24522839a5fa8bee4d
- - url: http://redacted/rh-sso-7.0.0-saml-eap7-adapter.zip
+ - artifact: http://redacted/rh-sso-7.0.0-saml-eap7-adapter.zip
md5: ce858a47c707b362a968ffd5c66768dd
diff --git a/tests/schemas/good/openshift_fuse-camel_6.3_image.yaml b/tests/schemas/good/openshift_fuse-camel_6.3_image.yaml
index 953fa45..9c5855d 100644
--- a/tests/schemas/good/openshift_fuse-camel_6.3_image.yaml
+++ b/tests/schemas/good/openshift_fuse-camel_6.3_image.yaml
@@ -100,23 +100,23 @@ scripts:
user: 185
exec: configure.sh
sources:
- - url: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
+ - artifact: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
md5: 1b996b9083f537917b307309b0e2f16d
- - url: http://redacted/javax.json-1.0.4.jar
+ - artifact: http://redacted/javax.json-1.0.4.jar
md5: 569870f975deeeb6691fcb9bc02a9555
- - url: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
+ - artifact: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
md5: 7c743e35463db5f55f415dd666d705c5
- - url: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
md5: bafa4db7efe4082d76cde2fa9499bf84
- - url: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
md5: 71bbfdf795a2c65e4473df242f765490
- - url: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
md5: 145add030a89c3ed588dce27b0f24999
- - url: http://redacted/oauth-20100527.jar
+ - artifact: http://redacted/oauth-20100527.jar
md5: 91c7c70579f95b7ddee95b2143a49b41
- - url: http://redacted/activemq-rar-5.11.0.redhat-621084.rar
+ - artifact: http://redacted/activemq-rar-5.11.0.redhat-621084.rar
md5: 207e17ac8102c93233fe2764d1fe8499
- - url: http://redacted/rh-sso-7.0.0-eap6-adapter.zip
+ - artifact: http://redacted/rh-sso-7.0.0-eap6-adapter.zip
md5: 6fd81306ea4297307dcc5f51712e5f95
- - url: http://redacted/rh-sso-7.0.0-saml-eap6-adapter.zip
+ - artifact: http://redacted/rh-sso-7.0.0-saml-eap6-adapter.zip
md5: 3b953c114dd09f86e71e18cd57d8af56
diff --git a/tests/schemas/good/openshift_kieserver_6.2_image.yaml b/tests/schemas/good/openshift_kieserver_6.2_image.yaml
index 3a1bd93..77134f3 100644
--- a/tests/schemas/good/openshift_kieserver_6.2_image.yaml
+++ b/tests/schemas/good/openshift_kieserver_6.2_image.yaml
@@ -129,12 +129,12 @@ scripts:
user: 185
exec: configure.sh
sources:
- - url: http://redacted/jboss-bpmsuite-6.2.1.GA-redhat-2-deployable-eap6.x.zip
+ - artifact: http://redacted/jboss-bpmsuite-6.2.1.GA-redhat-2-deployable-eap6.x.zip
md5: b63c7dfe82a44a140cce3a824c8c2e90
- - url: http://redacted/openshift-kieserver-common-1.0.2.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-kieserver-common-1.0.2.Final-redhat-1.jar
md5: 5858103206d0bcc4695aad38a7430c75
- - url: http://redacted/openshift-kieserver-jms-1.0.2.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-kieserver-jms-1.0.2.Final-redhat-1.jar
md5: 4a80b12399c49a1d274bbd1c62b49b65
- - url: http://redacted/openshift-kieserver-web-1.0.2.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-kieserver-web-1.0.2.Final-redhat-1.jar
md5: adf602d027020b30cc5743d3d5d8e2f7
diff --git a/tests/schemas/good/openshift_kieserver_6.3_image.yaml b/tests/schemas/good/openshift_kieserver_6.3_image.yaml
index caf4912..010b945 100644
--- a/tests/schemas/good/openshift_kieserver_6.3_image.yaml
+++ b/tests/schemas/good/openshift_kieserver_6.3_image.yaml
@@ -130,13 +130,13 @@ scripts:
user: 185
exec: configure.sh
sources:
- - url: http://redacted/jboss-bpmsuite-6.3.0.GA-deployable-eap6.x.zip
+ - artifact: http://redacted/jboss-bpmsuite-6.3.0.GA-deployable-eap6.x.zip
md5: 4e283717b0f295adf7025971065d6db8
- - url: http://redacted/jboss-bpmsuite-6.3.0.GA-supplementary-tools.zip
+ - artifact: http://redacted/jboss-bpmsuite-6.3.0.GA-supplementary-tools.zip
md5: b3d135e2d297f1e89d9ff8357c1e9aac
- - url: http://redacted/openshift-kieserver-common-1.0.2.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-kieserver-common-1.0.2.Final-redhat-1.jar
md5: 5858103206d0bcc4695aad38a7430c75
- - url: http://redacted/openshift-kieserver-jms-1.0.2.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-kieserver-jms-1.0.2.Final-redhat-1.jar
md5: 4a80b12399c49a1d274bbd1c62b49b65
- - url: http://redacted/openshift-kieserver-web-1.0.2.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-kieserver-web-1.0.2.Final-redhat-1.jar
md5: adf602d027020b30cc5743d3d5d8e2f7
diff --git a/tests/schemas/good/openshift_sso_7.0_image.yaml b/tests/schemas/good/openshift_sso_7.0_image.yaml
index 76cee4f..0c43530 100644
--- a/tests/schemas/good/openshift_sso_7.0_image.yaml
+++ b/tests/schemas/good/openshift_sso_7.0_image.yaml
@@ -96,18 +96,18 @@ scripts:
- package: os-sso
exec: configure.sh
sources:
- - url: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
+ - artifact: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
md5: 1b996b9083f537917b307309b0e2f16d
- - url: http://redacted/javax.json-1.0.4.jar
+ - artifact: http://redacted/javax.json-1.0.4.jar
md5: 569870f975deeeb6691fcb9bc02a9555
- - url: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
+ - artifact: http://redacted/jboss-logmanager-ext-1.0.0.Alpha2-redhat-1.jar
md5: 7c743e35463db5f55f415dd666d705c5
- - url: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-common-1.0.0.Final-redhat-1.jar
md5: bafa4db7efe4082d76cde2fa9499bf84
- - url: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-dns-1.0.0.Final-redhat-1.jar
md5: 71bbfdf795a2c65e4473df242f765490
- - url: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
+ - artifact: http://redacted/openshift-ping-kube-1.0.0.Final-redhat-1.jar
md5: 145add030a89c3ed588dce27b0f24999
- - url: http://redacted/oauth-20100527.jar
+ - artifact: http://redacted/oauth-20100527.jar
md5: 91c7c70579f95b7ddee95b2143a49b41
diff --git a/tests/schemas/good/openshift_webserver-tomcat7_3.0_image.yaml b/tests/schemas/good/openshift_webserver-tomcat7_3.0_image.yaml
index 4c4d54e..c7e5991 100644
--- a/tests/schemas/good/openshift_webserver-tomcat7_3.0_image.yaml
+++ b/tests/schemas/good/openshift_webserver-tomcat7_3.0_image.yaml
@@ -93,11 +93,11 @@ scripts:
user: 185
exec: configure.sh
sources:
- - url: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
+ - artifact: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
md5: 1b996b9083f537917b307309b0e2f16d
- - url: http://redacted/json-smart-1.1.1.jar
+ - artifact: http://redacted/json-smart-1.1.1.jar
md5: c382c9109020d001b96329c2057ba933
- - url: http://redacted/commons-lang-2.6.0.redhat-4.jar
+ - artifact: http://redacted/commons-lang-2.6.0.redhat-4.jar
md5: 0da0fbfb0ff2160df3a4832d28003361
- - url: http://redacted/jsonevent-layout-1.7-redhat-1.jar
+ - artifact: http://redacted/jsonevent-layout-1.7-redhat-1.jar
md5: 08f9aa037ac91c4aaa0d5dabf143a60e
diff --git a/tests/schemas/good/openshift_webserver-tomcat8_3.0_image.yaml b/tests/schemas/good/openshift_webserver-tomcat8_3.0_image.yaml
index 6a0b095..b7039a3 100644
--- a/tests/schemas/good/openshift_webserver-tomcat8_3.0_image.yaml
+++ b/tests/schemas/good/openshift_webserver-tomcat8_3.0_image.yaml
@@ -93,11 +93,11 @@ scripts:
user: 185
exec: configure.sh
sources:
- - url: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
+ - artifact: http://redacted/jolokia-jvm-1.3.2.redhat-1-agent.jar
md5: 1b996b9083f537917b307309b0e2f16d
- - url: http://redacted/json-smart-1.1.1.jar
+ - artifact: http://redacted/json-smart-1.1.1.jar
md5: c382c9109020d001b96329c2057ba933
- - url: http://redacted/commons-lang-2.6.0.redhat-4.jar
+ - artifact: http://redacted/commons-lang-2.6.0.redhat-4.jar
md5: 0da0fbfb0ff2160df3a4832d28003361
- - url: http://redacted/jsonevent-layout-1.7-redhat-1.jar
+ - artifact: http://redacted/jsonevent-layout-1.7-redhat-1.jar
md5: 08f9aa037ac91c4aaa0d5dabf143a60e
diff --git a/tests/test_unit_generate_configuration.py b/tests/test_unit_generate_configuration.py
index 531ee8e..268f9d5 100644
--- a/tests/test_unit_generate_configuration.py
+++ b/tests/test_unit_generate_configuration.py
@@ -245,7 +245,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_without_specified_md5sum(self, mock_fetch_file):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -257,7 +257,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -270,7 +270,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_multiple_hashes(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n sha1: 105bfe02a86ba69be5506cd559a54c4b252fb132".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n sha1: 105bfe02a86ba69be5506cd559a54c4b252fb132".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -284,7 +284,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_multiple_hashes_and_cache_url(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n sha1: 105bfe02a86ba69be5506cd559a54c4b252fb132".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n sha1: 105bfe02a86ba69be5506cd559a54c4b252fb132".encode())
k = mock.patch.dict(os.environ, {'DOGEN_SOURCES_CACHE':'http://cache/get?#algorithm#=#hash#'})
k.start()
@@ -301,7 +301,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_deprecated_md5sum(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5sum: e9013fc202c87be48e3b302df10efc4b".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5sum: e9013fc202c87be48e3b302df10efc4b".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -314,7 +314,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_sha1(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n sha1: 105bfe02a86ba69be5506cd559a54c4b252fb132".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n sha1: 105bfe02a86ba69be5506cd559a54c4b252fb132".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -327,7 +327,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_sha256(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n sha256: 9912afca5a08e9e05174c5fbb7a9a1510283d5952f90796c6a3e8bc78217e2fb".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n sha256: 9912afca5a08e9e05174c5fbb7a9a1510283d5952f90796c6a3e8bc78217e2fb".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -341,7 +341,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_when_local_file_exists_and_is_correct(self, mock_fetch_file, mock_check_sum, mock_path):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -354,7 +354,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_when_local_file_exists_and_is_broken(self, mock_fetch_file, mock_check_sum, mock_path):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -366,7 +366,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_target_filename(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n target: target.zip".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n target: target.zip".encode())
generator = Generator(self.log, self.args)
generator.configure()
@@ -378,7 +378,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_cache_url(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b".encode())
k = mock.patch.dict(os.environ, {'DOGEN_SOURCES_CACHE':'http://cache/get?#algorithm#=#hash#'})
k.start()
@@ -393,7 +393,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_cache_url_and_target_filename(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n target: target.zip".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n target: target.zip".encode())
k = mock.patch.dict(os.environ, {'DOGEN_SOURCES_CACHE':'http://cache/get?#algorithm#=#hash#'})
k.start()
@@ -408,7 +408,7 @@ class TestConfig(unittest.TestCase):
@mock.patch('dogen.generator.Generator._fetch_file')
def test_handling_sources_with_cache_url_with_filename_to_replace(self, mock_fetch_file, mock_check_sum):
with self.descriptor as f:
- f.write("sources:\n - url: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n target: target.zip".encode())
+ f.write("sources:\n - artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n target: target.zip".encode())
k = mock.patch.dict(os.environ, {'DOGEN_SOURCES_CACHE':'http://cache/#filename#'})
k.start()
@@ -418,3 +418,31 @@ class TestConfig(unittest.TestCase):
k.stop()
mock_fetch_file.assert_called_with('http://cache/file.zip', 'target/target.zip')
+
+ # https://github.com/jboss-dockerfiles/dogen/issues/132
+ @mock.patch('dogen.generator.Generator.check_sum')
+ @mock.patch('dogen.generator.Generator._fetch_file')
+ def test_handling_sources_with_url_and_artifact_keys_set(self, mock_fetch_file, mock_check_sum):
+ with self.descriptor as f:
+ f.write("sources:\n - url: http://something.com\n artifact: http://somehost.com/file.zip\n md5: e9013fc202c87be48e3b302df10efc4b\n target: target.zip".encode())
+
+ generator = Generator(self.log, self.args)
+ generator.configure()
+ generator.handle_sources()
+
+ self.log.warn.assert_any_call("The 'url' key is deprecated, please use 'artifact' for specifying the http://something.com artifact location")
+ self.log.warn.assert_any_call("You specified both: 'artifact' and 'url' for a source file, 'artifact': will be used: http://somehost.com/file.zip")
+ mock_fetch_file.assert_called_with('http://somehost.com/file.zip', 'target/target.zip')
+
+ @mock.patch('dogen.generator.Generator._fetch_file')
+ def test_handling_sources_should_fail_when_no_artifact_nor_url_is_provided(self, mock_fetch_file):
+ with self.descriptor as f:
+ f.write("sources:\n - md5: e9013fc202c87be48e3b302df10efc4b\n target: target.zip".encode())
+
+ generator = Generator(self.log, self.args)
+ generator.configure()
+
+ with self.assertRaises(Error) as cm:
+ generator.handle_sources()
+
+ self.assertEquals(str(cm.exception), "Artifact location for one or more sources was not provided, please check your image descriptor!")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 3
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@a155c8b0383d66f5a4f81501e855edfcb148cb3e#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_should_fail_when_no_artifact_nor_url_is_provided",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_when_local_file_exists_and_is_broken",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_when_local_file_exists_and_is_correct",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_cache_url",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_cache_url_and_target_filename",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_cache_url_with_filename_to_replace",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_deprecated_md5sum",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_multiple_hashes",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_multiple_hashes_and_cache_url",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_sha1",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_sha256",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_target_filename",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_with_url_and_artifact_keys_set",
"tests/test_unit_generate_configuration.py::TestConfig::test_handling_sources_without_specified_md5sum"
]
| []
| [
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_exec_not_env",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_user_not_default",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_script_user_not_env",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_scripts_dir_in_cli_should_override_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_scripts_dir_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_template_in_cli_should_override_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_custom_template_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_script_user",
"tests/test_unit_generate_configuration.py::TestConfig::test_default_values",
"tests/test_unit_generate_configuration.py::TestConfig::test_do_not_skip_ssl_verification_in_cli_false_should_override_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_do_not_skip_ssl_verification_in_descriptor",
"tests/test_unit_generate_configuration.py::TestConfig::test_env_provided_script_user",
"tests/test_unit_generate_configuration.py::TestConfig::test_env_supplied_script_exec",
"tests/test_unit_generate_configuration.py::TestConfig::test_fail_if_version_mismatch",
"tests/test_unit_generate_configuration.py::TestConfig::test_no_scripts_defined",
"tests/test_unit_generate_configuration.py::TestConfig::test_scripts_dir_found_by_convention",
"tests/test_unit_generate_configuration.py::TestConfig::test_skip_ssl_verification_in_descriptor"
]
| []
| MIT License | 1,336 | [
"dogen/schema/kwalify_schema.yaml",
"dogen/generator.py",
"dogen/template_helper.py"
]
| [
"dogen/schema/kwalify_schema.yaml",
"dogen/generator.py",
"dogen/template_helper.py"
]
|
jboss-dockerfiles__dogen-136 | c4cd03cb2352e4538e6bdc64800a2184ce1998e4 | 2017-06-06 09:22:34 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/generator.py b/dogen/generator.py
index d59602d..50ed93c 100644
--- a/dogen/generator.py
+++ b/dogen/generator.py
@@ -226,6 +226,15 @@ class Generator(object):
self.log.info("Finished!")
def render_from_template(self):
+ maintainer = self.cfg.get('maintainer')
+
+ # https://github.com/jboss-dockerfiles/dogen/issues/129
+ if maintainer:
+ if not self.cfg.get('labels'):
+ self.cfg['labels'] = []
+
+ self.cfg['labels'].append({'name': 'maintainer', 'value': maintainer})
+
if self.template:
template_file = self.template
else:
diff --git a/dogen/templates/template.jinja b/dogen/templates/template.jinja
index 7b81139..4754bb0 100644
--- a/dogen/templates/template.jinja
+++ b/dogen/templates/template.jinja
@@ -18,11 +18,6 @@
FROM {{ helper.base_image(from, version) }}
-{% if maintainer %}
-MAINTAINER {{ maintainer }}
-
-{% endif -%}
-
# Environment variables
ENV JBOSS_IMAGE_NAME="{{name}}" \
JBOSS_IMAGE_VERSION="{{version}}" {% if helper.envs(envs) %}\{% for env in helper.envs(envs) %}
| Generate "maintainer" label instead using the MAINTAINER instruction
https://docs.docker.com/engine/reference/builder/#maintainer-deprecated
We should use `maintainer` label. | jboss-dockerfiles/dogen | diff --git a/tests/test_dockerfile.py b/tests/test_dockerfile.py
index 1c6fec8..33534bb 100644
--- a/tests/test_dockerfile.py
+++ b/tests/test_dockerfile.py
@@ -131,9 +131,6 @@ class TestDockerfile(unittest.TestCase):
# https://github.com/jboss-dockerfiles/dogen/issues/124
def test_debug_port(self):
- """
- Test that cmd: is mapped into a CMD instruction
- """
with open(self.yaml, 'ab') as f:
f.write("ports:\n - value: 8080\n - value: 9999\n expose: False".encode())
@@ -147,3 +144,19 @@ class TestDockerfile(unittest.TestCase):
dockerfile = f.read()
regex = re.compile(r'.*EXPOSE 8080$', re.MULTILINE)
self.assertRegexpMatches(dockerfile, regex)
+
+ # https://github.com/jboss-dockerfiles/dogen/issues/129
+ def test_generating_maintainer_label(self):
+ with open(self.yaml, 'ab') as f:
+ f.write("maintainer: Marek Goldmann".encode())
+
+ generator = Generator(self.log, self.args)
+ generator.configure()
+ generator.render_from_template()
+
+ self.assertEqual(generator.cfg['labels'], [{'name': 'maintainer', 'value': 'Marek Goldmann'}])
+
+ with open(os.path.join(self.target, "Dockerfile"), "r") as f:
+ dockerfile = f.read()
+ regex = re.compile(r'LABEL name=\"\$JBOSS_IMAGE_NAME\" \\\s+version=\"\$JBOSS_IMAGE_VERSION\" \\\s+architecture=\"x86_64\" \\\s+com.redhat.component=\"someimage\" \\\s+maintainer=\"Marek Goldmann\"', re.MULTILINE)
+ self.assertRegexpMatches(dockerfile, regex)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@c4cd03cb2352e4538e6bdc64800a2184ce1998e4#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_dockerfile.py::TestDockerfile::test_generating_maintainer_label"
]
| []
| [
"tests/test_dockerfile.py::TestDockerfile::test_debug_port",
"tests/test_dockerfile.py::TestDockerfile::test_default_cmd_user",
"tests/test_dockerfile.py::TestDockerfile::test_set_cmd",
"tests/test_dockerfile.py::TestDockerfile::test_set_cmd_user",
"tests/test_dockerfile.py::TestDockerfile::test_set_entrypoint",
"tests/test_dockerfile.py::TestDockerfile::test_volumes"
]
| []
| MIT License | 1,337 | [
"dogen/generator.py",
"dogen/templates/template.jinja"
]
| [
"dogen/generator.py",
"dogen/templates/template.jinja"
]
|
|
construct__construct-363 | a8a3645789c832a8ea6260728f81d36dddf899c4 | 2017-06-06 10:40:15 | 670eb6c1b6e9f6c9e6ea7102d08d115612818134 | diff --git a/construct/core.py b/construct/core.py
index ec37366..976384f 100644
--- a/construct/core.py
+++ b/construct/core.py
@@ -64,26 +64,26 @@ def singleton(cls):
def singletonfunction(func):
return func()
-def _read_stream(stream, length):
+def _read_stream(stream, length, unitname="bytes"):
# if not isinstance(length, int):
# raise TypeError("expected length to be int")
if length < 0:
raise ValueError("length must be >= 0", length)
data = stream.read(length)
if len(data) != length:
- raise FieldError("could not read enough bytes, expected %d, found %d" % (length, len(data)))
+ raise FieldError("could not read enough %s, expected %d, found %d" % (unitname, length, len(data)))
return data
-def _write_stream(stream, length, data):
+def _write_stream(stream, length, data, unitname="bytes"):
# if not isinstance(data, bytes):
# raise TypeError("expected data to be a bytes")
if length < 0:
raise ValueError("length must be >= 0", length)
if len(data) != length:
- raise FieldError("could not write bytes, expected %d, found %d" % (length, len(data)))
+ raise FieldError("could not write %s, expected %d, found %d" % (unitname, length, len(data)))
written = stream.write(data)
if written is not None and written != length:
- raise FieldError("could not write bytes, written %d, should %d" % (written, length))
+ raise FieldError("could not write %s, written %d, should %d" % (unitname, written, length))
#===============================================================================
@@ -481,7 +481,7 @@ def Bitwise(subcon):
>>> Bitwise(Octet).sizeof()
1
"""
- return Restreamed(subcon, bits2bytes, 8, bytes2bits, 1, lambda n: n//8)
+ return Restreamed(subcon, bits2bytes, 8, bytes2bits, 1, "bits", lambda n: n//8)
def Bytewise(subcon):
@@ -499,7 +499,7 @@ def Bytewise(subcon):
>>> Bitwise(Bytewise(Byte)).sizeof()
1
"""
- return Restreamed(subcon, bytes2bits, 1, bits2bytes, 8, lambda n: n*8)
+ return Restreamed(subcon, bytes2bits, 1, bits2bytes, 8, "bytes", lambda n: n*8)
class BytesInteger(Construct):
@@ -577,7 +577,7 @@ class BitsInteger(Construct):
self.bytesize = bytesize
def _parse(self, stream, context, path):
length = self.length(context) if callable(self.length) else self.length
- data = _read_stream(stream, length)
+ data = _read_stream(stream, length, "bits")
if self.swapped:
data = swapbytes(data, self.bytesize)
return bits2integer(data, self.signed)
@@ -1767,6 +1767,7 @@ class Restreamed(Subconstruct):
:param encoderunit: ratio as int, encoder takes that many bytes at once
:param decoder: a function that takes a b-string and returns a b-string (used when parsing)
:param decoderunit: ratio as int, decoder takes that many bytes at once
+ :param decoderunitname: English string that describes the units (plural) returned by the decoder. Used for error messages.
:param sizecomputer: a function that computes amount of bytes outputed by some bytes
Example::
@@ -1774,21 +1775,23 @@ class Restreamed(Subconstruct):
Bitwise <--> Restreamed(subcon, bits2bytes, 8, bytes2bits, 1, lambda n: n//8)
Bytewise <--> Restreamed(subcon, bytes2bits, 1, bits2bytes, 8, lambda n: n*8)
"""
- __slots__ = ["sizecomputer", "encoder", "encoderunit", "decoder", "decoderunit"]
- def __init__(self, subcon, encoder, encoderunit, decoder, decoderunit, sizecomputer):
+
+ __slots__ = ["sizecomputer", "encoder", "encoderunit", "decoder", "decoderunit", "decoderunitname"]
+ def __init__(self, subcon, encoder, encoderunit, decoder, decoderunit, decoderunitname, sizecomputer):
super(Restreamed, self).__init__(subcon)
self.encoder = encoder
self.encoderunit = encoderunit
self.decoder = decoder
self.decoderunit = decoderunit
+ self.decoderunitname = decoderunitname
self.sizecomputer = sizecomputer
def _parse(self, stream, context, path):
- stream2 = RestreamedBytesIO(stream, self.encoder, self.encoderunit, self.decoder, self.decoderunit)
+ stream2 = RestreamedBytesIO(stream, self.encoder, self.encoderunit, self.decoder, self.decoderunit, self.decoderunitname)
obj = self.subcon._parse(stream2, context, path)
stream2.close()
return obj
def _build(self, obj, stream, context, path):
- stream2 = RestreamedBytesIO(stream, self.encoder, self.encoderunit, self.decoder, self.decoderunit)
+ stream2 = RestreamedBytesIO(stream, self.encoder, self.encoderunit, self.decoder, self.decoderunit, self.decoderunitname)
buildret = self.subcon._build(obj, stream2, context, path)
stream2.close()
return buildret
@@ -2163,6 +2166,7 @@ def ByteSwapped(subcon):
return Restreamed(subcon,
lambda s: s[::-1], subcon.sizeof(),
lambda s: s[::-1], subcon.sizeof(),
+ "bytes",
lambda n: n)
@@ -2182,6 +2186,7 @@ def BitsSwapped(subcon):
return Restreamed(subcon,
lambda s: bits2bytes(bytes2bits(s)[::-1]), 1,
lambda s: bits2bytes(bytes2bits(s)[::-1]), 1,
+ "bits",
lambda n: n)
diff --git a/construct/lib/bitstream.py b/construct/lib/bitstream.py
index 76c34ca..5425c05 100644
--- a/construct/lib/bitstream.py
+++ b/construct/lib/bitstream.py
@@ -5,14 +5,15 @@ from sys import maxsize
class RestreamedBytesIO(object):
- __slots__ = ["substream", "encoder", "encoderunit", "decoder", "decoderunit", "rbuffer", "wbuffer","sincereadwritten"]
+ __slots__ = ["substream", "encoder", "encoderunit", "decoder", "decoderunit", "decoderunitname", "rbuffer", "wbuffer","sincereadwritten"]
- def __init__(self, substream, encoder, encoderunit, decoder, decoderunit):
+ def __init__(self, substream, encoder, encoderunit, decoder, decoderunit, decoderunitname):
self.substream = substream
self.encoder = encoder
self.encoderunit = encoderunit
self.decoder = decoder
self.decoderunit = decoderunit
+ self.decoderunitname = decoderunitname
self.rbuffer = b""
self.wbuffer = b""
self.sincereadwritten = 0
@@ -23,7 +24,7 @@ class RestreamedBytesIO(object):
while len(self.rbuffer) < count:
data = self.substream.read(self.decoderunit)
if data is None or len(data) == 0:
- raise IOError("Restreamed cannot satisfy read request of %d bytes" % count)
+ raise IOError("Restreamed cannot satisfy read request of %d %s" % (count, self.decoderunitname))
self.rbuffer += self.decoder(data)
data, self.rbuffer = self.rbuffer[:count], self.rbuffer[count:]
self.sincereadwritten += count
@@ -40,9 +41,9 @@ class RestreamedBytesIO(object):
def close(self):
if len(self.rbuffer):
- raise ValueError("closing stream but %d unread bytes remain, %d is decoded unit" % (len(self.rbuffer), self.decoderunit))
+ raise ValueError("closing stream but %d unread %s remain, %d is decoded unit" % (len(self.rbuffer), self.decoderunitname, self.decoderunit))
if len(self.wbuffer):
- raise ValueError("closing stream but %d unwritten bytes remain, %d is encoded unit" % (len(self.wbuffer), self.encoderunit))
+ raise ValueError("closing stream but %d unwritten %s remain, %d is encoded unit" % (len(self.wbuffer), self.decoderunitname, self.encoderunit))
def seekable(self):
return False
| Error message says 'bytes', not bits when there aren't enough bits for BitsInteger
If you try to read `x` more bits than are available using a BitsInteger, the error message incorrectly states that you are trying to request `x` extra bytes, not bits.
Example code:
```python
from construct import BitStruct, Bit, BitsInteger
MY_MESSAGE = BitStruct(
Bit[7],
BitsInteger(17)
)
MY_MESSAGE.parse(b'A')
```
```
Traceback (most recent call last):
File "test.py", line 8, in <module>
MY_MESSAGE.parse(b'A')
File "env\lib\site-packages\construct\core.py", line 165, in parse
return self.parse_stream(BytesIO(data), context, **kw)
File "env\lib\site-packages\construct\core.py", line 176, in parse_stream
return self._parse(stream, context, "parsing")
File "env\lib\site-packages\construct\core.py", line 1791, in _parse
obj = self.subcon._parse(self.stream2, context, path)
File "env\lib\site-packages\construct\core.py", line 849, in _parse
subobj = sc._parse(stream, context, path)
File "env\lib\site-packages\construct\core.py", line 576, in _parse
data = _read_stream(stream, length)
File "env\lib\site-packages\construct\core.py", line 72, in _read_stream
data = stream.read(length)
File "env\lib\site-packages\construct\lib\bitstream.py", line 26, in read
raise IOError("Restreamed cannot satisfy read request of %d bytes" % count)
OSError: Restreamed cannot satisfy read request of 17 bytes
```
While trying to parse a complicated object, I was confused as to what combination of my bit fields added to the number of bytes presented in the error message.
In the above example, it should report `OSError: Restreamed cannot satisfy read request of 17 bits` | construct/construct | diff --git a/tests/test_all.py b/tests/test_all.py
index 04eed61..ad0997d 100644
--- a/tests/test_all.py
+++ b/tests/test_all.py
@@ -1084,13 +1084,13 @@ class TestCore(unittest.TestCase):
Struct(ProbeInto(this.inner)).build({})
def test_restreamed(self):
- assert Restreamed(Int16ub, ident, 1, ident, 1, ident).parse(b"\x00\x01") == 1
- assert Restreamed(Int16ub, ident, 1, ident, 1, ident).build(1) == b"\x00\x01"
- assert Restreamed(Int16ub, ident, 1, ident, 1, ident).sizeof() == 2
- assert raises(Restreamed(VarInt, ident, 1, ident, 1, ident).sizeof) == SizeofError
- assert Restreamed(Bytes(2), None, None, lambda b: b*2, 1, None).parse(b"a") == b"aa"
- assert Restreamed(Bytes(1), lambda b: b*2, 1, None, None, None).build(b"a") == b"aa"
- assert Restreamed(Bytes(5), None, None, None, None, lambda n: n*2).sizeof() == 10
+ assert Restreamed(Int16ub, ident, 1, ident, 1, "bytes", ident).parse(b"\x00\x01") == 1
+ assert Restreamed(Int16ub, ident, 1, ident, 1, "bytes", ident).build(1) == b"\x00\x01"
+ assert Restreamed(Int16ub, ident, 1, ident, 1, "bytes", ident).sizeof() == 2
+ assert raises(Restreamed(VarInt, ident, 1, ident, 1, "bytes", ident).sizeof) == SizeofError
+ assert Restreamed(Bytes(2), None, None, lambda b: b*2, 1, "bytes", None).parse(b"a") == b"aa"
+ assert Restreamed(Bytes(1), lambda b: b*2, 1, None, None, "bytes", None).build(b"a") == b"aa"
+ assert Restreamed(Bytes(5), None, None, None, None, "bytes", lambda n: n*2).sizeof() == 10
def test_rebuffered(self):
data = b"0" * 1000
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 2.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"numpy"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/construct/construct.git@a8a3645789c832a8ea6260728f81d36dddf899c4#egg=construct
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
numpy==1.19.5
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: construct
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- numpy==1.19.5
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/construct
| [
"tests/test_all.py::TestCore::test_restreamed"
]
| []
| [
"tests/test_all.py::TestCore::test_aligned",
"tests/test_all.py::TestCore::test_alignedstruct",
"tests/test_all.py::TestCore::test_array",
"tests/test_all.py::TestCore::test_bitsinteger",
"tests/test_all.py::TestCore::test_bitsswapped",
"tests/test_all.py::TestCore::test_bitsswapped_from_issue_145",
"tests/test_all.py::TestCore::test_bitstruct",
"tests/test_all.py::TestCore::test_bitstruct_from_issue_39",
"tests/test_all.py::TestCore::test_bitwise",
"tests/test_all.py::TestCore::test_byte",
"tests/test_all.py::TestCore::test_bytes",
"tests/test_all.py::TestCore::test_bytesinteger",
"tests/test_all.py::TestCore::test_byteswapped",
"tests/test_all.py::TestCore::test_byteswapped_from_issue_70",
"tests/test_all.py::TestCore::test_bytewise",
"tests/test_all.py::TestCore::test_check",
"tests/test_all.py::TestCore::test_checksum",
"tests/test_all.py::TestCore::test_compressed_bzip2",
"tests/test_all.py::TestCore::test_compressed_gzip",
"tests/test_all.py::TestCore::test_compressed_lzma",
"tests/test_all.py::TestCore::test_compressed_prefixed",
"tests/test_all.py::TestCore::test_compressed_zlib",
"tests/test_all.py::TestCore::test_computed",
"tests/test_all.py::TestCore::test_const",
"tests/test_all.py::TestCore::test_cstring",
"tests/test_all.py::TestCore::test_default",
"tests/test_all.py::TestCore::test_embeddedbitstruct",
"tests/test_all.py::TestCore::test_embeddedif_issue_296",
"tests/test_all.py::TestCore::test_embeddedswitch_issue_312",
"tests/test_all.py::TestCore::test_enum",
"tests/test_all.py::TestCore::test_error",
"tests/test_all.py::TestCore::test_expradapter",
"tests/test_all.py::TestCore::test_exprsymmetricadapter",
"tests/test_all.py::TestCore::test_exprvalidator",
"tests/test_all.py::TestCore::test_filter",
"tests/test_all.py::TestCore::test_flag",
"tests/test_all.py::TestCore::test_flagsenum",
"tests/test_all.py::TestCore::test_floats",
"tests/test_all.py::TestCore::test_floats_randomized",
"tests/test_all.py::TestCore::test_focusedseq",
"tests/test_all.py::TestCore::test_formatfield",
"tests/test_all.py::TestCore::test_formatfield_floats_randomized",
"tests/test_all.py::TestCore::test_formatfield_ints_randomized",
"tests/test_all.py::TestCore::test_from_issue_171",
"tests/test_all.py::TestCore::test_from_issue_175",
"tests/test_all.py::TestCore::test_from_issue_231",
"tests/test_all.py::TestCore::test_from_issue_244",
"tests/test_all.py::TestCore::test_from_issue_246",
"tests/test_all.py::TestCore::test_from_issue_269",
"tests/test_all.py::TestCore::test_from_issue_28",
"tests/test_all.py::TestCore::test_from_issue_298",
"tests/test_all.py::TestCore::test_from_issue_324",
"tests/test_all.py::TestCore::test_from_issue_357",
"tests/test_all.py::TestCore::test_from_issue_362",
"tests/test_all.py::TestCore::test_from_issue_60",
"tests/test_all.py::TestCore::test_from_issue_71",
"tests/test_all.py::TestCore::test_from_issue_76",
"tests/test_all.py::TestCore::test_from_issue_87",
"tests/test_all.py::TestCore::test_globally_encoded_strings",
"tests/test_all.py::TestCore::test_greedybytes",
"tests/test_all.py::TestCore::test_greedyrange",
"tests/test_all.py::TestCore::test_greedystring",
"tests/test_all.py::TestCore::test_hanging_issue_280",
"tests/test_all.py::TestCore::test_hex",
"tests/test_all.py::TestCore::test_hex_regression_188",
"tests/test_all.py::TestCore::test_hexdump",
"tests/test_all.py::TestCore::test_hexdump_regression_188",
"tests/test_all.py::TestCore::test_if",
"tests/test_all.py::TestCore::test_ifthenelse",
"tests/test_all.py::TestCore::test_indexing",
"tests/test_all.py::TestCore::test_ints",
"tests/test_all.py::TestCore::test_ints24",
"tests/test_all.py::TestCore::test_ipaddress_from_issue_95",
"tests/test_all.py::TestCore::test_lazybound",
"tests/test_all.py::TestCore::test_lazybound_node",
"tests/test_all.py::TestCore::test_lazyrange",
"tests/test_all.py::TestCore::test_lazysequence",
"tests/test_all.py::TestCore::test_lazysequence_nested_embedded",
"tests/test_all.py::TestCore::test_lazystruct",
"tests/test_all.py::TestCore::test_lazystruct_nested_embedded",
"tests/test_all.py::TestCore::test_namedtuple",
"tests/test_all.py::TestCore::test_nonbytes_checksum_issue_323",
"tests/test_all.py::TestCore::test_noneof",
"tests/test_all.py::TestCore::test_numpy",
"tests/test_all.py::TestCore::test_ondemand",
"tests/test_all.py::TestCore::test_ondemandpointer",
"tests/test_all.py::TestCore::test_oneof",
"tests/test_all.py::TestCore::test_operators",
"tests/test_all.py::TestCore::test_optional",
"tests/test_all.py::TestCore::test_padded",
"tests/test_all.py::TestCore::test_padding",
"tests/test_all.py::TestCore::test_pascalstring",
"tests/test_all.py::TestCore::test_pass",
"tests/test_all.py::TestCore::test_peek",
"tests/test_all.py::TestCore::test_pointer",
"tests/test_all.py::TestCore::test_prefixed",
"tests/test_all.py::TestCore::test_prefixedarray",
"tests/test_all.py::TestCore::test_probe",
"tests/test_all.py::TestCore::test_probeinto",
"tests/test_all.py::TestCore::test_range",
"tests/test_all.py::TestCore::test_rawcopy",
"tests/test_all.py::TestCore::test_rawcopy_issue_289",
"tests/test_all.py::TestCore::test_rawcopy_issue_358",
"tests/test_all.py::TestCore::test_rebuffered",
"tests/test_all.py::TestCore::test_rebuild",
"tests/test_all.py::TestCore::test_renamed",
"tests/test_all.py::TestCore::test_repeatuntil",
"tests/test_all.py::TestCore::test_seek",
"tests/test_all.py::TestCore::test_select",
"tests/test_all.py::TestCore::test_select_kwctor",
"tests/test_all.py::TestCore::test_sequence",
"tests/test_all.py::TestCore::test_sequence_nested_embedded",
"tests/test_all.py::TestCore::test_slicing",
"tests/test_all.py::TestCore::test_stopif",
"tests/test_all.py::TestCore::test_string",
"tests/test_all.py::TestCore::test_struct",
"tests/test_all.py::TestCore::test_struct_kwctor",
"tests/test_all.py::TestCore::test_struct_nested_embedded",
"tests/test_all.py::TestCore::test_struct_proper_context",
"tests/test_all.py::TestCore::test_struct_sizeof_context_nesting",
"tests/test_all.py::TestCore::test_switch",
"tests/test_all.py::TestCore::test_tell",
"tests/test_all.py::TestCore::test_terminated",
"tests/test_all.py::TestCore::test_union",
"tests/test_all.py::TestCore::test_union_issue_348",
"tests/test_all.py::TestCore::test_union_kwctor",
"tests/test_all.py::TestCore::test_varint",
"tests/test_all.py::TestCore::test_varint_randomized"
]
| []
| MIT License | 1,338 | [
"construct/core.py",
"construct/lib/bitstream.py"
]
| [
"construct/core.py",
"construct/lib/bitstream.py"
]
|
|
jboss-dockerfiles__dogen-139 | d916f2720cb1660517216f7804b7ddfcb97ebb65 | 2017-06-06 10:57:44 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/generator.py b/dogen/generator.py
index 50ed93c..335db99 100644
--- a/dogen/generator.py
+++ b/dogen/generator.py
@@ -226,14 +226,16 @@ class Generator(object):
self.log.info("Finished!")
def render_from_template(self):
- maintainer = self.cfg.get('maintainer')
+ if not self.cfg.get('labels'):
+ self.cfg['labels'] = []
# https://github.com/jboss-dockerfiles/dogen/issues/129
- if maintainer:
- if not self.cfg.get('labels'):
- self.cfg['labels'] = []
+ # https://github.com/jboss-dockerfiles/dogen/issues/137
+ for label in ['maintainer', 'description']:
+ value = self.cfg.get(label)
- self.cfg['labels'].append({'name': 'maintainer', 'value': maintainer})
+ if value:
+ self.cfg['labels'].append({'name': label, 'value': value})
if self.template:
template_file = self.template
| Set the "description" label based on the value of "description" field | jboss-dockerfiles/dogen | diff --git a/tests/test_dockerfile.py b/tests/test_dockerfile.py
index 33534bb..2ffa5a6 100644
--- a/tests/test_dockerfile.py
+++ b/tests/test_dockerfile.py
@@ -160,3 +160,19 @@ class TestDockerfile(unittest.TestCase):
dockerfile = f.read()
regex = re.compile(r'LABEL name=\"\$JBOSS_IMAGE_NAME\" \\\s+version=\"\$JBOSS_IMAGE_VERSION\" \\\s+architecture=\"x86_64\" \\\s+com.redhat.component=\"someimage\" \\\s+maintainer=\"Marek Goldmann\"', re.MULTILINE)
self.assertRegexpMatches(dockerfile, regex)
+
+ # https://github.com/jboss-dockerfiles/dogen/issues/137
+ def test_generating_description_label(self):
+ with open(self.yaml, 'ab') as f:
+ f.write("description: This is a nice image".encode())
+
+ generator = Generator(self.log, self.args)
+ generator.configure()
+ generator.render_from_template()
+
+ self.assertEqual(generator.cfg['labels'], [{'name': 'description', 'value': 'This is a nice image'}])
+
+ with open(os.path.join(self.target, "Dockerfile"), "r") as f:
+ dockerfile = f.read()
+ regex = re.compile(r'LABEL name=\"\$JBOSS_IMAGE_NAME\" \\\s+version=\"\$JBOSS_IMAGE_VERSION\" \\\s+architecture=\"x86_64\" \\\s+com.redhat.component=\"someimage\" \\\s+description=\"This is a nice image\"', re.MULTILINE)
+ self.assertRegexpMatches(dockerfile, regex)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock",
"pykwalify"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@d916f2720cb1660517216f7804b7ddfcb97ebb65#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_dockerfile.py::TestDockerfile::test_generating_description_label"
]
| []
| [
"tests/test_dockerfile.py::TestDockerfile::test_debug_port",
"tests/test_dockerfile.py::TestDockerfile::test_default_cmd_user",
"tests/test_dockerfile.py::TestDockerfile::test_generating_maintainer_label",
"tests/test_dockerfile.py::TestDockerfile::test_set_cmd",
"tests/test_dockerfile.py::TestDockerfile::test_set_cmd_user",
"tests/test_dockerfile.py::TestDockerfile::test_set_entrypoint",
"tests/test_dockerfile.py::TestDockerfile::test_volumes"
]
| []
| MIT License | 1,339 | [
"dogen/generator.py"
]
| [
"dogen/generator.py"
]
|
|
jboss-dockerfiles__dogen-140 | ac07abe63ec944d407e1d06669a09deeacc4e720 | 2017-06-06 12:34:35 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/schema/kwalify_schema.yaml b/dogen/schema/kwalify_schema.yaml
index 79a6b09..05069f3 100644
--- a/dogen/schema/kwalify_schema.yaml
+++ b/dogen/schema/kwalify_schema.yaml
@@ -23,22 +23,14 @@ map:
value: {type: str, required: True}
description: {type: str}
envs:
- type: map
- mapping:
- information:
- type: seq
- sequence:
- - type: map
- mapping:
- name: {type: str, required: True}
- value: {type: any, required: True}
- description: {type: str}
- configuration:
- seq:
- - map:
- name: {type: str, required: True}
- example: {type: any}
- description: {type: str}
+ type: seq
+ sequence:
+ - type: map
+ mapping:
+ name: {type: str, required: True}
+ value: {type: any}
+ example: {type: any}
+ description: {type: str}
ports:
seq:
- map:
diff --git a/dogen/template_helper.py b/dogen/template_helper.py
index e36693b..09106a7 100644
--- a/dogen/template_helper.py
+++ b/dogen/template_helper.py
@@ -52,14 +52,9 @@ class TemplateHelper(object):
envs = []
- if 'information' in env_variables:
- for e in env_variables['information']:
- envs.append(e)
-
- if 'configuration' in env_variables:
- for e in env_variables['configuration']:
- if 'value' in e:
- envs.append(e)
+ for env in env_variables:
+ if env.get('value') is not None:
+ envs.append(env)
return envs
| Remove information and configuration sections in envs
Instead we should have a single `envs` section that contains an array of environment variables. The old "information" envs should have a new key added: `information` (or similar) with boolean value.
This makes it easier to understand the image.yaml. | jboss-dockerfiles/dogen | diff --git a/tests/schemas/good/openshift_amq_6.2_image.yaml b/tests/schemas/good/openshift_amq_6.2_image.yaml
index 2cde6e8..00c95c9 100644
--- a/tests/schemas/good/openshift_amq_6.2_image.yaml
+++ b/tests/schemas/good/openshift_amq_6.2_image.yaml
@@ -12,19 +12,18 @@ labels:
- name: "io.openshift.s2i.scripts-url"
value: "image:///usr/local/s2i"
envs:
- configuration:
- - name: AMQ_MESH_SERVICE_NAME
- example: broker-amq-tcp
- - name: AMQ_MESH_DISCOVERY_TYPE
- example: kube
- - name: AMQ_MESH_SERVICE_NAMESPACE
- example: my-project
- - name: AMQ_SPLIT
- example: true
- description: "Allow multiple instances to share the same Persistent Volume. If set to true each instance will use a separate area within the Persistent Volume as its data directory. Default is unset."
- - name: AMQ_LOCK_TIMEOUT
- example: 30
- description: "If allowing multiple instances to share the same Persistent Volume then wait for this timeout period (seconds) to see if an existing server will terminate before moving to the next instance. Default is 30 seconds."
+ - name: AMQ_MESH_SERVICE_NAME
+ example: broker-amq-tcp
+ - name: AMQ_MESH_DISCOVERY_TYPE
+ example: kube
+ - name: AMQ_MESH_SERVICE_NAMESPACE
+ example: my-project
+ - name: AMQ_SPLIT
+ example: true
+ description: "Allow multiple instances to share the same Persistent Volume. If set to true each instance will use a separate area within the Persistent Volume as its data directory. Default is unset."
+ - name: AMQ_LOCK_TIMEOUT
+ example: 30
+ description: "If allowing multiple instances to share the same Persistent Volume then wait for this timeout period (seconds) to see if an existing server will terminate before moving to the next instance. Default is 30 seconds."
ports:
- value: 8778
- value: 5672
diff --git a/tests/schemas/good/openshift_datagrid_6.5_image.yaml b/tests/schemas/good/openshift_datagrid_6.5_image.yaml
index 67ff7a1..1cabb3f 100644
--- a/tests/schemas/good/openshift_datagrid_6.5_image.yaml
+++ b/tests/schemas/good/openshift_datagrid_6.5_image.yaml
@@ -14,72 +14,70 @@ labels:
- name: "io.openshift.s2i.scripts-url"
value: "image:///usr/local/s2i"
envs:
- information:
- - name: "JBOSS_MODULES_SYSTEM_PKGS"
- value: "org.jboss.logmanager"
- configuration:
- - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
- example: "myproject"
- description: "Clustering project namespace."
- - name: "OPENSHIFT_KUBE_PING_LABELS"
- example: "application=eap-app"
- description: "Clustering labels selector."
- - name: "JAVA_OPTS_APPEND"
- example: "-Dfoo=bar"
- - name: "JGROUPS_CLUSTER_PASSWORD"
- example: "miR0JaDR"
- description: "A password to control access to the JGroup. Needs to be set consistently cluster-wide. The image default is to use the `OPENSHIFT_KUBE_PING_LABELS` variable value, however the JBoss application templates generate and supply a random value."
- - name: CACHE_NAMES
- description: "List of caches to configure. Defaults to default,memcached"
- example: "addressbook,addressbook_indexed"
- - name: DEFAULT_CACHE
- description: "Indicates the default cache for this cache container."
- example: "addressbook"
- - name: CACHE_CONTAINER_START
- description: "Should this cache container be started on server startup, or lazily when requested by a service or deployment. Defaults to LAZY"
- example: "EAGER"
- - name: CACHE_CONTAINER_STATISTICS
- description: "Determines whether or not the cache container should collect statistics. Disable for optimal performance. Default is true"
- example: "false"
- - name: TRANSPORT_LOCK_TIMEOUT
- description: "Infinispan uses a distributed lock to maintain a coherent transaction log during state transfer or remd5suming, which means that only one cache can be doing state transfer or rehashing at the same time. This constraint is in place because more than one cache could be involved in a transaction. This timeout controls the time to wait to acquire a distributed lock. Defaults to 240000"
- example: "120000"
- - name: CONTAINER_SECURITY_IDENTITY_ROLE_MAPPER
- description: "Set a role mapper for this cache container. Valid values are: identity-role-mapper,common-name-role-mapper,cluster-role-mapper,custom-role-mapper"
- example: "identity-role-mapper"
- - name: CONTAINER_SECURITY_CUSTOM_ROLE_MAPPER_CLASS
- description: "Class of the custom principal to role mapper"
- example: "com.acme.CustomRoleMapper"
- - name: CONTAINER_SECURITY_ROLES
- description: "Defines role names and assigns permissions to them."
- example: "admin=ALL,reader=READ,writer=WRITE"
- - name: INFINISPAN_CONNECTORS
- description: "Comma separated list of connectors to configure. Defaults to hotrod,memcached,rest. Beware, if you enable authorization or authentication on your cache you should remove memcached as the protocol is insecure."
- example: "hotrod"
- - name: HOTROD_SERVICE_NAME
- description: "Name of the OpenShift service used to expose HotRod externally."
- example: "DATAGRID_APP_HOTROD"
- - name: HOTROD_AUTHENTICATION
- description: "If defined the hotrod-connectors will be configured with authentication in the ApplicationRealm."
- example: "true"
- - name: HOTROD_ENCRYPTION
- description: "If defined the hotrod-connectors will be configured with encryption in the ApplicationRealm."
- example: "true"
- - name: ENCRYPTION_REQUIRE_SSL_CLIENT_AUTH
- description: "Whether to require client certificate authentication. Defaults to false."
- example: "true"
- - name: MEMCACHED_CACHE
- description: "The name of the cache to use for the Memcached connector."
- example: "memcached"
- - name: REST_SECURITY_DOMAIN
- description: "The security domain to use for authentication/authorization purposes. Defaults to none (no authentication)"
- example: "other"
- - name: "USERNAME"
- example: "openshift"
- description: "Username for JDG user"
- - name: "PASSWORD"
- example: "p@ssw0rd"
- description: "Password for JDG user"
+ - name: "JBOSS_MODULES_SYSTEM_PKGS"
+ value: "org.jboss.logmanager"
+ - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
+ example: "myproject"
+ description: "Clustering project namespace."
+ - name: "OPENSHIFT_KUBE_PING_LABELS"
+ example: "application=eap-app"
+ description: "Clustering labels selector."
+ - name: "JAVA_OPTS_APPEND"
+ example: "-Dfoo=bar"
+ - name: "JGROUPS_CLUSTER_PASSWORD"
+ example: "miR0JaDR"
+ description: "A password to control access to the JGroup. Needs to be set consistently cluster-wide. The image default is to use the `OPENSHIFT_KUBE_PING_LABELS` variable value, however the JBoss application templates generate and supply a random value."
+ - name: CACHE_NAMES
+ description: "List of caches to configure. Defaults to default,memcached"
+ example: "addressbook,addressbook_indexed"
+ - name: DEFAULT_CACHE
+ description: "Indicates the default cache for this cache container."
+ example: "addressbook"
+ - name: CACHE_CONTAINER_START
+ description: "Should this cache container be started on server startup, or lazily when requested by a service or deployment. Defaults to LAZY"
+ example: "EAGER"
+ - name: CACHE_CONTAINER_STATISTICS
+ description: "Determines whether or not the cache container should collect statistics. Disable for optimal performance. Default is true"
+ example: "false"
+ - name: TRANSPORT_LOCK_TIMEOUT
+ description: "Infinispan uses a distributed lock to maintain a coherent transaction log during state transfer or remd5suming, which means that only one cache can be doing state transfer or rehashing at the same time. This constraint is in place because more than one cache could be involved in a transaction. This timeout controls the time to wait to acquire a distributed lock. Defaults to 240000"
+ example: "120000"
+ - name: CONTAINER_SECURITY_IDENTITY_ROLE_MAPPER
+ description: "Set a role mapper for this cache container. Valid values are: identity-role-mapper,common-name-role-mapper,cluster-role-mapper,custom-role-mapper"
+ example: "identity-role-mapper"
+ - name: CONTAINER_SECURITY_CUSTOM_ROLE_MAPPER_CLASS
+ description: "Class of the custom principal to role mapper"
+ example: "com.acme.CustomRoleMapper"
+ - name: CONTAINER_SECURITY_ROLES
+ description: "Defines role names and assigns permissions to them."
+ example: "admin=ALL,reader=READ,writer=WRITE"
+ - name: INFINISPAN_CONNECTORS
+ description: "Comma separated list of connectors to configure. Defaults to hotrod,memcached,rest. Beware, if you enable authorization or authentication on your cache you should remove memcached as the protocol is insecure."
+ example: "hotrod"
+ - name: HOTROD_SERVICE_NAME
+ description: "Name of the OpenShift service used to expose HotRod externally."
+ example: "DATAGRID_APP_HOTROD"
+ - name: HOTROD_AUTHENTICATION
+ description: "If defined the hotrod-connectors will be configured with authentication in the ApplicationRealm."
+ example: "true"
+ - name: HOTROD_ENCRYPTION
+ description: "If defined the hotrod-connectors will be configured with encryption in the ApplicationRealm."
+ example: "true"
+ - name: ENCRYPTION_REQUIRE_SSL_CLIENT_AUTH
+ description: "Whether to require client certificate authentication. Defaults to false."
+ example: "true"
+ - name: MEMCACHED_CACHE
+ description: "The name of the cache to use for the Memcached connector."
+ example: "memcached"
+ - name: REST_SECURITY_DOMAIN
+ description: "The security domain to use for authentication/authorization purposes. Defaults to none (no authentication)"
+ example: "other"
+ - name: "USERNAME"
+ example: "openshift"
+ description: "Username for JDG user"
+ - name: "PASSWORD"
+ example: "p@ssw0rd"
+ description: "Password for JDG user"
ports:
- value: 8443
- value: 8778
diff --git a/tests/schemas/good/openshift_decisionserver_6.2_image.yaml b/tests/schemas/good/openshift_decisionserver_6.2_image.yaml
index 6fe16a8..3ff05e5 100644
--- a/tests/schemas/good/openshift_decisionserver_6.2_image.yaml
+++ b/tests/schemas/good/openshift_decisionserver_6.2_image.yaml
@@ -11,14 +11,13 @@ labels:
- name: "io.openshift.tags"
value: "builder,decisionserver,decisionserver6"
envs:
- information:
- - name: "KIE_SERVER_BPM_DISABLED"
- value: "true"
- description: "The BPM capability is disabled in this image."
- - name: "KIE_SERVER_BPM_UI_DISABLED"
- value: "true"
- description: "The BPM UI capability is disabled in this image."
- - name: "KIE_SERVER_BRP_DISABLED"
- value: "true"
- description: "The BRP capability is disabled in this image."
+ - name: "KIE_SERVER_BPM_DISABLED"
+ value: "true"
+ description: "The BPM capability is disabled in this image."
+ - name: "KIE_SERVER_BPM_UI_DISABLED"
+ value: "true"
+ description: "The BPM UI capability is disabled in this image."
+ - name: "KIE_SERVER_BRP_DISABLED"
+ value: "true"
+ description: "The BRP capability is disabled in this image."
diff --git a/tests/schemas/good/openshift_decisionserver_6.3_image.yaml b/tests/schemas/good/openshift_decisionserver_6.3_image.yaml
index 1eff437..28ae16f 100644
--- a/tests/schemas/good/openshift_decisionserver_6.3_image.yaml
+++ b/tests/schemas/good/openshift_decisionserver_6.3_image.yaml
@@ -11,14 +11,13 @@ labels:
- name: "io.openshift.tags"
value: "builder,decisionserver,decisionserver6"
envs:
- information:
- - name: "KIE_SERVER_BPM_DISABLED"
- value: "true"
- description: "The BPM capability is disabled in this image."
- - name: "KIE_SERVER_BPM_UI_DISABLED"
- value: "true"
- description: "The BPM UI capability is disabled in this image."
- - name: "KIE_SERVER_BRP_DISABLED"
- value: "true"
- description: "The BRP capability is disabled in this image."
+ - name: "KIE_SERVER_BPM_DISABLED"
+ value: "true"
+ description: "The BPM capability is disabled in this image."
+ - name: "KIE_SERVER_BPM_UI_DISABLED"
+ value: "true"
+ description: "The BPM UI capability is disabled in this image."
+ - name: "KIE_SERVER_BRP_DISABLED"
+ value: "true"
+ description: "The BRP capability is disabled in this image."
diff --git a/tests/schemas/good/openshift_eap_6.4_image.yaml b/tests/schemas/good/openshift_eap_6.4_image.yaml
index 0ab9029..44075d3 100644
--- a/tests/schemas/good/openshift_eap_6.4_image.yaml
+++ b/tests/schemas/good/openshift_eap_6.4_image.yaml
@@ -14,24 +14,22 @@ labels:
- name: "io.openshift.s2i.scripts-url"
value: "image:///usr/local/s2i"
envs:
- information:
- - name: "STI_BUILDER"
- value: "jee"
- - name: "JBOSS_MODULES_SYSTEM_PKGS"
- value: "org.jboss.logmanager,jdk.nashorn.api"
- configuration:
- - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
- example: "myproject"
- description: "Clustering project namespace."
- - name: "OPENSHIFT_KUBE_PING_LABELS"
- example: "application=eap-app"
- description: "Clustering labels selector."
- - name: "JAVA_OPTS_APPEND"
- example: "-Dfoo=bar"
- description: "Server startup options."
- - name: "MQ_SIMPLE_DEFAULT_PHYSICAL_DESTINATION"
- example: "false"
- description: "For backwards compatability, set to true to use 'MyQueue' and 'MyTopic' as physical destination name defaults instead of 'queue/MyQueue' and 'topic/MyTopic'."
+ - name: "STI_BUILDER"
+ value: "jee"
+ - name: "JBOSS_MODULES_SYSTEM_PKGS"
+ value: "org.jboss.logmanager,jdk.nashorn.api"
+ - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
+ example: "myproject"
+ description: "Clustering project namespace."
+ - name: "OPENSHIFT_KUBE_PING_LABELS"
+ example: "application=eap-app"
+ description: "Clustering labels selector."
+ - name: "JAVA_OPTS_APPEND"
+ example: "-Dfoo=bar"
+ description: "Server startup options."
+ - name: "MQ_SIMPLE_DEFAULT_PHYSICAL_DESTINATION"
+ example: "false"
+ description: "For backwards compatability, set to true to use 'MyQueue' and 'MyTopic' as physical destination name defaults instead of 'queue/MyQueue' and 'topic/MyTopic'."
ports:
- value: 8443
- value: 8778
diff --git a/tests/schemas/good/openshift_eap_7.0_image.yaml b/tests/schemas/good/openshift_eap_7.0_image.yaml
index 7c0c4b4..8d392ca 100644
--- a/tests/schemas/good/openshift_eap_7.0_image.yaml
+++ b/tests/schemas/good/openshift_eap_7.0_image.yaml
@@ -14,33 +14,31 @@ labels:
- name: "io.openshift.s2i.scripts-url"
value: "image:///usr/local/s2i"
envs:
- information:
- - name: "STI_BUILDER"
- value: "jee"
- - name: "JBOSS_MODULES_SYSTEM_PKGS"
- value: "org.jboss.logmanager,org.jboss.byteman,jdk.nashorn.api"
- configuration:
- - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
- example: "myproject"
- description: "Clustering project namespace."
- - name: "OPENSHIFT_KUBE_PING_LABELS"
- example: "application=eap-app"
- description: "Clustering labels selector."
- - name: "JAVA_OPTS_APPEND"
- example: "-Dfoo=bar"
- description: "Server startup options."
- - name: "JBOSS_MODULES_SYSTEM_PKGS_APPEND"
- example: "org.jboss.byteman"
- description: "Comma-separated list of package names that will be appended to the JBOSS_MODULES_SYSTEM_PKGS environment variable."
- - name: "MQ_SIMPLE_DEFAULT_PHYSICAL_DESTINATION"
- example: "false"
- description: "For backwards compatability, set to true to use 'MyQueue' and 'MyTopic' as physical destination name defaults instead of 'queue/MyQueue' and 'topic/MyTopic'."
- - name: "DEFAULT_JMS_CONNECTION_FACTORY"
- example: "java:jboss/DefaultJMSConnectionFactory"
- description: "Specify the default JNDI binding for the JMS connection factory (jms-connection-factory='java:jboss/DefaultJMSConnectionFactory')."
- - name: "CLI_GRACEFUL_SHUTDOWN"
- example: "true"
- description: "If set to any non zero length value then the image will prevent shutdown with the TERM signal and will require execution of the shutdown command through jboss-cli."
+ - name: "STI_BUILDER"
+ value: "jee"
+ - name: "JBOSS_MODULES_SYSTEM_PKGS"
+ value: "org.jboss.logmanager,org.jboss.byteman,jdk.nashorn.api"
+ - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
+ example: "myproject"
+ description: "Clustering project namespace."
+ - name: "OPENSHIFT_KUBE_PING_LABELS"
+ example: "application=eap-app"
+ description: "Clustering labels selector."
+ - name: "JAVA_OPTS_APPEND"
+ example: "-Dfoo=bar"
+ description: "Server startup options."
+ - name: "JBOSS_MODULES_SYSTEM_PKGS_APPEND"
+ example: "org.jboss.byteman"
+ description: "Comma-separated list of package names that will be appended to the JBOSS_MODULES_SYSTEM_PKGS environment variable."
+ - name: "MQ_SIMPLE_DEFAULT_PHYSICAL_DESTINATION"
+ example: "false"
+ description: "For backwards compatability, set to true to use 'MyQueue' and 'MyTopic' as physical destination name defaults instead of 'queue/MyQueue' and 'topic/MyTopic'."
+ - name: "DEFAULT_JMS_CONNECTION_FACTORY"
+ example: "java:jboss/DefaultJMSConnectionFactory"
+ description: "Specify the default JNDI binding for the JMS connection factory (jms-connection-factory='java:jboss/DefaultJMSConnectionFactory')."
+ - name: "CLI_GRACEFUL_SHUTDOWN"
+ example: "true"
+ description: "If set to any non zero length value then the image will prevent shutdown with the TERM signal and will require execution of the shutdown command through jboss-cli."
ports:
- value: 8443
- value: 8778
diff --git a/tests/schemas/good/openshift_fuse-camel_6.3_image.yaml b/tests/schemas/good/openshift_fuse-camel_6.3_image.yaml
index 9c5855d..7d577d5 100644
--- a/tests/schemas/good/openshift_fuse-camel_6.3_image.yaml
+++ b/tests/schemas/good/openshift_fuse-camel_6.3_image.yaml
@@ -14,24 +14,22 @@ labels:
- name: "io.openshift.s2i.scripts-url"
value: "image:///usr/local/s2i"
envs:
- information:
- - name: "STI_BUILDER"
- value: "jee"
- - name: "JBOSS_MODULES_SYSTEM_PKGS"
- value: "org.jboss.logmanager"
- configuration:
- - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
- example: "myproject"
- description: "Clustering project namespace."
- - name: "OPENSHIFT_KUBE_PING_LABELS"
- example: "application=eap-app"
- description: "Clustering labels selector."
- - name: "JAVA_OPTS_APPEND"
- example: "-Dfoo=bar"
- description: "Server startup options."
- - name: "MQ_SIMPLE_DEFAULT_PHYSICAL_DESTINATION"
- example: "false"
- description: "For backwards compatability, set to true to use 'MyQueue' and 'MyTopic' as physical destination name defaults instead of 'queue/MyQueue' and 'topic/MyTopic'."
+ - name: "STI_BUILDER"
+ value: "jee"
+ - name: "JBOSS_MODULES_SYSTEM_PKGS"
+ value: "org.jboss.logmanager"
+ - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
+ example: "myproject"
+ description: "Clustering project namespace."
+ - name: "OPENSHIFT_KUBE_PING_LABELS"
+ example: "application=eap-app"
+ description: "Clustering labels selector."
+ - name: "JAVA_OPTS_APPEND"
+ example: "-Dfoo=bar"
+ description: "Server startup options."
+ - name: "MQ_SIMPLE_DEFAULT_PHYSICAL_DESTINATION"
+ example: "false"
+ description: "For backwards compatability, set to true to use 'MyQueue' and 'MyTopic' as physical destination name defaults instead of 'queue/MyQueue' and 'topic/MyTopic'."
ports:
- value: 8443
- value: 8778
diff --git a/tests/schemas/good/openshift_kieserver_6.2_image.yaml b/tests/schemas/good/openshift_kieserver_6.2_image.yaml
index 77134f3..7c84769 100644
--- a/tests/schemas/good/openshift_kieserver_6.2_image.yaml
+++ b/tests/schemas/good/openshift_kieserver_6.2_image.yaml
@@ -11,104 +11,102 @@ labels:
- name: "io.openshift.tags"
value: "builder,kieserver,kieserver6"
envs:
- information:
- - name: "KIE_CONTAINER_REDIRECT_ENABLED"
- value: "false"
- description: "KIE Container alias ids will not be redirected to generated deployment ids in 6.2."
- configuration:
- - name: "KIE_CONTAINER_DEPLOYMENT"
- example: "containerId=groupId:artifactId:version|c2=g2:a2:v2'"
- description: "The KIE Containers to deploy. (required)"
- - name: "KIE_SERVER_BPM_DISABLED"
- example: "false"
- description: "Whether to disable the BPM capability. (optional)"
- - name: "KIE_SERVER_BPM_UI_DISABLED"
- example: "false"
- description: "Whether to disable the BPM UI capability. (optional)"
- - name: "KIE_SERVER_BRM_DISABLED"
- example: "false"
- description: "Whether to disable the BRM capability. (optional)"
- - name: "KIE_SERVER_BRP_DISABLED"
- example: "false"
- description: "Whether to disable the BRP capability. (optional)"
- - name: "KIE_SERVER_DOMAIN"
- example: "other"
- description: "JAAS LoginContext domain that shall be used to authenticate users when using JMS. (optional)"
- - name: "KIE_SERVER_EXECUTOR_POOL_SIZE"
- example: "1"
- description: "BPM Executor pool size. (optional)"
- - name: "KIE_SERVER_EXECUTOR_RETRY_COUNT"
- example: "3"
- description: "BPM Executor retry count. (optional)"
- - name: "KIE_SERVER_EXECUTOR_INTERVAL"
- example: "3"
- description: "BPM Executor interval. (optional)"
- - name: "KIE_SERVER_EXECUTOR_INITIAL_DELAY"
- example: "100"
- description: "BPM Executor initial dealy. (optional)"
- - name: "KIE_SERVER_EXECUTOR_TIMEUNIT"
- example: "SECONDS"
- description: "BPM Executor timeunit. (optional)"
- - name: "KIE_SERVER_EXECUTOR_JMS"
- example: "true"
- description: "Whether to use JMS for BPM Executor. (optional)"
- - name: "KIE_SERVER_EXECUTOR_JMS_QUEUE"
- example: "queue/KIE.SERVER.EXECUTOR"
- description: "BPM JNDI name of executor queue for JMS. (optional)"
- - name: "KIE_SERVER_EXECUTOR_JMS_TRANSACTED"
- example: "false"
- description: "Whether to transact JMS for BPM Executor. (optional)"
- - name: "KIE_SERVER_FILTER_CLASSES"
- example: "true"
- description: "Whether remoteable classes in the kjar should be filtered. (optional)"
- - name: "KIE_SERVER_HOST"
- example: "localhost"
- description: "The host name to access the KIE Server REST interface. (optional)"
- - name: "KIE_SERVER_HT_CALLBACK"
- example: "custom"
- description: "BPM Human task callback type (mvel, ldap, db, jaas, props, custom). (optional)"
- - name: "KIE_SERVER_HT_CUSTOM_CALLBACK"
- example: "org.custom.MyUserGroupCallback"
- description: "BPM Human task custom callback class (implements UserGroupCallback). (optional)"
- - name: "KIE_SERVER_HT_USERINFO"
- example: "custom"
- description: "BPM Human task UserInfo type (ldap, db, props, custom). (optional)"
- - name: "KIE_SERVER_HT_CUSTOM_USERINFO"
- example: "org.custom.MyUserInfo"
- description: "BPM Human task custom userinfo class (implements UserInfo). (optional)"
- - name: "KIE_SERVER_ID"
- example: "kieserver"
- description: "The KIE Server identifier. (optional)"
- - name: "KIE_SERVER_JMS_QUEUES_REQUEST"
- example: "queue/KIE.SERVER.REQUEST"
- description: "JNDI name of request queue for JMS. (optional)"
- - name: "KIE_SERVER_JMS_QUEUES_RESPONSE"
- example: "queue/KIE.SERVER.RESPONSE"
- description: "JNDI name of response queue for JMS. (optional)"
- - name: "KIE_SERVER_MBEANS_ENABLED"
- example: "true"
- description: "Whether the KIE Server JMX MBeans should be enabled. (optional)"
- - name: "KIE_SERVER_PASSWORD"
- example: "kieserver1!"
- description: "The password to access the KIE Server REST or JMS interface. Must be different than username; must not be root, admin, or administrator; must contain at least 8 characters, 1 alphabetic character(s), 1 digit(s), and 1 non-alphanumeric symbol(s). (optional)"
- - name: "KIE_SERVER_PERSISTENCE_DIALECT"
- example: "org.hibernate.dialect.MySQL5Dialect"
- description: "BPM Hibernate persistence dialect. (optional)"
- - name: "KIE_SERVER_PERSISTENCE_DS"
- example: "java:/jboss/datasources/ExampleDS"
- description: "BPM Hibernate persistence datasource; will default to DB_JNDI. (optional)"
- - name: "KIE_SERVER_PERSISTENCE_TM"
- example: "org.hibernate.service.jta.platform.internal.JBossAppServerJtaPlatform"
- description: "BPM Hibernate persistence transaction manager. (optional)"
- - name: "KIE_SERVER_PORT"
- example: "8080"
- description: "The port to access the KIE Server REST interface. (optional)"
- - name: "KIE_SERVER_PROTOCOL"
- example: "http"
- description: "The protocol to access the KIE Server REST interface. (optional)"
- - name: "KIE_SERVER_USER"
- example: "kieserver"
- description: "The user name to access the KIE Server REST or JMS interface. (optional)"
+ - name: "KIE_CONTAINER_REDIRECT_ENABLED"
+ value: "false"
+ description: "KIE Container alias ids will not be redirected to generated deployment ids in 6.2."
+ - name: "KIE_CONTAINER_DEPLOYMENT"
+ example: "containerId=groupId:artifactId:version|c2=g2:a2:v2'"
+ description: "The KIE Containers to deploy. (required)"
+ - name: "KIE_SERVER_BPM_DISABLED"
+ example: "false"
+ description: "Whether to disable the BPM capability. (optional)"
+ - name: "KIE_SERVER_BPM_UI_DISABLED"
+ example: "false"
+ description: "Whether to disable the BPM UI capability. (optional)"
+ - name: "KIE_SERVER_BRM_DISABLED"
+ example: "false"
+ description: "Whether to disable the BRM capability. (optional)"
+ - name: "KIE_SERVER_BRP_DISABLED"
+ example: "false"
+ description: "Whether to disable the BRP capability. (optional)"
+ - name: "KIE_SERVER_DOMAIN"
+ example: "other"
+ description: "JAAS LoginContext domain that shall be used to authenticate users when using JMS. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_POOL_SIZE"
+ example: "1"
+ description: "BPM Executor pool size. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_RETRY_COUNT"
+ example: "3"
+ description: "BPM Executor retry count. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_INTERVAL"
+ example: "3"
+ description: "BPM Executor interval. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_INITIAL_DELAY"
+ example: "100"
+ description: "BPM Executor initial dealy. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_TIMEUNIT"
+ example: "SECONDS"
+ description: "BPM Executor timeunit. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_JMS"
+ example: "true"
+ description: "Whether to use JMS for BPM Executor. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_JMS_QUEUE"
+ example: "queue/KIE.SERVER.EXECUTOR"
+ description: "BPM JNDI name of executor queue for JMS. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_JMS_TRANSACTED"
+ example: "false"
+ description: "Whether to transact JMS for BPM Executor. (optional)"
+ - name: "KIE_SERVER_FILTER_CLASSES"
+ example: "true"
+ description: "Whether remoteable classes in the kjar should be filtered. (optional)"
+ - name: "KIE_SERVER_HOST"
+ example: "localhost"
+ description: "The host name to access the KIE Server REST interface. (optional)"
+ - name: "KIE_SERVER_HT_CALLBACK"
+ example: "custom"
+ description: "BPM Human task callback type (mvel, ldap, db, jaas, props, custom). (optional)"
+ - name: "KIE_SERVER_HT_CUSTOM_CALLBACK"
+ example: "org.custom.MyUserGroupCallback"
+ description: "BPM Human task custom callback class (implements UserGroupCallback). (optional)"
+ - name: "KIE_SERVER_HT_USERINFO"
+ example: "custom"
+ description: "BPM Human task UserInfo type (ldap, db, props, custom). (optional)"
+ - name: "KIE_SERVER_HT_CUSTOM_USERINFO"
+ example: "org.custom.MyUserInfo"
+ description: "BPM Human task custom userinfo class (implements UserInfo). (optional)"
+ - name: "KIE_SERVER_ID"
+ example: "kieserver"
+ description: "The KIE Server identifier. (optional)"
+ - name: "KIE_SERVER_JMS_QUEUES_REQUEST"
+ example: "queue/KIE.SERVER.REQUEST"
+ description: "JNDI name of request queue for JMS. (optional)"
+ - name: "KIE_SERVER_JMS_QUEUES_RESPONSE"
+ example: "queue/KIE.SERVER.RESPONSE"
+ description: "JNDI name of response queue for JMS. (optional)"
+ - name: "KIE_SERVER_MBEANS_ENABLED"
+ example: "true"
+ description: "Whether the KIE Server JMX MBeans should be enabled. (optional)"
+ - name: "KIE_SERVER_PASSWORD"
+ example: "kieserver1!"
+ description: "The password to access the KIE Server REST or JMS interface. Must be different than username; must not be root, admin, or administrator; must contain at least 8 characters, 1 alphabetic character(s), 1 digit(s), and 1 non-alphanumeric symbol(s). (optional)"
+ - name: "KIE_SERVER_PERSISTENCE_DIALECT"
+ example: "org.hibernate.dialect.MySQL5Dialect"
+ description: "BPM Hibernate persistence dialect. (optional)"
+ - name: "KIE_SERVER_PERSISTENCE_DS"
+ example: "java:/jboss/datasources/ExampleDS"
+ description: "BPM Hibernate persistence datasource; will default to DB_JNDI. (optional)"
+ - name: "KIE_SERVER_PERSISTENCE_TM"
+ example: "org.hibernate.service.jta.platform.internal.JBossAppServerJtaPlatform"
+ description: "BPM Hibernate persistence transaction manager. (optional)"
+ - name: "KIE_SERVER_PORT"
+ example: "8080"
+ description: "The port to access the KIE Server REST interface. (optional)"
+ - name: "KIE_SERVER_PROTOCOL"
+ example: "http"
+ description: "The protocol to access the KIE Server REST interface. (optional)"
+ - name: "KIE_SERVER_USER"
+ example: "kieserver"
+ description: "The user name to access the KIE Server REST or JMS interface. (optional)"
ports:
- value: 8778
cmd:
diff --git a/tests/schemas/good/openshift_kieserver_6.3_image.yaml b/tests/schemas/good/openshift_kieserver_6.3_image.yaml
index 010b945..bea0c84 100644
--- a/tests/schemas/good/openshift_kieserver_6.3_image.yaml
+++ b/tests/schemas/good/openshift_kieserver_6.3_image.yaml
@@ -11,103 +11,102 @@ labels:
- name: "io.openshift.tags"
value: "builder,kieserver,kieserver6"
envs:
- configuration:
- - name: "KIE_CONTAINER_DEPLOYMENT"
- example: "containerId=groupId:artifactId:version|c2=g2:a2:v2'"
- description: "The KIE Containers to deploy. (required)"
- - name: "KIE_CONTAINER_REDIRECT_ENABLED"
- example: "true"
- description: "Whether KIE Container alias ids will be redirected to generated deployment ids. (optional)"
- - name: "KIE_SERVER_BPM_DISABLED"
- example: "false"
- description: "Whether to disable the BPM capability. (optional)"
- - name: "KIE_SERVER_BPM_UI_DISABLED"
- example: "false"
- description: "Whether to disable the BPM UI capability. (optional)"
- - name: "KIE_SERVER_BRM_DISABLED"
- example: "false"
- description: "Whether to disable the BRM capability. (optional)"
- - name: "KIE_SERVER_BRP_DISABLED"
- example: "false"
- description: "Whether to disable the BRP capability. (optional)"
- - name: "KIE_SERVER_DOMAIN"
- example: "other"
- description: "JAAS LoginContext domain that shall be used to authenticate users when using JMS. (optional)"
- - name: "KIE_SERVER_EXECUTOR_POOL_SIZE"
- example: "1"
- description: "BPM Executor pool size. (optional)"
- - name: "KIE_SERVER_EXECUTOR_RETRY_COUNT"
- example: "3"
- description: "BPM Executor retry count. (optional)"
- - name: "KIE_SERVER_EXECUTOR_INTERVAL"
- example: "3"
- description: "BPM Executor interval. (optional)"
- - name: "KIE_SERVER_EXECUTOR_INITIAL_DELAY"
- example: "100"
- description: "BPM Executor initial dealy. (optional)"
- - name: "KIE_SERVER_EXECUTOR_TIMEUNIT"
- example: "SECONDS"
- description: "BPM Executor timeunit. (optional)"
- - name: "KIE_SERVER_EXECUTOR_JMS"
- example: "true"
- description: "Whether to use JMS for BPM Executor. (optional)"
- - name: "KIE_SERVER_EXECUTOR_JMS_QUEUE"
- example: "queue/KIE.SERVER.EXECUTOR"
- description: "BPM JNDI name of executor queue for JMS. (optional)"
- - name: "KIE_SERVER_EXECUTOR_JMS_TRANSACTED"
- example: "false"
- description: "Whether to transact JMS for BPM Executor. (optional)"
- - name: "KIE_SERVER_FILTER_CLASSES"
- example: "true"
- description: "Whether remoteable classes in the kjar should be filtered. (optional)"
- - name: "KIE_SERVER_HOST"
- example: "localhost"
- description: "The host name to access the KIE Server REST interface. (optional)"
- - name: "KIE_SERVER_HT_CALLBACK"
- example: "custom"
- description: "BPM Human task callback type (mvel, ldap, db, jaas, props, custom). (optional)"
- - name: "KIE_SERVER_HT_CUSTOM_CALLBACK"
- example: "org.custom.MyUserGroupCallback"
- description: "BPM Human task custom callback class (implements UserGroupCallback). (optional)"
- - name: "KIE_SERVER_HT_USERINFO"
- example: "custom"
- description: "BPM Human task UserInfo type (ldap, db, props, custom). (optional)"
- - name: "KIE_SERVER_HT_CUSTOM_USERINFO"
- example: "org.custom.MyUserInfo"
- description: "BPM Human task custom userinfo class (implements UserInfo). (optional)"
- - name: "KIE_SERVER_ID"
- example: "kieserver"
- description: "The KIE Server identifier. (optional)"
- - name: "KIE_SERVER_JMS_QUEUES_REQUEST"
- example: "queue/KIE.SERVER.REQUEST"
- description: "JNDI name of request queue for JMS. (optional)"
- - name: "KIE_SERVER_JMS_QUEUES_RESPONSE"
- example: "queue/KIE.SERVER.RESPONSE"
- description: "JNDI name of response queue for JMS. (optional)"
- - name: "KIE_SERVER_MBEANS_ENABLED"
- example: "true"
- description: "Whether the KIE Server JMX MBeans should be enabled. (optional)"
- - name: "KIE_SERVER_PASSWORD"
- example: "kieserver1!"
- description: "The password to access the KIE Server REST or JMS interface. Must be different than username; must not be root, admin, or administrator; must contain at least 8 characters, 1 alphabetic character(s), 1 digit(s), and 1 non-alphanumeric symbol(s). (optional)"
- - name: "KIE_SERVER_PERSISTENCE_DIALECT"
- example: "org.hibernate.dialect.MySQL5Dialect"
- description: "BPM Hibernate persistence dialect. (optional)"
- - name: "KIE_SERVER_PERSISTENCE_DS"
- example: "java:/jboss/datasources/ExampleDS"
- description: "BPM Hibernate persistence datasource; will default to DB_JNDI. (optional)"
- - name: "KIE_SERVER_PERSISTENCE_TM"
- example: "org.hibernate.service.jta.platform.internal.JBossAppServerJtaPlatform"
- description: "BPM Hibernate persistence transaction manager. (optional)"
- - name: "KIE_SERVER_PORT"
- example: "8080"
- description: "The port to access the KIE Server REST interface. (optional)"
- - name: "KIE_SERVER_PROTOCOL"
- example: "http"
- description: "The protocol to access the KIE Server REST interface. (optional)"
- - name: "KIE_SERVER_USER"
- example: "kieserver"
- description: "The user name to access the KIE Server REST or JMS interface. (optional)"
+ - name: "KIE_CONTAINER_DEPLOYMENT"
+ example: "containerId=groupId:artifactId:version|c2=g2:a2:v2'"
+ description: "The KIE Containers to deploy. (required)"
+ - name: "KIE_CONTAINER_REDIRECT_ENABLED"
+ example: "true"
+ description: "Whether KIE Container alias ids will be redirected to generated deployment ids. (optional)"
+ - name: "KIE_SERVER_BPM_DISABLED"
+ example: "false"
+ description: "Whether to disable the BPM capability. (optional)"
+ - name: "KIE_SERVER_BPM_UI_DISABLED"
+ example: "false"
+ description: "Whether to disable the BPM UI capability. (optional)"
+ - name: "KIE_SERVER_BRM_DISABLED"
+ example: "false"
+ description: "Whether to disable the BRM capability. (optional)"
+ - name: "KIE_SERVER_BRP_DISABLED"
+ example: "false"
+ description: "Whether to disable the BRP capability. (optional)"
+ - name: "KIE_SERVER_DOMAIN"
+ example: "other"
+ description: "JAAS LoginContext domain that shall be used to authenticate users when using JMS. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_POOL_SIZE"
+ example: "1"
+ description: "BPM Executor pool size. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_RETRY_COUNT"
+ example: "3"
+ description: "BPM Executor retry count. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_INTERVAL"
+ example: "3"
+ description: "BPM Executor interval. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_INITIAL_DELAY"
+ example: "100"
+ description: "BPM Executor initial dealy. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_TIMEUNIT"
+ example: "SECONDS"
+ description: "BPM Executor timeunit. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_JMS"
+ example: "true"
+ description: "Whether to use JMS for BPM Executor. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_JMS_QUEUE"
+ example: "queue/KIE.SERVER.EXECUTOR"
+ description: "BPM JNDI name of executor queue for JMS. (optional)"
+ - name: "KIE_SERVER_EXECUTOR_JMS_TRANSACTED"
+ example: "false"
+ description: "Whether to transact JMS for BPM Executor. (optional)"
+ - name: "KIE_SERVER_FILTER_CLASSES"
+ example: "true"
+ description: "Whether remoteable classes in the kjar should be filtered. (optional)"
+ - name: "KIE_SERVER_HOST"
+ example: "localhost"
+ description: "The host name to access the KIE Server REST interface. (optional)"
+ - name: "KIE_SERVER_HT_CALLBACK"
+ example: "custom"
+ description: "BPM Human task callback type (mvel, ldap, db, jaas, props, custom). (optional)"
+ - name: "KIE_SERVER_HT_CUSTOM_CALLBACK"
+ example: "org.custom.MyUserGroupCallback"
+ description: "BPM Human task custom callback class (implements UserGroupCallback). (optional)"
+ - name: "KIE_SERVER_HT_USERINFO"
+ example: "custom"
+ description: "BPM Human task UserInfo type (ldap, db, props, custom). (optional)"
+ - name: "KIE_SERVER_HT_CUSTOM_USERINFO"
+ example: "org.custom.MyUserInfo"
+ description: "BPM Human task custom userinfo class (implements UserInfo). (optional)"
+ - name: "KIE_SERVER_ID"
+ example: "kieserver"
+ description: "The KIE Server identifier. (optional)"
+ - name: "KIE_SERVER_JMS_QUEUES_REQUEST"
+ example: "queue/KIE.SERVER.REQUEST"
+ description: "JNDI name of request queue for JMS. (optional)"
+ - name: "KIE_SERVER_JMS_QUEUES_RESPONSE"
+ example: "queue/KIE.SERVER.RESPONSE"
+ description: "JNDI name of response queue for JMS. (optional)"
+ - name: "KIE_SERVER_MBEANS_ENABLED"
+ example: "true"
+ description: "Whether the KIE Server JMX MBeans should be enabled. (optional)"
+ - name: "KIE_SERVER_PASSWORD"
+ example: "kieserver1!"
+ description: "The password to access the KIE Server REST or JMS interface. Must be different than username; must not be root, admin, or administrator; must contain at least 8 characters, 1 alphabetic character(s), 1 digit(s), and 1 non-alphanumeric symbol(s). (optional)"
+ - name: "KIE_SERVER_PERSISTENCE_DIALECT"
+ example: "org.hibernate.dialect.MySQL5Dialect"
+ description: "BPM Hibernate persistence dialect. (optional)"
+ - name: "KIE_SERVER_PERSISTENCE_DS"
+ example: "java:/jboss/datasources/ExampleDS"
+ description: "BPM Hibernate persistence datasource; will default to DB_JNDI. (optional)"
+ - name: "KIE_SERVER_PERSISTENCE_TM"
+ example: "org.hibernate.service.jta.platform.internal.JBossAppServerJtaPlatform"
+ description: "BPM Hibernate persistence transaction manager. (optional)"
+ - name: "KIE_SERVER_PORT"
+ example: "8080"
+ description: "The port to access the KIE Server REST interface. (optional)"
+ - name: "KIE_SERVER_PROTOCOL"
+ example: "http"
+ description: "The protocol to access the KIE Server REST interface. (optional)"
+ - name: "KIE_SERVER_USER"
+ example: "kieserver"
+ description: "The user name to access the KIE Server REST or JMS interface. (optional)"
ports:
- value: 8778
cmd:
diff --git a/tests/schemas/good/openshift_processserver_6.3_image.yaml b/tests/schemas/good/openshift_processserver_6.3_image.yaml
index cc5b047..e2eef92 100644
--- a/tests/schemas/good/openshift_processserver_6.3_image.yaml
+++ b/tests/schemas/good/openshift_processserver_6.3_image.yaml
@@ -11,11 +11,10 @@ labels:
- name: "io.openshift.tags"
value: "builder,processserver,processserver6"
envs:
- information:
- - name: "KIE_SERVER_BPM_UI_DISABLED"
- value: "true"
- description: "The BPM UI capability is disabled in this image."
- - name: "KIE_SERVER_BRP_DISABLED"
- value: "true"
- description: "The BRP capability is disabled in this image."
+ - name: "KIE_SERVER_BPM_UI_DISABLED"
+ value: "true"
+ description: "The BPM UI capability is disabled in this image."
+ - name: "KIE_SERVER_BRP_DISABLED"
+ value: "true"
+ description: "The BRP capability is disabled in this image."
diff --git a/tests/schemas/good/openshift_sso_7.0_image.yaml b/tests/schemas/good/openshift_sso_7.0_image.yaml
index 0c43530..6ddc915 100644
--- a/tests/schemas/good/openshift_sso_7.0_image.yaml
+++ b/tests/schemas/good/openshift_sso_7.0_image.yaml
@@ -14,30 +14,28 @@ labels:
- name: "io.openshift.s2i.scripts-url"
value: "image:///usr/local/s2i"
envs:
- information:
- - name: "STI_BUILDER"
- value: "jee"
- - name: "JBOSS_MODULES_SYSTEM_PKGS"
- value: "org.jboss.logmanager,jdk.nashorn.api"
- configuration:
- - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
- example: "myproject"
- description: "Clustering project namespace."
- - name: "OPENSHIFT_KUBE_PING_LABELS"
- example: "application=sso"
- description: "Clustering labels selector."
- - name: "JAVA_OPTS_APPEND"
- example: "-Dfoo=bar"
- description: "Server startup options."
- - name: "MQ_SIMPLE_DEFAULT_PHYSICAL_DESTINATION"
- example: "false"
- description: "For backwards compatability, set to true to use 'MyQueue' and 'MyTopic' as physical destination name defaults instead of 'queue/MyQueue' and 'topic/MyTopic'."
- - name: "SSO_ADMIN_USERNAME"
- example: "admin"
- description: "SSO administrator username"
- - name: "SSO_ADMIN_PASSWORD"
- example: "hardtoguess"
- description: "SSO administrator password"
+ - name: "STI_BUILDER"
+ value: "jee"
+ - name: "JBOSS_MODULES_SYSTEM_PKGS"
+ value: "org.jboss.logmanager,jdk.nashorn.api"
+ - name: "OPENSHIFT_KUBE_PING_NAMESPACE"
+ example: "myproject"
+ description: "Clustering project namespace."
+ - name: "OPENSHIFT_KUBE_PING_LABELS"
+ example: "application=sso"
+ description: "Clustering labels selector."
+ - name: "JAVA_OPTS_APPEND"
+ example: "-Dfoo=bar"
+ description: "Server startup options."
+ - name: "MQ_SIMPLE_DEFAULT_PHYSICAL_DESTINATION"
+ example: "false"
+ description: "For backwards compatability, set to true to use 'MyQueue' and 'MyTopic' as physical destination name defaults instead of 'queue/MyQueue' and 'topic/MyTopic'."
+ - name: "SSO_ADMIN_USERNAME"
+ example: "admin"
+ description: "SSO administrator username"
+ - name: "SSO_ADMIN_PASSWORD"
+ example: "hardtoguess"
+ description: "SSO administrator password"
ports:
- value: 8443
- value: 8778
diff --git a/tests/schemas/good/openshift_webserver-tomcat7_3.0_image.yaml b/tests/schemas/good/openshift_webserver-tomcat7_3.0_image.yaml
index c7e5991..ca482f1 100644
--- a/tests/schemas/good/openshift_webserver-tomcat7_3.0_image.yaml
+++ b/tests/schemas/good/openshift_webserver-tomcat7_3.0_image.yaml
@@ -14,31 +14,29 @@ labels:
- name: "io.openshift.s2i.scripts-url"
value: "image:///usr/local/s2i"
envs:
- information:
- - name: "STI_BUILDER"
- value: "jee"
- configuration:
- - name: JWS_ADMIN_USERNAME
- example: jwsadmin
- - name: JWS_ADMIN_PASSWORD
- example: p5sw0rdd
- - name: JWS_HTTPS_CERTIFICATE_DIR
- example: /opt/webserver/conf
- - name: JWS_HTTPS_CERTIFICATE
- example: server.crt
- - name: JWS_HTTPS_CERTIFICATE_KEY
- example: server.key
- - name: JWS_HTTPS_CERTIFICATE_PASSWORD
- example: $tr0nGPaSs?
- - name: CATALINA_OPTS_APPEND
- example: -Dfoo=bar
- - name: JWS_REALM_USERTABLE
- - name: JWS_REALM_USERNAME_COL
- - name: JWS_REALM_USERCRED_COL
- - name: JWS_REALM_USERROLE_TABLE
- - name: JWS_REALM_ROLENAME_COL
- - name: DB_SERVICE_PREFIX_MAPPING
- example: test-postgresql=TEST_POSTGRESQL,test-mysql=TEST_MYSQL
+ - name: "STI_BUILDER"
+ value: "jee"
+ - name: JWS_ADMIN_USERNAME
+ example: jwsadmin
+ - name: JWS_ADMIN_PASSWORD
+ example: p5sw0rdd
+ - name: JWS_HTTPS_CERTIFICATE_DIR
+ example: /opt/webserver/conf
+ - name: JWS_HTTPS_CERTIFICATE
+ example: server.crt
+ - name: JWS_HTTPS_CERTIFICATE_KEY
+ example: server.key
+ - name: JWS_HTTPS_CERTIFICATE_PASSWORD
+ example: $tr0nGPaSs?
+ - name: CATALINA_OPTS_APPEND
+ example: -Dfoo=bar
+ - name: JWS_REALM_USERTABLE
+ - name: JWS_REALM_USERNAME_COL
+ - name: JWS_REALM_USERCRED_COL
+ - name: JWS_REALM_USERROLE_TABLE
+ - name: JWS_REALM_ROLENAME_COL
+ - name: DB_SERVICE_PREFIX_MAPPING
+ example: test-postgresql=TEST_POSTGRESQL,test-mysql=TEST_MYSQL
ports:
- value: 8443
- value: 8778
diff --git a/tests/schemas/good/openshift_webserver-tomcat8_3.0_image.yaml b/tests/schemas/good/openshift_webserver-tomcat8_3.0_image.yaml
index b7039a3..83c9812 100644
--- a/tests/schemas/good/openshift_webserver-tomcat8_3.0_image.yaml
+++ b/tests/schemas/good/openshift_webserver-tomcat8_3.0_image.yaml
@@ -14,31 +14,29 @@ labels:
- name: "io.openshift.s2i.scripts-url"
value: "image:///usr/local/s2i"
envs:
- information:
- - name: "STI_BUILDER"
- value: "jee"
- configuration:
- - name: JWS_ADMIN_USERNAME
- example: jwsadmin
- - name: JWS_ADMIN_PASSWORD
- example: p5sw0rdd
- - name: JWS_HTTPS_CERTIFICATE_DIR
- example: /opt/webserver/conf
- - name: JWS_HTTPS_CERTIFICATE
- example: server.crt
- - name: JWS_HTTPS_CERTIFICATE_KEY
- example: server.key
- - name: JWS_HTTPS_CERTIFICATE_PASSWORD
- example: $tr0nGPaSs?
- - name: CATALINA_OPTS_APPEND
- example: -Dfoo=bar
- - name: JWS_REALM_USERTABLE
- - name: JWS_REALM_USERNAME_COL
- - name: JWS_REALM_USERCRED_COL
- - name: JWS_REALM_USERROLE_TABLE
- - name: JWS_REALM_ROLENAME_COL
- - name: DB_SERVICE_PREFIX_MAPPING
- example: test-postgresql=TEST_POSTGRESQL,test-mysql=TEST_MYSQL
+ - name: "STI_BUILDER"
+ value: "jee"
+ - name: JWS_ADMIN_USERNAME
+ example: jwsadmin
+ - name: JWS_ADMIN_PASSWORD
+ example: p5sw0rdd
+ - name: JWS_HTTPS_CERTIFICATE_DIR
+ example: /opt/webserver/conf
+ - name: JWS_HTTPS_CERTIFICATE
+ example: server.crt
+ - name: JWS_HTTPS_CERTIFICATE_KEY
+ example: server.key
+ - name: JWS_HTTPS_CERTIFICATE_PASSWORD
+ example: $tr0nGPaSs?
+ - name: CATALINA_OPTS_APPEND
+ example: -Dfoo=bar
+ - name: JWS_REALM_USERTABLE
+ - name: JWS_REALM_USERNAME_COL
+ - name: JWS_REALM_USERCRED_COL
+ - name: JWS_REALM_USERROLE_TABLE
+ - name: JWS_REALM_ROLENAME_COL
+ - name: DB_SERVICE_PREFIX_MAPPING
+ example: test-postgresql=TEST_POSTGRESQL,test-mysql=TEST_MYSQL
ports:
- value: 8443
- value: 8778
diff --git a/tests/test_dockerfile.py b/tests/test_dockerfile.py
index 2ffa5a6..3ab2885 100644
--- a/tests/test_dockerfile.py
+++ b/tests/test_dockerfile.py
@@ -176,3 +176,17 @@ class TestDockerfile(unittest.TestCase):
dockerfile = f.read()
regex = re.compile(r'LABEL name=\"\$JBOSS_IMAGE_NAME\" \\\s+version=\"\$JBOSS_IMAGE_VERSION\" \\\s+architecture=\"x86_64\" \\\s+com.redhat.component=\"someimage\" \\\s+description=\"This is a nice image\"', re.MULTILINE)
self.assertRegexpMatches(dockerfile, regex)
+
+ # https://github.com/jboss-dockerfiles/dogen/issues/127
+ def test_generating_env_variables(self):
+ with open(self.yaml, 'ab') as f:
+ f.write("envs:\n - name: INFO_ENV\n value: 0\n - name: CONFIG_ENV\n example: 1234\n - name: COMBINED_ENV\n value: set_value\n example: example_value\n description: This is a description".encode())
+
+ generator = Generator(self.log, self.args)
+ generator.configure()
+ generator.render_from_template()
+
+ with open(os.path.join(self.target, "Dockerfile"), "r") as f:
+ dockerfile = f.read()
+ regex = re.compile(r'ENV JBOSS_IMAGE_NAME=\"someimage\" \\\s+JBOSS_IMAGE_VERSION=\"1\" \\\s+INFO_ENV=\"0\" \\\s+COMBINED_ENV=\"set_value\" \n', re.MULTILINE)
+ self.assertRegexpMatches(dockerfile, regex)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@ac07abe63ec944d407e1d06669a09deeacc4e720#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_dockerfile.py::TestDockerfile::test_generating_env_variables"
]
| []
| [
"tests/test_dockerfile.py::TestDockerfile::test_debug_port",
"tests/test_dockerfile.py::TestDockerfile::test_default_cmd_user",
"tests/test_dockerfile.py::TestDockerfile::test_generating_description_label",
"tests/test_dockerfile.py::TestDockerfile::test_generating_maintainer_label",
"tests/test_dockerfile.py::TestDockerfile::test_set_cmd",
"tests/test_dockerfile.py::TestDockerfile::test_set_cmd_user",
"tests/test_dockerfile.py::TestDockerfile::test_set_entrypoint",
"tests/test_dockerfile.py::TestDockerfile::test_volumes"
]
| []
| MIT License | 1,340 | [
"dogen/schema/kwalify_schema.yaml",
"dogen/template_helper.py"
]
| [
"dogen/schema/kwalify_schema.yaml",
"dogen/template_helper.py"
]
|
|
opsdroid__opsdroid-165 | a3e93bda15e1debbc368f5c947a10ad5f1027fbe | 2017-06-06 15:13:13 | b67d32310da842dddddb9b907f731a03d0562a2f | coveralls:
[](https://coveralls.io/builds/11850868)
Coverage decreased (-1.1%) to 94.764% when pulling **94188fe0a3bc45018ef05f76be87a396347e41ee on jacobtomlinson:auto-gen-conf** into **a3e93bda15e1debbc368f5c947a10ad5f1027fbe on opsdroid:master**.
coveralls:
[](https://coveralls.io/builds/11851214)
Coverage decreased (-0.1%) to 95.777% when pulling **1435962988a19c2d69c8deff1984acf4772c9441 on jacobtomlinson:auto-gen-conf** into **a3e93bda15e1debbc368f5c947a10ad5f1027fbe on opsdroid:master**.
coveralls:
[](https://coveralls.io/builds/11851245)
Coverage decreased (-0.1%) to 95.777% when pulling **1435962988a19c2d69c8deff1984acf4772c9441 on jacobtomlinson:auto-gen-conf** into **a3e93bda15e1debbc368f5c947a10ad5f1027fbe on opsdroid:master**.
coveralls:
[](https://coveralls.io/builds/11851421)
Coverage increased (+0.2%) to 96.115% when pulling **6c3aa2538aefb511db794be759ebdaf290131a77 on jacobtomlinson:auto-gen-conf** into **a3e93bda15e1debbc368f5c947a10ad5f1027fbe on opsdroid:master**.
| diff --git a/README.md b/README.md
index 098e647..7b9ad7b 100644
--- a/README.md
+++ b/README.md
@@ -10,16 +10,14 @@ An open source python chat-ops bot framework.
```
pip3 install opsdroid
-mkdir ~/.opsdroid
-opsdroid --gen-config > ~/.opsdroid/configuration.yaml
opsdroid
```
## Configuration
-Configuration is done in a yaml file called `configuration.yaml`. See the [full reference](http://opsdroid.readthedocs.io/en/latest/configuration-reference/).
+Configuration is done in a yaml file called `configuration.yaml`. This will be created automatically for you in `~/.opsdroid`. See the [full reference](http://opsdroid.readthedocs.io/en/latest/configuration-reference/).
-Example:
+Example config:
```yaml
## _ _ _
diff --git a/docs/getting-started.md b/docs/getting-started.md
index f9b2692..1f7ebb7 100644
--- a/docs/getting-started.md
+++ b/docs/getting-started.md
@@ -22,6 +22,8 @@ For configuration you simply need to create a single YAML file named `configurat
* `~/.opsdroid/configuration.yaml`
* `/etc/opsdroid/configuration.yaml`
+ If none are found then `~/.opsdroid/configuration.yaml` will be created for you.
+
The opsdroid project itself is very simple and requires modules to give it functionality. In your configuration file you must specify the connector, skill and database* modules you wish to use and any options they may require.
**Connectors** are modules for connecting opsdroid to your specific chat service. **Skills** are modules which define what actions opsdroid should perform based on different chat messages. **Database** modules connect opsdroid to your chosen database and allows skills to store information between messages.
diff --git a/opsdroid/__main__.py b/opsdroid/__main__.py
index 423fcaa..5513e8f 100644
--- a/opsdroid/__main__.py
+++ b/opsdroid/__main__.py
@@ -1,12 +1,11 @@
"""Starts opsdroid."""
import sys
-import os
import logging
import argparse
from opsdroid.core import OpsDroid
-from opsdroid.const import LOG_FILENAME
+from opsdroid.const import LOG_FILENAME, EXAMPLE_CONFIG_FILE
from opsdroid.web import Web
@@ -88,10 +87,7 @@ def main():
args = parse_args(sys.argv[1:])
if args.gen_config:
- path = os.path.join(
- os.path.dirname(os.path.abspath(__file__)),
- "configuration/example_configuration.yaml")
- with open(path, 'r') as conf:
+ with open(EXAMPLE_CONFIG_FILE, 'r') as conf:
print(conf.read())
sys.exit(0)
diff --git a/opsdroid/configuration/example_configuration.yaml b/opsdroid/configuration/example_configuration.yaml
index 8b3d6af..abb841c 100644
--- a/opsdroid/configuration/example_configuration.yaml
+++ b/opsdroid/configuration/example_configuration.yaml
@@ -37,6 +37,7 @@
## Connector modules
connectors:
- name: shell
+ - name: websocket
## Database modules (optional)
# databases:
diff --git a/opsdroid/const.py b/opsdroid/const.py
index c826aed..3d5b8b4 100644
--- a/opsdroid/const.py
+++ b/opsdroid/const.py
@@ -1,9 +1,14 @@
"""Constants used by OpsDroid."""
+import os
__version__ = "0.8.0"
LOG_FILENAME = 'output.log'
DEFAULT_GIT_URL = "https://github.com/opsdroid/"
MODULES_DIRECTORY = "opsdroid-modules"
-DEFAULT_MODULES_PATH = "~/.opsdroid/modules"
+DEFAULT_ROOT_PATH = os.path.join(os.path.expanduser("~"), ".opsdroid")
+DEFAULT_MODULES_PATH = os.path.join(DEFAULT_ROOT_PATH, "modules")
+DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_ROOT_PATH, "configuration.yaml")
DEFAULT_MODULE_BRANCH = "master"
+EXAMPLE_CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ "configuration/example_configuration.yaml")
diff --git a/opsdroid/core.py b/opsdroid/core.py
index 0f43afd..5e3b545 100644
--- a/opsdroid/core.py
+++ b/opsdroid/core.py
@@ -4,7 +4,6 @@ import logging
import sys
import weakref
import asyncio
-import os.path
from opsdroid.memory import Memory
from opsdroid.connector import Connector
@@ -13,6 +12,7 @@ from opsdroid.loader import Loader
from opsdroid.parsers.regex import parse_regex
from opsdroid.parsers.apiai import parse_apiai
from opsdroid.parsers.crontab import parse_crontab
+from opsdroid.const import DEFAULT_CONFIG_PATH
_LOGGER = logging.getLogger(__name__)
@@ -89,8 +89,7 @@ class OpsDroid():
"""Load configuration."""
self.config = self.loader.load_config_file([
"./configuration.yaml",
- os.path.join(os.path.expanduser("~"),
- ".opsdroid/configuration.yaml"),
+ DEFAULT_CONFIG_PATH,
"/etc/opsdroid/configuration.yaml"
])
diff --git a/opsdroid/loader.py b/opsdroid/loader.py
index 34d1330..0f6e1a8 100644
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -9,7 +9,7 @@ import importlib
import yaml
from opsdroid.const import (
DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
- DEFAULT_MODULE_BRANCH)
+ DEFAULT_MODULE_BRANCH, DEFAULT_CONFIG_PATH, EXAMPLE_CONFIG_FILE)
_LOGGER = logging.getLogger(__name__)
@@ -100,6 +100,16 @@ class Loader:
_LOGGER.debug(str(line).strip())
process.wait()
+ @staticmethod
+ def create_default_config(config_path):
+ """Create a default config file based on the included example."""
+ _LOGGER.info("Creating %s.", config_path)
+ config_dir, _ = os.path.split(config_path)
+ if not os.path.isdir(config_dir):
+ os.makedirs(config_dir)
+ shutil.copyfile(EXAMPLE_CONFIG_FILE, config_path)
+ return config_path
+
def load_config_file(self, config_paths):
"""Load a yaml config file from path."""
config_path = ""
@@ -112,7 +122,8 @@ class Loader:
break
if not config_path:
- self.opsdroid.critical("No configuration files found", 1)
+ _LOGGER.info("No configuration files found.")
+ config_path = self.create_default_config(DEFAULT_CONFIG_PATH)
try:
with open(config_path, 'r') as stream:
| Generate default config automatically
Instead of running `opsdroid --gen-config` and piping the output into a file opsdroid should simply check for the existence of a config file and if it can't find one it should create one in `~/.opsdroid` using the default config file. | opsdroid/opsdroid | diff --git a/tests/test_loader.py b/tests/test_loader.py
index ae83378..155e262 100644
--- a/tests/test_loader.py
+++ b/tests/test_loader.py
@@ -29,10 +29,28 @@ class TestLoader(unittest.TestCase):
config = loader.load_config_file(["tests/configs/minimal.yaml"])
self.assertIsNotNone(config)
+ def test_create_default_config(self):
+ test_config_path = "/tmp/test_config_path/configuration.yaml"
+ opsdroid, loader = self.setup()
+
+ self.assertEqual(loader.create_default_config(test_config_path),
+ test_config_path)
+ self.assertTrue(os.path.isfile(test_config_path))
+ shutil.rmtree(os.path.split(test_config_path)[0])
+
+ def test_generate_config_if_none_exist(self):
+ opsdroid, loader = self.setup()
+ loader.create_default_config = mock.Mock(
+ return_value="tests/configs/minimal.yaml")
+ loader.load_config_file(["file_which_does_not_exist"])
+ self.assertTrue(loader.create_default_config.called)
+
def test_load_non_existant_config_file(self):
opsdroid, loader = self.setup()
- loader.opsdroid.critical = mock.Mock()
+ loader.create_default_config = mock.Mock(
+ return_value="/tmp/my_nonexistant_config")
loader.load_config_file(["file_which_does_not_exist"])
+ self.assertTrue(loader.create_default_config.called)
self.assertTrue(loader.opsdroid.critical.called)
def test_load_broken_config_file(self):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 7
} | 0.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y git gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohttp==1.3.5
arrow==0.10.0
async-timeout==5.0.1
chardet==5.2.0
coverage==7.8.0
exceptiongroup==1.2.2
execnet==2.1.1
iniconfig==2.1.0
multidict==6.2.0
-e git+https://github.com/opsdroid/opsdroid.git@a3e93bda15e1debbc368f5c947a10ad5f1027fbe#egg=opsdroid
packaging==24.2
pluggy==1.5.0
pycron==0.40
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==3.12
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
yarl==0.9.8
| name: opsdroid
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohttp==1.3.5
- arrow==0.10.0
- async-timeout==5.0.1
- chardet==5.2.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- iniconfig==2.1.0
- multidict==6.2.0
- packaging==24.2
- pluggy==1.5.0
- pycron==0.40
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==3.12
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
- yarl==0.9.8
prefix: /opt/conda/envs/opsdroid
| [
"tests/test_loader.py::TestLoader::test_create_default_config",
"tests/test_loader.py::TestLoader::test_generate_config_if_none_exist",
"tests/test_loader.py::TestLoader::test_load_non_existant_config_file"
]
| []
| [
"tests/test_loader.py::TestLoader::test_build_module_path",
"tests/test_loader.py::TestLoader::test_check_cache_leaves",
"tests/test_loader.py::TestLoader::test_check_cache_removes_dir",
"tests/test_loader.py::TestLoader::test_check_cache_removes_file",
"tests/test_loader.py::TestLoader::test_git_clone",
"tests/test_loader.py::TestLoader::test_import_module",
"tests/test_loader.py::TestLoader::test_import_module_failure",
"tests/test_loader.py::TestLoader::test_import_module_new",
"tests/test_loader.py::TestLoader::test_install_default_remote_module",
"tests/test_loader.py::TestLoader::test_install_existing_module",
"tests/test_loader.py::TestLoader::test_install_local_module_dir",
"tests/test_loader.py::TestLoader::test_install_local_module_failure",
"tests/test_loader.py::TestLoader::test_install_local_module_file",
"tests/test_loader.py::TestLoader::test_install_missing_local_module",
"tests/test_loader.py::TestLoader::test_install_specific_local_git_module",
"tests/test_loader.py::TestLoader::test_install_specific_local_path_module",
"tests/test_loader.py::TestLoader::test_install_specific_remote_module",
"tests/test_loader.py::TestLoader::test_load_broken_config_file",
"tests/test_loader.py::TestLoader::test_load_config",
"tests/test_loader.py::TestLoader::test_load_config_file",
"tests/test_loader.py::TestLoader::test_load_empty_config",
"tests/test_loader.py::TestLoader::test_load_modules",
"tests/test_loader.py::TestLoader::test_pip_install_deps"
]
| []
| Apache License 2.0 | 1,341 | [
"opsdroid/__main__.py",
"opsdroid/const.py",
"opsdroid/configuration/example_configuration.yaml",
"opsdroid/core.py",
"README.md",
"docs/getting-started.md",
"opsdroid/loader.py"
]
| [
"opsdroid/__main__.py",
"opsdroid/const.py",
"opsdroid/configuration/example_configuration.yaml",
"opsdroid/core.py",
"README.md",
"docs/getting-started.md",
"opsdroid/loader.py"
]
|
bokeh__bokeh-6394 | be0d7255b917dd915a6b6cee5e40073e5a31eba6 | 2017-06-06 17:16:35 | 44b63d65efec1e06fb565a9a81e0f2f21315e85a | bryevdv: @philippjfr I have updated the test I think it should be better now
philippjfr: Looks good and works for me. | diff --git a/bokeh/core/property/bases.py b/bokeh/core/property/bases.py
index c5d7c92c9..4aace587a 100644
--- a/bokeh/core/property/bases.py
+++ b/bokeh/core/property/bases.py
@@ -178,7 +178,19 @@ class Property(PropertyDescriptorFactory):
if isinstance(new, np.ndarray) or isinstance(old, np.ndarray):
return np.array_equal(new, old)
- return new == old
+ # this handles the special but common case where there is a dict with numpy
+ # arrays as values (e.g. the .data property of a ColumnDataSource)
+ if isinstance(new, dict) and isinstance(old, dict):
+ if set(new.keys()) != set(old.keys()):
+ return False
+ return all(self.matches(new[k], old[k]) for k in new)
+
+ try:
+ return new == old
+
+ # if the comparison fails for some reason, just punt and return no-match
+ except ValueError:
+ return False
def from_json(self, json, models=None):
''' Convert from JSON-compatible values into a value for this property.
| Modifying datasources broken
In bokeh master and presumably any rc/dev releases cut in the last few days, modifying a CDS is broken completely. This is because https://github.com/bokeh/bokeh/pull/6374 introduced a change in the way bokeh properties check whether an old and new value has changed. The approach there guards against checks on arrays, but since the ``.data`` property on a CDS is not itself an array it ends up doing a simple equality between the dictionary of arrays, which raises this error:
>ValueError�: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
This occurs for any change to a ``ColumnDataSource.data``, which means that any server or notebook examples that modify a CDS in python are currently broken. | bokeh/bokeh | diff --git a/bokeh/core/property/tests/test_bases.py b/bokeh/core/property/tests/test_bases.py
index 14a1234bc..d7a0309ab 100644
--- a/bokeh/core/property/tests/test_bases.py
+++ b/bokeh/core/property/tests/test_bases.py
@@ -45,6 +45,14 @@ def test_property_assert_msg_funcs():
p.prepare_value(hp, "foo", 10)
assert str(e) == "bad True name, 10"
+def test_property_matches_basic_types(capsys):
+ p = pb.Property()
+ for x in [1, 1.2, "a", np.arange(4), None, False, True, {}, []]:
+ assert p.matches(x, x) is True
+ assert p.matches(x, "junk") is False
+ out, err = capsys.readouterr()
+ assert err == ""
+
def test_property_matches_compatible_arrays(capsys):
p = pb.Property()
a = np.arange(5)
@@ -65,3 +73,35 @@ def test_property_matches_incompatible_arrays(capsys):
out, err = capsys.readouterr()
# no way to suppress FutureWarning in this case
# assert err == ""
+
+def test_property_matches_dicts_with_array_values(capsys):
+ p = pb.Property()
+ d1 = dict(foo=np.arange(10))
+ d2 = dict(foo=np.arange(10))
+
+ assert p.matches(d1, d1) is True
+ assert p.matches(d1, d2) is True
+
+ # XXX not sure if this is preferable to have match, or not
+ assert p.matches(d1, dict(foo=list(range(10)))) is True
+
+ assert p.matches(d1, dict(foo=np.arange(11))) is False
+ assert p.matches(d1, dict(bar=np.arange(10))) is False
+ assert p.matches(d1, dict(bar=10)) is False
+ out, err = capsys.readouterr()
+ assert err == ""
+
+def test_property_matches_non_dict_containers_with_array_false(capsys):
+ p = pb.Property()
+ d1 = [np.arange(10)]
+ d2 = [np.arange(10)]
+ assert p.matches(d1, d1) is True # because object identity
+ assert p.matches(d1, d2) is False
+
+ t1 = (np.arange(10),)
+ t2 = (np.arange(10),)
+ assert p.matches(t1, t1) is True # because object identity
+ assert p.matches(t1, t2) is False
+
+ out, err = capsys.readouterr()
+ assert err == ""
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install bokeh",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | bokeh==3.4.3
contourpy==1.3.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.1.6
MarkupSafe==3.0.2
numpy==2.0.2
packaging @ file:///croot/packaging_1734472117206/work
pandas==2.2.3
pillow==11.1.0
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.2
six==1.17.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
tornado==6.4.2
tzdata==2025.2
xyzservices==2025.1.0
| name: bokeh
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- bokeh==3.4.3
- contourpy==1.3.0
- jinja2==3.1.6
- markupsafe==3.0.2
- numpy==2.0.2
- pandas==2.2.3
- pillow==11.1.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.2
- six==1.17.0
- tornado==6.4.2
- tzdata==2025.2
- xyzservices==2025.1.0
prefix: /opt/conda/envs/bokeh
| [
"bokeh/core/property/tests/test_bases.py::test_property_matches_dicts_with_array_values",
"bokeh/core/property/tests/test_bases.py::test_property_matches_non_dict_containers_with_array_false"
]
| []
| [
"bokeh/core/property/tests/test_bases.py::test_property_assert_bools",
"bokeh/core/property/tests/test_bases.py::test_property_assert_functions",
"bokeh/core/property/tests/test_bases.py::test_property_assert_msg_funcs",
"bokeh/core/property/tests/test_bases.py::test_property_matches_basic_types",
"bokeh/core/property/tests/test_bases.py::test_property_matches_compatible_arrays",
"bokeh/core/property/tests/test_bases.py::test_property_matches_incompatible_arrays"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,342 | [
"bokeh/core/property/bases.py"
]
| [
"bokeh/core/property/bases.py"
]
|
Azure__WALinuxAgent-748 | 9bed39863b187154afbb805b74f647a484603f81 | 2017-06-06 19:23:36 | 6e9b985c1d7d564253a1c344bab01b45093103cd | diff --git a/azurelinuxagent/common/protocol/hostplugin.py b/azurelinuxagent/common/protocol/hostplugin.py
index 464fd354..bf62cb54 100644
--- a/azurelinuxagent/common/protocol/hostplugin.py
+++ b/azurelinuxagent/common/protocol/hostplugin.py
@@ -288,12 +288,23 @@ class HostPluginProtocol(object):
@staticmethod
def read_response_error(response):
- if response is None:
- return ''
- body = remove_bom(response.read())
- if PY_VERSION_MAJOR < 3 and body is not None:
- body = ustr(body, encoding='utf-8')
- return "{0}, {1}, {2}".format(
- response.status,
- response.reason,
- body)
+ result = ''
+ if response is not None:
+ try:
+ body = remove_bom(response.read())
+ result = "[{0}: {1}] {2}".format(response.status,
+ response.reason,
+ body)
+
+ # this result string is passed upstream to several methods
+ # which do a raise HttpError() or a format() of some kind;
+ # as a result it cannot have any unicode characters
+ if PY_VERSION_MAJOR < 3:
+ result = ustr(result, encoding='ascii', errors='ignore')
+ else:
+ result = result\
+ .encode(encoding='ascii', errors='ignore')\
+ .decode(encoding='ascii', errors='ignore')
+ except Exception:
+ logger.warn(traceback.format_exc())
+ return result
| 2.2.11 on CentOS 6.9 - HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
Hi Guys,
I have not been able to reproduce this issue myself but customer tried multiple times reinstalling the waagent and we are seeing these ascii codec errors in the logs. Could it be due to python version?
Here are some details, customer also tried with older waagents 2.2.0, 2.26, 2.2.10, but same issue happens.
___
# waagent -version
WALinuxAgent-2.2.11 running on centos 6.9
Python: 2.6.6
Goal state agent: 2.2.11
# cat /etc/*release
CentOS release 6.9 (Final)
LSB_VERSION=base-4.0-amd64:base-4.0-noarch:core-4.0-amd64:core-4.0-noarch:graphics-4.0-amd64:graphics-4.0-noarch:printing-4.0-amd64:printing-4.0-noarch
CentOS release 6.9 (Final)
CentOS release 6.9 (Final)
contents from /var/log/waagent.log:
2017/05/24 15:46:17.844766 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:17.861865 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:21.049203 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:21.090362 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:21.122899 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:21.139003 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:24.305469 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:24.331966 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:24.364467 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:24.380578 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:28.564198 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:28.591858 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:28.617763 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:28.639363 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:31.885319 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:31.911783 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:31.946363 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:31.964649 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:35.146591 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:35.189370 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:35.225146 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:35.242466 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:39.429445 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:39.455260 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:39.487014 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:39.503239 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:42.683012 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:42.710385 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:42.744597 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:42.753255 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:45.931898 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:45.962590 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:45.996475 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:46.015805 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:50.197470 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:50.223235 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:50.255621 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:50.273207 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:53.446002 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:53.484775 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:53.523703 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:53.541274 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:46:56.723500 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:56.749868 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:46:56.787001 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:46:56.795731 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:01.004477 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:01.031631 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:01.065891 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:01.074840 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:04.239667 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:04.265142 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:04.297960 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:04.316253 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:07.477140 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:07.502672 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:07.539391 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:07.548653 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:11.731418 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:11.758850 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:11.799020 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:11.823203 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:14.995817 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:15.030661 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:15.065515 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:15.083501 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:18.270243 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:18.300546 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:18.333954 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:18.351530 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:22.533407 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:22.558433 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:22.610613 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:22.628229 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:25.780685 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:25.808705 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:25.849293 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:25.858204 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:29.070364 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:29.113366 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:29.152417 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:29.170557 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:33.358345 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:33.385624 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:33.386848 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:33.393950 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:36.569030 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:36.596103 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:36.631035 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:36.647864 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:39.832556 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:39.863671 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:39.895851 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:39.914430 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:44.099506 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:44.124648 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:44.158748 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:44.177613 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:47.583472 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:47.608486 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:47.640467 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:47.657303 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:50.835735 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:50.874290 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:50.916490 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:50.945341 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:55.129633 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:55.156257 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:55.190141 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:55.215993 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:47:58.424382 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:58.451950 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:47:58.487127 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:47:58.504755 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:01.690875 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:01.715687 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:01.748547 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:01.765343 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:05.936563 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:05.964643 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:05.996995 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:06.017752 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:09.190781 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:09.218779 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:09.252314 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:09.277252 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:12.459783 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:12.484684 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:12.518163 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:12.535435 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:16.729958 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:16.754869 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:16.788728 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:16.805349 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:20.001452 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:20.029702 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:20.065718 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:20.088268 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:23.305548 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:23.331518 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:23.362826 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:23.380037 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:27.596824 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:27.623493 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:27.657310 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:27.673913 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:30.849483 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:30.875699 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:30.908237 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:30.920496 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:34.087479 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:34.114431 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:34.162600 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:34.179671 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:38.360868 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:38.388320 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:38.422843 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:38.430939 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:41.598481 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:41.624106 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:41.657094 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:41.685802 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:44.875318 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:44.916857 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:44.948256 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:44.964992 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:49.146943 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:49.223333 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:49.270700 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:49.289094 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:52.453305 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:52.483725 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:52.514236 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:52.541040 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:48:55.757322 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:55.794825 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:48:55.847756 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:48:55.868455 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:00.054734 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:00.090044 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:00.124402 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:00.141889 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:03.319632 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:03.345371 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:03.377599 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:03.408319 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:06.580183 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:06.605901 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:06.639361 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:06.656325 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:10.853238 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:10.898130 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:10.969166 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:11.043244 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:14.241737 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:14.370211 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:14.384266 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:14.395068 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:17.574396 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:17.602290 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:17.633727 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:17.649852 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:21.827386 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:21.853598 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:21.886466 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:21.903030 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:25.102639 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:25.132233 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:25.171485 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:25.180229 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:28.358046 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:28.383589 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:28.416060 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:28.432768 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:32.679130 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:32.722700 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:32.755549 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:32.772589 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:35.964000 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:35.989517 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:36.022212 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:36.039823 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:39.208352 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:39.236071 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:39.271353 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:39.299952 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:43.483750 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:43.516775 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:43.550003 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:43.570354 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:47.603632 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:47.635376 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:47.685295 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:47.702468 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:50.882551 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:50.908720 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:50.951725 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:50.972618 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:55.180153 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:55.206611 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:55.275126 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:55.298144 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:49:58.466913 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:58.494807 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:49:58.528384 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:49:58.546434 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:01.730089 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:01.757220 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:01.758602 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:01.768496 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:05.960692 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:06.009798 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:06.072462 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:06.083434 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:09.245194 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:09.271266 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:09.304154 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:09.312189 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:12.488104 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:12.515773 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:12.556066 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:12.572258 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:16.752931 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:16.779420 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:16.814980 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:16.822962 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:20.023086 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:20.048418 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:20.102101 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:20.126512 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:23.368280 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:23.394281 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:23.426482 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:23.442494 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:27.621318 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:27.646496 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:27.679599 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:27.696550 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:30.862777 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:30.890594 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:30.925860 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:30.943765 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:34.114088 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:34.141320 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:34.173661 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:34.189875 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:38.361132 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:38.388245 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:38.421441 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:38.437757 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:41.638507 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:41.664675 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:41.697331 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:41.715056 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:44.894232 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:44.921590 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:44.954569 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:44.971219 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:49.166333 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:49.190998 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:49.222526 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:49.239378 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:52.443256 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:52.469896 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:52.502156 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:52.518454 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:55.718757 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:55.747044 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:50:55.783661 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:50:55.802636 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:50:59.994033 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:00.021891 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:00.055730 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:00.073697 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:03.248109 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:03.275377 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:03.309335 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:03.327206 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:06.551271 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:06.591861 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:06.626499 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:06.643350 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:10.824589 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:10.853689 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:10.888139 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:10.913930 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:14.084725 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:14.111949 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:14.144392 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:14.160706 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:17.336614 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:17.364869 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:17.400119 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:17.422776 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:21.662768 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:21.688839 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:21.721831 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:21.738387 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:24.919905 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:24.966375 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:24.999268 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:25.020380 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:28.178949 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:28.205222 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:28.239041 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:28.258710 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:32.431694 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:32.460159 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:32.492851 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:32.511886 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:35.673963 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:35.724406 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:35.758572 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:35.775299 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:38.951270 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:38.977857 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:39.011253 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:39.028396 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
2017/05/24 15:51:43.205074 ERROR HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:43.231282 ERROR Event: name=WALinuxAgent, op=ReportStatus, message=HostGAPlugin: Exception Put VM status: 'ascii' codec can't encode characters in position 83-85: ordinal not in range(128)
2017/05/24 15:51:43.263837 WARNING HostGAPlugin: resetting default channel
2017/05/24 15:51:43.280415 INFO Event: name=WALinuxAgent, op=ProcessGoalState, message=
| Azure/WALinuxAgent | diff --git a/tests/protocol/test_hostplugin.py b/tests/protocol/test_hostplugin.py
index e203615a..b18b6914 100644
--- a/tests/protocol/test_hostplugin.py
+++ b/tests/protocol/test_hostplugin.py
@@ -19,7 +19,7 @@ import base64
import json
import sys
-
+from azurelinuxagent.common.future import ustr
if sys.version_info[0] == 3:
import http.client as httpclient
@@ -224,6 +224,61 @@ class TestHostPlugin(AgentTestCase):
test_goal_state,
exp_method, exp_url, exp_data)
+ def test_read_response_error(self):
+ """
+ Validate the read_response_error method handles encoding correctly
+ """
+ responses = ['message', b'message', '\x80message\x80']
+ response = MagicMock()
+ response.status = 'status'
+ response.reason = 'reason'
+ with patch.object(response, 'read') as patch_response:
+ for s in responses:
+ patch_response.return_value = s
+ result = hostplugin.HostPluginProtocol.read_response_error(response)
+ self.assertTrue('[status: reason]' in result)
+ self.assertTrue('message' in result)
+
+ def test_read_response_bytes(self):
+ response_bytes = '7b:0a:20:20:20:20:22:65:72:72:6f:72:43:6f:64:65:22:' \
+ '3a:20:22:54:68:65:20:62:6c:6f:62:20:74:79:70:65:20:' \
+ '69:73:20:69:6e:76:61:6c:69:64:20:66:6f:72:20:74:68:' \
+ '69:73:20:6f:70:65:72:61:74:69:6f:6e:2e:22:2c:0a:20:' \
+ '20:20:20:22:6d:65:73:73:61:67:65:22:3a:20:22:c3:af:' \
+ 'c2:bb:c2:bf:3c:3f:78:6d:6c:20:76:65:72:73:69:6f:6e:' \
+ '3d:22:31:2e:30:22:20:65:6e:63:6f:64:69:6e:67:3d:22:' \
+ '75:74:66:2d:38:22:3f:3e:3c:45:72:72:6f:72:3e:3c:43:' \
+ '6f:64:65:3e:49:6e:76:61:6c:69:64:42:6c:6f:62:54:79:' \
+ '70:65:3c:2f:43:6f:64:65:3e:3c:4d:65:73:73:61:67:65:' \
+ '3e:54:68:65:20:62:6c:6f:62:20:74:79:70:65:20:69:73:' \
+ '20:69:6e:76:61:6c:69:64:20:66:6f:72:20:74:68:69:73:' \
+ '20:6f:70:65:72:61:74:69:6f:6e:2e:0a:52:65:71:75:65:' \
+ '73:74:49:64:3a:63:37:34:32:39:30:63:62:2d:30:30:30:' \
+ '31:2d:30:30:62:35:2d:30:36:64:61:2d:64:64:36:36:36:' \
+ '61:30:30:30:22:2c:0a:20:20:20:20:22:64:65:74:61:69:' \
+ '6c:73:22:3a:20:22:22:0a:7d'.split(':')
+ expected_response = '[status: reason] {\n "errorCode": "The blob ' \
+ 'type is invalid for this operation.",\n ' \
+ '"message": "<?xml version="1.0" ' \
+ 'encoding="utf-8"?>' \
+ '<Error><Code>InvalidBlobType</Code><Message>The ' \
+ 'blob type is invalid for this operation.\n' \
+ 'RequestId:c74290cb-0001-00b5-06da-dd666a000",' \
+ '\n "details": ""\n}'
+
+ response_string = ''.join(chr(int(b, 16)) for b in response_bytes)
+ response = MagicMock()
+ response.status = 'status'
+ response.reason = 'reason'
+ with patch.object(response, 'read') as patch_response:
+ patch_response.return_value = response_string
+ result = hostplugin.HostPluginProtocol.read_response_error(response)
+ self.assertEqual(result, expected_response)
+ try:
+ raise HttpError("{0}".format(result))
+ except HttpError as e:
+ self.assertTrue(result in ustr(e))
+
def test_no_fallback(self):
"""
Validate fallback to upload status using HostGAPlugin is not happening
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 2.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.4",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/Azure/WALinuxAgent.git@9bed39863b187154afbb805b74f647a484603f81#egg=WALinuxAgent
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: WALinuxAgent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
prefix: /opt/conda/envs/WALinuxAgent
| [
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_read_response_bytes",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_read_response_error"
]
| []
| [
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_fallback",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_fallback_failure",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_no_fallback",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_put_status_error_reporting",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_validate_block_blob",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_validate_get_extension_artifacts",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_validate_http_request",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_validate_page_blobs"
]
| []
| Apache License 2.0 | 1,343 | [
"azurelinuxagent/common/protocol/hostplugin.py"
]
| [
"azurelinuxagent/common/protocol/hostplugin.py"
]
|
|
jboss-dockerfiles__dogen-143 | a55fa36d7ae6d19688c93d42629fbd763b72419f | 2017-06-07 10:33:27 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/cli.py b/dogen/cli.py
index 6540695..c70984c 100644
--- a/dogen/cli.py
+++ b/dogen/cli.py
@@ -14,6 +14,7 @@ from dogen.version import version
from dogen.errors import Error
from dogen.plugin import Plugin
+import colorlog
class MyParser(argparse.ArgumentParser):
@@ -25,14 +26,15 @@ class MyParser(argparse.ArgumentParser):
class CLI(object):
def __init__(self):
- self.log = logging.getLogger("dogen")
+ formatter = colorlog.ColoredFormatter(
+ '%(log_color)s%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler = logging.StreamHandler()
- formatter = logging.Formatter(
- '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
+
+ self.log = logging.getLogger("dogen")
self.log.addHandler(handler)
- for package in ["requests.packages.urllib3", "pykwalify.core", "pykwalify.rule"]:
+ for package in ["requests.packages.urllib3", "pykwalify.rule"]:
log = logging.getLogger(package)
log.setLevel(logging.INFO)
@@ -86,7 +88,10 @@ class CLI(object):
except KeyboardInterrupt as e:
pass
except Error as e:
- self.log.exception(e)
+ if args.verbose:
+ self.log.exception(e)
+ else:
+ self.log.error(str(e))
sys.exit(1)
def get_plugins(self):
diff --git a/dogen/generator.py b/dogen/generator.py
index 335db99..0562cd8 100644
--- a/dogen/generator.py
+++ b/dogen/generator.py
@@ -48,12 +48,12 @@ class Generator(object):
self.ssl_verify to False.
"""
- self.log.info("Fetching '%s' file..." % location)
+ self.log.debug("Fetching '%s' file..." % location)
if not output:
output = tempfile.mktemp("-dogen")
- self.log.info("Fetched file will be saved as '%s'..." % os.path.basename(output))
+ self.log.debug("Fetched file will be saved as '%s'..." % os.path.basename(output))
r = requests.get(location, verify=self.ssl_verify, stream=True)
@@ -261,8 +261,12 @@ class Generator(object):
if 'sources' not in self.cfg or self.without_sources:
return []
+ self.log.info("Handling artifacts...")
self.cfg['artifacts'] = {}
+ sources_cache = os.environ.get("DOGEN_SOURCES_CACHE")
+ self.log.debug("Source cache will be used for all artifacts")
+
for source in self.cfg['sources']:
url = source.get('url')
artifact = source.get('artifact')
@@ -279,6 +283,8 @@ class Generator(object):
if not artifact:
raise Error("Artifact location for one or more sources was not provided, please check your image descriptor!")
+ self.log.info("Handling artifact '%s'" % artifact)
+
basename = os.path.basename(artifact)
target = source.get('target')
@@ -312,24 +318,28 @@ class Generator(object):
self.check_sum(filename, source[algorithm], algorithm)
passed = True
except Exception as e:
- self.log.warn(str(e))
+ self.log.debug(str(e))
+ self.log.warn("Local file doesn't match provided checksum, artifact '%s' will be downloaded again" % artifact)
passed = False
if not passed:
- sources_cache = os.environ.get("DOGEN_SOURCES_CACHE")
if sources_cache:
- artifact = sources_cache.replace('#filename#', basename)
+ cached_artifact = sources_cache.replace('#filename#', basename)
if algorithms:
if len(algorithms) > 1:
self.log.warn("You specified multiple algorithms for '%s' artifact, but only '%s' will be used to fetch it from cache" % (artifact, algorithms[0]))
- artifact = artifact.replace('#hash#', source[algorithms[0]]).replace('#algorithm#', algorithms[0])
-
- self.log.info("Using '%s' as cached location for artifact" % artifact)
+ cached_artifact = cached_artifact.replace('#hash#', source[algorithms[0]]).replace('#algorithm#', algorithms[0])
- self._fetch_file(artifact, filename)
+ try:
+ self._fetch_file(cached_artifact, filename)
+ except Exception as e:
+ self.log.warn("Could not download artifact from cached location: '%s': %s. Please make sure you set the correct value for DOGEN_SOURCES_CACHE (currently: '%s')." % (cached_artifact, str(e), sources_cache))
+ self._download_source(artifact, filename, source.get('hint'))
+ else:
+ self._download_source(artifact, filename, source.get('hint'))
if algorithms:
for algorithm in algorithms:
@@ -338,10 +348,24 @@ class Generator(object):
if algorithms:
self.cfg['artifacts'][target] = "%s:%s" % (algorithms[0], source[algorithms[0]])
else:
+ self.log.warn("No checksum was specified for artifact '%s'!" % artifact)
self.cfg['artifacts'][target] = None
+ def _download_source(self, artifact, filename, hint=None):
+ if Tools.is_url(artifact):
+ self.log.warn("Trying to download the '%s' artifact from original location" % artifact)
+ try:
+ self._fetch_file(artifact, filename)
+ except Exception as e:
+ raise Error("Could not download artifact from orignal location, reason: %s" % str(e))
+ else:
+ if hint:
+ self.log.info(hint)
+ self.log.info("Please download the '%s' artifact manually and save it as '%s'" % (artifact, filename))
+ raise Error("Artifact '%s' could not be fetched!" % artifact)
+
def check_sum(self, filename, checksum, algorithm):
- self.log.info("Checking '%s' %s hash..." % (os.path.basename(filename), algorithm))
+ self.log.debug("Checking '%s' %s hash..." % (os.path.basename(filename), algorithm))
hash = getattr(hashlib, algorithm)()
diff --git a/dogen/schema/kwalify_schema.yaml b/dogen/schema/kwalify_schema.yaml
index 05069f3..82c400d 100644
--- a/dogen/schema/kwalify_schema.yaml
+++ b/dogen/schema/kwalify_schema.yaml
@@ -64,6 +64,7 @@ map:
sha1: {type: str}
sha256: {type: str}
target: {type: str}
+ hint: {type: str}
packages:
seq:
- {type: str}
diff --git a/requirements.txt b/requirements.txt
index f7e4f1f..43c4660 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,3 +3,4 @@ Jinja2>=2.8
requests>=2.8.1
six>=1.10.0
pykwalify>=1.5.1
+colorlog>=2.10.0
| Add colored logging output
To make it easier to see important messages (especially when verbose output is turned on) - we should add color to log messages. | jboss-dockerfiles/dogen | diff --git a/tests/test_unit_generate_handle_files.py b/tests/test_unit_generate_handle_files.py
index eb85ab9..daf240a 100644
--- a/tests/test_unit_generate_handle_files.py
+++ b/tests/test_unit_generate_handle_files.py
@@ -1,6 +1,7 @@
import argparse
import unittest
import mock
+import os
import six
from dogen.generator import Generator
@@ -45,8 +46,8 @@ class TestFetchFile(unittest.TestCase):
mock_requests.assert_called_with('https://host/file.tmp', verify=None, stream=True)
mock_file().write.assert_called_once_with("file-content")
- self.log.info.assert_any_call("Fetching 'https://host/file.tmp' file...")
- self.log.info.assert_any_call("Fetched file will be saved as 'some-file'...")
+ self.log.debug.assert_any_call("Fetching 'https://host/file.tmp' file...")
+ self.log.debug.assert_any_call("Fetched file will be saved as 'some-file'...")
@mock.patch('dogen.generator.tempfile.mktemp', return_value="tmpfile")
@@ -64,8 +65,8 @@ class TestFetchFile(unittest.TestCase):
mock_requests.assert_called_with('https://host/file.tmp', verify=None, stream=True)
mock_file().write.assert_called_once_with("file-content")
- self.log.info.assert_any_call("Fetching 'https://host/file.tmp' file...")
- self.log.info.assert_any_call("Fetched file will be saved as 'tmpfile'...")
+ self.log.debug.assert_any_call("Fetching 'https://host/file.tmp' file...")
+ self.log.debug.assert_any_call("Fetched file will be saved as 'tmpfile'...")
class TestCustomTemplateHandling(unittest.TestCase):
def setUp(self):
@@ -113,3 +114,76 @@ class TestCustomTemplateHandling(unittest.TestCase):
fetch_file_mock.assert_called_with("http://host/custom-template")
self.assertEqual(self.generator.template, "some-tmp-file")
+
+class TestHandleSources(unittest.TestCase):
+ def setUp(self):
+ self.log = mock.Mock()
+ args = argparse.Namespace(path="image.yaml", output="target", without_sources=None,
+ template="http://host/custom-template", scripts_path=None,
+ additional_script=None, skip_ssl_verification=None)
+ self.generator = Generator(self.log, args)
+
+ def test_fetch_artifact_without_url_should_fail(self):
+ self.generator.cfg = {'sources': [{'artifact': 'jboss-eap.zip'}]}
+
+ with self.assertRaises(Error) as cm:
+ self.generator.handle_sources()
+
+ self.assertEquals(str(cm.exception), "Artifact 'jboss-eap.zip' could not be fetched!")
+
+ @mock.patch('dogen.generator.Generator._fetch_file', side_effect=Error("Blah"))
+ def test_fetch_artifact_should_fail_when_fetching_fails(self, mock_fetch_file):
+ self.generator.cfg = {'sources': [{'artifact': 'http://jboss-eap.zip'}]}
+
+ with self.assertRaises(Error) as cm:
+ self.generator.handle_sources()
+
+ self.assertEquals(str(cm.exception), "Could not download artifact from orignal location, reason: Blah")
+
+ @mock.patch('dogen.generator.Generator._fetch_file', side_effect=[Error("cached"), Error("original")])
+ def test_fetch_artifact_should_fail_when_cached_download_failed_and_original_failed_too(self, mock_fetch_file):
+ self.generator.cfg = {'sources': [{'artifact': 'http://host.com/jboss-eap.zip'}]}
+
+ k = mock.patch.dict(os.environ, {'DOGEN_SOURCES_CACHE':'http://cache/get?#algorithm#=#hash#'})
+ k.start()
+
+ with self.assertRaises(Error) as cm:
+ self.generator.handle_sources()
+
+ k.stop()
+
+ self.assertEquals(str(cm.exception), "Could not download artifact from orignal location, reason: original")
+ mock_fetch_file.assert_has_calls([mock.call('http://cache/get?#algorithm#=#hash#', 'target/jboss-eap.zip'), mock.call('http://host.com/jboss-eap.zip', 'target/jboss-eap.zip')])
+
+ @mock.patch('dogen.generator.Generator._fetch_file')
+ def test_fetch_artifact_should_fetch_file_from_cache(self, mock_fetch_file):
+ self.generator.cfg = {'sources': [{'artifact': 'http://host.com/jboss-eap.zip'}]}
+
+ k = mock.patch.dict(os.environ, {'DOGEN_SOURCES_CACHE':'http://cache/get?#filename#'})
+ k.start()
+ self.generator.handle_sources()
+ k.stop()
+
+ # No checksum provided and computed
+ self.assertEquals(self.generator.cfg['artifacts'], {'jboss-eap.zip': None})
+ mock_fetch_file.assert_called_with('http://cache/get?jboss-eap.zip', 'target/jboss-eap.zip')
+
+ @mock.patch('dogen.generator.Generator._fetch_file')
+ def test_fetch_artifact_should_fetch_file(self, mock_fetch_file):
+ self.generator.cfg = {'sources': [{'artifact': 'http://host.com/jboss-eap.zip'}]}
+ self.generator.handle_sources()
+ # No checksum provided and computed
+ self.assertEquals(self.generator.cfg['artifacts'], {'jboss-eap.zip': None})
+ mock_fetch_file.assert_called_with('http://host.com/jboss-eap.zip', 'target/jboss-eap.zip')
+
+ @mock.patch('dogen.generator.Generator._fetch_file', side_effect=[Error("cached"), None])
+ def test_fetch_artifact_should_download_from_original_location_if_cached_location_failed(self, mock_fetch_file):
+ self.generator.cfg = {'sources': [{'artifact': 'http://host.com/jboss-eap.zip'}]}
+
+ k = mock.patch.dict(os.environ, {'DOGEN_SOURCES_CACHE':'http://cache/get?#algorithm#=#hash#'})
+ k.start()
+ self.generator.handle_sources()
+ k.stop()
+
+ self.assertEquals(self.generator.cfg['artifacts'], {'jboss-eap.zip': None})
+ mock_fetch_file.assert_has_calls([mock.call('http://cache/get?#algorithm#=#hash#', 'target/jboss-eap.zip'), mock.call('http://host.com/jboss-eap.zip', 'target/jboss-eap.zip')])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 4
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@a55fa36d7ae6d19688c93d42629fbd763b72419f#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_unit_generate_handle_files.py::TestFetchFile::test_fetching_with_filename",
"tests/test_unit_generate_handle_files.py::TestFetchFile::test_fetching_with_tmpfile",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_download_from_original_location_if_cached_location_failed",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fail_when_cached_download_failed_and_original_failed_too",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fail_when_fetching_fails",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fetch_file",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_without_url_should_fail"
]
| []
| [
"tests/test_unit_generate_handle_files.py::TestURL::test_local_file",
"tests/test_unit_generate_handle_files.py::TestURL::test_remote_http_file",
"tests/test_unit_generate_handle_files.py::TestURL::test_remote_https_file",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_do_not_fail_if_no_template_is_provided",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_fetch_template_success",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_fetch_template_with_error",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fetch_file_from_cache"
]
| []
| MIT License | 1,344 | [
"dogen/schema/kwalify_schema.yaml",
"dogen/generator.py",
"requirements.txt",
"dogen/cli.py"
]
| [
"dogen/schema/kwalify_schema.yaml",
"dogen/generator.py",
"requirements.txt",
"dogen/cli.py"
]
|
|
BBN-Q__Auspex-98 | 783df41a1baaeb7a85611e5d599d07510df0d1ff | 2017-06-07 20:27:11 | bd5979590a940bcd22d0a124090ec9e05d3c050a | diff --git a/src/auspex/experiment.py b/src/auspex/experiment.py
index 936fa70f..6452b6f8 100644
--- a/src/auspex/experiment.py
+++ b/src/auspex/experiment.py
@@ -237,7 +237,6 @@ class Experiment(metaclass=MetaExperiment):
# Run the stream init
self.init_streams()
- self.update_descriptors()
def set_graph(self, edges):
unique_nodes = []
@@ -248,7 +247,6 @@ class Experiment(metaclass=MetaExperiment):
unique_nodes.append(ee.parent)
self.nodes = unique_nodes
self.graph = ExperimentGraph(edges, self.loop)
- self.update_descriptors()
def init_streams(self):
"""Establish the base descriptors for any internal data streams and connectors."""
@@ -386,6 +384,9 @@ class Experiment(metaclass=MetaExperiment):
self.instrs_connected = False
def run_sweeps(self):
+ # Propagate the descriptors through the network
+ self.update_descriptors()
+
#connect all instruments
if not self.instrs_connected:
self.connect_instruments()
@@ -495,7 +496,6 @@ class Experiment(metaclass=MetaExperiment):
for oc in self.output_connectors.values():
logger.debug("Adding axis %s to connector %s.", axis, oc.name)
oc.descriptor.add_axis(axis)
- self.update_descriptors()
def add_sweep(self, parameters, sweep_list, refine_func=None, callback_func=None, metadata=None):
ax = SweepAxis(parameters, sweep_list, refine_func=refine_func, callback_func=callback_func, metadata=metadata)
diff --git a/src/auspex/instruments/instrument.py b/src/auspex/instruments/instrument.py
index c00a8e5d..93874085 100644
--- a/src/auspex/instruments/instrument.py
+++ b/src/auspex/instruments/instrument.py
@@ -1,6 +1,6 @@
-# __all__ = ['Command', 'FloatCommand', 'StringCommand', 'IntCommand', 'RampCommand',
-# 'SCPICommand',
-# 'DigitizerChannel',
+# __all__ = ['Command', 'FloatCommand', 'StringCommand', 'IntCommand', 'RampCommand',
+# 'SCPICommand',
+# 'DigitizerChannel',
__all__ = ['Instrument'] # 'SCPIInstrument', 'CLibInstrument', 'MetaInstrument']
import numpy as np
@@ -226,24 +226,22 @@ class SCPIInstrument(Instrument):
# Load the dummy interface, unless we see that GPIB is in the resource string
if any([x in self.resource_name for x in ["GPIB", "USB", "SOCKET", "hislip", "inst0", "COM"]]):
interface_type = "VISA"
-
+
try:
if interface_type is None:
logger.debug("Instrument {} is using a generic instrument " +
"interface as none was provided.".format(self.name))
self.interface = Interface()
elif interface_type == "VISA":
- if "GPIB" in self.full_resource_name:
- pass
- elif any(is_valid_ipv4(substr) for substr in self.full_resource_name.split("::")) and "TCPIP" not in self.full_resource_name:
+ if any(is_valid_ipv4(substr) for substr in self.full_resource_name.split("::")) and "TCPIP" not in self.full_resource_name:
# assume single NIC for now
self.full_resource_name = "TCPIP0::" + self.full_resource_name
self.interface = VisaInterface(self.full_resource_name)
print(self.interface._resource)
logger.debug("A pyVISA interface {} was created for instrument {}.".format(str(self.interface._resource), self.name))
- elif interface_type == "Prologix":
+ elif interface_type == "Prologix":
self.interface = PrologixInterface(self.full_resource_name)
-
+
else:
raise ValueError("That interface type is not yet recognized.")
except:
| Defer update_descriptors() until run time
Many filters are quite confused about their lot in life before the experiment has all of its parameter sweeps added. Defer calling update_descriptors() until we actually want to run. | BBN-Q/Auspex | diff --git a/test/test_average.py b/test/test_average.py
index 5c015bd1..93bf241d 100644
--- a/test/test_average.py
+++ b/test/test_average.py
@@ -136,7 +136,7 @@ class AverageTestCase(unittest.TestCase):
exp = TestExperiment()
printer_partial = Print(name="Partial")
printer_final = Print(name="Final")
- avgr = Averager(name="TestAverager")
+ avgr = Averager(name="TestAverager", axis='freq_1')
edges = [(exp.chan1, avgr.sink),
(avgr.partial_average, printer_partial.sink),
@@ -144,8 +144,6 @@ class AverageTestCase(unittest.TestCase):
exp.set_graph(edges)
exp.add_sweep(exp.freq_1, np.linspace(0,9,10))
- avgr.axis.value = 'freq_1'
- avgr.update_descriptors()
exp.run_sweeps()
if __name__ == '__main__':
diff --git a/test/test_experiment.py b/test/test_experiment.py
index 64ba5297..6ec15025 100644
--- a/test/test_experiment.py
+++ b/test/test_experiment.py
@@ -138,7 +138,7 @@ class ExperimentTestCase(unittest.TestCase):
(pt.source, prnt.sink)]
exp.set_graph(edges)
-
+ exp.update_descriptors()
self.assertFalse(pt.sink.descriptor is None)
self.assertFalse(prnt.sink.descriptor is None)
self.assertTrue(exp.chan1.descriptor == pt.sink.descriptor)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 2
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
-e git+https://github.com/BBN-Q/Auspex.git@783df41a1baaeb7a85611e5d599d07510df0d1ff#egg=auspex
cached-property==1.5.2
certifi==2021.5.30
cffi==1.15.1
cycler==0.11.0
dataclasses==0.8
decorator==4.4.2
h5py==3.1.0
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
kiwisolver==1.3.1
matplotlib==3.3.4
networkx==2.5.1
numpy==1.19.5
packaging==21.3
pandas==1.1.5
Pillow==8.4.0
pluggy==1.0.0
py==1.11.0
pycparser==2.21
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyVISA==1.11.3
scipy==1.5.4
six==1.17.0
tomli==1.2.3
tqdm==4.64.1
typing_extensions==4.1.1
zipp==3.6.0
| name: Auspex
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- cached-property==1.5.2
- cffi==1.15.1
- cycler==0.11.0
- dataclasses==0.8
- decorator==4.4.2
- h5py==3.1.0
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- kiwisolver==1.3.1
- matplotlib==3.3.4
- networkx==2.5.1
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pillow==8.4.0
- pluggy==1.0.0
- py==1.11.0
- pycparser==2.21
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyvisa==1.11.3
- scipy==1.5.4
- six==1.17.0
- tomli==1.2.3
- tqdm==4.64.1
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/Auspex
| [
"test/test_average.py::AverageTestCase::test_sameness"
]
| []
| [
"test/test_average.py::AverageTestCase::test_final_average_runs",
"test/test_average.py::AverageTestCase::test_final_variance_runs",
"test/test_average.py::AverageTestCase::test_partial_average_runs",
"test/test_experiment.py::ExperimentTestCase::test_compressed_streams",
"test/test_experiment.py::ExperimentTestCase::test_copy_descriptor",
"test/test_experiment.py::ExperimentTestCase::test_create_graph",
"test/test_experiment.py::ExperimentTestCase::test_depth",
"test/test_experiment.py::ExperimentTestCase::test_graph_parenting",
"test/test_experiment.py::ExperimentTestCase::test_instruments",
"test/test_experiment.py::ExperimentTestCase::test_parameters",
"test/test_experiment.py::ExperimentTestCase::test_run_simple_graph",
"test/test_experiment.py::ExperimentTestCase::test_run_simple_graph_branchout",
"test/test_experiment.py::ExperimentTestCase::test_update_descriptors"
]
| []
| Apache License 2.0 | 1,345 | [
"src/auspex/instruments/instrument.py",
"src/auspex/experiment.py"
]
| [
"src/auspex/instruments/instrument.py",
"src/auspex/experiment.py"
]
|
|
ambv__retype-3 | 3fb46555d76dd5432481936c4101f7f50b584b88 | 2017-06-08 12:07:19 | 3fb46555d76dd5432481936c4101f7f50b584b88 | diff --git a/retype.py b/retype.py
index eee4b8e..d59e1b0 100644
--- a/retype.py
+++ b/retype.py
@@ -12,6 +12,7 @@ from pathlib import Path
import re
import sys
import threading
+import tokenize
import traceback
import click
@@ -138,9 +139,9 @@ def retype_file(src, pyi_dir, targets, *, quiet=False, hg=False):
Type comments in sources are normalized to type annotations.
"""
- with open(src) as src_file:
- src_txt = src_file.read()
- src_node = lib2to3_parse(src_txt)
+ with tokenize.open(src) as src_buffer:
+ src_encoding = src_buffer.encoding
+ src_node = lib2to3_parse(src_buffer.read())
try:
with open((pyi_dir / src.name).with_suffix('.pyi')) as pyi_file:
pyi_txt = pyi_file.read()
@@ -156,7 +157,7 @@ def retype_file(src, pyi_dir, targets, *, quiet=False, hg=False):
reapply_all(pyi_ast.body, src_node)
fix_remaining_type_comments(src_node)
targets.mkdir(parents=True, exist_ok=True)
- with open(targets / src.name, 'w') as target_file:
+ with open(targets / src.name, 'w', encoding=src_encoding) as target_file:
target_file.write(lib2to3_unparse(src_node, hg=hg))
return targets / src.name
@@ -169,7 +170,11 @@ def lib2to3_parse(src_txt):
result = drv.parse_string(src_txt, True)
except ParseError as pe:
lineno, column = pe.context[1]
- faulty_line = src_txt.splitlines()[lineno - 1]
+ lines = src_txt.splitlines()
+ if src_txt[-1] != '\n':
+ faulty_line = "The source is missing a trailing newline."
+ else:
+ faulty_line = lines[lineno - 1]
raise ValueError(f"Cannot parse: {lineno}:{column}: {faulty_line}") from None
if isinstance(result, Leaf):
| lib2to3_parse assumes that the ParseError will always refer to an existing line
There seems to be a case where `ParseError` will report the line number after the last line number, causing an `IndexError` in retype:
Example file (core.py):
```
def get_message():
return '123'
```
Example stub (types/core.pyi):
```
def get_message() -> str: ...
```
```
$>retype --traceback core.py
error: core.py: list index out of range
Traceback (most recent call last):
File "retype.py", line 110, in retype_path
retype_file(src, pyi_dir, targets, quiet=quiet, hg=hg)
File "retype.py", line 132, in retype_file
src_node = lib2to3_parse(src_txt)
File "retype.py", line 161, in lib2to3_parse
faulty_line = src_txt.splitlines()[lineno - 1]
IndexError: list index out of range
```
I haven't gone digging yet to see why the `Driver` is failing to parse this, but it seems that this should be fixed as well.
**Tested using:**
- `Python 3.6.1 (v3.6.1:69c0db5, Mar 21 2017, 17:54:52) [MSC v.1900 32 bit (Intel)] on win32`
- `Python 3.6.1 (default, May 11 2017, 22:14:44) [GCC 4.9.2] on linux`
| ambv/retype | diff --git a/tests/test_retype.py b/tests/test_retype.py
index 5142782..dfb4d99 100644
--- a/tests/test_retype.py
+++ b/tests/test_retype.py
@@ -2134,6 +2134,17 @@ class PrintStmtTestCase(RetypeTestCase):
str(exception),
)
+class ParseErrorTestCase(RetypeTestCase):
+ def test_missing_trailing_newline_crash(self) -> None:
+ pyi_txt = "def f() -> None: ...\n"
+ src_txt = """
+ def f():
+ pass"""
+ exception = self.assertReapplyRaises(pyi_txt, src_txt, ValueError)
+ self.assertEqual(
+ 'Cannot parse: 4:0: The source is missing a trailing newline.',
+ str(exception),
+ )
class PostProcessTestCase(RetypeTestCase):
def test_straddling_variable_comments(self) -> None:
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 1
} | 17.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | click==8.1.8
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
-e git+https://github.com/ambv/retype.git@3fb46555d76dd5432481936c4101f7f50b584b88#egg=retype
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typed-ast==1.5.5
| name: retype
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==8.1.8
- typed-ast==1.5.5
prefix: /opt/conda/envs/retype
| [
"tests/test_retype.py::ParseErrorTestCase::test_missing_trailing_newline_crash"
]
| []
| [
"tests/test_retype.py::ImportTestCase::test_equal",
"tests/test_retype.py::ImportTestCase::test_matched1",
"tests/test_retype.py::ImportTestCase::test_matched2",
"tests/test_retype.py::ImportTestCase::test_matched3",
"tests/test_retype.py::ImportTestCase::test_src_empty",
"tests/test_retype.py::ImportTestCase::test_unmatched1",
"tests/test_retype.py::ImportTestCase::test_unmatched2",
"tests/test_retype.py::ImportTestCase::test_unmatched3",
"tests/test_retype.py::ImportTestCase::test_unmatched4",
"tests/test_retype.py::ImportTestCase::test_unmatched5",
"tests/test_retype.py::ImportTestCase::test_unmatched6",
"tests/test_retype.py::ImportTestCase::test_unmatched7",
"tests/test_retype.py::FromImportTestCase::test_equal",
"tests/test_retype.py::FromImportTestCase::test_matched1",
"tests/test_retype.py::FromImportTestCase::test_matched2",
"tests/test_retype.py::FromImportTestCase::test_matched3",
"tests/test_retype.py::FromImportTestCase::test_src_empty",
"tests/test_retype.py::FromImportTestCase::test_unmatched1",
"tests/test_retype.py::FromImportTestCase::test_unmatched2",
"tests/test_retype.py::FromImportTestCase::test_unmatched3",
"tests/test_retype.py::FromImportTestCase::test_unmatched4",
"tests/test_retype.py::FromImportTestCase::test_unmatched5",
"tests/test_retype.py::FromImportTestCase::test_unmatched6",
"tests/test_retype.py::FromImportTestCase::test_unmatched7",
"tests/test_retype.py::FromImportTestCase::test_unmatched8",
"tests/test_retype.py::FunctionReturnTestCase::test_complex_return_value",
"tests/test_retype.py::FunctionReturnTestCase::test_complex_return_value2",
"tests/test_retype.py::FunctionReturnTestCase::test_complex_return_value3",
"tests/test_retype.py::FunctionReturnTestCase::test_complex_return_value_spurious_type_comment",
"tests/test_retype.py::FunctionReturnTestCase::test_complex_return_value_type_comment",
"tests/test_retype.py::FunctionReturnTestCase::test_mismatched_return_value",
"tests/test_retype.py::FunctionReturnTestCase::test_missing_return_value_both",
"tests/test_retype.py::FunctionReturnTestCase::test_missing_return_value_both_incremental",
"tests/test_retype.py::FunctionReturnTestCase::test_missing_return_value_pyi",
"tests/test_retype.py::FunctionReturnTestCase::test_missing_return_value_pyi_incremental",
"tests/test_retype.py::FunctionReturnTestCase::test_missing_return_value_src",
"tests/test_retype.py::FunctionReturnTestCase::test_missing_return_value_src_incremental",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_ann",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_ann_with_default",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_sig1",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_sig1_type_comment",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_sig2",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_sig2_type_comment",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_sig3_type_comment",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_sig4_spurious_type_comment",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_sig4_type_comment",
"tests/test_retype.py::FunctionArgumentTestCase::test_complex_sig_async",
"tests/test_retype.py::FunctionArgumentTestCase::test_extra_arg1",
"tests/test_retype.py::FunctionArgumentTestCase::test_extra_arg2",
"tests/test_retype.py::FunctionArgumentTestCase::test_extra_arg_kwonly",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_ann_both",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_ann_both_incremental",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_ann_both_multiple_args_incremental",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_ann_pyi",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_ann_src",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_arg",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_arg2",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_arg_kwonly",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_default_arg_pyi",
"tests/test_retype.py::FunctionArgumentTestCase::test_missing_default_arg_src",
"tests/test_retype.py::FunctionArgumentTestCase::test_no_args",
"tests/test_retype.py::FunctionVariableTestCase::test_basic",
"tests/test_retype.py::FunctionVariableTestCase::test_complex",
"tests/test_retype.py::FunctionVariableTestCase::test_complex_type",
"tests/test_retype.py::FunctionVariableTestCase::test_default_type",
"tests/test_retype.py::FunctionVariableTestCase::test_no_value",
"tests/test_retype.py::FunctionVariableTestCase::test_type_mismatch",
"tests/test_retype.py::FunctionVariableTypeCommentTestCase::test_basic",
"tests/test_retype.py::FunctionVariableTypeCommentTestCase::test_complex",
"tests/test_retype.py::FunctionVariableTypeCommentTestCase::test_complex_type",
"tests/test_retype.py::FunctionVariableTypeCommentTestCase::test_default_type",
"tests/test_retype.py::FunctionVariableTypeCommentTestCase::test_no_value",
"tests/test_retype.py::FunctionVariableTypeCommentTestCase::test_no_value_type_comment",
"tests/test_retype.py::FunctionVariableTypeCommentTestCase::test_type_mismatch",
"tests/test_retype.py::MethodTestCase::test_basic",
"tests/test_retype.py::MethodTestCase::test_complex_sig1_type_comment",
"tests/test_retype.py::MethodTestCase::test_complex_sig2_type_comment",
"tests/test_retype.py::MethodTestCase::test_complex_sig3_type_comment",
"tests/test_retype.py::MethodTestCase::test_complex_sig4_type_comment",
"tests/test_retype.py::MethodTestCase::test_complex_sig5_type_comment",
"tests/test_retype.py::MethodTestCase::test_decorator_mismatch",
"tests/test_retype.py::MethodTestCase::test_decorator_mismatch2",
"tests/test_retype.py::MethodTestCase::test_decorator_mismatch3",
"tests/test_retype.py::MethodTestCase::test_function",
"tests/test_retype.py::MethodTestCase::test_missing_class",
"tests/test_retype.py::MethodTestCase::test_staticmethod",
"tests/test_retype.py::MethodTestCase::test_two_classes",
"tests/test_retype.py::MethodVariableTestCase::test_basic",
"tests/test_retype.py::MethodVariableTestCase::test_complex",
"tests/test_retype.py::MethodVariableTestCase::test_default_type",
"tests/test_retype.py::MethodVariableTestCase::test_no_value",
"tests/test_retype.py::MethodVariableTestCase::test_type_mismatch",
"tests/test_retype.py::MethodVariableTypeCommentTestCase::test_basic",
"tests/test_retype.py::MethodVariableTypeCommentTestCase::test_complex",
"tests/test_retype.py::MethodVariableTypeCommentTestCase::test_default_type",
"tests/test_retype.py::MethodVariableTypeCommentTestCase::test_no_value",
"tests/test_retype.py::MethodVariableTypeCommentTestCase::test_no_value_type_comment",
"tests/test_retype.py::MethodVariableTypeCommentTestCase::test_type_mismatch",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_alias_basic",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_alias_many",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_alias_typevar",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_alias_typevar_typing",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_basic",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_complex",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_complex_with_imports",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_default_type",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_no_value",
"tests/test_retype.py::ModuleLevelVariableTestCase::test_type_mismatch",
"tests/test_retype.py::ClassVariableTestCase::test_basic",
"tests/test_retype.py::ClassVariableTestCase::test_complex",
"tests/test_retype.py::ClassVariableTestCase::test_default_type",
"tests/test_retype.py::ClassVariableTestCase::test_instance_fields_assignment_docstring",
"tests/test_retype.py::ClassVariableTestCase::test_instance_fields_assignment_no_docstring",
"tests/test_retype.py::ClassVariableTestCase::test_instance_fields_no_assignment",
"tests/test_retype.py::ClassVariableTestCase::test_instance_fields_no_assignment_docstring",
"tests/test_retype.py::ClassVariableTestCase::test_instance_fields_no_assignment_no_docstring",
"tests/test_retype.py::ClassVariableTestCase::test_no_value",
"tests/test_retype.py::ClassVariableTestCase::test_type_mismatch",
"tests/test_retype.py::SerializeTestCase::test_serialize_attribute",
"tests/test_retype.py::SerializeTestCase::test_serialize_name",
"tests/test_retype.py::PrintStmtTestCase::test_print_stmt_crash",
"tests/test_retype.py::PostProcessTestCase::test_straddling_function_signature_type_comments1",
"tests/test_retype.py::PostProcessTestCase::test_straddling_function_signature_type_comments2",
"tests/test_retype.py::PostProcessTestCase::test_straddling_function_signature_type_comments3",
"tests/test_retype.py::PostProcessTestCase::test_straddling_variable_comments"
]
| []
| MIT License | 1,346 | [
"retype.py"
]
| [
"retype.py"
]
|
|
slarse__pdfebc-22 | 95bdd8a5cc56f82d7f77678a88d50d7be9762995 | 2017-06-08 18:51:50 | 95bdd8a5cc56f82d7f77678a88d50d7be9762995 | codecov[bot]: # [Codecov](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=h1) Report
> Merging [#22](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=desc) into [master](https://codecov.io/gh/slarse/pdfebc/commit/95bdd8a5cc56f82d7f77678a88d50d7be9762995?src=pr&el=desc) will **decrease** coverage by `1.26%`.
> The diff coverage is `77.14%`.
[](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #22 +/- ##
==========================================
- Coverage 70.5% 69.23% -1.27%
==========================================
Files 4 4
Lines 200 247 +47
==========================================
+ Hits 141 171 +30
- Misses 59 76 +17
```
| [Impacted Files](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [pdfebc/main.py](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree#diff-cGRmZWJjL21haW4ucHk=) | `0% <0%> (ø)` | :arrow_up: |
| [pdfebc/cli.py](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree#diff-cGRmZWJjL2NsaS5weQ==) | `56.86% <20%> (-4.01%)` | :arrow_down: |
| [pdfebc/utils.py](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree#diff-cGRmZWJjL3V0aWxzLnB5) | `94.87% <94.64%> (-3.93%)` | :arrow_down: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=footer). Last update [95bdd8a...79197d3](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
codecov[bot]: # [Codecov](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=h1) Report
> Merging [#22](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=desc) into [master](https://codecov.io/gh/slarse/pdfebc/commit/95bdd8a5cc56f82d7f77678a88d50d7be9762995?src=pr&el=desc) will **decrease** coverage by `1.26%`.
> The diff coverage is `77.14%`.
[](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #22 +/- ##
==========================================
- Coverage 70.5% 69.23% -1.27%
==========================================
Files 4 4
Lines 200 247 +47
==========================================
+ Hits 141 171 +30
- Misses 59 76 +17
```
| [Impacted Files](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [pdfebc/main.py](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree#diff-cGRmZWJjL21haW4ucHk=) | `0% <0%> (ø)` | :arrow_up: |
| [pdfebc/cli.py](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree#diff-cGRmZWJjL2NsaS5weQ==) | `56.86% <20%> (-4.01%)` | :arrow_down: |
| [pdfebc/utils.py](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=tree#diff-cGRmZWJjL3V0aWxzLnB5) | `94.87% <94.64%> (-3.93%)` | :arrow_down: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=footer). Last update [95bdd8a...79197d3](https://codecov.io/gh/slarse/pdfebc/pull/22?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/pdfebc/cli.py b/pdfebc/cli.py
index 9b38bd1..40bb5b3 100644
--- a/pdfebc/cli.py
+++ b/pdfebc/cli.py
@@ -9,6 +9,7 @@
"""
import argparse
import sys
+from . import utils
OUTPUT_DIR_DEFAULT = "pdfebc_out"
SOURCE_DIR_DEFAULT = "."
@@ -41,16 +42,20 @@ def create_argparser():
Returns:
argparse.ArgumentParser: The argument parser for pdfebc.
"""
+ config = utils.read_config()
+ out_dir_default = utils.try_get_conf(config, utils.DEFAULT_SECTION_KEY, utils.OUT_DEFAULT_DIR_KEY)
+ src_dir_default = utils.try_get_conf(config, utils.DEFAULT_SECTION_KEY, utils.SRC_DEFAULT_DIR_KEY)
+ gs_default_binary = utils.try_get_conf(config, utils.DEFAULT_SECTION_KEY, utils.GS_DEFAULT_BINARY_KEY)
parser = argparse.ArgumentParser(
description=DESCRIPTION
)
parser.add_argument(
OUTPUT_DIR_SHORT, OUTPUT_DIR_LONG, help=OUTPUT_DIR_HELP, type=str,
- default=OUTPUT_DIR_DEFAULT
+ default=out_dir_default
)
parser.add_argument(
SOURCE_DIR_SHORT, SOURCE_DIR_LONG, help=SOURCE_DIR_HELP, type=str,
- default=SOURCE_DIR_DEFAULT
+ default=src_dir_default
)
parser.add_argument(
NO_MAKEDIR_SHORT, NO_MAKEDIR_LONG, help=NO_MAKEDIR_HELP,
@@ -58,7 +63,7 @@ def create_argparser():
)
parser.add_argument(
GS_SHORT, GS_LONG, help=GS_HELP,
- type=str, default=GHOSTSCRIPT_BINARY_DEFAULT
+ type=str, default=gs_default_binary
)
parser.add_argument(
SEND_SHORT, SEND_LONG, help=SEND_HELP,
diff --git a/pdfebc/main.py b/pdfebc/main.py
index d5a12e7..6162f76 100644
--- a/pdfebc/main.py
+++ b/pdfebc/main.py
@@ -6,6 +6,7 @@ Author: Simon Larsén
import os
import shutil
import smtplib
+import sys
from . import cli, core, utils
AUTH_ERROR = """An authentication error has occured!
@@ -24,7 +25,14 @@ Please open an issue about this error at 'https://github.com/slarse/pdfebc/issue
def main():
"""Run PDFEBC."""
- parser = cli.create_argparser()
+ try:
+ parser = cli.create_argparser()
+ except utils.ConfigurationError as e:
+ print("FIRST TIME CONFIG NOT YET IMPLEMENTED")
+ print("FIX THE CONFIG FILE!")
+ print("EXITING!")
+ print(e)
+ sys.exit(1)
args = parser.parse_args()
if not args.nomakedir:
os.makedirs(args.outdir)
diff --git a/pdfebc/utils.py b/pdfebc/utils.py
index 90b2ca9..d4d3fdd 100644
--- a/pdfebc/utils.py
+++ b/pdfebc/utils.py
@@ -1,20 +1,18 @@
# -*- coding: utf-8 -*-
"""Module containing util functions for the pdfebc program.
-It uses Google's SMTP server for sending emails. If you wish to use another server, simply change
-the SMTP_SERVER variable to your preferred server.
+The SMTP server and port are configured in the config.cnf file.
Requires a config file called 'email.cnf' in the user conf directory specified by appdirs. In the
-case of Arch Linux, this is '$HOME/.config/pdfebc/email.cnf', but this may vary with distributions.
+case of Arch Linux, this is '$HOME/.config/pdfebc/config.cnf', but this may vary with distributions.
The config file should have the following format:
- |[email]
- |user = sender_email
- |pass = password
- |receiver = receiver_email
-
-All characters after the colon and whitespace (as much whitespace as you'd like) until
-EOL counts as the username/password.
+ |[EMAIL]
+ |user = <sender_email>
+ |pass = <password>
+ |receiver = <receiver_email>
+ |smtp_server = <smtp_server>
+ |smtp_port = <smtp_port>
.. module:: utils
:platform: Unix
@@ -29,22 +27,40 @@ from email.mime.text import MIMEText
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
import appdirs
+from . import cli
-CONFIG_PATH = os.path.join(appdirs.user_config_dir('pdfebc'), 'email.cnf')
-SECTION_KEY = "email"
+CONFIG_FILENAME = 'config.cnf'
+CONFIG_PATH = os.path.join(appdirs.user_config_dir('pdfebc'), CONFIG_FILENAME)
+EMAIL_SECTION_KEY = "EMAIL"
PASSWORD_KEY = "pass"
USER_KEY = "user"
-RECIEVER_KEY = "receiver"
-SMTP_SERVER = "smtp.gmail.com"
+RECEIVER_KEY = "receiver"
+DEFAULT_SMTP_SERVER = "smtp.gmail.com"
+DEFAULT_SMTP_PORT = 587
+SMTP_SERVER_KEY = "smtp_server"
+SMTP_PORT_KEY = "smtp_port"
+EMAIL_SECTION_KEYS = {USER_KEY, PASSWORD_KEY, RECEIVER_KEY, SMTP_SERVER_KEY, SMTP_PORT_KEY}
+DEFAULT_SECTION_KEY = "DEFAULTS"
+GS_DEFAULT_BINARY_KEY = "gs_binary"
+SRC_DEFAULT_DIR_KEY = "src"
+OUT_DEFAULT_DIR_KEY = "out"
+DEFAULT_SECTION_KEYS = {GS_DEFAULT_BINARY_KEY, SRC_DEFAULT_DIR_KEY, OUT_DEFAULT_DIR_KEY}
+SECTION_KEYS = {EMAIL_SECTION_KEY: EMAIL_SECTION_KEYS,
+ DEFAULT_SECTION_KEY: DEFAULT_SECTION_KEYS}
SENDING_PRECONF = """Sending files ...
From: {}
To: {}
SMTP Server: {}
+SMTP Port: {}
+
Files:
{}"""
FILES_SENT = "Files successfully sent!"""
+class ConfigurationError(configparser.ParsingError):
+ pass
+
def create_email_config(user, password, receiver):
"""Create an email config.
@@ -57,10 +73,34 @@ def create_email_config(user, password, receiver):
configparser.ConfigParser: A ConfigParser
"""
config = configparser.ConfigParser()
- config[SECTION_KEY] = {
+ config[EMAIL_SECTION_KEY] = {
USER_KEY: user,
PASSWORD_KEY: password,
- RECIEVER_KEY: receiver}
+ RECEIVER_KEY: receiver}
+ return config
+
+def create_config(sections, section_contents):
+ """Create a config file from the provided sections and key value pairs.
+
+ Args:
+ sections (List[str]): A list of section keys.
+ key_value_pairs (Dict[str, str]): A list of of dictionaries. Must be as long as
+ the list of sections. That is to say, if there are two sections, there should be two
+ dicts.
+ Returns:
+ configparser.ConfigParser: A ConfigParser.
+ Raises:
+ ValueError
+ """
+ sections_length, section_contents_length = len(sections), len(section_contents)
+ if sections_length != section_contents_length:
+ raise ValueError("Mismatch between argument lengths.\n"
+ "len(sections) = {}\n"
+ "len(section_contents) = {}"
+ .format(sections_length, section_contents_length))
+ config = configparser.ConfigParser()
+ for section, section_content in zip(sections, section_contents):
+ config[section] = section_content
return config
def write_config(config, config_path=CONFIG_PATH):
@@ -75,15 +115,13 @@ def write_config(config, config_path=CONFIG_PATH):
with open(config_path, 'w', encoding='utf-8') as f:
config.write(f)
-def read_email_config(config_path=CONFIG_PATH):
- """Read the email config file.
+def read_config(config_path=CONFIG_PATH):
+ """Read the config information from the config file.
Args:
config_path (str): Relative path to the email config file.
-
Returns:
- (str, str, str): User email, user password and receiver email.
-
+ configparser.ConfigParser: A ConfigParser with the config information.
Raises:
IOError
"""
@@ -91,10 +129,37 @@ def read_email_config(config_path=CONFIG_PATH):
raise IOError("No config file found at %s" % config_path)
config = configparser.ConfigParser()
config.read(config_path)
- user = try_get_conf(config, SECTION_KEY, USER_KEY)
- password = try_get_conf(config, SECTION_KEY, PASSWORD_KEY)
- receiver = try_get_conf(config, SECTION_KEY, RECIEVER_KEY)
- return user, password, receiver
+ check_config(config)
+ return config
+
+def section_is_healthy(section, expected_keys):
+ """Check that the section contains all keys it should.
+
+ Args:
+ section (configparser.SectionProxy): A map-like object.
+ expected_keys (Iterable): A Set of keys that should be contained in the section.
+ Returns:
+ boolean: True if the section is healthy, false if not.
+ """
+ return set(section.keys()) == set(expected_keys)
+
+def check_config(config):
+ """Check that all sections of the config contain the keys that they should.
+
+ Args:
+ config (configparser.ConfigParser): A ConfigParser.
+ Raises:
+ ConfigurationError
+ """
+ for section, expected_section_keys in SECTION_KEYS.items():
+ try:
+ section_content = config[section]
+ if not section_is_healthy(section_content, expected_section_keys):
+ raise ConfigurationError("The {} section of the configuration file is badly formed!"
+ .format(section))
+ except KeyError as e:
+ raise ConfigurationError("Config file badly formed! Section {} is missing."
+ .format(section))
def try_get_conf(config, section, attribute):
"""Try to parse an attribute of the config file.
@@ -103,37 +168,34 @@ def try_get_conf(config, section, attribute):
config (configparser.ConfigParser): A ConfigParser.
section (str): The section of the config file to get information from.
attribute (str): The attribute of the section to fetch.
-
Returns:
str: The string corresponding to the section and attribute.
-
Raises:
- configparser.ParseError
+ ConfigurationError
"""
try:
return config[section][attribute]
except KeyError:
- raise configparser.ParsingError("""Config file badly formed!\n
- Failed to get attribute '%s' from section '%s'!""" % (attribute, section))
+ raise ConfigurationError("Config file badly formed!\n"
+ "Failed to get attribute '{}' from section '{}'!"
+ .format(attribute, section))
-def send_with_attachments(user, password, receiver, subject, message, filepaths):
+def send_with_attachments(subject, message, filepaths, config):
"""Send an email from the user (a gmail) to the receiver.
Args:
- user (str): The sender's email address.
- password (str): The password to the 'user' address.
- receiver (str): The receiver's email address.
subject (str): Subject of the email.
message (str): A message.
filepaths (list(str)): Filepaths to files to be attached.
+ config (configparser.ConfigParser): A ConfigParser.
"""
email_ = MIMEMultipart()
email_.attach(MIMEText(message))
email_["Subject"] = subject
- email_["From"] = user
- email_["To"] = receiver
+ email_["From"] = try_get_conf(config, EMAIL_SECTION_KEY, USER_KEY)
+ email_["To"] = try_get_conf(config, EMAIL_SECTION_KEY, RECEIVER_KEY)
attach_files(filepaths, email_)
- send_email(user, password, email_)
+ send_email(email_, config)
def attach_files(filepaths, email_):
@@ -150,15 +212,18 @@ def attach_files(filepaths, email_):
part["Content-Disposition"] = 'attachment; filename="%s"' % base
email_.attach(part)
-def send_email(user, password, email_):
+def send_email(email_, config):
"""Send an email.
Args:
- user (str): Sender's email address.
- password (str): Password to sender's email.
email_ (email.MIMEMultipart): The email to send.
+ config (configparser.ConfigParser): A ConfigParser.
"""
- server = smtplib.SMTP(SMTP_SERVER, 587)
+ smtp_server = try_get_conf(config, EMAIL_SECTION_KEY, SMTP_SERVER_KEY)
+ smtp_port = int(try_get_conf(config, EMAIL_SECTION_KEY, SMTP_PORT_KEY))
+ user = try_get_conf(config, EMAIL_SECTION_KEY, USER_KEY)
+ password = try_get_conf(config, EMAIL_SECTION_KEY, PASSWORD_KEY)
+ server = smtplib.SMTP(smtp_server, smtp_port)
server.starttls()
server.login(user, password)
server.send_message(email_)
@@ -170,12 +235,16 @@ def send_files_preconf(filepaths, config_path=CONFIG_PATH, status_callback=None)
Args:
filepaths (list(str)): A list of filepaths.
"""
- user, password, receiver = read_email_config(config_path)
+ config = read_config(config_path)
subject = "PDF files from pdfebc"
message = ""
- args = (user, receiver, SMTP_SERVER, '\n'.join(filepaths))
+ args = (try_get_conf(config, EMAIL_SECTION_KEY, USER_KEY),
+ try_get_conf(config, EMAIL_SECTION_KEY, RECEIVER_KEY),
+ try_get_conf(config, EMAIL_SECTION_KEY, SMTP_SERVER_KEY),
+ try_get_conf(config, EMAIL_SECTION_KEY, SMTP_PORT_KEY),
+ '\n'.join(filepaths))
if_callable_call_with_formatted_string(status_callback, SENDING_PRECONF, *args)
- send_with_attachments(user, password, receiver, subject, message, filepaths)
+ send_with_attachments(subject, message, filepaths, config)
if_callable_call_with_formatted_string(status_callback, FILES_SENT)
def valid_config_exists(config_path=CONFIG_PATH):
@@ -189,8 +258,8 @@ def valid_config_exists(config_path=CONFIG_PATH):
"""
if (os.path.isfile(config_path)):
try:
- read_email_config(config_path)
- except configparser.ParsingError:
+ read_config(config_path)
+ except ConfigurationError or IOError:
return False
else:
return False
| Move all configurations to a common config file
Right now, the `email.cnf` file only includes `user`, `password` and `receiver` values. It shold be renamed `config.cnf` and include EVERYTHING that the user may want to customize.
- [x] Everything currently in `email.cnf`
- [x] SMTP server and port
- [x] All defaults that are currently set in the CLI (e.g. `Ghostscript` binary default).
This is a breaking change and should be added last, before version 0.2.0 is released. | slarse/pdfebc | diff --git a/tests/test_utils.py b/tests/test_utils.py
index ff146e8..f35c7d9 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -17,9 +17,27 @@ class UtilsTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
- cls.expected_user = "test_user"
- cls.expected_password = "test_password"
- cls.expected_receiver = "test_receiver"
+ cls.user = 'test_user'
+ cls.password = 'test_password'
+ cls.receiver = 'test_receiver'
+ cls.smtp_server = 'test_server'
+ cls.smtp_port = 999
+ cls.user_key = pdfebc.utils.USER_KEY
+ cls.password_key = pdfebc.utils.PASSWORD_KEY
+ cls.receiver_key = pdfebc.utils.RECEIVER_KEY
+ cls.smtp_server_key = pdfebc.utils.SMTP_SERVER_KEY
+ cls.smtp_port_key = pdfebc.utils.SMTP_PORT_KEY
+ cls.gs_binary_default_key = pdfebc.utils.GS_DEFAULT_BINARY_KEY
+ cls.out_dir_default_key = pdfebc.utils.OUT_DEFAULT_DIR_KEY
+ cls.src_dir_default_key = pdfebc.utils.SRC_DEFAULT_DIR_KEY
+ cls.email_section_key = pdfebc.utils.EMAIL_SECTION_KEY
+ cls.default_section_key = pdfebc.utils.DEFAULT_SECTION_KEY
+ cls.email_section_keys = {cls.user_key, cls.password_key, cls.receiver_key,
+ cls.smtp_server_key, cls.smtp_port_key}
+ cls.default_section_keys = {cls.gs_binary_default_key, cls.src_dir_default_key, cls.out_dir_default_key}
+ cls.section_keys = {cls.email_section_key: cls.email_section_keys,
+ cls.default_section_key: cls.default_section_keys}
+
@classmethod
def setUp(cls):
@@ -30,12 +48,21 @@ class UtilsTest(unittest.TestCase):
encoding='utf-8', suffix=pdfebc.core.PDF_EXTENSION, mode='w', delete=False)
cls.attachment_filenames.append(file.name)
file.close()
- cls.valid_config = pdfebc.utils.create_email_config(cls.expected_user,
- cls.expected_password,
- cls.expected_receiver)
+ cls.valid_config = configparser.ConfigParser()
+ cls.valid_config[pdfebc.utils.EMAIL_SECTION_KEY] = ({
+ cls.user_key: cls.user,
+ cls.password_key: cls.password,
+ cls.receiver_key: cls.receiver,
+ cls.smtp_server_key: cls.smtp_server,
+ cls.smtp_port_key: cls.smtp_port})
+ cls.valid_config[pdfebc.utils.DEFAULT_SECTION_KEY] = {
+ cls.gs_binary_default_key: pdfebc.cli.GHOSTSCRIPT_BINARY_DEFAULT,
+ cls.src_dir_default_key: pdfebc.cli.SOURCE_DIR_DEFAULT,
+ cls.out_dir_default_key: pdfebc.cli.OUTPUT_DIR_DEFAULT}
cls.invalid_config = configparser.ConfigParser()
- cls.invalid_config[pdfebc.utils.SECTION_KEY] = {pdfebc.utils.USER_KEY: cls.expected_user,
- pdfebc.utils.PASSWORD_KEY: cls.expected_password}
+ cls.invalid_config[pdfebc.utils.EMAIL_SECTION_KEY] = {
+ cls.user_key: cls.user,
+ cls.password_key: cls.password}
@classmethod
def tearDown(cls):
@@ -44,42 +71,73 @@ class UtilsTest(unittest.TestCase):
for filename in cls.attachment_filenames:
os.unlink(filename)
+ def test_create_config(self):
+ sections = self.section_keys.keys()
+ section_contents = [{
+ self.user_key: self.user,
+ self.password_key: self.password,
+ self.receiver_key: self.receiver,
+ self.smtp_server_key: pdfebc.utils.DEFAULT_SMTP_SERVER,
+ self.smtp_port_key: str(pdfebc.utils.DEFAULT_SMTP_PORT)},
+ {self.gs_binary_default_key: pdfebc.cli.GHOSTSCRIPT_BINARY_DEFAULT,
+ self.src_dir_default_key: pdfebc.cli.SOURCE_DIR_DEFAULT,
+ self.out_dir_default_key: pdfebc.cli.OUTPUT_DIR_DEFAULT}]
+ config = pdfebc.utils.create_config(sections, section_contents)
+ for section, section_content in zip(sections, section_contents):
+ config_section = config[section]
+ for section_content_key, section_content_value in section_content.items():
+ self.assertEqual(section_content_value, config_section[section_content_key])
+
+ def test_create_config_too_few_sections(self):
+ sections = ["EMAIL"]
+ section_contents = [{1: 2, 3: 4}, {1: 2}]
+ with self.assertRaises(ValueError):
+ pdfebc.utils.create_config(sections, section_contents)
+
def test_write_valid_email_config(self):
self.temp_config_file.close()
pdfebc.utils.write_config(self.valid_config, self.temp_config_file.name)
config = configparser.ConfigParser()
with open(self.temp_config_file.name) as file:
config.read_file(file)
- section = config[pdfebc.utils.SECTION_KEY]
- self.assertEqual(self.expected_user, section[pdfebc.utils.USER_KEY])
- self.assertEqual(self.expected_password, section[pdfebc.utils.PASSWORD_KEY])
- self.assertEqual(self.expected_receiver, section[pdfebc.utils.RECIEVER_KEY])
+ section = config[pdfebc.utils.EMAIL_SECTION_KEY]
+ self.assertEqual(self.user, section[self.user_key])
+ self.assertEqual(self.password, section[self.password_key])
+ self.assertEqual(self.receiver, section[self.receiver_key])
def test_read_valid_email_config(self):
self.valid_config.write(self.temp_config_file)
self.temp_config_file.flush()
self.temp_config_file.close()
- actual_user, actual_password, actual_receiver = pdfebc.utils.read_email_config(self.temp_config_file.name)
- self.assertEqual(self.expected_user, actual_user)
- self.assertEqual(self.expected_password, actual_password)
- self.assertEqual(self.expected_receiver, actual_receiver)
+ email_section = pdfebc.utils.read_config(self.temp_config_file.name)[
+ pdfebc.utils.EMAIL_SECTION_KEY]
+ actual_user = email_section[self.user_key]
+ actual_password = email_section[self.password_key]
+ actual_receiver = email_section[self.receiver_key]
+ actual_smtp_server = email_section[self.smtp_server_key]
+ actual_smtp_port = int(email_section[self.smtp_port_key])
+ self.assertEqual(self.user, actual_user)
+ self.assertEqual(self.password, actual_password)
+ self.assertEqual(self.receiver, actual_receiver)
+ self.assertEqual(self.smtp_server, actual_smtp_server)
+ self.assertEqual(self.smtp_port, actual_smtp_port)
- def test_read_empty_email_config(self):
- with self.assertRaises(configparser.ParsingError) as context:
- pdfebc.utils.read_email_config(self.temp_config_file.name)
+ def test_read_empty_config(self):
+ with self.assertRaises(pdfebc.utils.ConfigurationError) as context:
+ pdfebc.utils.read_config(self.temp_config_file.name)
- def test_read_email_config_no_file(self):
+ def test_read_config_no_file(self):
with tempfile.NamedTemporaryFile() as tmp:
config_path = tmp.name
with self.assertRaises(IOError) as context:
- pdfebc.utils.read_email_config(config_path)
+ pdfebc.utils.read_config(config_path)
- def test_read_email_config_without_receiver(self):
+ def test_read_config_with_only_user_and_password(self):
self.invalid_config.write(self.temp_config_file)
self.temp_config_file.flush()
self.temp_config_file.close()
- with self.assertRaises(configparser.ParsingError) as context:
- pdfebc.utils.read_email_config(self.temp_config_file.name)
+ with self.assertRaises(pdfebc.utils.ConfigurationError) as context:
+ pdfebc.utils.read_config(self.temp_config_file.name)
def test_attach_valid_files(self):
email_ = MIMEMultipart()
@@ -94,43 +152,43 @@ class UtilsTest(unittest.TestCase):
@patch('smtplib.SMTP')
def test_send_valid_email(self, mock_smtp):
- mock_smtp_instance = mock_smtp()
- user, password, receiver = "test_user", "test_password", "test_receiver"
subject = "Test e-mail"
email_ = MIMEMultipart()
- email_['From'] = user
- email_['To'] = receiver
+ email_['From'] = self.user
+ email_['To'] = self.receiver
email_['Subject'] = subject
- pdfebc.utils.send_email(user, password, email_)
+ pdfebc.utils.send_email(email_, self.valid_config)
+ mock_smtp.assert_called_once_with(self.smtp_server, self.smtp_port)
+ mock_smtp_instance = mock_smtp()
mock_smtp_instance.starttls.assert_called_once()
- mock_smtp_instance.login.assert_called_once_with(user, password)
+ mock_smtp_instance.login.assert_called_once_with(self.user, self.password)
mock_smtp_instance.send_message.assert_called_once_with(email_)
mock_smtp_instance.quit.assert_called_once()
@patch('smtplib.SMTP')
def test_send_valid_email_with_attachments(self, mock_smtp):
- mock_smtp_instance = mock_smtp()
subject = "Test e-mail"
message = "Test e-mail body"
- pdfebc.utils.send_with_attachments(self.expected_user, self.expected_password,
- self.expected_receiver, subject, message,
- self.attachment_filenames)
+ pdfebc.utils.send_with_attachments(subject, message, self.attachment_filenames,
+ self.valid_config)
+ mock_smtp.assert_called_once_with(self.smtp_server, self.smtp_port)
+ mock_smtp_instance = mock_smtp()
mock_smtp_instance.starttls.assert_called_once()
- mock_smtp_instance.login.assert_called_once_with(self.expected_user, self.expected_password)
+ mock_smtp_instance.login.assert_called_once_with(self.user, self.password)
mock_smtp_instance.send_message.assert_called_once()
mock_smtp_instance.quit.assert_called_once()
def test_create_email_config(self):
- section_key = pdfebc.utils.SECTION_KEY
- user_key = pdfebc.utils.USER_KEY
- password_key = pdfebc.utils.PASSWORD_KEY
- receiver_key = pdfebc.utils.RECIEVER_KEY
+ section_key = pdfebc.utils.EMAIL_SECTION_KEY
+ user_key = self.user_key
+ password_key = self.password_key
+ receiver_key = self.receiver_key
actual_user = self.valid_config[section_key][user_key]
actual_password = self.valid_config[section_key][password_key]
actual_receiver = self.valid_config[section_key][receiver_key]
- self.assertEqual(self.expected_user, actual_user)
- self.assertEqual(self.expected_password, actual_password)
- self.assertEqual(self.expected_receiver, actual_receiver)
+ self.assertEqual(self.user, actual_user)
+ self.assertEqual(self.password, actual_password)
+ self.assertEqual(self.receiver, actual_receiver)
def test_valid_config_exists_no_config(self):
with tempfile.NamedTemporaryFile() as file:
@@ -143,7 +201,7 @@ class UtilsTest(unittest.TestCase):
config_path = self.temp_config_file.name
self.assertTrue(pdfebc.utils.valid_config_exists(config_path))
- def test_valid_config_exists_with_config_without_receiver(self):
+ def test_valid_config_exists_with_invalid_config(self):
self.invalid_config.write(self.temp_config_file)
self.temp_config_file.close()
config_path = self.temp_config_file.name
@@ -201,12 +259,12 @@ class UtilsTest(unittest.TestCase):
pdfebc.utils.send_files_preconf(self.attachment_filenames, config_path=self.temp_config_file.name,
status_callback=mock_status_callback)
mock_smtp_instance.starttls.assert_called_once()
- mock_smtp_instance.login.assert_called_once_with(self.expected_user, self.expected_password)
+ mock_smtp_instance.login.assert_called_once_with(self.user, self.password)
mock_smtp_instance.send_message.assert_called_once()
mock_smtp_instance.quit.assert_called_once()
expected_send_message = pdfebc.utils.SENDING_PRECONF.format(
- self.expected_user, self.expected_receiver,
- pdfebc.utils.SMTP_SERVER, '\n'.join(self.attachment_filenames))
+ self.user, self.receiver,
+ self.smtp_server, self.smtp_port, '\n'.join(self.attachment_filenames))
expected_sent_message = pdfebc.utils.FILES_SENT
mock_status_callback.assert_any_call(expected_send_message)
mock_status_callback.assert_any_call(expected_sent_message)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
appdirs==1.4.4
babel==2.17.0
certifi==2025.1.31
charset-normalizer==3.4.1
codecov==2.1.13
coverage==7.8.0
docutils==0.21.2
exceptiongroup==1.2.2
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
packaging==24.2
-e git+https://github.com/slarse/pdfebc.git@95bdd8a5cc56f82d7f77678a88d50d7be9762995#egg=pdfebc
pluggy==1.5.0
Pygments==2.19.1
pytest==8.3.5
pytest-cov==6.0.0
requests==2.32.3
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
urllib3==2.3.0
zipp==3.21.0
| name: pdfebc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- appdirs==1.4.4
- babel==2.17.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- codecov==2.1.13
- coverage==7.8.0
- docutils==0.21.2
- exceptiongroup==1.2.2
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- packaging==24.2
- pluggy==1.5.0
- pygments==2.19.1
- pytest==8.3.5
- pytest-cov==6.0.0
- requests==2.32.3
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/pdfebc
| [
"tests/test_utils.py::UtilsTest::test_attach_valid_files",
"tests/test_utils.py::UtilsTest::test_create_config",
"tests/test_utils.py::UtilsTest::test_create_config_too_few_sections",
"tests/test_utils.py::UtilsTest::test_create_email_config",
"tests/test_utils.py::UtilsTest::test_if_callable_call_with_formatted_string_not_callable_too_few_args",
"tests/test_utils.py::UtilsTest::test_if_callable_call_with_formatted_string_not_callable_valid_args",
"tests/test_utils.py::UtilsTest::test_if_callable_call_with_formatted_string_too_few_args",
"tests/test_utils.py::UtilsTest::test_if_callable_call_with_formatted_string_valid_args",
"tests/test_utils.py::UtilsTest::test_read_config_no_file",
"tests/test_utils.py::UtilsTest::test_read_config_with_only_user_and_password",
"tests/test_utils.py::UtilsTest::test_read_empty_config",
"tests/test_utils.py::UtilsTest::test_read_valid_email_config",
"tests/test_utils.py::UtilsTest::test_send_files_preconf_valid_files",
"tests/test_utils.py::UtilsTest::test_send_valid_email",
"tests/test_utils.py::UtilsTest::test_send_valid_email_with_attachments",
"tests/test_utils.py::UtilsTest::test_valid_config_exists_no_config",
"tests/test_utils.py::UtilsTest::test_valid_config_exists_with_invalid_config",
"tests/test_utils.py::UtilsTest::test_valid_config_exists_with_valid_config",
"tests/test_utils.py::UtilsTest::test_write_valid_email_config"
]
| []
| []
| []
| MIT License | 1,347 | [
"pdfebc/cli.py",
"pdfebc/utils.py",
"pdfebc/main.py"
]
| [
"pdfebc/cli.py",
"pdfebc/utils.py",
"pdfebc/main.py"
]
|
hgrecco__pint-527 | 240349f2eefc6acc3f7b2784e6064dca98a45f60 | 2017-06-09 02:38:23 | bc754ae302b0c03d1802daddcd76c103a5fdfb67 | diff --git a/.gitignore b/.gitignore
index e39c2e8..545148c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,6 @@ MANIFEST
# WebDAV file system cache files
.DAV/
+
+# tags files (from ctags)
+tags
diff --git a/pint/default_en.txt b/pint/default_en.txt
index 4035883..fba87d2 100644
--- a/pint/default_en.txt
+++ b/pint/default_en.txt
@@ -72,6 +72,10 @@ cmil = 5.067075e-10 * m ** 2 = circular_mils
darcy = 9.869233e-13 * m ** 2
hectare = 100 * are = ha
+# Concentration
+[concentration] = [substance] / [volume]
+molar = mol / (1e-3 * m ** 3) = M
+
# EM
esu = 1 * erg**0.5 * centimeter**0.5 = statcoulombs = statC = franklin = Fr
esu_per_second = 1 * esu / second = statampere
| Concentration Units
Hi,
Is there any reason concentration units (in particular molarity, mol / L) is not included as a default unit in Pint? It is very easy to define on its own, but it seems well-used enough to warrant inclusion in the defaults list. If there are no objections to adding it, I'm happy to submit a PR doing so. | hgrecco/pint | diff --git a/pint/testsuite/test_issues.py b/pint/testsuite/test_issues.py
index 3b06fd6..b5c494b 100644
--- a/pint/testsuite/test_issues.py
+++ b/pint/testsuite/test_issues.py
@@ -39,7 +39,6 @@ class TestIssues(QuantityTestCase):
def test_issue29(self):
ureg = UnitRegistry()
- ureg.define('molar = mole / liter = M')
t = 4 * ureg('mM')
self.assertEqual(t.magnitude, 4)
self.assertEqual(t._units, UnitsContainer(millimolar=1))
@@ -561,4 +560,4 @@ class TestIssuesNP(QuantityTestCase):
a = np.asarray([1, 2, 3])
q = [1, 2, 3] * ureg.dimensionless
p = (q ** q).m
- np.testing.assert_array_equal(p, a ** a)
\ No newline at end of file
+ np.testing.assert_array_equal(p, a ** a)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.8 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
-e git+https://github.com/hgrecco/pint.git@240349f2eefc6acc3f7b2784e6064dca98a45f60#egg=Pint
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: pint
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
prefix: /opt/conda/envs/pint
| [
"pint/testsuite/test_issues.py::TestIssues::test_issue29"
]
| []
| [
"pint/testsuite/test_issues.py::TestIssues::test_alternative_angstrom_definition",
"pint/testsuite/test_issues.py::TestIssues::test_angstrom_creation",
"pint/testsuite/test_issues.py::TestIssues::test_issue104",
"pint/testsuite/test_issues.py::TestIssues::test_issue105",
"pint/testsuite/test_issues.py::TestIssues::test_issue121",
"pint/testsuite/test_issues.py::TestIssues::test_issue170",
"pint/testsuite/test_issues.py::TestIssues::test_issue52",
"pint/testsuite/test_issues.py::TestIssues::test_issue523",
"pint/testsuite/test_issues.py::TestIssues::test_issue54",
"pint/testsuite/test_issues.py::TestIssues::test_issue54_related",
"pint/testsuite/test_issues.py::TestIssues::test_issue61",
"pint/testsuite/test_issues.py::TestIssues::test_issue61_notNP",
"pint/testsuite/test_issues.py::TestIssues::test_issue66",
"pint/testsuite/test_issues.py::TestIssues::test_issue66b",
"pint/testsuite/test_issues.py::TestIssues::test_issue69",
"pint/testsuite/test_issues.py::TestIssues::test_issue85",
"pint/testsuite/test_issues.py::TestIssues::test_issue86",
"pint/testsuite/test_issues.py::TestIssues::test_issue93",
"pint/testsuite/test_issues.py::TestIssues::test_issues86b",
"pint/testsuite/test_issues.py::TestIssues::test_micro_creation"
]
| []
| BSD | 1,348 | [
".gitignore",
"pint/default_en.txt"
]
| [
".gitignore",
"pint/default_en.txt"
]
|
|
maximkulkin__lollipop-53 | f7c50ac54610b7965d41f28d0ee5ee5e24dea0ee | 2017-06-09 05:22:05 | 360bbc8f9c2b6203ab5af8a3cd051f852ba8dae3 | diff --git a/lollipop/types.py b/lollipop/types.py
index 5f3ac70..3ec8a50 100644
--- a/lollipop/types.py
+++ b/lollipop/types.py
@@ -1,6 +1,6 @@
from lollipop.errors import ValidationError, ValidationErrorBuilder, \
ErrorMessagesMixin, merge_errors
-from lollipop.utils import is_list, is_dict, make_context_aware, \
+from lollipop.utils import is_sequence, is_mapping, make_context_aware, \
constant, identity, OpenStruct
from lollipop.compat import string_types, int_types, iteritems, OrderedDict
import datetime
@@ -377,7 +377,7 @@ class List(Type):
self._fail('required')
# TODO: Make more intelligent check for collections
- if not is_list(data):
+ if not is_sequence(data):
self._fail('invalid')
errors_builder = ValidationErrorBuilder()
@@ -395,7 +395,7 @@ class List(Type):
if value is MISSING or value is None:
self._fail('required')
- if not is_list(value):
+ if not is_sequence(value):
self._fail('invalid')
errors_builder = ValidationErrorBuilder()
@@ -422,6 +422,7 @@ class Tuple(Type):
Example: ::
Tuple([String(), Integer(), Boolean()]).load(['foo', 123, False])
+ # => ('foo', 123, False)
:param list item_types: List of item types.
:param kwargs: Same keyword arguments as for :class:`Type`.
@@ -439,11 +440,13 @@ class Tuple(Type):
if data is MISSING or data is None:
self._fail('required')
- if not is_list(data):
+ if not is_sequence(data):
self._fail('invalid')
if len(data) != len(self.item_types):
- self._fail('invalid_length', expected_length=len(self.item_types))
+ self._fail('invalid_length',
+ expected_length=len(self.item_types),
+ actual_length=len(data))
errors_builder = ValidationErrorBuilder()
result = []
@@ -454,13 +457,13 @@ class Tuple(Type):
errors_builder.add_errors({idx: ve.messages})
errors_builder.raise_errors()
- return super(Tuple, self).load(result, *args, **kwargs)
+ return tuple(super(Tuple, self).load(result, *args, **kwargs))
def dump(self, value, *args, **kwargs):
if value is MISSING or value is None:
self._fail('required')
- if not is_list(value):
+ if not is_sequence(value):
self._fail('invalid')
if len(value) != len(self.item_types):
@@ -560,7 +563,7 @@ class OneOf(Type):
if data is MISSING or data is None:
self._fail('required')
- if is_dict(self.types) and self.load_hint:
+ if is_mapping(self.types) and self.load_hint:
type_id = self.load_hint(data)
if type_id not in self.types:
self._fail('unknown_type_id', type_id=type_id)
@@ -569,7 +572,8 @@ class OneOf(Type):
result = item_type.load(data, *args, **kwargs)
return super(OneOf, self).load(result, *args, **kwargs)
else:
- for item_type in (self.types.values() if is_dict(self.types) else self.types):
+ for item_type in (self.types.values()
+ if is_mapping(self.types) else self.types):
try:
result = item_type.load(data, *args, **kwargs)
return super(OneOf, self).load(result, *args, **kwargs)
@@ -582,7 +586,7 @@ class OneOf(Type):
if data is MISSING or data is None:
self._fail('required')
- if is_dict(self.types) and self.dump_hint:
+ if is_mapping(self.types) and self.dump_hint:
type_id = self.dump_hint(data)
if type_id not in self.types:
self._fail('unknown_type_id', type_id=type_id)
@@ -591,7 +595,8 @@ class OneOf(Type):
result = item_type.dump(data, *args, **kwargs)
return super(OneOf, self).dump(result, *args, **kwargs)
else:
- for item_type in (self.types.values() if is_dict(self.types) else self.types):
+ for item_type in (self.types.values()
+ if is_mapping(self.types) else self.types):
try:
result = item_type.dump(data, *args, **kwargs)
return super(OneOf, self).dump(result, *args, **kwargs)
@@ -668,7 +673,7 @@ class Dict(Type):
if data is MISSING or data is None:
self._fail('required')
- if not is_dict(data):
+ if not is_mapping(data):
self._fail('invalid')
errors_builder = ValidationErrorBuilder()
@@ -695,7 +700,7 @@ class Dict(Type):
if value is MISSING or value is None:
self._fail('required')
- if not is_dict(value):
+ if not is_mapping(value):
self._fail('invalid')
errors_builder = ValidationErrorBuilder()
@@ -1091,10 +1096,10 @@ class Object(Type):
if isinstance(bases_or_fields, Type):
bases = [bases_or_fields]
- if is_list(bases_or_fields) and \
+ if is_sequence(bases_or_fields) and \
all([isinstance(base, Type) for base in bases_or_fields]):
bases = bases_or_fields
- elif is_list(bases_or_fields) or is_dict(bases_or_fields):
+ elif is_sequence(bases_or_fields) or is_mapping(bases_or_fields):
if fields is None:
bases = []
fields = bases_or_fields
@@ -1108,9 +1113,9 @@ class Object(Type):
self._allow_extra_fields = allow_extra_fields
self._immutable = immutable
self._ordered = ordered
- if only is not None and not is_list(only):
+ if only is not None and not is_sequence(only):
only = [only]
- if exclude is not None and not is_list(exclude):
+ if exclude is not None and not is_sequence(exclude):
exclude = [exclude]
self._only = only
self._exclude = exclude
@@ -1155,7 +1160,8 @@ class Object(Type):
if fields is not None:
all_fields += [
(name, self._normalize_field(field))
- for name, field in (iteritems(fields) if is_dict(fields) else fields)
+ for name, field in (iteritems(fields)
+ if is_mapping(fields) else fields)
]
return OrderedDict(all_fields)
@@ -1164,7 +1170,7 @@ class Object(Type):
if data is MISSING or data is None:
self._fail('required')
- if not is_dict(data):
+ if not is_mapping(data):
self._fail('invalid')
errors_builder = ValidationErrorBuilder()
@@ -1213,7 +1219,7 @@ class Object(Type):
if data is None:
self._fail('required')
- if not is_dict(data):
+ if not is_mapping(data):
self._fail('invalid')
errors_builder = ValidationErrorBuilder()
@@ -1528,7 +1534,7 @@ def validated_type(base_type, name=None, validate=None):
"""
if validate is None:
validate = []
- if not is_list(validate):
+ if not is_sequence(validate):
validate = [validate]
class ValidatedSubtype(base_type):
diff --git a/lollipop/utils.py b/lollipop/utils.py
index 596706c..fa0bb4b 100644
--- a/lollipop/utils.py
+++ b/lollipop/utils.py
@@ -1,6 +1,7 @@
import inspect
import re
from lollipop.compat import DictMixin, iterkeys
+import collections
def identity(value):
@@ -14,14 +15,18 @@ def constant(value):
return func
-def is_list(value):
+def is_sequence(value):
"""Returns True if value supports list interface; False - otherwise"""
- return isinstance(value, list)
+ return isinstance(value, collections.Sequence)
-
-def is_dict(value):
+def is_mapping(value):
"""Returns True if value supports dict interface; False - otherwise"""
- return isinstance(value, dict)
+ return isinstance(value, collections.Mapping)
+
+
+# Backward compatibility
+is_list = is_sequence
+is_dict = is_mapping
def make_context_aware(func, numargs):
diff --git a/lollipop/validators.py b/lollipop/validators.py
index 8652d96..c29b1da 100644
--- a/lollipop/validators.py
+++ b/lollipop/validators.py
@@ -1,7 +1,7 @@
from lollipop.errors import ValidationError, ValidationErrorBuilder, \
ErrorMessagesMixin
from lollipop.compat import string_types, iteritems
-from lollipop.utils import make_context_aware, is_list, identity
+from lollipop.utils import make_context_aware, is_sequence, identity
import re
@@ -291,7 +291,7 @@ class Unique(Validator):
self._error_messages['unique'] = error
def __call__(self, value, context=None):
- if not is_list(value):
+ if not is_sequence(value):
self._fail('invalid')
seen = set()
@@ -318,12 +318,12 @@ class Each(Validator):
def __init__(self, validators, **kwargs):
super(Validator, self).__init__(**kwargs)
- if not is_list(validators):
+ if not is_sequence(validators):
validators = [validators]
self.validators = validators
def __call__(self, value, context=None):
- if not is_list(value):
+ if not is_sequence(value):
self._fail('invalid', data=value)
error_builder = ValidationErrorBuilder()
| Misleading Tuple type
Is it on purpose that the Tuple type dumps into / loads from a list instead of a tuple? If yes, I think the naming is a bit misleading…
```
Tuple([String(), Integer(), Boolean()]).load(('foo', 123, False))
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "site-packages\lollipop\types.py", line 443, in load self._fail('invalid')
File "site-packages\lollipop\errors.py", line 63, in _fail raise ValidationError(msg)
lollipop.errors.ValidationError: Invalid data: 'Value should be list'
``` | maximkulkin/lollipop | diff --git a/tests/test_types.py b/tests/test_types.py
index 2ab07e1..e652fb6 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -509,7 +509,7 @@ class TestList(NameDescriptionTestsMixin, RequiredTestsMixin, ValidationTestsMix
def test_loading_non_list_value_raises_ValidationError(self):
with pytest.raises(ValidationError) as exc_info:
- List(String()).load('1, 2, 3')
+ List(String()).load(123)
assert exc_info.value.messages == List.default_error_messages['invalid']
def test_loading_list_value_with_items_of_incorrect_type_raises_ValidationError(self):
@@ -542,9 +542,13 @@ class TestList(NameDescriptionTestsMixin, RequiredTestsMixin, ValidationTestsMix
def test_dumping_list_value(self):
assert List(String()).dump(['foo', 'bar', 'baz']) == ['foo', 'bar', 'baz']
+ def test_dumping_sequence_value(self):
+ assert List(String()).dump(('foo', 'bar', 'baz')) == ['foo', 'bar', 'baz']
+ assert List(String()).dump('foobar') == ['f', 'o', 'o', 'b', 'a', 'r']
+
def test_dumping_non_list_value_raises_ValidationError(self):
with pytest.raises(ValidationError) as exc_info:
- List(String()).dump('1, 2, 3')
+ List(String()).dump(123)
assert exc_info.value.messages == List.default_error_messages['invalid']
def test_dumping_list_value_with_items_of_incorrect_type_raises_ValidationError(self):
@@ -563,15 +567,15 @@ class TestList(NameDescriptionTestsMixin, RequiredTestsMixin, ValidationTestsMix
class TestTuple(NameDescriptionTestsMixin, RequiredTestsMixin, ValidationTestsMixin):
tested_type = partial(Tuple, [Integer(), Integer()])
valid_data = [123, 456]
- valid_value = [123, 456]
+ valid_value = (123, 456)
def test_loading_tuple_with_values_of_same_type(self):
assert Tuple([Integer(), Integer()]).load([123, 456]) == \
- [123, 456]
+ (123, 456)
def test_loading_tuple_with_values_of_different_type(self):
assert Tuple([String(), Integer(), Boolean()]).load(['foo', 123, False]) == \
- ['foo', 123, False]
+ ('foo', 123, False)
def test_loading_non_tuple_value_raises_ValidationError(self):
with pytest.raises(ValidationError) as exc_info:
@@ -596,23 +600,35 @@ class TestTuple(NameDescriptionTestsMixin, RequiredTestsMixin, ValidationTestsMi
assert inner_type.load_context == context
def test_dump_tuple(self):
- assert Tuple([Integer(), Integer()]).dump([123, 456]) == [123, 456]
+ assert Tuple([String(), Integer()]).dump(('hello', 123)) == ['hello', 123]
+
+ def test_dump_sequence(self):
+ assert Tuple([String(), Integer()]).dump(['hello', 123]) == ['hello', 123]
def test_dumping_non_tuple_raises_ValidationError(self):
with pytest.raises(ValidationError) as exc_info:
- Tuple(String()).dump('foo')
+ Tuple([String()]).dump(123)
assert exc_info.value.messages == Tuple.default_error_messages['invalid']
+ def test_dumping_sequence_of_incorrect_length_raises_ValidationError(self):
+ with pytest.raises(ValidationError) as exc_info:
+ Tuple([String(), Integer()]).dump(['hello', 123, 456])
+ assert exc_info.value.messages == \
+ Tuple.default_error_messages['invalid_length'].format(
+ expected_length=2,
+ actual_length=3,
+ )
+
def test_dumping_tuple_with_items_of_incorrect_type_raises_ValidationError(self):
with pytest.raises(ValidationError) as exc_info:
- Tuple([String(), String()]).dump([123, 456])
+ Tuple([String(), String()]).dump(('hello', 456))
message = String.default_error_messages['invalid']
- assert exc_info.value.messages == {0: message, 1: message}
+ assert exc_info.value.messages == {1: message}
def test_dumping_tuple_passes_context_to_inner_type_dump(self):
inner_type = SpyType()
context = object()
- Tuple([inner_type, inner_type]).dump(['foo','foo'], context)
+ Tuple([inner_type, inner_type]).dump(('foo','foo'), context)
assert inner_type.dump_context == context
diff --git a/tests/test_validators.py b/tests/test_validators.py
index fa84610..5d6421a 100644
--- a/tests/test_validators.py
+++ b/tests/test_validators.py
@@ -318,7 +318,7 @@ class TestRegexp:
class TestUnique:
def test_raising_ValidationError_if_value_is_not_collection(self):
with raises(ValidationError) as exc_info:
- Unique()('foo')
+ Unique()(123)
assert exc_info.value.messages == Unique.default_error_messages['invalid']
def test_matching_empty_collection(self):
@@ -371,7 +371,7 @@ is_small = Predicate(lambda x: x <= 5, 'Value should be small')
class TestEach:
def test_raising_ValidationError_if_value_is_not_collection(self):
with raises(ValidationError) as exc_info:
- Each(lambda x: x)('foo')
+ Each(lambda x: x)(123)
assert exc_info.value.messages == Each.default_error_messages['invalid']
def test_matching_empty_collections(self):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 3
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/maximkulkin/lollipop.git@f7c50ac54610b7965d41f28d0ee5ee5e24dea0ee#egg=lollipop
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: lollipop
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/lollipop
| [
"tests/test_types.py::TestList::test_dumping_sequence_value",
"tests/test_types.py::TestTuple::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestTuple::test_loading_tuple_with_values_of_same_type",
"tests/test_types.py::TestTuple::test_loading_tuple_with_values_of_different_type",
"tests/test_types.py::TestTuple::test_dump_tuple",
"tests/test_types.py::TestTuple::test_dumping_tuple_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestTuple::test_dumping_tuple_passes_context_to_inner_type_dump"
]
| []
| [
"tests/test_types.py::TestString::test_name",
"tests/test_types.py::TestString::test_description",
"tests/test_types.py::TestString::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestString::test_loading_None_raises_required_error",
"tests/test_types.py::TestString::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestString::test_dumping_None_raises_required_error",
"tests/test_types.py::TestString::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestString::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestString::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestString::test_loading_passes_context_to_validator",
"tests/test_types.py::TestString::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestString::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestString::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestString::test_loading_string_value",
"tests/test_types.py::TestString::test_loading_non_string_value_raises_ValidationError",
"tests/test_types.py::TestString::test_dumping_string_value",
"tests/test_types.py::TestString::test_dumping_non_string_value_raises_ValidationError",
"tests/test_types.py::TestNumber::test_name",
"tests/test_types.py::TestNumber::test_description",
"tests/test_types.py::TestNumber::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestNumber::test_loading_None_raises_required_error",
"tests/test_types.py::TestNumber::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestNumber::test_dumping_None_raises_required_error",
"tests/test_types.py::TestNumber::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestNumber::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestNumber::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestNumber::test_loading_passes_context_to_validator",
"tests/test_types.py::TestNumber::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestNumber::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestNumber::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestNumber::test_loading_float_value",
"tests/test_types.py::TestNumber::test_loading_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestNumber::test_dumping_float_value",
"tests/test_types.py::TestNumber::test_dumping_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestInteger::test_loading_integer_value",
"tests/test_types.py::TestInteger::test_loading_long_value",
"tests/test_types.py::TestInteger::test_loading_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestInteger::test_dumping_integer_value",
"tests/test_types.py::TestInteger::test_dumping_long_value",
"tests/test_types.py::TestInteger::test_dumping_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestFloat::test_loading_float_value",
"tests/test_types.py::TestFloat::test_loading_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestFloat::test_dumping_float_value",
"tests/test_types.py::TestFloat::test_dumping_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestBoolean::test_name",
"tests/test_types.py::TestBoolean::test_description",
"tests/test_types.py::TestBoolean::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestBoolean::test_loading_None_raises_required_error",
"tests/test_types.py::TestBoolean::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestBoolean::test_dumping_None_raises_required_error",
"tests/test_types.py::TestBoolean::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestBoolean::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestBoolean::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestBoolean::test_loading_passes_context_to_validator",
"tests/test_types.py::TestBoolean::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestBoolean::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestBoolean::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestBoolean::test_loading_boolean_value",
"tests/test_types.py::TestBoolean::test_loading_non_boolean_value_raises_ValidationError",
"tests/test_types.py::TestBoolean::test_dumping_boolean_value",
"tests/test_types.py::TestBoolean::test_dumping_non_boolean_value_raises_ValidationError",
"tests/test_types.py::TestDateTime::test_name",
"tests/test_types.py::TestDateTime::test_description",
"tests/test_types.py::TestDateTime::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestDateTime::test_loading_None_raises_required_error",
"tests/test_types.py::TestDateTime::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestDateTime::test_dumping_None_raises_required_error",
"tests/test_types.py::TestDateTime::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestDateTime::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestDateTime::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestDateTime::test_loading_passes_context_to_validator",
"tests/test_types.py::TestDateTime::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestDateTime::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestDateTime::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestDateTime::test_loading_string_date",
"tests/test_types.py::TestDateTime::test_loading_using_predefined_format",
"tests/test_types.py::TestDateTime::test_loading_using_custom_format",
"tests/test_types.py::TestDateTime::test_loading_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestDateTime::test_loading_raises_ValidationError_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestDateTime::test_customizing_error_message_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestDateTime::test_loading_passes_deserialized_date_to_validator",
"tests/test_types.py::TestDateTime::test_dumping_date",
"tests/test_types.py::TestDateTime::test_dumping_using_predefined_format",
"tests/test_types.py::TestDateTime::test_dumping_using_custom_format",
"tests/test_types.py::TestDateTime::test_dumping_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestDateTime::test_customizing_error_message_if_value_is_not_string",
"tests/test_types.py::TestDate::test_name",
"tests/test_types.py::TestDate::test_description",
"tests/test_types.py::TestDate::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestDate::test_loading_None_raises_required_error",
"tests/test_types.py::TestDate::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestDate::test_dumping_None_raises_required_error",
"tests/test_types.py::TestDate::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestDate::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestDate::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestDate::test_loading_passes_context_to_validator",
"tests/test_types.py::TestDate::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestDate::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestDate::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestDate::test_loading_string_date",
"tests/test_types.py::TestDate::test_loading_using_predefined_format",
"tests/test_types.py::TestDate::test_loading_using_custom_format",
"tests/test_types.py::TestDate::test_loading_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestDate::test_loading_raises_ValidationError_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestDate::test_customizing_error_message_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestDate::test_loading_passes_deserialized_date_to_validator",
"tests/test_types.py::TestDate::test_dumping_date",
"tests/test_types.py::TestDate::test_dumping_using_predefined_format",
"tests/test_types.py::TestDate::test_dumping_using_custom_format",
"tests/test_types.py::TestDate::test_dumping_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestDate::test_customizing_error_message_if_value_is_not_string",
"tests/test_types.py::TestTime::test_name",
"tests/test_types.py::TestTime::test_description",
"tests/test_types.py::TestTime::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestTime::test_loading_None_raises_required_error",
"tests/test_types.py::TestTime::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestTime::test_dumping_None_raises_required_error",
"tests/test_types.py::TestTime::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestTime::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestTime::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestTime::test_loading_passes_context_to_validator",
"tests/test_types.py::TestTime::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestTime::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestTime::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestTime::test_loading_string_date",
"tests/test_types.py::TestTime::test_loading_using_custom_format",
"tests/test_types.py::TestTime::test_loading_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestTime::test_loading_raises_ValidationError_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestTime::test_customizing_error_message_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestTime::test_loading_passes_deserialized_date_to_validator",
"tests/test_types.py::TestTime::test_dumping_date",
"tests/test_types.py::TestTime::test_dumping_using_custom_format",
"tests/test_types.py::TestTime::test_dumping_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestTime::test_customizing_error_message_if_value_is_not_string",
"tests/test_types.py::TestList::test_name",
"tests/test_types.py::TestList::test_description",
"tests/test_types.py::TestList::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestList::test_loading_None_raises_required_error",
"tests/test_types.py::TestList::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestList::test_dumping_None_raises_required_error",
"tests/test_types.py::TestList::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestList::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestList::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestList::test_loading_passes_context_to_validator",
"tests/test_types.py::TestList::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestList::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestList::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestList::test_loading_list_value",
"tests/test_types.py::TestList::test_loading_non_list_value_raises_ValidationError",
"tests/test_types.py::TestList::test_loading_list_value_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestList::test_loading_list_value_with_items_that_have_validation_errors_raises_ValidationError",
"tests/test_types.py::TestList::test_loading_does_not_validate_whole_list_if_items_have_errors",
"tests/test_types.py::TestList::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestList::test_dumping_list_value",
"tests/test_types.py::TestList::test_dumping_non_list_value_raises_ValidationError",
"tests/test_types.py::TestList::test_dumping_list_value_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestList::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestTuple::test_name",
"tests/test_types.py::TestTuple::test_description",
"tests/test_types.py::TestTuple::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestTuple::test_loading_None_raises_required_error",
"tests/test_types.py::TestTuple::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestTuple::test_dumping_None_raises_required_error",
"tests/test_types.py::TestTuple::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestTuple::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestTuple::test_loading_passes_context_to_validator",
"tests/test_types.py::TestTuple::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestTuple::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestTuple::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestTuple::test_loading_non_tuple_value_raises_ValidationError",
"tests/test_types.py::TestTuple::test_loading_tuple_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestTuple::test_loading_tuple_with_items_that_have_validation_errors_raises_ValidationErrors",
"tests/test_types.py::TestTuple::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestTuple::test_dump_sequence",
"tests/test_types.py::TestTuple::test_dumping_non_tuple_raises_ValidationError",
"tests/test_types.py::TestTuple::test_dumping_sequence_of_incorrect_length_raises_ValidationError",
"tests/test_types.py::TestDict::test_name",
"tests/test_types.py::TestDict::test_description",
"tests/test_types.py::TestDict::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestDict::test_loading_None_raises_required_error",
"tests/test_types.py::TestDict::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestDict::test_dumping_None_raises_required_error",
"tests/test_types.py::TestDict::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestDict::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestDict::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestDict::test_loading_passes_context_to_validator",
"tests/test_types.py::TestDict::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestDict::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestDict::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestDict::test_loading_dict_with_custom_key_type",
"tests/test_types.py::TestDict::test_loading_accepts_any_key_if_key_type_is_not_specified",
"tests/test_types.py::TestDict::test_loading_dict_with_values_of_the_same_type",
"tests/test_types.py::TestDict::test_loading_dict_with_values_of_different_types",
"tests/test_types.py::TestDict::test_loading_accepts_any_value_if_value_types_are_not_specified",
"tests/test_types.py::TestDict::test_loading_non_dict_value_raises_ValidationError",
"tests/test_types.py::TestDict::test_loading_dict_with_incorrect_key_value_raises_ValidationError",
"tests/test_types.py::TestDict::test_loading_dict_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestDict::test_loading_dict_with_items_that_have_validation_errors_raises_ValidationError",
"tests/test_types.py::TestDict::test_loading_does_not_validate_whole_list_if_items_have_errors",
"tests/test_types.py::TestDict::test_loading_dict_with_incorrect_key_value_and_incorrect_value_raises_ValidationError_with_both_errors",
"tests/test_types.py::TestDict::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestDict::test_dumping_dict_with_custom_key_type",
"tests/test_types.py::TestDict::test_dumping_accepts_any_key_if_key_type_is_not_specified",
"tests/test_types.py::TestDict::test_dumping_dict_with_values_of_the_same_type",
"tests/test_types.py::TestDict::test_dumping_dict_with_values_of_different_types",
"tests/test_types.py::TestDict::test_dumping_accepts_any_value_if_value_types_are_not_specified",
"tests/test_types.py::TestDict::test_dumping_non_dict_value_raises_ValidationError",
"tests/test_types.py::TestDict::test_dumping_dict_with_incorrect_key_value_raises_ValidationError",
"tests/test_types.py::TestDict::test_dumping_dict_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestDict::test_dumping_dict_with_incorrect_key_value_and_incorrect_value_raises_ValidationError_with_both_errors",
"tests/test_types.py::TestDict::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestOneOf::test_loading_values_of_one_of_listed_types",
"tests/test_types.py::TestOneOf::test_loading_raises_ValidationError_if_value_is_of_unlisted_type",
"tests/test_types.py::TestOneOf::test_loading_raises_ValidationError_if_deserialized_value_has_errors",
"tests/test_types.py::TestOneOf::test_loading_raises_ValidationError_if_type_hint_is_unknown",
"tests/test_types.py::TestOneOf::test_loading_with_type_hinting",
"tests/test_types.py::TestOneOf::test_loading_with_type_hinting_raises_ValidationError_if_deserialized_value_has_errors",
"tests/test_types.py::TestOneOf::test_dumping_values_of_one_of_listed_types",
"tests/test_types.py::TestOneOf::test_dumping_raises_ValidationError_if_value_is_of_unlisted_type",
"tests/test_types.py::TestOneOf::test_dumping_raises_ValidationError_if_type_hint_is_unknown",
"tests/test_types.py::TestOneOf::test_dumping_raises_ValidationError_if_serialized_value_has_errors",
"tests/test_types.py::TestOneOf::test_dumping_with_type_hinting",
"tests/test_types.py::TestOneOf::test_dumping_with_type_hinting_raises_ValidationError_if_deserialized_value_has_errors",
"tests/test_types.py::TestAttributeField::test_getting_value_returns_value_of_given_object_attribute",
"tests/test_types.py::TestAttributeField::test_getting_value_returns_value_of_configured_object_attribute",
"tests/test_types.py::TestAttributeField::test_getting_value_returns_value_of_field_name_transformed_with_given_name_transformation",
"tests/test_types.py::TestAttributeField::test_setting_value_sets_given_value_to_given_object_attribute",
"tests/test_types.py::TestAttributeField::test_setting_value_sets_given_value_to_configured_object_attribute",
"tests/test_types.py::TestAttributeField::test_setting_value_sets_given_value_to_field_name_transformed_with_given_name_transformation",
"tests/test_types.py::TestAttributeField::test_loading_value_with_field_type",
"tests/test_types.py::TestAttributeField::test_loading_given_attribute_regardless_of_attribute_override",
"tests/test_types.py::TestAttributeField::test_loading_missing_value_if_attribute_does_not_exist",
"tests/test_types.py::TestAttributeField::test_loading_passes_context_to_field_type_load",
"tests/test_types.py::TestAttributeField::test_dumping_given_attribute_from_object",
"tests/test_types.py::TestAttributeField::test_dumping_object_attribute_with_field_type",
"tests/test_types.py::TestAttributeField::test_dumping_a_different_attribute_from_object",
"tests/test_types.py::TestAttributeField::test_dumping_passes_context_to_field_type_dump",
"tests/test_types.py::TestMethodField::test_get_value_returns_result_of_calling_configured_method_on_object",
"tests/test_types.py::TestMethodField::test_get_value_returns_result_of_calling_method_calculated_by_given_function_on_object",
"tests/test_types.py::TestMethodField::test_get_value_returns_MISSING_if_get_method_is_not_specified",
"tests/test_types.py::TestMethodField::test_get_value_raises_ValueError_if_method_does_not_exist",
"tests/test_types.py::TestMethodField::test_get_value_raises_ValueError_if_property_is_not_callable",
"tests/test_types.py::TestMethodField::test_get_value_passes_context_to_method",
"tests/test_types.py::TestMethodField::test_set_value_calls_configure_method_on_object",
"tests/test_types.py::TestMethodField::test_set_value_calls_method_calculated_by_given_function_on_object",
"tests/test_types.py::TestMethodField::test_set_value_does_not_do_anything_if_set_method_is_not_specified",
"tests/test_types.py::TestMethodField::test_set_value_raises_ValueError_if_method_does_not_exist",
"tests/test_types.py::TestMethodField::test_set_value_raises_ValueError_if_property_is_not_callable",
"tests/test_types.py::TestMethodField::test_set_value_passes_context_to_method",
"tests/test_types.py::TestMethodField::test_loading_value_with_field_type",
"tests/test_types.py::TestMethodField::test_loading_value_returns_loaded_value",
"tests/test_types.py::TestMethodField::test_loading_value_passes_context_to_field_types_load",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_calls_field_types_load_into",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_calls_field_types_load_if_load_into_is_not_available",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_calls_field_types_load_if_old_value_is_None",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_calls_field_types_load_if_old_value_is_MISSING",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_passes_context_to_field_types_load_into",
"tests/test_types.py::TestMethodField::test_dumping_result_of_given_objects_method",
"tests/test_types.py::TestMethodField::test_dumping_result_of_objects_method_with_field_type",
"tests/test_types.py::TestMethodField::test_dumping_result_of_a_different_objects_method",
"tests/test_types.py::TestMethodField::test_dumping_raises_ValueError_if_given_method_does_not_exist",
"tests/test_types.py::TestMethodField::test_dumping_raises_ValueError_if_given_method_is_not_callable",
"tests/test_types.py::TestMethodField::test_dumping_passes_context_to_field_type_dump",
"tests/test_types.py::TestFunctionField::test_get_value_returns_result_of_calling_configured_function_with_object",
"tests/test_types.py::TestFunctionField::test_get_value_returns_MISSING_if_get_func_is_not_specified",
"tests/test_types.py::TestFunctionField::test_get_value_raises_ValueError_if_property_is_not_callable",
"tests/test_types.py::TestFunctionField::test_get_value_passes_context_to_func",
"tests/test_types.py::TestFunctionField::test_set_value_calls_configure_method_on_object",
"tests/test_types.py::TestFunctionField::test_set_value_does_not_do_anything_if_set_func_is_not_specified",
"tests/test_types.py::TestFunctionField::test_set_value_raises_ValueError_if_property_is_not_callable",
"tests/test_types.py::TestFunctionField::test_set_value_passes_context_to_func",
"tests/test_types.py::TestFunctionField::test_loading_value_with_field_type",
"tests/test_types.py::TestFunctionField::test_loading_value_returns_loaded_value",
"tests/test_types.py::TestFunctionField::test_loading_value_passes_context_to_field_types_load",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_calls_field_types_load_into",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_calls_field_types_load_if_load_into_is_not_available",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_calls_field_types_load_if_old_value_is_None",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_calls_field_types_load_if_old_value_is_MISSING",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_passes_context_to_field_types_load_into",
"tests/test_types.py::TestFunctionField::test_dumping_result_of_given_function",
"tests/test_types.py::TestFunctionField::test_dumping_result_of_objects_method_with_field_type",
"tests/test_types.py::TestFunctionField::test_dumping_raises_ValueError_if_given_get_func_is_not_callable",
"tests/test_types.py::TestFunctionField::test_dumping_passes_context_to_field_type_dump",
"tests/test_types.py::TestConstant::test_name",
"tests/test_types.py::TestConstant::test_description",
"tests/test_types.py::TestConstant::test_loading_always_returns_missing",
"tests/test_types.py::TestConstant::test_loading_raises_ValidationError_if_loaded_value_is_not_a_constant_value_specified",
"tests/test_types.py::TestConstant::test_loading_value_with_inner_type_before_checking_value_correctness",
"tests/test_types.py::TestConstant::test_customizing_error_message_when_value_is_incorrect",
"tests/test_types.py::TestConstant::test_dumping_always_returns_given_value",
"tests/test_types.py::TestConstant::test_dumping_given_constant_with_field_type",
"tests/test_types.py::TestObject::test_name",
"tests/test_types.py::TestObject::test_description",
"tests/test_types.py::TestObject::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestObject::test_loading_None_raises_required_error",
"tests/test_types.py::TestObject::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestObject::test_dumping_None_raises_required_error",
"tests/test_types.py::TestObject::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestObject::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestObject::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestObject::test_loading_passes_context_to_validator",
"tests/test_types.py::TestObject::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestObject::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestObject::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestObject::test_default_field_type_is_unset_by_default",
"tests/test_types.py::TestObject::test_inheriting_default_field_type_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_constructor_is_unset_by_default",
"tests/test_types.py::TestObject::test_inheriting_constructor_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_allow_extra_fields_is_unset_by_default",
"tests/test_types.py::TestObject::test_inheriting_allow_extra_fields_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_immutable_is_unset_by_default",
"tests/test_types.py::TestObject::test_inheriting_immutable_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_ordered_is_unset_by_default",
"tests/test_types.py::TestObject::test_iheriting_ordered_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_loading_dict_value",
"tests/test_types.py::TestObject::test_loading_non_dict_values_raises_ValidationError",
"tests/test_types.py::TestObject::test_loading_bypasses_values_for_which_field_type_returns_missing_value",
"tests/test_types.py::TestObject::test_loading_dict_with_field_errors_raises_ValidationError_with_all_field_errors_merged",
"tests/test_types.py::TestObject::test_loading_dict_with_field_errors_does_not_run_whole_object_validators",
"tests/test_types.py::TestObject::test_loading_calls_field_load_passing_field_name_and_whole_data",
"tests/test_types.py::TestObject::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestObject::test_constructing_objects_with_default_constructor_on_load",
"tests/test_types.py::TestObject::test_constructing_custom_objects_on_load",
"tests/test_types.py::TestObject::test_load_ignores_extra_fields_by_default",
"tests/test_types.py::TestObject::test_load_raises_ValidationError_if_reporting_extra_fields",
"tests/test_types.py::TestObject::test_loading_inherited_fields",
"tests/test_types.py::TestObject::test_loading_multiple_inherited_fields",
"tests/test_types.py::TestObject::test_loading_raises_ValidationError_if_inherited_fields_have_errors",
"tests/test_types.py::TestObject::test_loading_only_specified_fields",
"tests/test_types.py::TestObject::test_loading_only_specified_fields_does_not_affect_own_fields",
"tests/test_types.py::TestObject::test_loading_all_but_specified_base_class_fields",
"tests/test_types.py::TestObject::test_loading_all_but_specified_fields_does_not_affect_own_fields",
"tests/test_types.py::TestObject::test_loading_values_into_existing_object",
"tests/test_types.py::TestObject::test_loading_values_into_existing_object_returns_that_object",
"tests/test_types.py::TestObject::test_loading_values_into_existing_object_passes_all_object_attributes_to_validators",
"tests/test_types.py::TestObject::test_loading_values_into_immutable_object_creates_a_copy",
"tests/test_types.py::TestObject::test_loading_values_into_immutable_object_does_not_modify_original_object",
"tests/test_types.py::TestObject::test_loading_values_into_nested_object_of_immutable_object_creates_copy_of_it_regardless_of_nested_objects_immutable_flag",
"tests/test_types.py::TestObject::test_loading_values_into_nested_object_of_immutable_object_does_not_modify_original_objects",
"tests/test_types.py::TestObject::test_loading_values_into_nested_objects_with_inplace_False_does_not_modify_original_objects",
"tests/test_types.py::TestObject::test_loading_values_into_existing_objects_ignores_missing_fields",
"tests/test_types.py::TestObject::test_loading_MISSING_into_existing_object_does_not_do_anything",
"tests/test_types.py::TestObject::test_loading_None_into_existing_objects_raises_ValidationError",
"tests/test_types.py::TestObject::test_loading_None_into_field_of_existing_object_passes_None_to_field",
"tests/test_types.py::TestObject::test_loading_values_into_existing_objects_raises_ValidationError_if_data_contains_errors",
"tests/test_types.py::TestObject::test_loading_values_into_existing_objects_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestObject::test_loading_values_into_existing_objects_annotates_field_errors_with_field_names",
"tests/test_types.py::TestObject::test_loading_values_into_existing_nested_objects",
"tests/test_types.py::TestObject::test_loading_values_into_existing_object_when_nested_object_does_not_exist",
"tests/test_types.py::TestObject::test_validating_data_for_existing_objects_returns_None_if_data_is_valid",
"tests/test_types.py::TestObject::test_validating_data_for_existing_objects_returns_errors_if_data_contains_errors",
"tests/test_types.py::TestObject::test_validating_data_for_existing_objects_returns_errors_if_validator_fails",
"tests/test_types.py::TestObject::test_validating_data_for_existing_objects_does_not_modify_original_objects",
"tests/test_types.py::TestObject::test_dumping_object_attributes",
"tests/test_types.py::TestObject::test_dumping_calls_field_dump_passing_field_name_and_whole_object",
"tests/test_types.py::TestObject::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestObject::test_dumping_inherited_fields",
"tests/test_types.py::TestObject::test_dumping_multiple_inherited_fields",
"tests/test_types.py::TestObject::test_dumping_only_specified_fields_of_base_classes",
"tests/test_types.py::TestObject::test_dumping_only_specified_fields_does_not_affect_own_fields",
"tests/test_types.py::TestObject::test_dumping_all_but_specified_base_class_fields",
"tests/test_types.py::TestObject::test_dumping_all_but_specified_fields_does_not_affect_own_fields",
"tests/test_types.py::TestObject::test_shortcut_for_specifying_constant_fields",
"tests/test_types.py::TestObject::test_dumping_fields_in_declared_order_if_ordered_is_True",
"tests/test_types.py::TestOptional::test_loading_value_calls_load_of_inner_type",
"tests/test_types.py::TestOptional::test_loading_missing_value_returns_None",
"tests/test_types.py::TestOptional::test_loading_None_returns_None",
"tests/test_types.py::TestOptional::test_loading_missing_value_does_not_call_inner_type_load",
"tests/test_types.py::TestOptional::test_loading_None_does_not_call_inner_type_load",
"tests/test_types.py::TestOptional::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestOptional::test_overriding_missing_value_on_load",
"tests/test_types.py::TestOptional::test_overriding_None_value_on_load",
"tests/test_types.py::TestOptional::test_using_function_to_override_value_on_load",
"tests/test_types.py::TestOptional::test_loading_passes_context_to_override_function",
"tests/test_types.py::TestOptional::test_dumping_value_calls_dump_of_inner_type",
"tests/test_types.py::TestOptional::test_dumping_missing_value_returns_None",
"tests/test_types.py::TestOptional::test_dumping_None_returns_None",
"tests/test_types.py::TestOptional::test_dumping_missing_value_does_not_call_inner_type_dump",
"tests/test_types.py::TestOptional::test_dumping_None_does_not_call_inner_type_dump",
"tests/test_types.py::TestOptional::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestOptional::test_overriding_missing_value_on_dump",
"tests/test_types.py::TestOptional::test_overriding_None_value_on_dump",
"tests/test_types.py::TestOptional::test_using_function_to_override_value_on_dump",
"tests/test_types.py::TestOptional::test_dumping_passes_context_to_override_function",
"tests/test_types.py::TestLoadOnly::test_name",
"tests/test_types.py::TestLoadOnly::test_description",
"tests/test_types.py::TestLoadOnly::test_loading_returns_inner_type_load_result",
"tests/test_types.py::TestLoadOnly::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestLoadOnly::test_dumping_always_returns_missing",
"tests/test_types.py::TestLoadOnly::test_dumping_does_not_call_inner_type_dump",
"tests/test_types.py::TestDumpOnly::test_name",
"tests/test_types.py::TestDumpOnly::test_description",
"tests/test_types.py::TestDumpOnly::test_loading_always_returns_missing",
"tests/test_types.py::TestDumpOnly::test_loading_does_not_call_inner_type_dump",
"tests/test_types.py::TestDumpOnly::test_dumping_returns_inner_type_dump_result",
"tests/test_types.py::TestDumpOnly::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestTransform::test_name",
"tests/test_types.py::TestTransform::test_description",
"tests/test_types.py::TestTransform::test_loading_calls_pre_load_with_original_value",
"tests/test_types.py::TestTransform::test_loading_calls_inner_type_load_with_result_of_pre_load",
"tests/test_types.py::TestTransform::test_loading_calls_post_load_with_result_of_inner_type_load",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_inner_type_load",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_pre_load",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_post_load",
"tests/test_types.py::TestTransform::test_dumping_calls_pre_dump_with_original_value",
"tests/test_types.py::TestTransform::test_dumping_calls_inner_type_dump_with_result_of_pre_dump",
"tests/test_types.py::TestTransform::test_dumping_calls_post_dump_with_result_of_inner_type_dump",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_inner_type_dump",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_pre_dump",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_post_dump",
"tests/test_types.py::TestValidatedType::test_returns_subclass_of_given_type",
"tests/test_types.py::TestValidatedType::test_returns_type_that_has_single_given_validator",
"tests/test_types.py::TestValidatedType::test_accepts_context_unaware_validators",
"tests/test_types.py::TestValidatedType::test_returns_type_that_has_multiple_given_validators",
"tests/test_types.py::TestValidatedType::test_specifying_more_validators_on_type_instantiation",
"tests/test_types.py::TestValidatedType::test_new_type_accepts_same_constructor_arguments_as_base_type",
"tests/test_validators.py::TestPredicate::test_matching_values",
"tests/test_validators.py::TestPredicate::test_raising_ValidationError_if_predicate_returns_False",
"tests/test_validators.py::TestPredicate::test_customizing_validation_error",
"tests/test_validators.py::TestPredicate::test_passing_context_to_predicate",
"tests/test_validators.py::TestRange::test_matching_min_value",
"tests/test_validators.py::TestRange::test_raising_ValidationError_when_matching_min_value_and_given_value_is_less",
"tests/test_validators.py::TestRange::test_customzing_min_error_message",
"tests/test_validators.py::TestRange::test_matching_max_value",
"tests/test_validators.py::TestRange::test_raising_ValidationError_when_matching_max_value_and_given_value_is_greater",
"tests/test_validators.py::TestRange::test_customzing_max_error_message",
"tests/test_validators.py::TestRange::test_matching_range",
"tests/test_validators.py::TestRange::test_raising_ValidationError_when_matching_range_and_given_value_is_less",
"tests/test_validators.py::TestRange::test_raising_ValidationError_when_matching_range_and_given_value_is_greater",
"tests/test_validators.py::TestRange::test_customzing_range_error_message",
"tests/test_validators.py::TestRange::test_customizing_all_error_messages_at_once",
"tests/test_validators.py::TestLength::test_matching_exact_value",
"tests/test_validators.py::TestLength::test_raising_ValidationError_when_matching_exact_value_and_given_value_does_not_match",
"tests/test_validators.py::TestLength::test_customizing_exact_error_message",
"tests/test_validators.py::TestLength::test_matching_min_value",
"tests/test_validators.py::TestLength::test_raising_ValidationError_when_matching_min_value_and_given_value_is_less",
"tests/test_validators.py::TestLength::test_customzing_min_error_message",
"tests/test_validators.py::TestLength::test_matching_max_value",
"tests/test_validators.py::TestLength::test_raising_ValidationError_when_matching_max_value_and_given_value_is_greater",
"tests/test_validators.py::TestLength::test_customzing_max_error_message",
"tests/test_validators.py::TestLength::test_matching_range",
"tests/test_validators.py::TestLength::test_raising_ValidationError_when_matching_range_and_given_value_is_less",
"tests/test_validators.py::TestLength::test_raising_ValidationError_when_matching_range_and_given_value_is_greater",
"tests/test_validators.py::TestLength::test_customzing_range_error_message",
"tests/test_validators.py::TestLength::test_customizing_all_error_messages_at_once",
"tests/test_validators.py::TestNoneOf::test_matching_values_other_than_given_values",
"tests/test_validators.py::TestNoneOf::test_raising_ValidationError_when_value_is_one_of_forbidden_values",
"tests/test_validators.py::TestNoneOf::test_customizing_error_message",
"tests/test_validators.py::TestAnyOf::test_matching_given_values",
"tests/test_validators.py::TestAnyOf::test_raising_ValidationError_when_value_is_other_than_given_values",
"tests/test_validators.py::TestAnyOf::test_customizing_error_message",
"tests/test_validators.py::TestRegexp::test_matching_by_string_regexp",
"tests/test_validators.py::TestRegexp::test_matching_by_string_regexp_with_flags",
"tests/test_validators.py::TestRegexp::test_matching_by_regexp",
"tests/test_validators.py::TestRegexp::test_matching_by_regexp_ignores_flags",
"tests/test_validators.py::TestRegexp::test_raising_ValidationError_if_given_string_does_not_match_string_regexp",
"tests/test_validators.py::TestRegexp::test_raising_ValidationError_if_given_string_does_not_match_regexp",
"tests/test_validators.py::TestRegexp::test_customizing_error_message",
"tests/test_validators.py::TestUnique::test_raising_ValidationError_if_value_is_not_collection",
"tests/test_validators.py::TestUnique::test_matching_empty_collection",
"tests/test_validators.py::TestUnique::test_matching_collection_of_unique_values",
"tests/test_validators.py::TestUnique::test_matching_collection_of_values_with_unique_custom_keys",
"tests/test_validators.py::TestUnique::test_raising_ValidationError_if_item_appears_more_than_once",
"tests/test_validators.py::TestUnique::test_raising_ValidationError_if_custom_key_appears_more_than_once",
"tests/test_validators.py::TestUnique::test_customizing_error_message",
"tests/test_validators.py::TestEach::test_raising_ValidationError_if_value_is_not_collection",
"tests/test_validators.py::TestEach::test_matching_empty_collections",
"tests/test_validators.py::TestEach::test_matching_collections_each_elemenet_of_which_matches_given_validators",
"tests/test_validators.py::TestEach::test_raising_ValidationError_if_single_validator_fails",
"tests/test_validators.py::TestEach::test_raising_ValidationError_if_any_item_fails_any_validator"
]
| []
| MIT License | 1,350 | [
"lollipop/validators.py",
"lollipop/utils.py",
"lollipop/types.py"
]
| [
"lollipop/validators.py",
"lollipop/utils.py",
"lollipop/types.py"
]
|
|
melexis__warnings-plugin-9 | 63ce3d59b74e0cbd3d4b6795d57d0ce22d8ae112 | 2017-06-09 08:01:32 | 0c7e730a491d32ad90f258439715fb6507be37f2 | diff --git a/setup.py b/setup.py
index 5672ae8..8f049c5 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ from os.path import basename, dirname, join, splitext
from setuptools import find_packages, setup
PROJECT_URL = 'https://github.com/melexis/warnings-plugin'
-VERSION = '0.0.4'
+VERSION = '0.0.5'
def read(*names, **kwargs):
diff --git a/src/mlx/warnings.py b/src/mlx/warnings.py
index 4c45974..3112a5f 100644
--- a/src/mlx/warnings.py
+++ b/src/mlx/warnings.py
@@ -6,7 +6,7 @@ import math
DOXYGEN_WARNING_REGEX = r"(?:(?:((?:[/.]|[A-Za-z]:).+?):(-?\d+):\s*([Ww]arning|[Ee]rror)|<.+>:-?\d+(?::\s*([Ww]arning|[Ee]rror))?): (.+(?:\n(?!\s*(?:[Nn]otice|[Ww]arning|[Ee]rror): )[^/<\n][^:\n][^/\n].+)*)|\s*([Nn]otice|[Ww]arning|[Ee]rror): (.+))$"
doxy_pattern = re.compile(DOXYGEN_WARNING_REGEX)
-SPHINX_WARNING_REGEX = r"^(.+?:\d+): (DEBUG|INFO|WARNING|ERROR|SEVERE): (.+)\n?$"
+SPHINX_WARNING_REGEX = r"^(.+?:(?:\d+|None)): (DEBUG|INFO|WARNING|ERROR|SEVERE): (.+)\n?$"
sphinx_pattern = re.compile(SPHINX_WARNING_REGEX)
| Some RST warnings are not recognized
Example of warning not recognized:
```
/path/to/file.rst:None: WARNING: Traceability: cannot link to XXX, item is not defined
```
Rumours say it is because of a missing line number, and more rumours say that this regex would work
```
^(.+?:None): (DEBUG|INFO|WARNING|ERROR|SEVERE): (.+)\n?$
```
Warning is from mlx.traceability plugin, which could print a line number iso the None (see issue https://github.com/melexis/sphinx-traceability-extension/issues/2).
| melexis/warnings-plugin | diff --git a/tests/test_sphinx.py b/tests/test_sphinx.py
index a9977fe..354a255 100644
--- a/tests/test_sphinx.py
+++ b/tests/test_sphinx.py
@@ -15,6 +15,11 @@ class TestSphinxWarnings(TestCase):
self.warnings.check_sphinx_warnings("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'")
self.assertEqual(self.warnings.return_sphinx_warnings(), 1)
+ def test_single_warning_no_line_number(self):
+ self.warnings.check_sphinx_warnings("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'")
+ self.warnings.check_sphinx_warnings("/home/bljah/test/index.rst:None: WARNING: toctree contains reference to nonexisting document u'installation'")
+ self.assertEqual(self.warnings.return_sphinx_warnings(), 2)
+
def test_single_warning_mixed(self):
self.warnings.check_sphinx_warnings('This1 should not be treated as warning')
self.warnings.check_sphinx_warnings("/home/bljah/test/index.rst:5: WARNING: toctree contains reference to nonexisting document u'installation'")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-travis-fold",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/melexis/warnings-plugin.git@63ce3d59b74e0cbd3d4b6795d57d0ce22d8ae112#egg=mlx.warnings
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-cov==4.0.0
pytest-travis-fold==1.3.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: warnings-plugin
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==6.2
- mock==5.2.0
- pytest-cov==4.0.0
- pytest-travis-fold==1.3.0
- tomli==1.2.3
prefix: /opt/conda/envs/warnings-plugin
| [
"tests/test_sphinx.py::TestSphinxWarnings::test_single_warning_no_line_number"
]
| []
| [
"tests/test_sphinx.py::TestSphinxWarnings::test_no_warning",
"tests/test_sphinx.py::TestSphinxWarnings::test_single_warning",
"tests/test_sphinx.py::TestSphinxWarnings::test_single_warning_mixed"
]
| []
| Apache License 2.0 | 1,351 | [
"setup.py",
"src/mlx/warnings.py"
]
| [
"setup.py",
"src/mlx/warnings.py"
]
|
|
Azure__azure-cli-3643 | c024331912bbf467725d76d437557ebc6e4aba17 | 2017-06-09 16:51:58 | eb12ac454cbe1ddb59c86cdf2045e1912660e750 | diff --git a/doc/configuring_your_machine.md b/doc/configuring_your_machine.md
index 7af498348..a4cbd442c 100644
--- a/doc/configuring_your_machine.md
+++ b/doc/configuring_your_machine.md
@@ -72,7 +72,7 @@ The repo has a launch.json file that will launch the version of Python that is f
#### OSX/Ubuntu (bash):
```Shell
- source <clone root>/env/scripts/activate
+ . <clone root>/env/bin/activate
```
#### Windows:
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py
index fd4342d86..5e0d8d925 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py
@@ -679,8 +679,8 @@ def _validate_admin_password(password, os_type):
max_length = 72 if is_linux else 123
min_length = 12
if len(password) not in range(min_length, max_length + 1):
- raise CLIError('The pssword length must be between {} and {}'.format(min_length,
- max_length))
+ raise CLIError('The password length must be between {} and {}'.format(min_length,
+ max_length))
contains_lower = re.findall('[a-z]+', password)
contains_upper = re.findall('[A-Z]+', password)
contains_digit = re.findall('[0-9]+', password)
| The error message for too short/long password for VM is spelled incorrectly (pssword)
We even have a test that verifies that it is incorrectly spelled :)
| Azure/azure-cli | diff --git a/src/command_modules/azure-cli-vm/tests/test_vm_actions.py b/src/command_modules/azure-cli-vm/tests/test_vm_actions.py
index 800a04d14..45f0b6e8c 100644
--- a/src/command_modules/azure-cli-vm/tests/test_vm_actions.py
+++ b/src/command_modules/azure-cli-vm/tests/test_vm_actions.py
@@ -117,7 +117,7 @@ class TestActions(unittest.TestCase):
def test_validate_admin_password_linux(self):
# pylint: disable=line-too-long
- err_length = 'The pssword length must be between 12 and 72'
+ err_length = 'The password length must be between 12 and 72'
err_variety = 'Password must have the 3 of the following: 1 lower case character, 1 upper case character, 1 number and 1 special character'
self._verify_password_with_ex('te', 'linux', err_length)
@@ -129,7 +129,7 @@ class TestActions(unittest.TestCase):
def test_validate_admin_password_windows(self):
# pylint: disable=line-too-long
- err_length = 'The pssword length must be between 12 and 123'
+ err_length = 'The password length must be between 12 and 123'
err_variety = 'Password must have the 3 of the following: 1 lower case character, 1 upper case character, 1 number and 1 special character'
self._verify_password_with_ex('P1', 'windows', err_length)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libffi-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==2.11.7
attrs==22.2.0
autopep8==2.0.4
azure-batch==3.0.0
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_billing&subdirectory=src/command_modules/azure-cli-billing
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_cdn&subdirectory=src/command_modules/azure-cli-cdn
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_cognitiveservices&subdirectory=src/command_modules/azure-cli-cognitiveservices
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_consumption&subdirectory=src/command_modules/azure-cli-consumption
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_cosmosdb&subdirectory=src/command_modules/azure-cli-cosmosdb
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_dla&subdirectory=src/command_modules/azure-cli-dla
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_dls&subdirectory=src/command_modules/azure-cli-dls
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_interactive&subdirectory=src/command_modules/azure-cli-interactive
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_lab&subdirectory=src/command_modules/azure-cli-lab
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_monitor&subdirectory=src/command_modules/azure-cli-monitor
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_rdbms&subdirectory=src/command_modules/azure-cli-rdbms
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_sf&subdirectory=src/command_modules/azure-cli-sf
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@c024331912bbf467725d76d437557ebc6e4aba17#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.28
azure-core==1.24.2
azure-datalake-store==0.0.9
azure-graphrbac==0.30.0rc6
azure-keyvault==0.3.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==4.0.0
azure-mgmt-billing==0.1.0
azure-mgmt-cdn==0.30.2
azure-mgmt-cognitiveservices==1.0.0
azure-mgmt-compute==1.0.0rc1
azure-mgmt-consumption==0.1.0
azure-mgmt-containerregistry==0.2.1
azure-mgmt-datalake-analytics==0.1.4
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.4
azure-mgmt-devtestlabs==2.0.0
azure-mgmt-dns==1.0.1
azure-mgmt-documentdb==0.1.3
azure-mgmt-iothub==0.2.2
azure-mgmt-keyvault==0.31.0
azure-mgmt-monitor==0.2.1
azure-mgmt-network==1.0.0rc3
azure-mgmt-nspkg==1.0.0
azure-mgmt-rdbms==0.1.0
azure-mgmt-redis==1.0.0
azure-mgmt-resource==1.1.0rc1
azure-mgmt-sql==0.5.1
azure-mgmt-storage==1.0.0rc1
azure-mgmt-trafficmanager==0.30.0
azure-mgmt-web==0.32.0
azure-monitor==0.3.0
azure-multiapi-storage==0.1.0
azure-nspkg==1.0.0
azure-servicefabric==5.6.130
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==6.2
cryptography==40.0.2
flake8==5.0.4
futures==3.1.1
humanfriendly==2.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
isodate==0.7.0
isort==5.10.1
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.7.0
mock==5.2.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pluggy==1.0.0
prompt-toolkit==3.0.36
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.10.0
pycparser==2.21
pydocumentdb==2.3.5
pyflakes==2.5.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.7.1
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.7
tomli==1.2.3
typed-ast==1.5.5
typing-extensions==4.1.1
urllib3==1.26.20
urllib3-secure-extra==0.1.0
vcrpy==1.10.3
vsts-cd-manager==1.0.2
wcwidth==0.2.13
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==2.11.7
- attrs==22.2.0
- autopep8==2.0.4
- azure-batch==3.0.0
- azure-common==1.1.28
- azure-core==1.24.2
- azure-datalake-store==0.0.9
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.3.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==4.0.0
- azure-mgmt-billing==0.1.0
- azure-mgmt-cdn==0.30.2
- azure-mgmt-cognitiveservices==1.0.0
- azure-mgmt-compute==1.0.0rc1
- azure-mgmt-consumption==0.1.0
- azure-mgmt-containerregistry==0.2.1
- azure-mgmt-datalake-analytics==0.1.4
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.4
- azure-mgmt-devtestlabs==2.0.0
- azure-mgmt-dns==1.0.1
- azure-mgmt-documentdb==0.1.3
- azure-mgmt-iothub==0.2.2
- azure-mgmt-keyvault==0.31.0
- azure-mgmt-monitor==0.2.1
- azure-mgmt-network==1.0.0rc3
- azure-mgmt-nspkg==1.0.0
- azure-mgmt-rdbms==0.1.0
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==1.1.0rc1
- azure-mgmt-sql==0.5.1
- azure-mgmt-storage==1.0.0rc1
- azure-mgmt-trafficmanager==0.30.0
- azure-mgmt-web==0.32.0
- azure-monitor==0.3.0
- azure-multiapi-storage==0.1.0
- azure-nspkg==1.0.0
- azure-servicefabric==5.6.130
- cffi==1.15.1
- colorama==0.3.7
- coverage==6.2
- cryptography==40.0.2
- flake8==5.0.4
- futures==3.1.1
- humanfriendly==2.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isodate==0.7.0
- isort==5.10.1
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.7.0
- mock==5.2.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pip==9.0.1
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.10.0
- pycparser==2.21
- pydocumentdb==2.3.5
- pyflakes==2.5.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.7.1
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.7
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- urllib3==1.26.20
- urllib3-secure-extra==0.1.0
- vcrpy==1.10.3
- vsts-cd-manager==1.0.2
- wcwidth==0.2.13
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_validate_admin_password_linux",
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_validate_admin_password_windows"
]
| []
| [
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_figure_out_storage_source",
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_generate_specfied_ssh_key_files",
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_get_next_subnet_addr_suffix",
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_parse_image_argument",
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_source_storage_account_err_case",
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_validate_admin_username_linux",
"src/command_modules/azure-cli-vm/tests/test_vm_actions.py::TestActions::test_validate_admin_username_windows"
]
| []
| MIT License | 1,352 | [
"doc/configuring_your_machine.md",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py"
]
| [
"doc/configuring_your_machine.md",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py"
]
|
|
Azure__azure-cli-3645 | fb9f2e795d88105549c8552dd7a38136a3fdfda0 | 2017-06-09 17:35:19 | eb12ac454cbe1ddb59c86cdf2045e1912660e750 | diff --git a/azure-cli.pyproj b/azure-cli.pyproj
index 3fdf7cd0a..ea34c0dd2 100644
--- a/azure-cli.pyproj
+++ b/azure-cli.pyproj
@@ -592,6 +592,8 @@
</Compile>
<Compile Include="command_modules\azure-cli-resource\azure_bdist_wheel.py" />
<Compile Include="command_modules\azure-cli-resource\tests\test_api_check.py" />
+ <Compile Include="command_modules\azure-cli-resource\tests\test_custom.py" />
+ <Compile Include="command_modules\azure-cli-resource\tests\test_locks.py" />
<Compile Include="command_modules\azure-cli-resource\tests\test_resource.py" />
<Compile Include="command_modules\azure-cli-resource\tests\test_resource_list_odata_filter.py" />
<Compile Include="command_modules\azure-cli-resource\tests\test_resource_validators.py" />
@@ -1119,7 +1121,6 @@
<Content Include="command_modules\azure-cli-resource\tests\sample_policy_rule.json" />
<Content Include="command_modules\azure-cli-resource\tests\crossrg_deploy.json" />
<Content Include="command_modules\azure-cli-resource\tests\simple_deploy.json" />
- <Content Include="command_modules\azure-cli-resource\tests\crossrg_deploy_parameters.json" />
<Content Include="command_modules\azure-cli-resource\tests\simple_deploy_parameters.json" />
<Content Include="command_modules\azure-cli-role\HISTORY.rst" />
<Content Include="command_modules\azure-cli-sf\HISTORY.rst" />
diff --git a/src/command_modules/azure-cli-interactive/azclishell/app.py b/src/command_modules/azure-cli-interactive/azclishell/app.py
index 81fd00581..628a98763 100644
--- a/src/command_modules/azure-cli-interactive/azclishell/app.py
+++ b/src/command_modules/azure-cli-interactive/azclishell/app.py
@@ -435,12 +435,6 @@ class Shell(object):
elif text.strip() == CLEAR_WORD:
outside = True
cmd = CLEAR_WORD
- if '--version' in text:
- try:
- continue_flag = True
- show_version_info_exit(sys.stdout)
- except SystemExit:
- pass
if text:
if text[0] == SELECT_SYMBOL['outside']:
cmd = text[1:]
@@ -459,7 +453,12 @@ class Shell(object):
elif text[0] == SELECT_SYMBOL['query']: # query previous output
continue_flag = self.handle_jmespath_query(text, continue_flag)
-
+ elif text[0] == '--version' or text[0] == '-v':
+ try:
+ continue_flag = True
+ show_version_info_exit(sys.stdout)
+ except SystemExit:
+ pass
elif "|" in text or ">" in text: # anything I don't parse, send off
outside = True
cmd = "az " + cmd
diff --git a/src/command_modules/azure-cli-resource/HISTORY.rst b/src/command_modules/azure-cli-resource/HISTORY.rst
index bcad7f816..6c6a7856b 100644
--- a/src/command_modules/azure-cli-resource/HISTORY.rst
+++ b/src/command_modules/azure-cli-resource/HISTORY.rst
@@ -5,7 +5,8 @@ Release History
unreleased
++++++++++++++++++
-Fix --resource-type parsing for the lock command to accept <resource-namespace>/<resource-type>
+* Fix --resource-type parsing for the lock command to accept <resource-namespace>/<resource-type>
+* Add support for specifying deployment parameters using KEY=VALUE syntax.
2.0.7 (2017-05-30)
++++++++++++++++++
diff --git a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
index 0809a3a5f..63b8529a5 100644
--- a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
+++ b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
@@ -281,6 +281,12 @@ helps['group deployment'] = """
helps['group deployment create'] = """
type: command
short-summary: Start a deployment.
+ parameters:
+ - name: --parameters
+ short-summary: Supply deployment parameter values.
+ long-summary: >
+ Parameters may be supplied from a parameters file, raw JSON (which can be loaded using `@<file path>` syntax, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.
+ It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax (example: --parameters params.json --parameters location=westus)
examples:
- name: Create a deployment from a remote template file.
text: >
@@ -288,6 +294,9 @@ helps['group deployment create'] = """
- name: Create a deployment from a local template file and use parameter values in a string.
text: >
az group deployment create -g MyResourceGroup --template-file azuredeploy.json --parameters "{\\"location\\": {\\"value\\": \\"westus\\"}}"
+ - name: Create a deployment from a local template, use a parameter file and selectively override parameters.
+ text: >
+ az group deployment create -g MyResourceGroup --template-file azuredeploy.json --parameters params.json --parameters MyValue=This [email protected]
"""
helps['group deployment export'] = """
type: command
@@ -296,6 +305,12 @@ helps['group deployment export'] = """
helps['group deployment validate'] = """
type: command
short-summary: Validate whether the specified template is syntactically correct and will be accepted by Azure Resource Manager.
+ parameters:
+ - name: --parameters
+ short-summary: Supply deployment parameter values.
+ long-summary: >
+ Parameters may be supplied from a parameters file, raw JSON (which can be loaded using `@<file path>` syntax, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.
+ It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax (example: --parameters params.json --parameters location=westus)
"""
helps['group deployment wait'] = """
type: command
diff --git a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_params.py b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_params.py
index 6f636d9cf..104f4cb98 100644
--- a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_params.py
+++ b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_params.py
@@ -15,7 +15,7 @@ from azure.cli.core.commands.parameters import (ignore_type, resource_group_name
enum_choice_list, no_wait_type, file_type)
from .custom import (get_policy_completion_list, get_policy_assignment_completion_list,
get_resource_types_completion_list, get_providers_completion_list)
-from ._validators import validate_deployment_name, validate_lock_parameters
+from ._validators import process_deployment_create_namespace, validate_lock_parameters, validate_deployment_parameters
# BASIC PARAMETER CONFIGURATION
@@ -72,15 +72,19 @@ register_cli_argument('policy assignment', 'policy', help='policy name or fully
register_cli_argument('group', 'tag', tag_type)
register_cli_argument('group', 'tags', tags_type)
register_cli_argument('group', 'resource_group_name', resource_group_name_type, options_list=('--name', '-n'))
+
register_cli_argument('group deployment', 'resource_group_name', arg_type=resource_group_name_type, completer=get_resource_group_completion_list)
register_cli_argument('group deployment', 'deployment_name', options_list=('--name', '-n'), required=True, help='The deployment name.')
-register_cli_argument('group deployment', 'parameters', action='append', completer=FilesCompleter(), help="provide deployment parameter values, either json string, or use `@<file path>` to load from a file. Can be repeated. If a the same parameter is present in multiple arguments, the last value wins.")
register_cli_argument('group deployment', 'template_file', completer=FilesCompleter(), type=file_type, help="a template file path in the file system")
register_cli_argument('group deployment', 'template_uri', help='a uri to a remote template file')
register_cli_argument('group deployment', 'mode', help='Incremental (only add resources to resource group) or Complete (remove extra resources from resource group)', **enum_choice_list(DeploymentMode))
+
register_cli_argument('group deployment create', 'deployment_name', options_list=('--name', '-n'), required=False,
- validator=validate_deployment_name, help='The deployment name. Default to template file base name')
+ validator=process_deployment_create_namespace, help='The deployment name. Default to template file base name')
+register_cli_argument('group deployment', 'parameters', action='append', nargs='+', completer=FilesCompleter(), validator=validate_deployment_parameters)
+
register_cli_argument('group deployment operation show', 'operation_ids', nargs='+', help='A list of operation ids to show')
+
register_cli_argument('group export', 'include_comments', action='store_true')
register_cli_argument('group export', 'include_parameter_default_value', action='store_true')
register_cli_argument('group create', 'rg_name', options_list=('--name', '-n'), help='name of the new resource group', completer=None)
diff --git a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py
index 4348e2b7b..fd6531210 100644
--- a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py
+++ b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py
@@ -14,7 +14,7 @@ except ImportError:
from azure.cli.core.util import CLIError
-def validate_deployment_name(namespace):
+def _validate_deployment_name(namespace):
# If missing,try come out with a name associated with the template name
if namespace.deployment_name is None:
template_filename = None
@@ -29,6 +29,55 @@ def validate_deployment_name(namespace):
namespace.deployment_name = 'deployment1'
+def validate_deployment_parameters(namespace):
+
+ from azure.cli.core.util import shell_safe_json_parse, get_file_json
+
+ def _try_parse_json_object(value):
+ try:
+ parsed = shell_safe_json_parse(value)
+ return parsed.get('parameters', parsed)
+ except CLIError:
+ return None
+
+ def _try_load_file_object(value):
+ if os.path.isfile(value):
+ parsed = get_file_json(value, throw_on_empty=False)
+ return parsed.get('parameters', parsed)
+ return None
+
+ def _try_parse_key_value_object(parameters, value):
+ try:
+ key, value = value.split('=', 1)
+ except ValueError:
+ return False
+
+ try:
+ parameters[key] = {'value': shell_safe_json_parse(value)}
+ except (ValueError, CLIError):
+ parameters[key] = {'value': value}
+
+ return True
+
+ parameters = {}
+ for params in namespace.parameters or []:
+ for item in params:
+ if not _try_parse_key_value_object(parameters, item):
+ param_obj = _try_load_file_object(item) or _try_parse_json_object(item)
+ if not param_obj:
+ raise CLIError('Unable to parse parameter: {}'.format(item))
+ parameters.update(param_obj)
+
+ namespace.parameters = parameters
+
+
+def process_deployment_create_namespace(namespace):
+ if (namespace.template_uri and namespace.template_file) or \
+ (not namespace.template_uri and not namespace.template_file):
+ raise CLIError('incorrect usage: --template-file FILE | --template-uri URI')
+ _validate_deployment_name(namespace)
+
+
def internal_validate_lock_parameters(resource_group_name, resource_provider_namespace,
parent_resource_path, resource_type, resource_name):
if resource_group_name is None:
diff --git a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/custom.py b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/custom.py
index eac565632..7e578d808 100644
--- a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/custom.py
+++ b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/custom.py
@@ -293,19 +293,6 @@ def _prompt_for_parameters(missing_parameters):
return result
-def _merge_parameters(parameter_list):
- parameters = None
- for params in parameter_list or []:
- params_object = shell_safe_json_parse(params)
- if params_object:
- params_object = params_object.get('parameters', params_object)
- if parameters is None:
- parameters = params_object
- else:
- parameters.update(params_object)
- return parameters
-
-
def _get_missing_parameters(parameters, template, prompt_fn):
missing = _find_missing_parameters(parameters, template)
if missing:
@@ -331,19 +318,13 @@ def _urlretrieve(url):
def _deploy_arm_template_core(resource_group_name, # pylint: disable=too-many-arguments
template_file=None, template_uri=None, deployment_name=None,
- parameter_list=None, mode='incremental', validate_only=False,
+ parameters=None, mode='incremental', validate_only=False,
no_wait=False):
DeploymentProperties, TemplateLink = get_sdk(ResourceType.MGMT_RESOURCE_RESOURCES,
'DeploymentProperties',
'TemplateLink',
mod='models')
-
- if bool(template_uri) == bool(template_file):
- raise CLIError('please provide either template file path or uri, but not both')
-
- parameters = _merge_parameters(parameter_list)
- if parameters is None:
- parameters = {}
+ parameters = parameters or {}
template = None
template_link = None
template_obj = None
diff --git a/src/command_modules/azure-cli-storage/HISTORY.rst b/src/command_modules/azure-cli-storage/HISTORY.rst
index 10bcec411..89d769875 100644
--- a/src/command_modules/azure-cli-storage/HISTORY.rst
+++ b/src/command_modules/azure-cli-storage/HISTORY.rst
@@ -3,6 +3,11 @@
Release History
===============
+unreleased
++++++++++++++++++
+
+* Fix #3592: convert generator to a list in download batch command dry run mode
+
2.0.7 (2017-05-30)
++++++++++++++++++
diff --git a/src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/blob.py b/src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/blob.py
index 5f09afc71..70c62ae87 100644
--- a/src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/blob.py
+++ b/src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/blob.py
@@ -109,7 +109,7 @@ def storage_blob_download_batch(client, source, destination, source_container_na
The pattern is used for files globbing. The supported patterns are '*', '?', '[seq]',
and '[!seq]'.
"""
- source_blobs = collect_blobs(client, source_container_name, pattern)
+ source_blobs = list(collect_blobs(client, source_container_name, pattern))
if dryrun:
logger = get_az_logger(__name__)
| Public IP creation with --version parameter
### Description
When I run the following command it should create IPv4 IP address but as an output I get versions of each Azure CLI modules
**Command**
network public-ip create --name linuxvm --resource-group clivm --allocation-method Dynamic --location southeastasia --version "IPv4" --verbose
**Output**
azure-cli (2.0.6)
acr (2.0.4)
acs (2.0.6)
appservice (0.1.6)
batch (2.0.4)
cdn (0.0.2)
cloud (2.0.2)
cognitiveservices (0.1.2)
command-modules-nspkg (2.0.0)
component (2.0.4)
configure (2.0.6)
core (2.0.6)
cosmosdb (0.1.6)
dla (0.0.6)
dls (0.0.6)
feedback (2.0.2)
find (0.2.2)
interactive (0.3.1)
iot (0.1.5)
keyvault (2.0.4)
lab (0.0.4)
monitor (0.0.4)
network (2.0.6)
nspkg (3.0.0)
profile (2.0.4)
rdbms (0.0.1)
redis (0.2.3)
resource (2.0.6)
role (2.0.4)
sf (1.0.1)
sql (2.0.3)
storage (2.0.6)
vm (2.0.6)
Python (Windows) 3.6.1 (v3.6.1:69c0db5, Mar 21 2017, 17:54:52) [MSC v.1900 32 bit (Intel)]
Python location 'C:\Program Files (x86)\Microsoft SDKs\Azure\CLI2\python.exe'
What I did was I remove the version parameter and run the command and It works as I expected
**Command**
az network public-ip create --name linuxvm-ip --resource-group clivm --location southeastasia --verbose
**Output**
```
{
"publicIp": {
"dnsSettings": null,
"etag": "W/\"e102dba5-471b-49c9-a6c6-55db94c410a8\"",
"id": "/subscriptions/<SubscriptionID>/resourceGroups/clivm/providers/Microsoft.Network/publicIPAddresses/linuxvm-ip",
"idleTimeoutInMinutes": 4,
"ipAddress": null,
"ipConfiguration": null,
"location": "southeastasia",
"name": "linuxvm-ip",
"provisioningState": "Succeeded",
"publicIpAddressVersion": "IPv4",
"publicIpAllocationMethod": "Dynamic",
"resourceGroup": "clivm",
"resourceGuid": "6c3bdf50-acbd-4395-8b81-20f73215f85c",
"tags": null,
"type": "Microsoft.Network/publicIPAddresses"
}
}
```
---
### Environment summary
**Install Method:** How did you install the CLI? (e.g. pip, interactive script, apt-get, Docker, MSI, nightly)
Answer here: MSI
**CLI Version:** What version of the CLI and modules are installed? (Use `az --version`)
Answer here: azure-cli (2.0.6)
**OS Version:** What OS and version are you using?
Answer here: Windows 10 Creates Update
**Shell Type:** What shell are you using? (e.g. bash, cmd.exe, Bash on Windows)
Answer here: CMD.exe
| Azure/azure-cli | diff --git a/src/azure-cli-testsdk/azure/cli/testsdk/__init__.py b/src/azure-cli-testsdk/azure/cli/testsdk/__init__.py
index bc8662e4b..ba11839ae 100644
--- a/src/azure-cli-testsdk/azure/cli/testsdk/__init__.py
+++ b/src/azure-cli-testsdk/azure/cli/testsdk/__init__.py
@@ -10,11 +10,11 @@ from .exceptions import CliTestError
from .checkers import (JMESPathCheck, JMESPathCheckExists, JMESPathCheckGreaterThan, NoneCheck,
StringCheck, StringContainCheck)
from .decorators import live_only, record_only, api_version_constraint
-from .utilities import get_sha1_hash, get_active_api_profile
+from .utilities import get_sha1_hash, get_active_api_profile, create_random_name
__all__ = ['ScenarioTest', 'LiveTest', 'ResourceGroupPreparer', 'StorageAccountPreparer',
'RoleBasedServicePrincipalPreparer', 'CliTestError', 'JMESPathCheck', 'JMESPathCheckExists', 'NoneCheck',
'live_only', 'record_only', 'StringCheck', 'StringContainCheck', 'get_sha1_hash', 'KeyVaultPreparer',
- 'JMESPathCheckGreaterThan', 'api_version_constraint', 'get_active_api_profile']
+ 'JMESPathCheckGreaterThan', 'api_version_constraint', 'get_active_api_profile', 'create_random_name']
__version__ = '0.1.0+dev'
diff --git a/src/command_modules/azure-cli-resource/tests/crossrg_deploy_parameters.json b/src/command_modules/azure-cli-resource/tests/crossrg_deploy_parameters.json
deleted file mode 100644
index 326d026b5..000000000
--- a/src/command_modules/azure-cli-resource/tests/crossrg_deploy_parameters.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "CrossRg": {
- "value": "crossrg5"
- }
-}
\ No newline at end of file
diff --git a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_crossrg_deployment.yaml b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_crossrg_deployment.yaml
new file mode 100644
index 000000000..a7b30f4f1
--- /dev/null
+++ b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_crossrg_deployment.yaml
@@ -0,0 +1,358 @@
+interactions:
+- request:
+ body: '{"location": "westus", "tags": {"use": "az-test"}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group create]
+ Connection: [keep-alive]
+ Content-Length: ['50']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_alt000001?api-version=2017-05-10
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_alt000001","name":"cli_test_cross_rg_alt000001","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['328']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:26:13 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ status: {code: 201, message: Created}
+- request:
+ body: '{"location": "westus", "tags": {"use": "az-test"}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group create]
+ Connection: [keep-alive]
+ Content-Length: ['50']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002?api-version=2017-05-10
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002","name":"cli_test_cross_rg_deploy000002","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['328']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:26:13 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ status: {code: 201, message: Created}
+- request:
+ body: 'b''{"properties": {"template": {"parameters": {"StorageAccountName1": {"defaultValue":
+ "test1ddosdatest72", "type": "string"}, "CrossRg": {"defaultValue": "crossrg5",
+ "type": "string"}, "StorageAccountName2": {"defaultValue": "test1ddfosatdest73",
+ "type": "string"}}, "resources": [{"type": "Microsoft.Resources/deployments",
+ "apiVersion": "2017-05-10", "resourceGroup": "[parameters(\''CrossRg\'')]",
+ "properties": {"template": {"parameters": {}, "variables": {}, "resources":
+ [{"location": "West US", "apiVersion": "2015-06-15", "type": "Microsoft.Storage/storageAccounts",
+ "properties": {"accountType": "Standard_LRS"}, "name": "[parameters(\''StorageAccountName2\'')]"}],
+ "contentVersion": "1.0.0.0", "outputs": {"result": {"type": "string", "value":
+ "Hello World"}}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
+ "parameters": {}, "mode": "Incremental"}, "name": "nestedTemplate"}, {"location":
+ "West US", "apiVersion": "2015-06-15", "type": "Microsoft.Storage/storageAccounts",
+ "properties": {"accountType": "Standard_LRS"}, "name": "[parameters(\''StorageAccountName1\'')]"}],
+ "contentVersion": "1.0.0.0", "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "variables": {}}, "parameters": {"CrossRG": {"value": "cli_test_cross_rg_alt000001"},
+ "StorageAccountName1": {"value": "crossrgp5wppipax4z7o6e6w"}, "StorageAccountName2":
+ {"value": "crossrgzvmoqv4i6g5qorlsx"}}, "mode": "Incremental"}}'''
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment validate]
+ Connection: [keep-alive]
+ Content-Length: ['1523']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: POST
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/mock-deployment/validate?api-version=2017-05-10
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/deployment_dry_run","name":"deployment_dry_run","properties":{"templateHash":"6460444355167324412","parameters":{"storageAccountName1":{"type":"String","value":"crossrgp5wppipax4z7o6e6w"},"crossRg":{"type":"String","value":"cli_test_cross_rg_alt000001"},"storageAccountName2":{"type":"String","value":"crossrgzvmoqv4i6g5qorlsx"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T22:26:14.8490106Z","duration":"PT0S","correlationId":"7b44a22d-229b-4ee2-9d82-d20752edfcc9","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[],"validatedResources":[{"apiVersion":"2015-06-15","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_alt000001/providers/Microsoft.Storage/storageAccounts/crossrgzvmoqv4i6g5qorlsx","name":"crossrgzvmoqv4i6g5qorlsx","type":"Microsoft.Storage/storageAccounts","location":"West
+ US","properties":{"accountType":"Standard_LRS"}},{"apiVersion":"2015-06-15","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Storage/storageAccounts/crossrgp5wppipax4z7o6e6w","name":"crossrgp5wppipax4z7o6e6w","type":"Microsoft.Storage/storageAccounts","location":"West
+ US","properties":{"accountType":"Standard_LRS"}}]}}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['1787']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:26:15 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
+ x-ms-ratelimit-remaining-subscription-writes: ['1196']
+ status: {code: 200, message: OK}
+- request:
+ body: 'b''{"properties": {"template": {"parameters": {"StorageAccountName1": {"defaultValue":
+ "test1ddosdatest72", "type": "string"}, "CrossRg": {"defaultValue": "crossrg5",
+ "type": "string"}, "StorageAccountName2": {"defaultValue": "test1ddfosatdest73",
+ "type": "string"}}, "resources": [{"type": "Microsoft.Resources/deployments",
+ "apiVersion": "2017-05-10", "resourceGroup": "[parameters(\''CrossRg\'')]",
+ "properties": {"template": {"parameters": {}, "variables": {}, "resources":
+ [{"location": "West US", "apiVersion": "2015-06-15", "type": "Microsoft.Storage/storageAccounts",
+ "properties": {"accountType": "Standard_LRS"}, "name": "[parameters(\''StorageAccountName2\'')]"}],
+ "contentVersion": "1.0.0.0", "outputs": {"result": {"type": "string", "value":
+ "Hello World"}}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
+ "parameters": {}, "mode": "Incremental"}, "name": "nestedTemplate"}, {"location":
+ "West US", "apiVersion": "2015-06-15", "type": "Microsoft.Storage/storageAccounts",
+ "properties": {"accountType": "Standard_LRS"}, "name": "[parameters(\''StorageAccountName1\'')]"}],
+ "contentVersion": "1.0.0.0", "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "variables": {}}, "parameters": {"CrossRG": {"value": "cli_test_cross_rg_alt000001"}},
+ "mode": "Incremental"}}'''
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Length: ['1399']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment","name":"azure-cli-crossrgdeployment","properties":{"templateHash":"6460444355167324412","parameters":{"storageAccountName1":{"type":"String","value":"test1ddosdatest72"},"crossRg":{"type":"String","value":"cli_test_cross_rg_alt000001"},"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-06-08T22:26:16.4590461Z","duration":"PT0.1156655S","correlationId":"e34cadc6-f5fe-479f-8a01-44e81bb41b24","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[]}}'}
+ headers:
+ azure-asyncoperation: ['https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment/operationStatuses/08587046461091342319?api-version=2017-05-10']
+ cache-control: [no-cache]
+ content-length: ['998']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:26:15 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587046461091342319?api-version=2017-05-10
+ response:
+ body: {string: '{"status":"Running"}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['20']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:26:46 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587046461091342319?api-version=2017-05-10
+ response:
+ body: {string: '{"status":"Succeeded"}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['22']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:27:17 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment","name":"azure-cli-crossrgdeployment","properties":{"templateHash":"6460444355167324412","parameters":{"storageAccountName1":{"type":"String","value":"test1ddosdatest72"},"crossRg":{"type":"String","value":"cli_test_cross_rg_alt000001"},"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T22:26:47.1155209Z","duration":"PT30.7721403S","correlationId":"e34cadc6-f5fe-479f-8a01-44e81bb41b24","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_alt000001/providers/Microsoft.Storage/storageAccounts/test1ddfosatdest73"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72"}]}}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['1449']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:27:16 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment list]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/?api-version=2017-05-10
+ response:
+ body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment","name":"azure-cli-crossrgdeployment","properties":{"templateHash":"6460444355167324412","parameters":{"storageAccountName1":{"type":"String","value":"test1ddosdatest72"},"crossRg":{"type":"String","value":"cli_test_cross_rg_alt000001"},"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T22:26:47.1155209Z","duration":"PT30.7721403S","correlationId":"e34cadc6-f5fe-479f-8a01-44e81bb41b24","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_alt000001/providers/Microsoft.Storage/storageAccounts/test1ddfosatdest73"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72"}]}}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['1461']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:27:17 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment show]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment","name":"azure-cli-crossrgdeployment","properties":{"templateHash":"6460444355167324412","parameters":{"storageAccountName1":{"type":"String","value":"test1ddosdatest72"},"crossRg":{"type":"String","value":"cli_test_cross_rg_alt000001"},"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T22:26:47.1155209Z","duration":"PT30.7721403S","correlationId":"e34cadc6-f5fe-479f-8a01-44e81bb41b24","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_alt000001/providers/Microsoft.Storage/storageAccounts/test1ddfosatdest73"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72"}]}}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['1449']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:27:17 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment operation list]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002/deployments/mock-deployment/operations?api-version=2017-05-10
+ response:
+ body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment/operations/6B3E905A350A7C59","operationId":"6B3E905A350A7C59","properties":{"provisioningOperation":"Create","provisioningState":"Succeeded","timestamp":"2017-06-08T22:26:37.0270964Z","duration":"PT20.0283016S","trackingId":"172cb8bd-7886-49e3-877a-e5ef6d9a8c4a","serviceRequestId":"e34cadc6-f5fe-479f-8a01-44e81bb41b24","statusCode":"OK","targetResource":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72","resourceType":"Microsoft.Storage/storageAccounts","resourceName":"test1ddosdatest72"}}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment/operations/CF64EB70E9F337E6","operationId":"CF64EB70E9F337E6","properties":{"provisioningOperation":"Create","provisioningState":"Succeeded","timestamp":"2017-06-08T22:26:46.9225479Z","duration":"PT29.9159043S","trackingId":"87c6f252-6875-4502-a74a-fda6f82dd85b","serviceRequestId":"093012e3-2474-4196-8031-cfae3b4b2220","statusCode":"OK","targetResource":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_alt000001/providers/Microsoft.Resources/deployments/nestedTemplate","resourceType":"Microsoft.Resources/deployments","resourceName":"nestedTemplate"}}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_cross_rg_deploy000002/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment/operations/08587046461091342319","operationId":"08587046461091342319","properties":{"provisioningOperation":"EvaluateDeploymentOutput","provisioningState":"Succeeded","timestamp":"2017-06-08T22:26:47.0894745Z","duration":"PT0.1183607S","trackingId":"f2549d7d-9ae3-424a-b604-6774cdd45af2","statusCode":"OK","statusMessage":null}}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['2306']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 08 Jun 2017 22:27:18 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group delete]
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: DELETE
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_deploy000002?api-version=2017-05-10
+ response:
+ body: {string: ''}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['0']
+ date: ['Thu, 08 Jun 2017 22:27:18 GMT']
+ expires: ['-1']
+ location: ['https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IlJFU09VUkNFR1JPVVBERUxFVElPTkpPQi1DTEk6NUZURVNUOjVGQ1JPU1M6NUZSRzo1RkRFUExPWUhFSFJRVFlaWDZZWlk2Nnw5MDFFNjIyODI1NUZDNkFGLVdFU1RVUyIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
+ pragma: [no-cache]
+ retry-after: ['15']
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ status: {code: 202, message: Accepted}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group delete]
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.7+dev]
+ accept-language: [en-US]
+ method: DELETE
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_cross_rg_alt000001?api-version=2017-05-10
+ response:
+ body: {string: ''}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['0']
+ date: ['Thu, 08 Jun 2017 22:27:20 GMT']
+ expires: ['-1']
+ location: ['https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IlJFU09VUkNFR1JPVVBERUxFVElPTkpPQi1DTEk6NUZURVNUOjVGQ1JPU1M6NUZSRzo1RkFMVFNWV1YzWTZHRVZLT1JBUUVDS3xEOTNDMjc4QTZCOENERjFDLVdFU1RVUyIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
+ pragma: [no-cache]
+ retry-after: ['15']
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1196']
+ status: {code: 202, message: Accepted}
+version: 1
diff --git a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_crossrgdeployment.yaml b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_crossrgdeployment.yaml
deleted file mode 100644
index 5d855dc2b..000000000
--- a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_crossrgdeployment.yaml
+++ /dev/null
@@ -1,290 +0,0 @@
-interactions:
-- request:
- body: '{"location": "westus", "tags": {"use": "az-test"}}'
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group create]
- Connection: [keep-alive]
- Content-Length: ['50']
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: PUT
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2017-05-10
- response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001","name":"clitest.rg000001","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
- headers:
- cache-control: [no-cache]
- content-length: ['328']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:08:59 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1199']
- status: {code: 201, message: Created}
-- request:
- body: '{"properties": {"mode": "Incremental", "parameters": {"CrossRg": {"value":
- "crossrg5"}}, "template": {"resources": [{"resourceGroup": "[parameters(''CrossRg'')]",
- "name": "nestedTemplate", "apiVersion": "2017-05-10", "type": "Microsoft.Resources/deployments",
- "properties": {"mode": "Incremental", "parameters": {}, "template": {"contentVersion":
- "1.0.0.0", "resources": [{"name": "[parameters(''StorageAccountName2'')]", "location":
- "West US", "apiVersion": "2015-06-15", "type": "Microsoft.Storage/storageAccounts",
- "properties": {"accountType": "Standard_LRS"}}], "parameters": {}, "variables":
- {}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
- "outputs": {"result": {"value": "Hello World", "type": "string"}}}}}, {"name":
- "[parameters(''StorageAccountName1'')]", "location": "West US", "apiVersion":
- "2015-06-15", "type": "Microsoft.Storage/storageAccounts", "properties": {"accountType":
- "Standard_LRS"}}], "parameters": {"StorageAccountName2": {"defaultValue": "test1ddfosatdest73",
- "type": "string"}, "CrossRg": {"defaultValue": "crossrg5", "type": "string"},
- "StorageAccountName1": {"defaultValue": "test1ddosdatest72", "type": "string"}},
- "contentVersion": "1.0.0.0", "variables": {}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"}}}'
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group deployment validate]
- Connection: [keep-alive]
- Content-Length: ['1332']
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: POST
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.Resources/deployments/mock-deployment/validate?api-version=2017-05-10
- response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Resources/deployments/deployment_dry_run","name":"deployment_dry_run","properties":{"templateHash":"15881258606218895679","parameters":{"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"},"crossRg":{"type":"String","value":"crossrg5"},"storageAccountName1":{"type":"String","value":"test1ddosdatest72"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-05-09T00:09:01.5553623Z","duration":"PT0S","correlationId":"55b7baef-a55c-40d9-9194-2478bb6e9842","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[],"validatedResources":[{"apiVersion":"2015-06-15","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/crossrg5/providers/Microsoft.Storage/storageAccounts/test1ddfosatdest73","name":"test1ddfosatdest73","type":"Microsoft.Storage/storageAccounts","location":"West
- US","properties":{"accountType":"Standard_LRS"}},{"apiVersion":"2015-06-15","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72","name":"test1ddosdatest72","type":"Microsoft.Storage/storageAccounts","location":"West
- US","properties":{"accountType":"Standard_LRS"}}]}}'}
- headers:
- cache-control: [no-cache]
- content-length: ['1615']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:09:02 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- transfer-encoding: [chunked]
- vary: [Accept-Encoding]
- x-ms-ratelimit-remaining-subscription-writes: ['1198']
- status: {code: 200, message: OK}
-- request:
- body: '{"properties": {"mode": "Incremental", "parameters": {"CrossRg": {"value":
- "crossrg5"}}, "template": {"resources": [{"resourceGroup": "[parameters(''CrossRg'')]",
- "name": "nestedTemplate", "apiVersion": "2017-05-10", "type": "Microsoft.Resources/deployments",
- "properties": {"mode": "Incremental", "parameters": {}, "template": {"contentVersion":
- "1.0.0.0", "resources": [{"name": "[parameters(''StorageAccountName2'')]", "location":
- "West US", "apiVersion": "2015-06-15", "type": "Microsoft.Storage/storageAccounts",
- "properties": {"accountType": "Standard_LRS"}}], "parameters": {}, "variables":
- {}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
- "outputs": {"result": {"value": "Hello World", "type": "string"}}}}}, {"name":
- "[parameters(''StorageAccountName1'')]", "location": "West US", "apiVersion":
- "2015-06-15", "type": "Microsoft.Storage/storageAccounts", "properties": {"accountType":
- "Standard_LRS"}}], "parameters": {"StorageAccountName2": {"defaultValue": "test1ddfosatdest73",
- "type": "string"}, "CrossRg": {"defaultValue": "crossrg5", "type": "string"},
- "StorageAccountName1": {"defaultValue": "test1ddosdatest72", "type": "string"}},
- "contentVersion": "1.0.0.0", "variables": {}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"}}}'
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group deployment create]
- Connection: [keep-alive]
- Content-Length: ['1332']
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: PUT
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
- response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment","name":"azure-cli-crossrgdeployment","properties":{"templateHash":"15881258606218895679","parameters":{"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"},"crossRg":{"type":"String","value":"crossrg5"},"storageAccountName1":{"type":"String","value":"test1ddosdatest72"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-05-09T00:09:03.6324823Z","duration":"PT0.1641726S","correlationId":"945b9424-c962-48d6-a889-95c77673a85b","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[]}}'}
- headers:
- azure-asyncoperation: ['https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment/operationStatuses/08587073183420093176?api-version=2017-05-10']
- cache-control: [no-cache]
- content-length: ['932']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:09:03 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1194']
- status: {code: 201, message: Created}
-- request:
- body: null
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group deployment create]
- Connection: [keep-alive]
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587073183420093176?api-version=2017-05-10
- response:
- body: {string: '{"status":"Running"}'}
- headers:
- cache-control: [no-cache]
- content-length: ['20']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:09:33 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- vary: [Accept-Encoding]
- status: {code: 200, message: OK}
-- request:
- body: null
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group deployment create]
- Connection: [keep-alive]
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587073183420093176?api-version=2017-05-10
- response:
- body: {string: '{"status":"Succeeded"}'}
- headers:
- cache-control: [no-cache]
- content-length: ['22']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:10:03 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- vary: [Accept-Encoding]
- status: {code: 200, message: OK}
-- request:
- body: null
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group deployment create]
- Connection: [keep-alive]
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
- response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment","name":"azure-cli-crossrgdeployment","properties":{"templateHash":"15881258606218895679","parameters":{"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"},"crossRg":{"type":"String","value":"crossrg5"},"storageAccountName1":{"type":"String","value":"test1ddosdatest72"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-05-09T00:09:39.4989341Z","duration":"PT36.0306244S","correlationId":"945b9424-c962-48d6-a889-95c77673a85b","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/crossrg5/providers/Microsoft.Storage/storageAccounts/test1ddfosatdest73"}]}}'}
- headers:
- cache-control: [no-cache]
- content-length: ['1316']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:10:03 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- vary: [Accept-Encoding]
- status: {code: 200, message: OK}
-- request:
- body: null
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group deployment list]
- Connection: [keep-alive]
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.Resources/deployments/?api-version=2017-05-10
- response:
- body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment","name":"azure-cli-crossrgdeployment","properties":{"templateHash":"15881258606218895679","parameters":{"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"},"crossRg":{"type":"String","value":"crossrg5"},"storageAccountName1":{"type":"String","value":"test1ddosdatest72"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-05-09T00:09:39.4989341Z","duration":"PT36.0306244S","correlationId":"945b9424-c962-48d6-a889-95c77673a85b","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/crossrg5/providers/Microsoft.Storage/storageAccounts/test1ddfosatdest73"}]}}]}'}
- headers:
- cache-control: [no-cache]
- content-length: ['1328']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:10:05 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- vary: [Accept-Encoding]
- status: {code: 200, message: OK}
-- request:
- body: null
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group deployment show]
- Connection: [keep-alive]
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
- response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment","name":"azure-cli-crossrgdeployment","properties":{"templateHash":"15881258606218895679","parameters":{"storageAccountName2":{"type":"String","value":"test1ddfosatdest73"},"crossRg":{"type":"String","value":"crossrg5"},"storageAccountName1":{"type":"String","value":"test1ddosdatest72"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-05-09T00:09:39.4989341Z","duration":"PT36.0306244S","correlationId":"945b9424-c962-48d6-a889-95c77673a85b","providers":[{"namespace":"Microsoft.Resources","resourceTypes":[{"resourceType":"deployments","locations":[null]}]},{"namespace":"Microsoft.Storage","resourceTypes":[{"resourceType":"storageAccounts","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72"},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/crossrg5/providers/Microsoft.Storage/storageAccounts/test1ddfosatdest73"}]}}'}
- headers:
- cache-control: [no-cache]
- content-length: ['1316']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:10:05 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- vary: [Accept-Encoding]
- status: {code: 200, message: OK}
-- request:
- body: null
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group deployment operation list]
- Connection: [keep-alive]
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001/deployments/mock-deployment/operations?api-version=2017-05-10
- response:
- body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment/operations/A342124B645FB82B","operationId":"A342124B645FB82B","properties":{"provisioningOperation":"Create","provisioningState":"Succeeded","timestamp":"2017-05-09T00:09:29.4790792Z","duration":"PT25.4957973S","trackingId":"8ed1b6ed-3209-4800-8f07-04593e24f558","serviceRequestId":"945b9424-c962-48d6-a889-95c77673a85b","statusCode":"OK","targetResource":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Storage/storageAccounts/test1ddosdatest72","resourceType":"Microsoft.Storage/storageAccounts","resourceName":"test1ddosdatest72"}}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment/operations/8B56071B2BB8A55C","operationId":"8B56071B2BB8A55C","properties":{"provisioningOperation":"Create","provisioningState":"Succeeded","timestamp":"2017-05-09T00:09:37.1004512Z","duration":"PT33.119443S","trackingId":"7bf79e18-9ef7-4cc3-bfae-688a63a76343","serviceRequestId":"685e1760-40fa-49f0-a170-0fbc4945bce3","statusCode":"OK","targetResource":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/crossrg5/providers/Microsoft.Resources/deployments/nestedTemplate","resourceType":"Microsoft.Resources/deployments","resourceName":"nestedTemplate"}}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/clitest.rg000001/providers/Microsoft.Resources/deployments/azure-cli-crossrgdeployment/operations/08587073183420093176","operationId":"08587073183420093176","properties":{"provisioningOperation":"EvaluateDeploymentOutput","provisioningState":"Succeeded","timestamp":"2017-05-09T00:09:39.2608113Z","duration":"PT1.5100056S","trackingId":"b930c4d4-1174-4258-a7a0-fc01c721198e","statusCode":"OK","statusMessage":null}}]}'}
- headers:
- cache-control: [no-cache]
- content-length: ['2238']
- content-type: [application/json; charset=utf-8]
- date: ['Tue, 09 May 2017 00:10:06 GMT']
- expires: ['-1']
- pragma: [no-cache]
- strict-transport-security: [max-age=31536000; includeSubDomains]
- vary: [Accept-Encoding]
- status: {code: 200, message: OK}
-- request:
- body: null
- headers:
- Accept: [application/json]
- Accept-Encoding: ['gzip, deflate']
- CommandName: [group delete]
- Connection: [keep-alive]
- Content-Length: ['0']
- Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.4.3 (Windows-8-6.2.9200) requests/2.9.1 msrest/0.4.7 msrest_azure/0.4.7
- resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python AZURECLI/2.0.5+dev]
- accept-language: [en-US]
- method: DELETE
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/clitest.rg000001?api-version=2017-05-10
- response:
- body: {string: ''}
- headers:
- cache-control: [no-cache]
- content-length: ['0']
- date: ['Tue, 09 May 2017 00:10:06 GMT']
- expires: ['-1']
- location: ['https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IlJFU09VUkNFR1JPVVBERUxFVElPTkpPQi1DTElURVNUOjJFUkdPTlhZTEY3QkJDWFpUSEFTTFVOSFJJUVpaN0FKS0xGRVZJWnwyMTMzNjk4MUIyQTRERUVDLVdFU1RVUyIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
- pragma: [no-cache]
- retry-after: ['15']
- strict-transport-security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1197']
- status: {code: 202, message: Accepted}
-version: 1
diff --git a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment.yaml b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment.yaml
index 66d6e1d6a..5e8b598db 100644
--- a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment.yaml
+++ b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment.yaml
@@ -1,102 +1,221 @@
interactions:
- request:
- body: !!binary |
- eyJwcm9wZXJ0aWVzIjogeyJwYXJhbWV0ZXJzIjogeyJsb2NhdGlvbiI6IHsidmFsdWUiOiAid2Vz
- dHVzIn0sICJuYW1lIjogeyJ2YWx1ZSI6ICJhenVyZS1jbGktZGVwbG95LXRlc3QtbnNnMSJ9fSwg
- InRlbXBsYXRlIjogeyJ2YXJpYWJsZXMiOiB7fSwgIiRzY2hlbWEiOiAiaHR0cHM6Ly9zY2hlbWEu
- bWFuYWdlbWVudC5henVyZS5jb20vc2NoZW1hcy8yMDE1LTAxLTAxL2RlcGxveW1lbnRUZW1wbGF0
- ZS5qc29uIyIsICJyZXNvdXJjZXMiOiBbeyJuYW1lIjogIltwYXJhbWV0ZXJzKCduYW1lJyldIiwg
- ImRlcGVuZHNPbiI6IFtdLCAidHlwZSI6ICJNaWNyb3NvZnQuTmV0d29yay9uZXR3b3JrU2VjdXJp
- dHlHcm91cHMiLCAicHJvcGVydGllcyI6IHsic2VjdXJpdHlSdWxlcyI6IFtdfSwgImxvY2F0aW9u
- IjogIltwYXJhbWV0ZXJzKCdsb2NhdGlvbicpXSIsICJhcGlWZXJzaW9uIjogIjIwMTUtMDYtMTUi
- fV0sICJjb250ZW50VmVyc2lvbiI6ICIxLjAuMC4wIiwgInBhcmFtZXRlcnMiOiB7ImxvY2F0aW9u
- IjogeyJtZXRhZGF0YSI6IHsiZGVzY3JpcHRpb24iOiAiTG9jYXRpb24gZm9yIHRoZSBuZXR3b3Jr
- IHNlY3VyaXR5IGdyb3VwLiJ9LCAiZGVmYXVsdFZhbHVlIjogIltyZXNvdXJjZUdyb3VwKCkubG9j
- YXRpb25dIiwgInR5cGUiOiAic3RyaW5nIn0sICJuYW1lIjogeyJtZXRhZGF0YSI6IHsiZGVzY3Jp
- cHRpb24iOiAiTmFtZSBvZiB0aGUgbmV0d29yayBzZWN1cml0eSBncm91cC4ifSwgInR5cGUiOiAi
- c3RyaW5nIn19LCAib3V0cHV0cyI6IHsiTmV3TlNHIjogeyJ2YWx1ZSI6ICJbcmVmZXJlbmNlKHBh
- cmFtZXRlcnMoJ25hbWUnKSldIiwgInR5cGUiOiAib2JqZWN0In19fSwgIm1vZGUiOiAiSW5jcmVt
- ZW50YWwifX0=
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [984358f0-4c90-11e7-8df4-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test?api-version=2017-05-10
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test","name":"azure-cli-deployment-test","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 08 Jun 2017 21:22:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['240']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"location": "westus", "properties": {"dhcpOptions": {}, "subnets": [{"properties":
+ {"addressPrefix": "10.0.0.0/24"}, "name": "subnet1"}], "addressSpace": {"addressPrefixes":
+ ["10.0.0.0/16"]}}, "tags": {}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['205']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 networkmanagementclient/1.0.0rc3 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [9864d83e-4c90-11e7-a276-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1?api-version=2017-03-01
+ response:
+ body: {string: "{\r\n \"name\": \"vnet1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1\"\
+ ,\r\n \"etag\": \"W/\\\"afaa273d-fae6-4176-8e47-5f3f865704fe\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworks\",\r\n \"location\": \"westus\"\
+ ,\r\n \"tags\": {},\r\n \"properties\": {\r\n \"provisioningState\":\
+ \ \"Updating\",\r\n \"resourceGuid\": \"240e9c40-73c7-4893-83a0-d30f69019829\"\
+ ,\r\n \"addressSpace\": {\r\n \"addressPrefixes\": [\r\n \"\
+ 10.0.0.0/16\"\r\n ]\r\n },\r\n \"dhcpOptions\": {\r\n \"dnsServers\"\
+ : []\r\n },\r\n \"subnets\": [\r\n {\r\n \"name\": \"subnet1\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"\
+ ,\r\n \"etag\": \"W/\\\"afaa273d-fae6-4176-8e47-5f3f865704fe\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"addressPrefix\": \"10.0.0.0/24\"\r\n }\r\n }\r\
+ \n ],\r\n \"virtualNetworkPeerings\": []\r\n }\r\n}"}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/providers/Microsoft.Network/locations/westus/operations/8c6d54b6-72de-49cb-87be-4a6bb7702d08?api-version=2017-03-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['1072']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 08 Jun 2017 21:22:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['3']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1195']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 networkmanagementclient/1.0.0rc3 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [9864d83e-4c90-11e7-a276-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/8c6d54b6-72de-49cb-87be-4a6bb7702d08?api-version=2017-03-01
+ response:
+ body: {string: "{\r\n \"status\": \"Succeeded\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 08 Jun 2017 21:22:40 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['29']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 networkmanagementclient/1.0.0rc3 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [9864d83e-4c90-11e7-a276-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1?api-version=2017-03-01
+ response:
+ body: {string: "{\r\n \"name\": \"vnet1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1\"\
+ ,\r\n \"etag\": \"W/\\\"8a5432ce-5584-4583-be8a-b322c796f079\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworks\",\r\n \"location\": \"westus\"\
+ ,\r\n \"tags\": {},\r\n \"properties\": {\r\n \"provisioningState\":\
+ \ \"Succeeded\",\r\n \"resourceGuid\": \"240e9c40-73c7-4893-83a0-d30f69019829\"\
+ ,\r\n \"addressSpace\": {\r\n \"addressPrefixes\": [\r\n \"\
+ 10.0.0.0/16\"\r\n ]\r\n },\r\n \"dhcpOptions\": {\r\n \"dnsServers\"\
+ : []\r\n },\r\n \"subnets\": [\r\n {\r\n \"name\": \"subnet1\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"\
+ ,\r\n \"etag\": \"W/\\\"8a5432ce-5584-4583-be8a-b322c796f079\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"addressPrefix\": \"10.0.0.0/24\"\r\n }\r\n }\r\
+ \n ],\r\n \"virtualNetworkPeerings\": []\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Thu, 08 Jun 2017 21:22:41 GMT']
+ ETag: [W/"8a5432ce-5584-4583-be8a-b322c796f079"]
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['1074']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"properties": {"mode": "Incremental", "template": {"resources": [{"dependsOn":
+ [], "properties": {"backendAddressPools": "[parameters(''backendAddressPools'')]",
+ "frontendIPConfigurations": [{"properties": {"privateIPAllocationMethod": "[parameters(''privateIPAllocationMethod'')]",
+ "subnet": {"id": "[parameters(''subnetId'')]"}}, "name": "LoadBalancerFrontEnd"}]},
+ "name": "[parameters(''name'')]", "location": "[parameters(''location'')]",
+ "apiVersion": "2016-03-30", "type": "Microsoft.Network/loadBalancers"}], "contentVersion":
+ "1.0.0.0", "parameters": {"location": {"type": "string"}, "privateIPAllocationMethod":
+ {"type": "string"}, "backendAddressPools": {"type": "array"}, "name": {"type":
+ "string"}, "subnetId": {"type": "string"}}, "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
+ "parameters": {"location": {"value": "westus"}, "privateIPAllocationMethod":
+ {"value": "Dynamic"}, "backendAddressPools": {"value": [{"name": "bepool1"},
+ {"name": "bepool2"}]}, "name": {"value": "test-lb"}, "subnetId": {"value": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}}}}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
- Content-Length: ['863']
+ Content-Length: ['1228']
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.10586) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [390fb6ac-a631-11e6-aef4-f4b7e2e85440]
+ x-ms-client-request-id: [9ad7925e-4c90-11e7-9479-a0b3ccf7272a]
method: POST
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment/validate?api-version=2017-05-10
response:
- body:
- string: !!binary |
- H4sIAAAAAAAEAO29B2AcSZYlJi9tynt/SvVK1+B0oQiAYBMk2JBAEOzBiM3mkuwdaUcjKasqgcpl
- VmVdZhZAzO2dvPfee++999577733ujudTif33/8/XGZkAWz2zkrayZ4hgKrIHz9+fB8/In7xR8Xs
- o0cf3W3Wk2ZaF6u2qJbN3Z3J7vmn+w92t3cn5zvb+7NZtp3l03vb08nDvQd75zsP7z/cuVvnTbWu
- p/nndbVeNXezH6zrfHtaFtuzfFVW14t82W63edPeXdXVZTHL6+buF8W0rprqvB2/0pebu661//vv
- P6uvf/96vfxo9NEyW+SEYvQ7Ar3K67bIm48e/eKPVllNbVvqCX+V1TTDcPB7e70CjNdtXSwv6L3L
- rFzjgyvCb9189EtML8Mtu8PjoW0vm4vdj34Jvb6oZmh1tpzWOXDMSnqXsLssGkKBQL1usxYtjqfT
- fNXmM/q6LRYEI1us6OO9nd1Pt3d3t3cevtnZf7Sz+2h/b/zg4f7D/d2fopazda1D+ejlm53X9Mm0
- quu85A/PMIH5bJJ/+mD2YPv+3uz+9v6nu5Pth3t7B9sP789ms/18Jzs/uE+vMUaYio8efe8X85ib
- VTYFXm5qXuTtVVW/pdZmht8QTeQN/xN6aSlNX+fTdV2018IJ9KIhPV4yNP7+L6H/aCj5Kl/O8uWU
- 5+x79AlRuJgRcWaWJ+jzX/xRtip+khAlKNQRkef+9g5RCIP4ueFYJcvd6Jh7sBiOsIfhrU0sRMwg
- FL1lf/SCoTG9pBTm6fXEodFXXq1LfPC97/8SmoJf8v8Aj69MlPIDAAA=
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/deployment_dry_run","name":"deployment_dry_run","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:42.0844954Z","duration":"PT0S","correlationId":"522bdd66-c165-41e3-8913-93f10b835055","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"validatedResources":[{"apiVersion":"2016-03-30","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb","name":"test-lb","type":"Microsoft.Network/loadBalancers","location":"westus","properties":{"backendAddressPools":[{"name":"bepool1"},{"name":"bepool2"}],"frontendIPConfigurations":[{"properties":{"privateIPAllocationMethod":"Dynamic","subnet":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"name":"LoadBalancerFrontEnd"}]}}]}}'}
headers:
Cache-Control: [no-cache]
- Content-Encoding: [gzip]
- Content-Length: ['611']
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 09 Nov 2016 04:01:42 GMT']
+ Date: ['Thu, 08 Jun 2017 21:22:41 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
Vary: [Accept-Encoding]
- x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ content-length: ['1676']
+ x-ms-ratelimit-remaining-subscription-writes: ['1195']
status: {code: 200, message: OK}
- request:
- body: !!binary |
- eyJwcm9wZXJ0aWVzIjogeyJwYXJhbWV0ZXJzIjogeyJsb2NhdGlvbiI6IHsidmFsdWUiOiAid2Vz
- dHVzIn0sICJuYW1lIjogeyJ2YWx1ZSI6ICJhenVyZS1jbGktZGVwbG95LXRlc3QtbnNnMSJ9fSwg
- InRlbXBsYXRlIjogeyJ2YXJpYWJsZXMiOiB7fSwgIiRzY2hlbWEiOiAiaHR0cHM6Ly9zY2hlbWEu
- bWFuYWdlbWVudC5henVyZS5jb20vc2NoZW1hcy8yMDE1LTAxLTAxL2RlcGxveW1lbnRUZW1wbGF0
- ZS5qc29uIyIsICJyZXNvdXJjZXMiOiBbeyJuYW1lIjogIltwYXJhbWV0ZXJzKCduYW1lJyldIiwg
- ImRlcGVuZHNPbiI6IFtdLCAidHlwZSI6ICJNaWNyb3NvZnQuTmV0d29yay9uZXR3b3JrU2VjdXJp
- dHlHcm91cHMiLCAicHJvcGVydGllcyI6IHsic2VjdXJpdHlSdWxlcyI6IFtdfSwgImxvY2F0aW9u
- IjogIltwYXJhbWV0ZXJzKCdsb2NhdGlvbicpXSIsICJhcGlWZXJzaW9uIjogIjIwMTUtMDYtMTUi
- fV0sICJjb250ZW50VmVyc2lvbiI6ICIxLjAuMC4wIiwgInBhcmFtZXRlcnMiOiB7ImxvY2F0aW9u
- IjogeyJtZXRhZGF0YSI6IHsiZGVzY3JpcHRpb24iOiAiTG9jYXRpb24gZm9yIHRoZSBuZXR3b3Jr
- IHNlY3VyaXR5IGdyb3VwLiJ9LCAiZGVmYXVsdFZhbHVlIjogIltyZXNvdXJjZUdyb3VwKCkubG9j
- YXRpb25dIiwgInR5cGUiOiAic3RyaW5nIn0sICJuYW1lIjogeyJtZXRhZGF0YSI6IHsiZGVzY3Jp
- cHRpb24iOiAiTmFtZSBvZiB0aGUgbmV0d29yayBzZWN1cml0eSBncm91cC4ifSwgInR5cGUiOiAi
- c3RyaW5nIn19LCAib3V0cHV0cyI6IHsiTmV3TlNHIjogeyJ2YWx1ZSI6ICJbcmVmZXJlbmNlKHBh
- cmFtZXRlcnMoJ25hbWUnKSldIiwgInR5cGUiOiAib2JqZWN0In19fSwgIm1vZGUiOiAiSW5jcmVt
- ZW50YWwifX0=
+ body: '{"properties": {"mode": "Incremental", "template": {"resources": [{"dependsOn":
+ [], "properties": {"backendAddressPools": "[parameters(''backendAddressPools'')]",
+ "frontendIPConfigurations": [{"properties": {"privateIPAllocationMethod": "[parameters(''privateIPAllocationMethod'')]",
+ "subnet": {"id": "[parameters(''subnetId'')]"}}, "name": "LoadBalancerFrontEnd"}]},
+ "name": "[parameters(''name'')]", "location": "[parameters(''location'')]",
+ "apiVersion": "2016-03-30", "type": "Microsoft.Network/loadBalancers"}], "contentVersion":
+ "1.0.0.0", "parameters": {"location": {"type": "string"}, "privateIPAllocationMethod":
+ {"type": "string"}, "backendAddressPools": {"type": "array"}, "name": {"type":
+ "string"}, "subnetId": {"type": "string"}}, "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
+ "parameters": {"location": {"value": "westus"}, "privateIPAllocationMethod":
+ {"value": "Dynamic"}, "backendAddressPools": {"value": [{"name": "bepool1"},
+ {"name": "bepool2"}]}, "name": {"value": "test-lb"}, "subnetId": {"value": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}}}}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
- Content-Length: ['863']
+ Content-Length: ['1228']
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.10586) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [398ced0a-a631-11e6-af10-f4b7e2e85440]
+ x-ms-client-request-id: [9b58cf52-4c90-11e7-9b16-a0b3ccf7272a]
method: PUT
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2016-11-09T04:01:43.629944Z","duration":"PT0.1645357S","correlationId":"cbe97827-5a27-47bc-b9cb-241d4a2bc49f","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-06-08T21:22:43.2188091Z","duration":"PT0.2770006S","correlationId":"c18f33be-ece9-4c12-96b9-eabd6fd111ff","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[]}}'}
headers:
- Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operationStatuses/08587229427820122645?api-version=2017-05-10']
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operationStatuses/08587046499225358060?api-version=2017-05-10']
Cache-Control: [no-cache]
- Content-Length: ['655']
+ Content-Length: ['1028']
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 09 Nov 2016 04:01:43 GMT']
+ Date: ['Thu, 08 Jun 2017 21:22:42 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ x-ms-ratelimit-remaining-subscription-writes: ['1197']
status: {code: 201, message: Created}
- request:
body: null
@@ -105,28 +224,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.10586) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [398ced0a-a631-11e6-af10-f4b7e2e85440]
+ x-ms-client-request-id: [9b58cf52-4c90-11e7-9b16-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587229427820122645?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587046499225358060?api-version=2017-05-10
response:
- body:
- string: !!binary |
- H4sIAAAAAAAEAO29B2AcSZYlJi9tynt/SvVK1+B0oQiAYBMk2JBAEOzBiM3mkuwdaUcjKasqgcpl
- VmVdZhZAzO2dvPfee++999577733ujudTif33/8/XGZkAWz2zkrayZ4hgKrIHz9+fB8/In7xR02b
- tevmo0cfvV5Pp3k+y2cf/ZL/ByCIe+QWAAAA
+ body: {string: '{"status":"Succeeded"}'}
headers:
Cache-Control: [no-cache]
- Content-Encoding: [gzip]
- Content-Length: ['141']
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 09 Nov 2016 04:02:13 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:13 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
+ content-length: ['22']
status: {code: 200, message: OK}
- request:
body: null
@@ -135,45 +250,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.10586) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [398ced0a-a631-11e6-af10-f4b7e2e85440]
+ x-ms-client-request-id: [9b58cf52-4c90-11e7-9b16-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body:
- string: !!binary |
- H4sIAAAAAAAEAO29B2AcSZYlJi9tynt/SvVK1+B0oQiAYBMk2JBAEOzBiM3mkuwdaUcjKasqgcpl
- VmVdZhZAzO2dvPfee++999577733ujudTif33/8/XGZkAWz2zkrayZ4hgKrIHz9+fB8/In7xR8Xs
- o0cf3W3Wk2ZaF6u2qJbN3Z3J7vmn+w92t3cn5zvb+7NZtp3l03vb08nDvQd75zsP7z/cuVvnTbWu
- p/nndbVeNXezH6zrfHtaFtuzfFVW14t82W63edPeXdXVZTHL6+buF8W0rprqvB2/0pebu651HMZH
- o4+W2SInJAe+JfCrvG6LvPno0S/+aJXV1Lql3vBXWU0zDAm/t9crQHnd1sXygt67zMo1PrgiHNfN
- R7/E9DPcsosAD2972VzsfvRL6PVFNUOrs+W0zoFbVtK7hN1l0RAKBOp1m7Vo8Xo9neb5LJ/R922x
- ICDZYkWf7+3sfrq9u7u98/DNzv6jnd1H9x+O93f373+6//CnqOlsXetgPnr5Zvf++OH+w937n376
- mr6aVnWdl/ztGeZzOskfPjjYe7B9P6N/9h9MptuTh9PJ9t7+7mw/25tM9x+e02uMHGbmo0ff+8U8
- /GaVTYGim6kXeXtV1W+ptZnwN0QeecP/hF5aStPX+XRdF+21MAa9aGYBLxlyf/+X0H80pnyVL2f5
- csrT9z36pFq3q3VLfxBC+dWL15/jN52RLyc/nU8x5zojNN0YwQbyGgw/XzOf59l073z3nCYxf/Bw
- e5/YfPvgHvH5ZPrpjGg22b2/e0BvNTqCV+vSoDXLz7N12ZqxmW8IyQxM89FxWVZXP0kUOFs+qdZL
- 9M093v1hS5bO111CBT8NwgOwGA6zMAlif4R3I8PK2+yCBvbdu7/vR3vZA6LaLnHYQ2Lb/dnuwfbD
- B/Rbdv5wL9892H/w4H72+35E7xCivowC7Q2TNsstxegbRiEtlhP0n7Z1dn5eTNPzulqkWVmmP/lF
- Q1+mP/ni9A29SqDbalqV9N636E+h48uqbl9lywv0g08JflssmSW7X8kLx7MZzUHzss7Pi3f0zU8W
- dbvOSiUtNfMg3Ng2o4E1NG4ZCH2wqosKFP7o0af3d3Z2CFpRE1sTLGp0JuOEQgl56xgz97zKZk+y
- MltO89rNyP9vGG3DGP/fxHXAMi0JzXSieNLrBP5ngfN6FKGWHpBucwC6gd92CcAN/PY0X17Ty474
- /39gsN6gftgcBQRYYxVCcsNU1JAA/SzwDr7xXop9bXkFyNHfPqsQr9D7N7AKUVTsw5fr1hD2/w/c
- EhvXD5thGIeU/CEmu2EX1UHER7B8bWV//f+RESSi85j7rHa2JNc+nJb/37BbZGz/L2U5gym9T/B/
- FngN33gvdb/2+r+BsTrWjkjLIwsYC7qP3qbvDNn//8BS/VH9sJkJGDDXdPmJWhKkHz7b4GvLL8CO
- /vbZhfiF3o+zy/d/CbHMRzQSCk5t8oIDQGaWD5/Fj9DF/wOany5SjxEAAA==
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:51.7379575Z","duration":"PT8.796149S","correlationId":"c18f33be-ece9-4c12-96b9-eabd6fd111ff","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb"}]}}'}
headers:
Cache-Control: [no-cache]
- Content-Encoding: [gzip]
- Content-Length: ['1126']
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 09 Nov 2016 04:02:14 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:13 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
+ content-length: ['1206']
status: {code: 200, message: OK}
- request:
body: null
@@ -182,45 +276,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.10586) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4cb064fe-a631-11e6-a719-f4b7e2e85440]
+ x-ms-client-request-id: [ae78895c-4c90-11e7-adda-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/?api-version=2017-05-10
response:
- body:
- string: !!binary |
- H4sIAAAAAAAEAO29B2AcSZYlJi9tynt/SvVK1+B0oQiAYBMk2JBAEOzBiM3mkuwdaUcjKasqgcpl
- VmVdZhZAzO2dvPfee++999577733ujudTif33/8/XGZkAWz2zkrayZ4hgKrIHz9+fB8/In7xR5dZ
- uc4/evS9X/xRMfvo0Ud3m/WkmdbFqi2qZXN3Z7J7/un+g93t3cn5zvb+bJZtZ/n03vZ08nDvwd75
- zsP7D3fu1nlTretp/nldrVfN3ewH6zrfnpbF9ixfldX1Il+2223etHdXdXVZzPK6uftFMa2rpjpv
- x6/05eauax2H8dHoo2W2IFw/GviWwK/yui3y5qNHv/ijVVZT65Z6w19lNc0wJPzeXq8A5XVbF8sL
- ek9J8NEV4bhuPvolpp/hll0EeHjby+Zi96NfQq8vqhlanS2ndQ7cspLeJewui4ZQIFCv26xFi9fr
- 6TTPZ/mMvm+LBQHJFiv6fG9n99Pt3d3tnYdvdvYf7ew+uv9wvL+7f//T/Yc/RU1n61oH89HLN7v3
- xw/3H+7e//TT1/TVtKrrvORvzzCf00n+8MHB3oPt+xn9s/9gMt2ePJxOtvf2d2f72d5kuv/wnF5j
- 5DAzzAkYfrPKpkDRzdSLvL2q6rfU2kz4GyKPvOF/Qi8tpenrfLqui/ZaGINeNLOAlwy5v/9L6D8a
- U77Kl7N8OeXp+x59Uq3b1bqlPwih/OrF68/xm87Il5OfzqeYc50Rmm6MYAN5DYafr5nP82y6d757
- TpOYP3i4vU9svn1wj/h8Mv10RjSb7N7fPaC3Gh3Bq3Vp0Jrl59m6bM3YzDeEZAam+ei4LKurnyQK
- nC2fVOsl+uYe7/6wJUvn6y6hgp8G4QFYDIdZmASxP8K7kWHlbXZBA/vu3d/3o73sAVFtlzjsIbHt
- /mz3YPvhA/otO3+4l+8e7D94cD/7fT+idwhRX0aB9oZJm+WWYvQNo5AWywn6T9s6Oz8vpul5XS3S
- rCzTn/yioS/Tn3xx+oZeJdBtNa1Keu9b9KfQ8WVVt6+y5QX6wacEvy2WzJLdr+SF49mM5qB5Wefn
- xTv65ieLul1npZKWmnkQbmyb0cAaGrcMhD5Y1UUFCn/06NP7Ozs7BK2oia0JFjU6k3FCoYS8dYyZ
- e15lsydZmS2nee1m5P83jLZhjP9v4jpgmZaEZjpRPOl1Av+zwHk9ilBLD0i3OQDdwG+7BOAGfnua
- L6/pZUf8/z8wWG9QP2yOAgKssQohuWEqakiAfhZ4B994L8W+trwC5Ohvn1WIV+j9G1iFKCr24ct1
- awj7/wduiY3rh80wjENK/hCT3bCL6iDiI1i+trK//v/ICBLRecx9VjtbkmsfTsv/b9gtMrb/l7Kc
- wZTeJ/g/C7yGb7yXul97/d/AWB1rR6TlkQWMBd1Hb9N3huz/f2Cp/qh+2MwEDJhruvxELQnSD59t
- 8LXlF2BHf/vsQvxC78fZ5fu/hFjmIxoJBac2ecEBIDPLh8/iR+ji+7/k/wE7X9EtmxEAAA==
+ body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:51.7379575Z","duration":"PT8.796149S","correlationId":"c18f33be-ece9-4c12-96b9-eabd6fd111ff","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb"}]}}]}'}
headers:
Cache-Control: [no-cache]
- Content-Encoding: [gzip]
- Content-Length: ['1135']
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 09 Nov 2016 04:02:14 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:14 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
+ content-length: ['1218']
status: {code: 200, message: OK}
- request:
body: null
@@ -229,45 +302,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.10586) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4cde65da-a631-11e6-b336-f4b7e2e85440]
+ x-ms-client-request-id: [aeb3b476-4c90-11e7-ba4f-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body:
- string: !!binary |
- H4sIAAAAAAAEAO29B2AcSZYlJi9tynt/SvVK1+B0oQiAYBMk2JBAEOzBiM3mkuwdaUcjKasqgcpl
- VmVdZhZAzO2dvPfee++999577733ujudTif33/8/XGZkAWz2zkrayZ4hgKrIHz9+fB8/In7xR8Xs
- o0cf3W3Wk2ZaF6u2qJbN3Z3J7vmn+w92t3cn5zvb+7NZtp3l03vb08nDvQd75zsP7z/cuVvnTbWu
- p/nndbVeNXezH6zrfHtaFtuzfFVW14t82W63edPeXdXVZTHL6+buF8W0rprqvB2/0pebu651HMZH
- o4+W2SInJAe+JfCrvG6LvPno0S/+aJXV1Lql3vBXWU0zDAm/t9crQHnd1sXygt67zMo1PrgiHNfN
- R7/E9DPcsosAD2972VzsfvRL6PVFNUOrs+W0zoFbVtK7hN1l0RAKBOp1m7Vo8Xo9neb5LJ/R922x
- ICDZYkWf7+3sfrq9u7u98/DNzv6jnd1H9x+O93f373+6//CnqOlsXetgPnr5Zvf++OH+w937n376
- mr6aVnWdl/ztGeZzOskfPjjYe7B9P6N/9h9MptuTh9PJ9t7+7mw/25tM9x+e02uMHGbmo0ff+8U8
- /GaVTYGim6kXeXtV1W+ptZnwN0QeecP/hF5aStPX+XRdF+21MAa9aGYBLxlyf/+X0H80pnyVL2f5
- csrT9z36pFq3q3VLfxBC+dWL15/jN52RLyc/nU8x5zojNN0YwQbyGgw/XzOf59l073z3nCYxf/Bw
- e5/YfPvgHvH5ZPrpjGg22b2/e0BvNTqCV+vSoDXLz7N12ZqxmW8IyQxM89FxWVZXP0kUOFs+qdZL
- 9M093v1hS5bO111CBT8NwgOwGA6zMAlif4R3I8PK2+yCBvbdu7/vR3vZA6LaLnHYQ2Lb/dnuwfbD
- B/Rbdv5wL9892H/w4H72+35E7xCivowC7Q2TNsstxegbRiEtlhP0n7Z1dn5eTNPzulqkWVmmP/lF
- Q1+mP/ni9A29SqDbalqV9N636E+h48uqbl9lywv0g08JflssmSW7X8kLx7MZzUHzss7Pi3f0zU8W
- dbvOSiUtNfMg3Ng2o4E1NG4ZCH2wqosKFP7o0af3d3Z2CFpRE1sTLGp0JuOEQgl56xgz97zKZk+y
- MltO89rNyP9vGG3DGP/fxHXAMi0JzXSieNLrBP5ngfN6FKGWHpBucwC6gd92CcAN/PY0X17Ty474
- /39gsN6gftgcBQRYYxVCcsNU1JAA/SzwDr7xXop9bXkFyNHfPqsQr9D7N7AKUVTsw5fr1hD2/w/c
- EhvXD5thGIeU/CEmu2EX1UHER7B8bWV//f+RESSi85j7rHa2JNc+nJb/37BbZGz/L2U5gym9T/B/
- FngN33gvdb/2+r+BsTrWjkjLIwsYC7qP3qbvDNn//8BS/VH9sJkJGDDXdPmJWhKkHz7b4GvLL8CO
- /vbZhfiF3o+zy/d/CbHMRzQSCk5t8oIDQGaWD5/Fj9DF/wOany5SjxEAAA==
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:51.7379575Z","duration":"PT8.796149S","correlationId":"c18f33be-ece9-4c12-96b9-eabd6fd111ff","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb"}]}}'}
headers:
Cache-Control: [no-cache]
- Content-Encoding: [gzip]
- Content-Length: ['1126']
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 09 Nov 2016 04:02:15 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:14 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
+ content-length: ['1206']
status: {code: 200, message: OK}
- request:
body: null
@@ -276,37 +328,23 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.10586) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4d158cd8-a631-11e6-92db-f4b7e2e85440]
+ x-ms-client-request-id: [aeea1dd2-4c90-11e7-a097-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/deployments/mock-deployment/operations?api-version=2017-05-10
response:
- body:
- string: !!binary |
- H4sIAAAAAAAEAO29B2AcSZYlJi9tynt/SvVK1+B0oQiAYBMk2JBAEOzBiM3mkuwdaUcjKasqgcpl
- VmVdZhZAzO2dvPfee++999577733ujudTif33/8/XGZkAWz2zkrayZ4hgKrIHz9+fB8/In7xR5dZ
- uc4/evS9X/xRMfvo0Ud3m/WkmdbFqi2qZXN3Z7J7/un+g93t3cn5zvb+bJZtZ/n03vZ08nDvwd75
- zsP7D3fu1nlTretp/nldrVfN3ewH6zrfnpbF9ixfldX1Il+2223etHdXdXVZzPK6uftFMa2rpjpv
- x6/05eauax2Hcbda5XUmeN3/9MH+w2c7B88OHu4+ebp3/NHoI/vtGcYRaUC9U5O2yJuPHv1i/HVZ
- NNS8WF58SZ/zq/TiSZ1nbS7NbYPXLT579NHr9XSa57N8Rt+3xYLGlC1W9Pnezu6n27u72zsP3+zs
- P9rZfXT/YPzg4ae79+/v/RQ1na0t+JdvdvfHu0S3e3v3X9NXbZ1N31IPjHSeT/PdB7sPtiez2f3t
- /d3pp9vZ3t7e9t69/NN8bz+b5Pfv0TtNXl8W0/xV/ovWhAG/uTvN7u3em+xu39+fTbf3HxxMtic5
- vX6e7d7be/jw4V6+cx9v0jjWzUk1w2C+/L3okzarL/LWTAII83PDBi/y9qqq395dys/X+XRdF+31
- ACyGs71sLnZpCKbfN9crGsBHt4TpvfgiW+DFDZ38kl/yS0Y/V5Qxc/N+ArJzcP/gwd7ew/29BwfE
- nXt7n+6DAWwL5pqBRoQFNbtJUE6hOUgsntr+v1y3q3UrAOwr7y86D8f3Dh7cO7jXk5yd8f29T/d2
- HvQFh4R8en/v/NPtyV4+o1nId7YPHj68t/1wdjDb2d/LH+5Mdugd6rPD/vLJF3nTZBf04XJdlr/k
- l3z/l/w/15afSBcFAAA=
+ body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operations/62E23BA3072F9A59","operationId":"62E23BA3072F9A59","properties":{"provisioningOperation":"Create","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:49.7320302Z","duration":"PT3.1487534S","trackingId":"25239975-d954-465b-897a-708ab8de526f","serviceRequestId":"1d49c05c-2558-47e1-84ac-d104ba980e53","statusCode":"Created","targetResource":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":"Microsoft.Network/loadBalancers","resourceName":"test-lb"}}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operations/08587046499225358060","operationId":"08587046499225358060","properties":{"provisioningOperation":"EvaluateDeploymentOutput","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:51.4958641Z","duration":"PT1.3552308S","trackingId":"52533323-6355-43b1-b5ca-d54f37c55fea","statusCode":"OK","statusMessage":null}}]}'}
headers:
Cache-Control: [no-cache]
- Content-Encoding: [gzip]
- Content-Length: ['698']
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 09 Nov 2016 04:02:15 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:14 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
+ content-length: ['1272']
status: {code: 200, message: OK}
version: 1
diff --git a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment_no_wait.yaml b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment_no_wait.yaml
index 0ccaacd4d..b204c5dcd 100644
--- a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment_no_wait.yaml
+++ b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment_no_wait.yaml
@@ -1,39 +1,40 @@
interactions:
- request:
- body: '{"properties": {"parameters": {"name": {"value": "azure-cli-deploy-test-nsg1"},
- "location": {"value": "westus"}}, "mode": "Incremental", "template": {"contentVersion":
- "1.0.0.0", "outputs": {"NewNSG": {"type": "object", "value": "[reference(parameters(''name''))]"}},
- "resources": [{"apiVersion": "2015-06-15", "name": "[parameters(''name'')]",
- "dependsOn": [], "properties": {"securityRules": []}, "location": "[parameters(''location'')]",
- "type": "Microsoft.Network/networkSecurityGroups"}], "variables": {}, "parameters":
- {"name": {"metadata": {"description": "Name of the network security group."},
- "type": "string"}, "location": {"metadata": {"description": "Location for the
- network security group."}, "defaultValue": "[resourceGroup().location]", "type":
- "string"}}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"}}}'
+ body: '{"properties": {"mode": "Incremental", "template": {"variables": {}, "parameters":
+ {"location": {"metadata": {"description": "Location for the network security
+ group."}, "type": "string", "defaultValue": "[resourceGroup().location]"}, "name":
+ {"metadata": {"description": "Name of the network security group."}, "type":
+ "string"}}, "resources": [{"dependsOn": [], "properties": {"securityRules":
+ []}, "name": "[parameters(''name'')]", "location": "[parameters(''location'')]",
+ "apiVersion": "2015-06-15", "type": "Microsoft.Network/networkSecurityGroups"}],
+ "outputs": {"NewNSG": {"value": "[reference(parameters(''name''))]", "type":
+ "object"}}, "contentVersion": "1.0.0.0", "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
+ "parameters": {"location": {"value": "westus"}, "name": {"value": "azure-cli-deploy-test-nsg1"}}}}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['863']
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b10]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [a850e48a-b742-11e6-8e81-64510658e3b3]
+ x-ms-client-request-id: [e13d2610-4c90-11e7-82c0-a0b3ccf7272a]
method: PUT
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"parameters":{"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"},"location":{"type":"String","value":"westus"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2016-11-30T21:19:24.1179296Z","duration":"PT0.6497169S","correlationId":"03126bac-629c-4bb9-ac9c-8265ae96e2fa","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-06-08T21:24:40.8557652Z","duration":"PT0.5740685S","correlationId":"1b0c61d7-cc95-4fb0-aedf-8e3b3b348968","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[]}}'}
headers:
- Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operationStatuses/08587210661220094393?api-version=2017-05-10']
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operationStatuses/08587046498051959183?api-version=2017-05-10']
Cache-Control: [no-cache]
- Content-Length: ['656']
+ Content-Length: ['699']
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 30 Nov 2016 21:19:24 GMT']
+ Date: ['Thu, 08 Jun 2017 21:24:40 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1197']
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
status: {code: 201, message: Created}
- request:
body: null
@@ -42,23 +43,25 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b10]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [b12e74d4-b742-11e6-9c32-64510658e3b3]
+ x-ms-client-request-id: [e280db8a-4c90-11e7-ac9d-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"parameters":{"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"},"location":{"type":"String","value":"westus"}},"mode":"Incremental","provisioningState":"Running","timestamp":"2016-11-30T21:19:24.7626283Z","duration":"PT1.2944156S","correlationId":"03126bac-629c-4bb9-ac9c-8265ae96e2fa","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Running","timestamp":"2017-06-08T21:24:41.5345896Z","duration":"PT1.2528929S","correlationId":"1b0c61d7-cc95-4fb0-aedf-8e3b3b348968","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[]}}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 30 Nov 2016 21:19:34 GMT']
+ Date: ['Thu, 08 Jun 2017 21:24:41 GMT']
Expires: ['-1']
Pragma: [no-cache]
+ Retry-After: ['5']
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
- content-length: ['655']
+ content-length: ['698']
status: {code: 200, message: OK}
- request:
body: null
@@ -67,29 +70,30 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b10]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [c37219ba-b742-11e6-871c-64510658e3b3]
+ x-ms-client-request-id: [f49aa2c0-4c90-11e7-9a02-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"parameters":{"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"},"location":{"type":"String","value":"westus"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2016-11-30T21:19:41.0645299Z","duration":"PT17.5963172S","correlationId":"03126bac-629c-4bb9-ac9c-8265ae96e2fa","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[],"outputs":{"newNSG":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"2bdf11b3-367f-4bdc-a1af-7e98197a2b2e","securityRules":[],"defaultSecurityRules":[{"name":"AllowVnetInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetInBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Allow
- inbound traffic from all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Inbound"}},{"name":"AllowAzureLoadBalancerInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowAzureLoadBalancerInBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Allow
- inbound traffic from azure load balancer","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"AzureLoadBalancer","destinationAddressPrefix":"*","access":"Allow","priority":65001,"direction":"Inbound"}},{"name":"DenyAllInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllInBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Deny
- all inbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Inbound"}},{"name":"AllowVnetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetOutBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Allow
- outbound traffic from all VMs to all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Outbound"}},{"name":"AllowInternetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowInternetOutBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Allow
- outbound traffic from all VMs to Internet","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"Internet","access":"Allow","priority":65001,"direction":"Outbound"}},{"name":"DenyAllOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllOutBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Deny
- all outbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Outbound"}}]}}},"outputResources":[{"id":"Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1"}]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:24:54.0359729Z","duration":"PT13.7542762S","correlationId":"1b0c61d7-cc95-4fb0-aedf-8e3b3b348968","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[],"outputs":{"newNSG":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"89520818-a2bd-43ee-93ba-5408df6a5106","securityRules":[],"defaultSecurityRules":[{"name":"AllowVnetInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetInBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ inbound traffic from all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Inbound"}},{"name":"AllowAzureLoadBalancerInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowAzureLoadBalancerInBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ inbound traffic from azure load balancer","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"AzureLoadBalancer","destinationAddressPrefix":"*","access":"Allow","priority":65001,"direction":"Inbound"}},{"name":"DenyAllInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllInBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Deny
+ all inbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Inbound"}},{"name":"AllowVnetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetOutBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ outbound traffic from all VMs to all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Outbound"}},{"name":"AllowInternetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowInternetOutBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ outbound traffic from all VMs to Internet","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"Internet","access":"Allow","priority":65001,"direction":"Outbound"}},{"name":"DenyAllOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllOutBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Deny
+ all outbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Outbound"}}]}}},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 30 Nov 2016 21:20:05 GMT']
+ Date: ['Thu, 08 Jun 2017 21:25:12 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
- content-length: ['4495']
+ content-length: ['4683']
status: {code: 200, message: OK}
- request:
body: null
@@ -98,28 +102,29 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.3
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.0b10]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [cb076a38-b742-11e6-a5d5-64510658e3b3]
+ x-ms-client-request-id: [f4cc42ca-4c90-11e7-aca5-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"parameters":{"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"},"location":{"type":"String","value":"westus"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2016-11-30T21:19:41.0645299Z","duration":"PT17.5963172S","correlationId":"03126bac-629c-4bb9-ac9c-8265ae96e2fa","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[],"outputs":{"newNSG":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"2bdf11b3-367f-4bdc-a1af-7e98197a2b2e","securityRules":[],"defaultSecurityRules":[{"name":"AllowVnetInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetInBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Allow
- inbound traffic from all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Inbound"}},{"name":"AllowAzureLoadBalancerInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowAzureLoadBalancerInBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Allow
- inbound traffic from azure load balancer","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"AzureLoadBalancer","destinationAddressPrefix":"*","access":"Allow","priority":65001,"direction":"Inbound"}},{"name":"DenyAllInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllInBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Deny
- all inbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Inbound"}},{"name":"AllowVnetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetOutBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Allow
- outbound traffic from all VMs to all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Outbound"}},{"name":"AllowInternetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowInternetOutBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Allow
- outbound traffic from all VMs to Internet","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"Internet","access":"Allow","priority":65001,"direction":"Outbound"}},{"name":"DenyAllOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllOutBound","etag":"W/\"8a782b4c-6ea8-4138-ac17-e1ef4170b1a7\"","properties":{"provisioningState":"Succeeded","description":"Deny
- all outbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Outbound"}}]}}},"outputResources":[{"id":"Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1"}]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:24:54.0359729Z","duration":"PT13.7542762S","correlationId":"1b0c61d7-cc95-4fb0-aedf-8e3b3b348968","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[],"outputs":{"newNSG":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"89520818-a2bd-43ee-93ba-5408df6a5106","securityRules":[],"defaultSecurityRules":[{"name":"AllowVnetInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetInBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ inbound traffic from all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Inbound"}},{"name":"AllowAzureLoadBalancerInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowAzureLoadBalancerInBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ inbound traffic from azure load balancer","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"AzureLoadBalancer","destinationAddressPrefix":"*","access":"Allow","priority":65001,"direction":"Inbound"}},{"name":"DenyAllInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllInBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Deny
+ all inbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Inbound"}},{"name":"AllowVnetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetOutBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ outbound traffic from all VMs to all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Outbound"}},{"name":"AllowInternetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowInternetOutBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ outbound traffic from all VMs to Internet","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"Internet","access":"Allow","priority":65001,"direction":"Outbound"}},{"name":"DenyAllOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllOutBound","etag":"W/\"c0f5e666-9d3b-41c2-a26d-b0818aeabbbb\"","properties":{"provisioningState":"Succeeded","description":"Deny
+ all outbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Outbound"}}]}}},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Wed, 30 Nov 2016 21:20:18 GMT']
+ Date: ['Thu, 08 Jun 2017 21:25:11 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
- content-length: ['4495']
+ content-length: ['4683']
status: {code: 200, message: OK}
version: 1
diff --git a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment_thru_uri.yaml b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment_thru_uri.yaml
index 6c3e0101a..73fe2108e 100644
--- a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment_thru_uri.yaml
+++ b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment_thru_uri.yaml
@@ -30,9 +30,9 @@ interactions:
Content-Length: ['918']
Content-Security-Policy: [default-src 'none'; style-src 'unsafe-inline']
Content-Type: [text/plain; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 17:04:48 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:17 GMT']
ETag: ['"f35114273a8c086d7190c66b2f904666791ad088"']
- Expires: ['Thu, 08 Jun 2017 17:09:48 GMT']
+ Expires: ['Thu, 08 Jun 2017 21:28:17 GMT']
Source-Age: ['0']
Strict-Transport-Security: [max-age=31536000]
Vary: ['Authorization,Accept-Encoding']
@@ -40,43 +40,43 @@ interactions:
X-Cache: [MISS]
X-Cache-Hits: ['0']
X-Content-Type-Options: [nosniff]
- X-Fastly-Request-ID: [949f2cf8eb9367de6223da2a64471022e1eca46c]
+ X-Fastly-Request-ID: [640255d5343ea951b7f1280a9037fbad0434ee15]
X-Frame-Options: [deny]
X-Geo-Block-List: ['']
- X-GitHub-Request-Id: ['5DB8:2F9C:447C6D:472F5E:593983B0']
- X-Served-By: [cache-dfw1845-DFW]
- X-Timer: ['S1496941489.712248,VS0,VE77']
+ X-GitHub-Request-Id: ['624C:2F9B:1A8A68:1BAC0F:5939C044']
+ X-Served-By: [cache-dfw1829-DFW]
+ X-Timer: ['S1496956997.219948,VS0,VE75']
X-XSS-Protection: [1; mode=block]
status: {code: 200, message: OK}
- request:
- body: '{"properties": {"parameters": {"location": {"value": "westus"}, "name":
- {"value": "azure-cli-deploy-test-nsg1"}}, "mode": "Incremental", "templateLink":
- {"uri": "https://raw.githubusercontent.com/Azure/azure-cli/master/src/command_modules/azure-cli-resource/tests/simple_deploy.json"}}}'
+ body: '{"properties": {"templateLink": {"uri": "https://raw.githubusercontent.com/Azure/azure-cli/master/src/command_modules/azure-cli-resource/tests/simple_deploy.json"},
+ "mode": "Incremental", "parameters": {"location": {"value": "westus"}, "name":
+ {"value": "azure-cli-deploy-test-nsg1"}}}}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['286']
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.3 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9455660a-4c6c-11e7-99f9-985fd3386693]
+ x-ms-client-request-id: [b038fb68-4c90-11e7-bd3d-a0b3ccf7272a]
method: PUT
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/simple_deploy","name":"simple_deploy","properties":{"templateLink":{"uri":"https://raw.githubusercontent.com/Azure/azure-cli/master/src/command_modules/azure-cli-resource/tests/simple_deploy.json","contentVersion":"1.0.0.0"},"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-06-08T17:04:50.5771758Z","duration":"PT0.2385447S","correlationId":"d58a557e-4bfd-4952-93a0-af907222dc8c","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/simple_deploy","name":"simple_deploy","properties":{"templateLink":{"uri":"https://raw.githubusercontent.com/Azure/azure-cli/master/src/command_modules/azure-cli-resource/tests/simple_deploy.json","contentVersion":"1.0.0.0"},"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-06-08T21:23:18.8662612Z","duration":"PT0.5096614S","correlationId":"4df72875-101b-46fe-8274-6bf4a65f63c1","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[]}}'}
headers:
- Azure-AsyncOperation: ['https://management.azure.com/subscriptions/ae43b1e3-c35d-4c8c-bc0d-f148b4c52b78/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/simple_deploy/operationStatuses/08587046653951389853?api-version=2017-05-10']
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/simple_deploy/operationStatuses/08587046498871210207?api-version=2017-05-10']
Cache-Control: [no-cache]
Content-Length: ['862']
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 17:04:49 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:18 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ x-ms-ratelimit-remaining-subscription-writes: ['1195']
status: {code: 201, message: Created}
- request:
body: null
@@ -85,19 +85,19 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.3 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9455660a-4c6c-11e7-99f9-985fd3386693]
+ x-ms-client-request-id: [b038fb68-4c90-11e7-bd3d-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587046653951389853?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587046498871210207?api-version=2017-05-10
response:
body: {string: '{"status":"Succeeded"}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 17:05:20 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:48 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
@@ -111,25 +111,25 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.3 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9455660a-4c6c-11e7-99f9-985fd3386693]
+ x-ms-client-request-id: [b038fb68-4c90-11e7-bd3d-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/simple_deploy","name":"simple_deploy","properties":{"templateLink":{"uri":"https://raw.githubusercontent.com/Azure/azure-cli/master/src/command_modules/azure-cli-resource/tests/simple_deploy.json","contentVersion":"1.0.0.0"},"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T17:05:02.3027397Z","duration":"PT11.9641086S","correlationId":"d58a557e-4bfd-4952-93a0-af907222dc8c","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[],"outputs":{"newNSG":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"fca6843c-35f6-4c7b-9be9-54dc3316e246","securityRules":[],"defaultSecurityRules":[{"name":"AllowVnetInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetInBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Allow
- inbound traffic from all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Inbound"}},{"name":"AllowAzureLoadBalancerInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowAzureLoadBalancerInBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Allow
- inbound traffic from azure load balancer","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"AzureLoadBalancer","destinationAddressPrefix":"*","access":"Allow","priority":65001,"direction":"Inbound"}},{"name":"DenyAllInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllInBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Deny
- all inbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Inbound"}},{"name":"AllowVnetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetOutBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Allow
- outbound traffic from all VMs to all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Outbound"}},{"name":"AllowInternetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowInternetOutBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Allow
- outbound traffic from all VMs to Internet","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"Internet","access":"Allow","priority":65001,"direction":"Outbound"}},{"name":"DenyAllOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllOutBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Deny
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/simple_deploy","name":"simple_deploy","properties":{"templateLink":{"uri":"https://raw.githubusercontent.com/Azure/azure-cli/master/src/command_modules/azure-cli-resource/tests/simple_deploy.json","contentVersion":"1.0.0.0"},"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:23:42.0167355Z","duration":"PT23.6601357S","correlationId":"4df72875-101b-46fe-8274-6bf4a65f63c1","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[],"outputs":{"newNSG":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"2f751d8f-968d-4a0d-ae29-80f88ac79f8f","securityRules":[],"defaultSecurityRules":[{"name":"AllowVnetInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetInBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ inbound traffic from all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Inbound"}},{"name":"AllowAzureLoadBalancerInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowAzureLoadBalancerInBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ inbound traffic from azure load balancer","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"AzureLoadBalancer","destinationAddressPrefix":"*","access":"Allow","priority":65001,"direction":"Inbound"}},{"name":"DenyAllInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllInBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Deny
+ all inbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Inbound"}},{"name":"AllowVnetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetOutBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ outbound traffic from all VMs to all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Outbound"}},{"name":"AllowInternetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowInternetOutBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ outbound traffic from all VMs to Internet","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"Internet","access":"Allow","priority":65001,"direction":"Outbound"}},{"name":"DenyAllOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllOutBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Deny
all outbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Outbound"}}]}}},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 17:05:21 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:48 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
@@ -143,25 +143,25 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.3 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [a8c58ee8-4c6c-11e7-93a7-985fd3386693]
+ x-ms-client-request-id: [c3f07b82-4c90-11e7-920e-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/simple_deploy","name":"simple_deploy","properties":{"templateLink":{"uri":"https://raw.githubusercontent.com/Azure/azure-cli/master/src/command_modules/azure-cli-resource/tests/simple_deploy.json","contentVersion":"1.0.0.0"},"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T17:05:02.3027397Z","duration":"PT11.9641086S","correlationId":"d58a557e-4bfd-4952-93a0-af907222dc8c","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[],"outputs":{"newNSG":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"fca6843c-35f6-4c7b-9be9-54dc3316e246","securityRules":[],"defaultSecurityRules":[{"name":"AllowVnetInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetInBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Allow
- inbound traffic from all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Inbound"}},{"name":"AllowAzureLoadBalancerInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowAzureLoadBalancerInBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Allow
- inbound traffic from azure load balancer","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"AzureLoadBalancer","destinationAddressPrefix":"*","access":"Allow","priority":65001,"direction":"Inbound"}},{"name":"DenyAllInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllInBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Deny
- all inbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Inbound"}},{"name":"AllowVnetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetOutBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Allow
- outbound traffic from all VMs to all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Outbound"}},{"name":"AllowInternetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowInternetOutBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Allow
- outbound traffic from all VMs to Internet","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"Internet","access":"Allow","priority":65001,"direction":"Outbound"}},{"name":"DenyAllOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllOutBound","etag":"W/\"e17c9599-4f83-4c84-8bd2-8cf8ffa39079\"","properties":{"provisioningState":"Succeeded","description":"Deny
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/simple_deploy","name":"simple_deploy","properties":{"templateLink":{"uri":"https://raw.githubusercontent.com/Azure/azure-cli/master/src/command_modules/azure-cli-resource/tests/simple_deploy.json","contentVersion":"1.0.0.0"},"templateHash":"3550658671258663593","parameters":{"location":{"type":"String","value":"westus"},"name":{"type":"String","value":"azure-cli-deploy-test-nsg1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:23:42.0167355Z","duration":"PT23.6601357S","correlationId":"4df72875-101b-46fe-8274-6bf4a65f63c1","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"networkSecurityGroups","locations":["westus"]}]}],"dependencies":[],"outputs":{"newNSG":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"2f751d8f-968d-4a0d-ae29-80f88ac79f8f","securityRules":[],"defaultSecurityRules":[{"name":"AllowVnetInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetInBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ inbound traffic from all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Inbound"}},{"name":"AllowAzureLoadBalancerInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowAzureLoadBalancerInBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ inbound traffic from azure load balancer","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"AzureLoadBalancer","destinationAddressPrefix":"*","access":"Allow","priority":65001,"direction":"Inbound"}},{"name":"DenyAllInBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllInBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Deny
+ all inbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Inbound"}},{"name":"AllowVnetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowVnetOutBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ outbound traffic from all VMs to all VMs in VNET","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"VirtualNetwork","destinationAddressPrefix":"VirtualNetwork","access":"Allow","priority":65000,"direction":"Outbound"}},{"name":"AllowInternetOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/AllowInternetOutBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Allow
+ outbound traffic from all VMs to Internet","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"Internet","access":"Allow","priority":65001,"direction":"Outbound"}},{"name":"DenyAllOutBound","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1/defaultSecurityRules/DenyAllOutBound","etag":"W/\"60b1f29f-2d32-479a-ade4-6cf7900a8b6e\"","properties":{"provisioningState":"Succeeded","description":"Deny
all outbound traffic","protocol":"*","sourcePortRange":"*","destinationPortRange":"*","sourceAddressPrefix":"*","destinationAddressPrefix":"*","access":"Deny","priority":65500,"direction":"Outbound"}}]}}},"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-uri-test/providers/Microsoft.Network/networkSecurityGroups/azure-cli-deploy-test-nsg1"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 17:05:22 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:50 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
@@ -176,11 +176,11 @@ interactions:
Connection: [keep-alive]
Content-Length: ['0']
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.3 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [a909d53a-4c6c-11e7-9433-985fd3386693]
+ x-ms-client-request-id: [c4178fec-4c90-11e7-abf6-a0b3ccf7272a]
method: DELETE
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
@@ -188,13 +188,13 @@ interactions:
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 08 Jun 2017 17:05:24 GMT']
+ Date: ['Thu, 08 Jun 2017 21:23:50 GMT']
Expires: ['-1']
- Location: ['https://management.azure.com/subscriptions/ae43b1e3-c35d-4c8c-bc0d-f148b4c52b78/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RjU2WjI6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
+ Location: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RlhVQzA6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
Pragma: [no-cache]
Retry-After: ['15']
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
status: {code: 202, message: Accepted}
- request:
body: null
@@ -203,21 +203,21 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.3 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [a909d53a-4c6c-11e7-9433-985fd3386693]
+ x-ms-client-request-id: [c4178fec-4c90-11e7-abf6-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RjU2WjI6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RlhVQzA6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Thu, 08 Jun 2017 17:05:39 GMT']
+ Date: ['Thu, 08 Jun 2017 21:24:06 GMT']
Expires: ['-1']
- Location: ['https://management.azure.com/subscriptions/ae43b1e3-c35d-4c8c-bc0d-f148b4c52b78/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RjU2WjI6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
+ Location: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RlhVQzA6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
Pragma: [no-cache]
Retry-After: ['15']
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
@@ -229,18 +229,44 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.3 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [a909d53a-4c6c-11e7-9433-985fd3386693]
+ x-ms-client-request-id: [c4178fec-4c90-11e7-abf6-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RjU2WjI6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RlhVQzA6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
- Date: ['Thu, 08 Jun 2017 17:05:54 GMT']
+ Content-Length: ['0']
+ Date: ['Thu, 08 Jun 2017 21:24:21 GMT']
+ Expires: ['-1']
+ Location: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RlhVQzA6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
+ Pragma: [no-cache]
+ Retry-After: ['15']
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ status: {code: 202, message: Accepted}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.7+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [c4178fec-4c90-11e7-abf6-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IkRlcGxveW1lbnREZWxldGlvbkpvYi1HTlMtQVpVUkU6MkRDTEk6MkRERVBMT1lNRU5UOjJEVVJJOjJEVEVTVDo1RlhVQzA6NUYtU0lNUExFOjVGREVQTE9ZLSIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10
+ response:
+ body: {string: ''}
+ headers:
+ Cache-Control: [no-cache]
+ Date: ['Thu, 08 Jun 2017 21:24:35 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
@@ -252,11 +278,11 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.3 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.7+dev]
accept-language: [en-US]
- x-ms-client-request-id: [bcdd1a3e-4c6c-11e7-bb23-985fd3386693]
+ x-ms-client-request-id: [e0002888-4c90-11e7-a096-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-uri-test/providers/Microsoft.Resources/deployments/?api-version=2017-05-10
response:
@@ -264,7 +290,7 @@ interactions:
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 17:05:56 GMT']
+ Date: ['Thu, 08 Jun 2017 21:24:37 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
diff --git a/src/command_modules/azure-cli-resource/tests/simple_deploy_parameters.json b/src/command_modules/azure-cli-resource/tests/simple_deploy_parameters.json
index 5542aec0e..40c143fbf 100644
--- a/src/command_modules/azure-cli-resource/tests/simple_deploy_parameters.json
+++ b/src/command_modules/azure-cli-resource/tests/simple_deploy_parameters.json
@@ -1,8 +1,10 @@
{
- "location": {
- "value": "westus"
- },
- "name": {
- "value": "azure-cli-deploy-test-nsg1"
+ "parameters": {
+ "location": {
+ "value": "westus"
+ },
+ "name": {
+ "value": "azure-cli-deploy-test-nsg1"
+ }
}
}
\ No newline at end of file
diff --git a/src/command_modules/azure-cli-resource/tests/test-object.json b/src/command_modules/azure-cli-resource/tests/test-object.json
new file mode 100644
index 000000000..64799c0ed
--- /dev/null
+++ b/src/command_modules/azure-cli-resource/tests/test-object.json
@@ -0,0 +1,8 @@
+[
+ {
+ "name": "bepool1"
+ },
+ {
+ "name": "bepool2"
+ }
+]
\ No newline at end of file
diff --git a/src/command_modules/azure-cli-resource/tests/test-params.json b/src/command_modules/azure-cli-resource/tests/test-params.json
new file mode 100644
index 000000000..d393e2593
--- /dev/null
+++ b/src/command_modules/azure-cli-resource/tests/test-params.json
@@ -0,0 +1,15 @@
+{
+ "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {
+ "name": {
+ "value": "test-lb"
+ },
+ "location": {
+ "value": "westus"
+ },
+ "privateIPAllocationMethod": {
+ "value": "Dynamic"
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/command_modules/azure-cli-resource/tests/test-template.json b/src/command_modules/azure-cli-resource/tests/test-template.json
new file mode 100644
index 000000000..8e5a5d5da
--- /dev/null
+++ b/src/command_modules/azure-cli-resource/tests/test-template.json
@@ -0,0 +1,44 @@
+{
+ "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {
+ "name": {
+ "type": "string"
+ },
+ "location": {
+ "type": "string"
+ },
+ "subnetId": {
+ "type": "string"
+ },
+ "privateIPAllocationMethod": {
+ "type": "string"
+ },
+ "backendAddressPools": {
+ "type": "array"
+ }
+ },
+ "resources": [
+ {
+ "apiVersion": "2016-03-30",
+ "dependsOn": [ ],
+ "location": "[parameters('location')]",
+ "name": "[parameters('name')]",
+ "properties": {
+ "frontendIPConfigurations": [
+ {
+ "name": "LoadBalancerFrontEnd",
+ "properties": {
+ "privateIPAllocationMethod": "[parameters('privateIPAllocationMethod')]",
+ "subnet": {
+ "id": "[parameters('subnetId')]"
+ }
+ }
+ }
+ ],
+ "backendAddressPools": "[parameters('backendAddressPools')]"
+ },
+ "type": "Microsoft.Network/loadBalancers"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/src/command_modules/azure-cli-resource/tests/test_custom.py b/src/command_modules/azure-cli-resource/tests/test_custom.py
index fda9edf9b..6ef0754a4 100644
--- a/src/command_modules/azure-cli-resource/tests/test_custom.py
+++ b/src/command_modules/azure-cli-resource/tests/test_custom.py
@@ -6,11 +6,12 @@
import unittest
from azure.cli.command_modules.resource.custom import (
- _merge_parameters,
_get_missing_parameters,
_extract_lock_params
)
+from azure.cli.command_modules.resource._validators import validate_deployment_parameters
+
class TestCustom(unittest.TestCase):
def test_extract_parameters(self):
@@ -158,29 +159,32 @@ class TestCustom(unittest.TestCase):
self.assertDictEqual(out_params, expected)
- def test_resource_merge_parameters(self):
+ def test_deployment_parameters(self):
tests = [
{
"parameter_list": [],
- "expected": None,
+ "expected": {},
},
{
- "parameter_list": ['{"foo": "bar"}'],
+ "parameter_list": [['{"foo": "bar"}']],
"expected": {"foo": "bar"},
},
{
- "parameter_list": ['{"foo": "bar"}', '{"baz": "blat"}'],
+ "parameter_list": [['{"foo": "bar"}', '{"baz": "blat"}']],
"expected": {"foo": "bar", "baz": "blat"},
},
{
- "parameter_list": ['{"foo": "bar"}', '{"foo": "baz"}'],
+ "parameter_list": [['{"foo": "bar"}', '{"foo": "baz"}']],
"expected": {"foo": "baz"},
},
]
for test in tests:
- output = _merge_parameters(test['parameter_list'])
- self.assertEqual(output, test['expected'])
+ from argparse import Namespace
+ namespace = Namespace()
+ namespace.parameters = test['parameter_list']
+ validate_deployment_parameters(namespace)
+ self.assertEqual(namespace.parameters, test['expected'])
if __name__ == '__main__':
diff --git a/src/command_modules/azure-cli-resource/tests/test_resource.py b/src/command_modules/azure-cli-resource/tests/test_resource.py
index 303ce2b46..7c310420d 100644
--- a/src/command_modules/azure-cli-resource/tests/test_resource.py
+++ b/src/command_modules/azure-cli-resource/tests/test_resource.py
@@ -7,7 +7,7 @@ import os
import time
import unittest
-from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, JMESPathCheck as JCheck)
+from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, JMESPathCheck as JCheck, create_random_name)
# AZURE CLI RESOURCE TEST DEFINITIONS
from azure.cli.testsdk.vcr_test_base import (VCRTestBase, JMESPathCheck, NoneCheck,
BooleanCheck,
@@ -320,20 +320,24 @@ class DeploymentTest(ResourceGroupVCRTestBase):
def body(self):
curr_dir = os.path.dirname(os.path.realpath(__file__))
- template_file = os.path.join(curr_dir, 'simple_deploy.json').replace('\\', '\\\\')
- parameters_file = os.path.join(curr_dir, 'simple_deploy_parameters.json').replace('\\',
- '\\\\')
+ template_file = os.path.join(curr_dir, 'test-template.json').replace('\\', '\\\\')
+ parameters_file = os.path.join(curr_dir, 'test-params.json').replace('\\', '\\\\')
+ object_file = os.path.join(curr_dir, 'test-object.json').replace('\\', '\\\\')
deployment_name = 'azure-cli-deployment'
- self.cmd('group deployment validate -g {} --template-file {} --parameters @{}'.format(
- self.resource_group, template_file, parameters_file), checks=[
- JMESPathCheck('properties.provisioningState', 'Accepted')
+ subnet_id = self.cmd('network vnet create -g {} -n vnet1 --subnet-name subnet1'.format(self.resource_group))['newVNet']['subnets'][0]['id']
+
+ self.cmd('group deployment validate -g {} --template-file {} --parameters @"{}" --parameters subnetId="{}" --parameters backendAddressPools=@"{}"'.format(
+ self.resource_group, template_file, parameters_file, subnet_id, object_file), checks=[
+ JMESPathCheck('properties.provisioningState', 'Succeeded')
])
- self.cmd('group deployment create -g {} -n {} --template-file {} --parameters @{}'.format(
- self.resource_group, deployment_name, template_file, parameters_file), checks=[
+
+ self.cmd('group deployment create -g {} -n {} --template-file {} --parameters @"{}" --parameters subnetId="{}" --parameters backendAddressPools=@"{}"'.format(
+ self.resource_group, deployment_name, template_file, parameters_file, subnet_id, object_file), checks=[
JMESPathCheck('properties.provisioningState', 'Succeeded'),
JMESPathCheck('resourceGroup', self.resource_group),
])
+
self.cmd('group deployment list -g {}'.format(self.resource_group), checks=[
JMESPathCheck('[0].name', deployment_name),
JMESPathCheck('[0].resourceGroup', self.resource_group)
@@ -655,20 +659,22 @@ class ManagedAppScenarioTest(ScenarioTest):
class CrossRGDeploymentScenarioTest(ScenarioTest):
- @ResourceGroupPreparer()
- def test_crossrgdeployment(self, resource_group):
+
+ @ResourceGroupPreparer(name_prefix='cli_test_cross_rg_alt', parameter_name='resource_group_cross')
+ @ResourceGroupPreparer(name_prefix='cli_test_cross_rg_deploy')
+ def test_crossrg_deployment(self, resource_group, resource_group_cross):
curr_dir = os.path.dirname(os.path.realpath(__file__))
template_file = os.path.join(curr_dir, 'crossrg_deploy.json').replace('\\', '\\\\')
- parameters_file = os.path.join(curr_dir, 'crossrg_deploy_parameters.json').replace('\\',
- '\\\\')
deployment_name = 'azure-cli-crossrgdeployment'
+ storage_account_1 = create_random_name(prefix='crossrg')
+ storage_account_2 = create_random_name(prefix='crossrg')
- self.cmd('group deployment validate -g {} --template-file {} --parameters @{}'.format(
- resource_group, template_file, parameters_file), checks=[
+ self.cmd('group deployment validate -g {} --template-file {} --parameters CrossRG={} StorageAccountName1={} StorageAccountName2={}'.format(
+ resource_group, template_file, resource_group_cross, storage_account_1, storage_account_2), checks=[
JCheck('properties.provisioningState', 'Succeeded')
])
- self.cmd('group deployment create -g {} -n {} --template-file {} --parameters @{}'.format(
- resource_group, deployment_name, template_file, parameters_file), checks=[
+ self.cmd('group deployment create -g {} -n {} --template-file {} --parameters CrossRG={}'.format(
+ resource_group, deployment_name, template_file, resource_group_cross), checks=[
JCheck('properties.provisioningState', 'Succeeded'),
JCheck('resourceGroup', resource_group),
])
@@ -676,20 +682,15 @@ class CrossRGDeploymentScenarioTest(ScenarioTest):
JCheck('[0].name', deployment_name),
JCheck('[0].resourceGroup', resource_group)
])
- self.cmd('group deployment show -g {} -n {}'.format(resource_group, deployment_name),
- checks=[
- JCheck('name', deployment_name),
- JCheck('resourceGroup', resource_group)
- ])
- self.cmd('group deployment operation list -g {} -n {}'.format(resource_group,
- deployment_name), checks=[
+ self.cmd('group deployment show -g {} -n {}'.format(resource_group, deployment_name), checks=[
+ JCheck('name', deployment_name),
+ JCheck('resourceGroup', resource_group)
+ ])
+ self.cmd('group deployment operation list -g {} -n {}'.format(resource_group, deployment_name), checks=[
JCheck('length([])', 3),
JCheck('[0].resourceGroup', resource_group)
])
- def tear_down(self):
- self.cmd('group delete --name {} --no-wait --yes'.format('crossrg5'))
-
if __name__ == '__main__':
unittest.main()
diff --git a/src/command_modules/azure-cli-resource/tests/test_resource_validators.py b/src/command_modules/azure-cli-resource/tests/test_resource_validators.py
index eea7b941b..9ef089c86 100644
--- a/src/command_modules/azure-cli-resource/tests/test_resource_validators.py
+++ b/src/command_modules/azure-cli-resource/tests/test_resource_validators.py
@@ -10,7 +10,7 @@ from six import StringIO
from azure.cli.core.util import CLIError
from azure.cli.command_modules.resource._validators import (
- validate_deployment_name,
+ _validate_deployment_name,
validate_lock_parameters,
)
@@ -120,14 +120,14 @@ class Test_resource_validators(unittest.TestCase):
namespace.template_uri = 'https://templates/template123.json?foo=bar'
namespace.template_file = None
namespace.deployment_name = None
- validate_deployment_name(namespace)
+ _validate_deployment_name(namespace)
self.assertEqual('template123', namespace.deployment_name)
namespace = mock.MagicMock()
namespace.template_file = __file__
namespace.template_uri = None
namespace.deployment_name = None
- validate_deployment_name(namespace)
+ _validate_deployment_name(namespace)
file_base_name = os.path.basename(__file__)
file_base_name = file_base_name[:str.find(file_base_name, '.')]
@@ -138,7 +138,7 @@ class Test_resource_validators(unittest.TestCase):
namespace.template_file = '{"foo":"bar"}'
namespace.template_uri = None
namespace.deployment_name = None
- validate_deployment_name(namespace)
+ _validate_deployment_name(namespace)
self.assertEqual('deployment1', namespace.deployment_name)
diff --git a/test-object.json b/test-object.json
new file mode 100644
index 000000000..64799c0ed
--- /dev/null
+++ b/test-object.json
@@ -0,0 +1,8 @@
+[
+ {
+ "name": "bepool1"
+ },
+ {
+ "name": "bepool2"
+ }
+]
\ No newline at end of file
diff --git a/test-params.json b/test-params.json
new file mode 100644
index 000000000..7d14aa51f
--- /dev/null
+++ b/test-params.json
@@ -0,0 +1,15 @@
+{
+ "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {
+ "name": {
+ "value": "test-lb"
+ },
+ "location": {
+ "value": "westus"
+ },
+ "subnetId": {
+ "value": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/tjp-arm/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"
+ }
+ }
+}
\ No newline at end of file
diff --git a/test-template.json b/test-template.json
new file mode 100644
index 000000000..8e5a5d5da
--- /dev/null
+++ b/test-template.json
@@ -0,0 +1,44 @@
+{
+ "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0",
+ "parameters": {
+ "name": {
+ "type": "string"
+ },
+ "location": {
+ "type": "string"
+ },
+ "subnetId": {
+ "type": "string"
+ },
+ "privateIPAllocationMethod": {
+ "type": "string"
+ },
+ "backendAddressPools": {
+ "type": "array"
+ }
+ },
+ "resources": [
+ {
+ "apiVersion": "2016-03-30",
+ "dependsOn": [ ],
+ "location": "[parameters('location')]",
+ "name": "[parameters('name')]",
+ "properties": {
+ "frontendIPConfigurations": [
+ {
+ "name": "LoadBalancerFrontEnd",
+ "properties": {
+ "privateIPAllocationMethod": "[parameters('privateIPAllocationMethod')]",
+ "subnet": {
+ "id": "[parameters('subnetId')]"
+ }
+ }
+ }
+ ],
+ "backendAddressPools": "[parameters('backendAddressPools')]"
+ },
+ "type": "Microsoft.Network/loadBalancers"
+ }
+ ]
+}
\ No newline at end of file
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": -1,
"issue_text_score": 0,
"test_score": -1
},
"num_modified_files": 9
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libffi-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==2.11.7
attrs==22.2.0
autopep8==2.0.4
azure-batch==3.0.0
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_billing&subdirectory=src/command_modules/azure-cli-billing
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_cdn&subdirectory=src/command_modules/azure-cli-cdn
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_cognitiveservices&subdirectory=src/command_modules/azure-cli-cognitiveservices
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_consumption&subdirectory=src/command_modules/azure-cli-consumption
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_cosmosdb&subdirectory=src/command_modules/azure-cli-cosmosdb
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_dla&subdirectory=src/command_modules/azure-cli-dla
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_dls&subdirectory=src/command_modules/azure-cli-dls
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_interactive&subdirectory=src/command_modules/azure-cli-interactive
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_lab&subdirectory=src/command_modules/azure-cli-lab
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_monitor&subdirectory=src/command_modules/azure-cli-monitor
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_rdbms&subdirectory=src/command_modules/azure-cli-rdbms
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_sf&subdirectory=src/command_modules/azure-cli-sf
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@fb9f2e795d88105549c8552dd7a38136a3fdfda0#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.28
azure-core==1.24.2
azure-datalake-store==0.0.9
azure-graphrbac==0.30.0rc6
azure-keyvault==0.3.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==4.0.0
azure-mgmt-billing==0.1.0
azure-mgmt-cdn==0.30.2
azure-mgmt-cognitiveservices==1.0.0
azure-mgmt-compute==1.0.0rc1
azure-mgmt-consumption==0.1.0
azure-mgmt-containerregistry==0.2.1
azure-mgmt-datalake-analytics==0.1.4
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.4
azure-mgmt-devtestlabs==2.0.0
azure-mgmt-dns==1.0.1
azure-mgmt-documentdb==0.1.3
azure-mgmt-iothub==0.2.2
azure-mgmt-keyvault==0.31.0
azure-mgmt-monitor==0.2.1
azure-mgmt-network==1.0.0rc3
azure-mgmt-nspkg==1.0.0
azure-mgmt-rdbms==0.1.0
azure-mgmt-redis==1.0.0
azure-mgmt-resource==1.1.0rc1
azure-mgmt-sql==0.5.1
azure-mgmt-storage==1.0.0rc1
azure-mgmt-trafficmanager==0.30.0
azure-mgmt-web==0.32.0
azure-monitor==0.3.0
azure-multiapi-storage==0.1.0
azure-nspkg==1.0.0
azure-servicefabric==5.6.130
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==6.2
cryptography==40.0.2
flake8==5.0.4
futures==3.1.1
humanfriendly==2.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
isodate==0.7.0
isort==5.10.1
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.7.0
mock==5.2.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pluggy==1.0.0
prompt-toolkit==3.0.36
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.10.0
pycparser==2.21
pydocumentdb==2.3.5
pyflakes==2.5.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.7.1
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.7
tomli==1.2.3
typed-ast==1.5.5
typing-extensions==4.1.1
urllib3==1.26.20
urllib3-secure-extra==0.1.0
vcrpy==1.10.3
vsts-cd-manager==1.0.2
wcwidth==0.2.13
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==2.11.7
- attrs==22.2.0
- autopep8==2.0.4
- azure-batch==3.0.0
- azure-common==1.1.28
- azure-core==1.24.2
- azure-datalake-store==0.0.9
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.3.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==4.0.0
- azure-mgmt-billing==0.1.0
- azure-mgmt-cdn==0.30.2
- azure-mgmt-cognitiveservices==1.0.0
- azure-mgmt-compute==1.0.0rc1
- azure-mgmt-consumption==0.1.0
- azure-mgmt-containerregistry==0.2.1
- azure-mgmt-datalake-analytics==0.1.4
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.4
- azure-mgmt-devtestlabs==2.0.0
- azure-mgmt-dns==1.0.1
- azure-mgmt-documentdb==0.1.3
- azure-mgmt-iothub==0.2.2
- azure-mgmt-keyvault==0.31.0
- azure-mgmt-monitor==0.2.1
- azure-mgmt-network==1.0.0rc3
- azure-mgmt-nspkg==1.0.0
- azure-mgmt-rdbms==0.1.0
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==1.1.0rc1
- azure-mgmt-sql==0.5.1
- azure-mgmt-storage==1.0.0rc1
- azure-mgmt-trafficmanager==0.30.0
- azure-mgmt-web==0.32.0
- azure-monitor==0.3.0
- azure-multiapi-storage==0.1.0
- azure-nspkg==1.0.0
- azure-servicefabric==5.6.130
- cffi==1.15.1
- colorama==0.3.7
- coverage==6.2
- cryptography==40.0.2
- flake8==5.0.4
- futures==3.1.1
- humanfriendly==2.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isodate==0.7.0
- isort==5.10.1
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.7.0
- mock==5.2.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pip==9.0.1
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.10.0
- pycparser==2.21
- pydocumentdb==2.3.5
- pyflakes==2.5.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.7.1
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.7
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- urllib3==1.26.20
- urllib3-secure-extra==0.1.0
- vcrpy==1.10.3
- vsts-cd-manager==1.0.2
- wcwidth==0.2.13
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/command_modules/azure-cli-resource/tests/test_custom.py::TestCustom::test_deployment_parameters",
"src/command_modules/azure-cli-resource/tests/test_custom.py::TestCustom::test_extract_parameters",
"src/command_modules/azure-cli-resource/tests/test_custom.py::TestCustom::test_resource_missing_parameters",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceGroupScenarioTest::test_resource_group",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceGroupNoWaitScenarioTest::test_resource_group_no_wait",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceScenarioTest::test_resource_scenario",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceIDScenarioTest::test_resource_id_scenario",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceCreateScenarioTest::test_resource_create",
"src/command_modules/azure-cli-resource/tests/test_resource.py::TagScenarioTest::test_tag_scenario",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ProviderRegistrationTest::test_provider_registration",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ProviderOperationTest::test_provider_operation",
"src/command_modules/azure-cli-resource/tests/test_resource.py::DeploymentTest::test_group_deployment",
"src/command_modules/azure-cli-resource/tests/test_resource.py::DeploymentnoWaitTest::test_group_deployment_no_wait",
"src/command_modules/azure-cli-resource/tests/test_resource.py::DeploymentThruUriTest::test_group_deployment_thru_uri",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceMoveScenarioTest::test_resource_move",
"src/command_modules/azure-cli-resource/tests/test_resource.py::FeatureScenarioTest::test_feature_list",
"src/command_modules/azure-cli-resource/tests/test_resource.py::PolicyScenarioTest::test_resource_policy",
"src/command_modules/azure-cli-resource/tests/test_resource_validators.py::Test_resource_validators::test_generate_deployment_name_from_file",
"src/command_modules/azure-cli-resource/tests/test_resource_validators.py::Test_resource_validators::test_validate_lock_params",
"src/command_modules/azure-cli-resource/tests/test_resource_validators.py::Test_resource_validators::test_validate_lock_params_invalid"
]
| [
"src/command_modules/azure-cli-resource/tests/test_resource.py::ManagedAppDefinitionScenarioTest::test_managedappdef",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ManagedAppScenarioTest::test_managedapp",
"src/command_modules/azure-cli-resource/tests/test_resource.py::CrossRGDeploymentScenarioTest::test_crossrg_deployment"
]
| []
| []
| MIT License | 1,353 | [
"azure-cli.pyproj",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_params.py",
"src/command_modules/azure-cli-storage/HISTORY.rst",
"src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/blob.py",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py",
"src/command_modules/azure-cli-interactive/azclishell/app.py",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/custom.py",
"src/command_modules/azure-cli-resource/HISTORY.rst",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py"
]
| [
"azure-cli.pyproj",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_params.py",
"src/command_modules/azure-cli-storage/HISTORY.rst",
"src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/blob.py",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py",
"src/command_modules/azure-cli-interactive/azclishell/app.py",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/custom.py",
"src/command_modules/azure-cli-resource/HISTORY.rst",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py"
]
|
|
stchris__untangle-43 | 2119763f721b23588b6223f28264843f6fc9f750 | 2017-06-09 18:57:03 | cd441fd6c3375430cca9443aea10e927f3d7fe68 | diff --git a/untangle.py b/untangle.py
index 1263056..6d1e966 100755
--- a/untangle.py
+++ b/untangle.py
@@ -14,6 +14,7 @@
License: MIT License - http://www.opensource.org/licenses/mit-license.php
"""
import os
+import keyword
from xml.sax import make_parser, handler
try:
from StringIO import StringIO
@@ -133,6 +134,11 @@ class Handler(handler.ContentHandler):
name = name.replace('-', '_')
name = name.replace('.', '_')
name = name.replace(':', '_')
+
+ # adding trailing _ for keywords
+ if keyword.iskeyword(name):
+ name += '_'
+
attrs = dict()
for k, v in attributes.items():
attrs[k] = v
| If an XML tag is a Python keyword SyntaxError is raised on access
Namely, I had this issue with a class tag.
My first thought is to use aliases for these. | stchris/untangle | diff --git a/tests/tests.py b/tests/tests.py
index 76a1ef4..33558e3 100755
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -93,6 +93,14 @@ class FromStringTestCase(unittest.TestCase):
self.assertEqual('child1', getattr(o.root, 'child')[0]['name'])
+ def test_python_keyword(self):
+ o = untangle.parse("<class><return/><pass/><None/></class>")
+ self.assert_(o is not None)
+ self.assert_(o.class_ is not None)
+ self.assert_(o.class_.return_ is not None)
+ self.assert_(o.class_.pass_ is not None)
+ self.assert_(o.class_.None_ is not None)
+
class InvalidTestCase(unittest.TestCase):
""" Test corner cases """
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/stchris/untangle.git@2119763f721b23588b6223f28264843f6fc9f750#egg=untangle
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: untangle
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/untangle
| [
"tests/tests.py::FromStringTestCase::test_python_keyword"
]
| []
| [
"tests/tests.py::FromStringTestCase::test_attribute_protocol",
"tests/tests.py::FromStringTestCase::test_basic",
"tests/tests.py::FromStringTestCase::test_basic_with_decl",
"tests/tests.py::FromStringTestCase::test_grouping",
"tests/tests.py::FromStringTestCase::test_single_root",
"tests/tests.py::FromStringTestCase::test_with_attributes",
"tests/tests.py::InvalidTestCase::test_empty_xml",
"tests/tests.py::InvalidTestCase::test_invalid_xml",
"tests/tests.py::InvalidTestCase::test_none_xml",
"tests/tests.py::PomXmlTestCase::test_lengths",
"tests/tests.py::PomXmlTestCase::test_parent",
"tests/tests.py::NamespaceTestCase::test_namespace",
"tests/tests.py::IterationTestCase::test_multiple_children",
"tests/tests.py::IterationTestCase::test_single_child",
"tests/tests.py::TwimlTestCase::test_twiml_dir",
"tests/tests.py::UnicodeTestCase::test_lengths",
"tests/tests.py::UnicodeTestCase::test_unicode_file",
"tests/tests.py::UnicodeTestCase::test_unicode_string",
"tests/tests.py::FileObjects::test_file_object",
"tests/tests.py::UntangleInObjectsTestCase::test_object",
"tests/tests.py::UrlStringTestCase::test_is_url",
"tests/tests.py::TestSaxHandler::test_cdata",
"tests/tests.py::TestSaxHandler::test_empty_handler",
"tests/tests.py::TestSaxHandler::test_handler",
"tests/tests.py::FigsTestCase::test_figs",
"tests/tests.py::ParserFeatureTestCase::test_invalid_external_dtd",
"tests/tests.py::ParserFeatureTestCase::test_invalid_feature",
"tests/tests.py::ParserFeatureTestCase::test_valid_feature"
]
| []
| MIT License | 1,354 | [
"untangle.py"
]
| [
"untangle.py"
]
|
|
aio-libs__aiosmtpd-112 | c030ae82759f82d1accb6872ce1702d6b655045d | 2017-06-10 04:48:51 | b87538bc1fc0137b5d188db938c9b386c71683a3 | diff --git a/aiosmtpd/docs/NEWS.rst b/aiosmtpd/docs/NEWS.rst
index b69ec8c..c53b1e6 100644
--- a/aiosmtpd/docs/NEWS.rst
+++ b/aiosmtpd/docs/NEWS.rst
@@ -11,6 +11,8 @@
ignored. **API BREAK** If you have a handler that implements
``handle_NOOP()``, it previously took zero arguments but now requires a
single argument. (Closes #107)
+* The command line options ``--version`` / ``-v`` has been added to print the
+ package's current version number. (Closes #111)
* General improvements in the ``Controller`` class. (Closes #104)
* When aiosmtpd handles a ``STARTTLS`` it must arrange for the original
transport to be closed when the wrapped transport is closed. This fixes a
diff --git a/aiosmtpd/main.py b/aiosmtpd/main.py
index 40fca4c..6e0c338 100644
--- a/aiosmtpd/main.py
+++ b/aiosmtpd/main.py
@@ -4,7 +4,7 @@ import signal
import asyncio
import logging
-from aiosmtpd.smtp import DATA_SIZE_DEFAULT, SMTP
+from aiosmtpd.smtp import DATA_SIZE_DEFAULT, SMTP, __version__
from argparse import ArgumentParser
from functools import partial
from importlib import import_module
@@ -28,6 +28,9 @@ def parseargs(args=None):
parser = ArgumentParser(
prog=PROGRAM,
description='An RFC 5321 SMTP server with extensions.')
+ parser.add_argument(
+ '-v', '--version', action='version',
+ version='%(prog)s {}'.format(__version__))
parser.add_argument(
'-n', '--nosetuid',
dest='setuid', default=True, action='store_false',
| Add CLI support for `--version`
Subject says it all. | aio-libs/aiosmtpd | diff --git a/aiosmtpd/tests/test_main.py b/aiosmtpd/tests/test_main.py
index 0eb7357..85518f1 100644
--- a/aiosmtpd/tests/test_main.py
+++ b/aiosmtpd/tests/test_main.py
@@ -6,7 +6,7 @@ import unittest
from aiosmtpd.handlers import Debugging
from aiosmtpd.main import main, parseargs
-from aiosmtpd.smtp import SMTP
+from aiosmtpd.smtp import SMTP, __version__
from contextlib import ExitStack
from functools import partial
from io import StringIO
@@ -309,3 +309,23 @@ class TestParseArgs(unittest.TestCase):
self.assertEqual(cm.exception.code, 2)
usage_lines = stderr.getvalue().splitlines()
self.assertEqual(usage_lines[-1][-24:], 'Invalid port number: foo')
+
+ def test_version(self):
+ stdout = StringIO()
+ with ExitStack() as resources:
+ resources.enter_context(patch('sys.stdout', stdout))
+ resources.enter_context(patch('aiosmtpd.main.PROGRAM', 'smtpd'))
+ cm = resources.enter_context(self.assertRaises(SystemExit))
+ parseargs(('--version',))
+ self.assertEqual(cm.exception.code, 0)
+ self.assertEqual(stdout.getvalue(), 'smtpd {}\n'.format(__version__))
+
+ def test_v(self):
+ stdout = StringIO()
+ with ExitStack() as resources:
+ resources.enter_context(patch('sys.stdout', stdout))
+ resources.enter_context(patch('aiosmtpd.main.PROGRAM', 'smtpd'))
+ cm = resources.enter_context(self.assertRaises(SystemExit))
+ parseargs(('-v',))
+ self.assertEqual(cm.exception.code, 0)
+ self.assertEqual(stdout.getvalue(), 'smtpd {}\n'.format(__version__))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 1
},
"num_modified_files": 2
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/aio-libs/aiosmtpd.git@c030ae82759f82d1accb6872ce1702d6b655045d#egg=aiosmtpd
atpublic==5.1
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: aiosmtpd
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- atpublic==5.1
prefix: /opt/conda/envs/aiosmtpd
| [
"aiosmtpd/tests/test_main.py::TestParseArgs::test_v",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_version"
]
| []
| [
"aiosmtpd/tests/test_main.py::TestMain::test_debug_0",
"aiosmtpd/tests/test_main.py::TestMain::test_debug_1",
"aiosmtpd/tests/test_main.py::TestMain::test_debug_2",
"aiosmtpd/tests/test_main.py::TestMain::test_debug_3",
"aiosmtpd/tests/test_main.py::TestMain::test_n",
"aiosmtpd/tests/test_main.py::TestMain::test_nosetuid",
"aiosmtpd/tests/test_main.py::TestMain::test_setuid",
"aiosmtpd/tests/test_main.py::TestMain::test_setuid_no_pwd_module",
"aiosmtpd/tests/test_main.py::TestMain::test_setuid_permission_error",
"aiosmtpd/tests/test_main.py::TestLoop::test_loop",
"aiosmtpd/tests/test_main.py::TestLoop::test_loop_keyboard_interrupt",
"aiosmtpd/tests/test_main.py::TestLoop::test_s",
"aiosmtpd/tests/test_main.py::TestLoop::test_size",
"aiosmtpd/tests/test_main.py::TestLoop::test_smtputf8",
"aiosmtpd/tests/test_main.py::TestLoop::test_u",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_bad_port_number",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_default_host_port",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_handler_from_cli",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_handler_from_cli_exception",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_handler_no_from_cli",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_handler_no_from_cli_exception",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_host_no_host",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_host_no_port",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_ipv6_host_port",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_l",
"aiosmtpd/tests/test_main.py::TestParseArgs::test_listen"
]
| []
| Apache License 2.0 | 1,355 | [
"aiosmtpd/main.py",
"aiosmtpd/docs/NEWS.rst"
]
| [
"aiosmtpd/main.py",
"aiosmtpd/docs/NEWS.rst"
]
|
|
elastic__elasticsearch-dsl-py-674 | 7c7a8d486877a25f53e226a1a00f900dfebf43cb | 2017-06-10 07:06:14 | e8906dcd17eb2021bd191325817ff7541d838ea1 | diff --git a/.gitignore b/.gitignore
index 4d5d0eb..d08fec9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,4 @@ test_elasticsearch_dsl/htmlcov
docs/_build
.cache
venv
+.idea
diff --git a/Changelog.rst b/Changelog.rst
index 8aa4e7b..d35153a 100644
--- a/Changelog.rst
+++ b/Changelog.rst
@@ -3,6 +3,12 @@
Changelog
=========
+5.4.0 (dev)
+-----------
+ * fix ``ip_range`` aggregation and rename the class to ``IPRange``.
+ ``Iprange`` is kept for bw compatibility
+ * fix bug in loading an aggregation with meta data from dict
+
5.3.0 (2017-05-18)
------------------
* fix constant score query definition
diff --git a/README b/README
index 0422153..c6ac2fd 100644
--- a/README
+++ b/README
@@ -64,7 +64,7 @@ Let's have a typical search request written directly as a ``dict``:
"query": {
"bool": {
"must": [{"match": {"title": "python"}}],
- "must_not": [{"match": {"description": "beta"}}]
+ "must_not": [{"match": {"description": "beta"}}],
"filter": [{"term": {"category": "search"}}]
}
},
diff --git a/docs/faceted_search.rst b/docs/faceted_search.rst
index a08e649..169534c 100644
--- a/docs/faceted_search.rst
+++ b/docs/faceted_search.rst
@@ -54,9 +54,9 @@ There are several different facets available:
``HistogramFacet``
similar to ``DateHistogramFacet`` but for numerical values: ``HistogramFacet(field="rating", interval=2)``
-``Rangefacet``
+``RangeFacet``
allows you to define your own ranges for a numerical fields:
- ``Rangefacet(field="comment_count", ranges=[("few", (None, 2)), ("lots", (2, None))])``
+ ``RangeFacet(field="comment_count", ranges=[("few", (None, 2)), ("lots", (2, None))])``
Advanced
~~~~~~~~
diff --git a/elasticsearch_dsl/__init__.py b/elasticsearch_dsl/__init__.py
index 0a11f2c..74c7e2a 100644
--- a/elasticsearch_dsl/__init__.py
+++ b/elasticsearch_dsl/__init__.py
@@ -9,6 +9,6 @@ from .index import Index
from .analysis import analyzer, token_filter, char_filter, tokenizer
from .faceted_search import *
-VERSION = (5, 3, 0)
+VERSION = (5, 4, 0, 'dev')
__version__ = VERSION
__versionstr__ = '.'.join(map(str, VERSION))
diff --git a/elasticsearch_dsl/aggs.py b/elasticsearch_dsl/aggs.py
index fba5980..5a56172 100644
--- a/elasticsearch_dsl/aggs.py
+++ b/elasticsearch_dsl/aggs.py
@@ -17,14 +17,19 @@ def A(name_or_agg, filter=None, **params):
agg = name_or_agg.copy()
# pop out nested aggs
aggs = agg.pop('aggs', None)
+ # pop out meta data
+ meta = agg.pop('meta', None)
# should be {"terms": {"field": "tags"}}
if len(agg) != 1:
raise ValueError('A() can only accept dict with an aggregation ({"terms": {...}}). '
- 'Instead it got (%r)' % name_or_agg)
+ 'Instead it got (%r)' % name_or_agg)
agg_type, params = agg.popitem()
if aggs:
params = params.copy()
params['aggs'] = aggs
+ if meta:
+ params = params.copy()
+ params['meta'] = meta
return Agg.get_dsl_class(agg_type)(_expand__to_dot=False, **params)
# Terms(...) just return the nested agg
@@ -62,7 +67,7 @@ class AggBase(object):
return key in self._params.get('aggs', {})
def __getitem__(self, agg_name):
- agg = self._params.setdefault('aggs', {})[agg_name] # propagate KeyError
+ agg = self._params.setdefault('aggs', {})[agg_name] # propagate KeyError
# make sure we're not mutating a shared state - whenever accessing a
# bucket, return a shallow copy of it to be safe
@@ -171,8 +176,12 @@ class Histogram(Bucket):
def result(self, search, data):
return FieldBucketData(self, search, data)
+# TODO: remove in 6.0
class Iprange(Bucket):
- name = 'iprange'
+ name = 'ip_range'
+
+class IPRange(Bucket):
+ name = 'ip_range'
class Missing(Bucket):
name = 'missing'
diff --git a/elasticsearch_dsl/analysis.py b/elasticsearch_dsl/analysis.py
index b735365..8424283 100644
--- a/elasticsearch_dsl/analysis.py
+++ b/elasticsearch_dsl/analysis.py
@@ -1,7 +1,7 @@
from .utils import DslBase
__all__ = [
- 'tokenizer', 'analyzer', 'char_filter', 'token_filter'
+ 'tokenizer', 'analyzer', 'char_filter', 'token_filter', 'normalizer'
]
class AnalysisBase(object):
@@ -34,6 +34,26 @@ class CustomAnalysis(object):
d['type'] = self._builtin_type
return d
+class CustomAnalysisDefinition(CustomAnalysis):
+ def get_analysis_definition(self):
+ out = {self._type_name: {self._name: self.get_definition()}}
+
+ t = getattr(self, 'tokenizer', None)
+ if 'tokenizer' in self._param_defs and hasattr(t, 'get_definition'):
+ out['tokenizer'] = {t._name: t.get_definition()}
+
+ filters = dict((f._name, f.get_definition())
+ for f in self.filter if hasattr(f, 'get_definition'))
+ if filters:
+ out['filter'] = filters
+
+ char_filters = dict((f._name, f.get_definition())
+ for f in self.char_filter if hasattr(f, 'get_definition'))
+ if char_filters:
+ out['char_filter'] = char_filters
+
+ return out
+
class BuiltinAnalysis(object):
name = 'builtin'
def __init__(self, name):
@@ -52,33 +72,26 @@ class BuiltinAnalyzer(BuiltinAnalysis, Analyzer):
def get_analysis_definition(self):
return {}
-class CustomAnalyzer(CustomAnalysis, Analyzer):
+class CustomAnalyzer(CustomAnalysisDefinition, Analyzer):
_param_defs = {
'filter': {'type': 'token_filter', 'multi': True},
'char_filter': {'type': 'char_filter', 'multi': True},
'tokenizer': {'type': 'tokenizer'},
}
- def get_analysis_definition(self):
- out = {'analyzer': {self._name: self.get_definition()}}
-
- t = getattr(self, 'tokenizer', None)
- if hasattr(t, 'get_definition'):
- out['tokenizer'] = {t._name: t.get_definition()}
-
- filters = dict((f._name, f.get_definition())
- for f in self.filter if hasattr(f, 'get_definition'))
- if filters:
- out['filter'] = filters
-
-
- char_filters = dict((f._name, f.get_definition())
- for f in self.char_filter if hasattr(f, 'get_definition'))
- if char_filters:
- out['char_filter'] = char_filters
+class Normalizer(AnalysisBase, DslBase):
+ _type_name = 'normalizer'
+ name = None
- return out
+class BuiltinNormalizer(BuiltinAnalysis, Normalizer):
+ def get_analysis_definition(self):
+ return {}
+class CustomNormalizer(CustomAnalysisDefinition, Normalizer):
+ _param_defs = {
+ 'filter': {'type': 'token_filter', 'multi': True},
+ 'char_filter': {'type': 'char_filter', 'multi': True}
+ }
class Tokenizer(AnalysisBase, DslBase):
_type_name = 'tokenizer'
@@ -113,9 +126,9 @@ class CustomCharFilter(CustomAnalysis, CharFilter):
pass
-
# shortcuts for direct use
analyzer = Analyzer._type_shortcut
tokenizer = Tokenizer._type_shortcut
token_filter = TokenFilter._type_shortcut
char_filter = CharFilter._type_shortcut
+normalizer = Normalizer._type_shortcut
diff --git a/elasticsearch_dsl/document.py b/elasticsearch_dsl/document.py
index 1c4e5f9..a0a103f 100644
--- a/elasticsearch_dsl/document.py
+++ b/elasticsearch_dsl/document.py
@@ -25,16 +25,19 @@ META_FIELDS = frozenset((
'index', 'using', 'score',
)).union(DOC_META_FIELDS)
+
class MetaField(object):
def __init__(self, *args, **kwargs):
self.args, self.kwargs = args, kwargs
+
class DocTypeMeta(type):
def __new__(cls, name, bases, attrs):
# DocTypeMeta filters attrs in place
attrs['_doc_type'] = DocTypeOptions(name, bases, attrs)
return super(DocTypeMeta, cls).__new__(cls, name, bases, attrs)
+
class DocTypeOptions(object):
def __init__(self, name, bases, attrs):
meta = attrs.pop('Meta', None)
@@ -337,7 +340,7 @@ class DocType(ObjectBase):
meta['_source'] = d
return meta
- def update(self, using=None, index=None, **fields):
+ def update(self, using=None, index=None, detect_noop=True, doc_as_upsert=False, **fields):
"""
Partial update of the document, specify fields you wish to update and
both the instance and the document in elasticsearch will be updated::
@@ -356,21 +359,37 @@ class DocType(ObjectBase):
if not fields:
raise IllegalOperation('You cannot call update() without updating individual fields. '
'If you wish to update the entire object use save().')
+
es = self._get_connection(using)
- # update the data locally
+ # update given fields locally
merge(self._d_, fields)
+ # prepare data for ES
+ values = self.to_dict()
+
+ # if fields were given: partial update
+ doc = dict(
+ (k, values.get(k))
+ for k in fields.keys()
+ )
+
# extract parent, routing etc from meta
doc_meta = dict(
(k, self.meta[k])
for k in DOC_META_FIELDS
if k in self.meta
)
+ body = {
+ 'doc': doc,
+ 'doc_as_upsert': doc_as_upsert,
+ 'detect_noop': detect_noop,
+ }
+
meta = es.update(
index=self._get_index(index),
doc_type=self._doc_type.name,
- body={'doc': fields},
+ body=body,
**doc_meta
)
# update meta information from ES
diff --git a/elasticsearch_dsl/faceted_search.py b/elasticsearch_dsl/faceted_search.py
index d896625..75c5fb0 100644
--- a/elasticsearch_dsl/faceted_search.py
+++ b/elasticsearch_dsl/faceted_search.py
@@ -141,6 +141,10 @@ class DateHistogramFacet(Facet):
def get_value(self, bucket):
if not isinstance(bucket['key'], datetime):
+ # Elasticsearch returns key=None instead of 0 for date 1970-01-01,
+ # so we need to set key to 0 to avoid TypeError exception
+ if bucket['key'] is None:
+ bucket['key'] = 0
return datetime.utcfromtimestamp(int(bucket['key']) / 1000)
else:
return bucket['key']
diff --git a/elasticsearch_dsl/field.py b/elasticsearch_dsl/field.py
index 3fbc80d..129b53c 100644
--- a/elasticsearch_dsl/field.py
+++ b/elasticsearch_dsl/field.py
@@ -247,6 +247,7 @@ class Keyword(Field):
_param_defs = {
'fields': {'type': 'field', 'hash': True},
'search_analyzer': {'type': 'analyzer'},
+ 'normalizer': {'type': 'normalizer'}
}
name = 'keyword'
diff --git a/elasticsearch_dsl/index.py b/elasticsearch_dsl/index.py
index 7fcb168..634c00f 100644
--- a/elasticsearch_dsl/index.py
+++ b/elasticsearch_dsl/index.py
@@ -133,7 +133,7 @@ class Index(object):
def search(self):
"""
- Rteurn a :class:`~elasticsearch_dsl.Search` object searching over this
+ Return a :class:`~elasticsearch_dsl.Search` object searching over this
index and its ``DocType``\s.
"""
return Search(
diff --git a/elasticsearch_dsl/mapping.py b/elasticsearch_dsl/mapping.py
index c738ed2..385416d 100644
--- a/elasticsearch_dsl/mapping.py
+++ b/elasticsearch_dsl/mapping.py
@@ -57,7 +57,7 @@ class Mapping(object):
fields.append(Text(**self._meta['_all']))
for f in chain(fields, self.properties._collect_fields()):
- for analyzer_name in ('analyzer', 'search_analyzer', 'search_quote_analyzer'):
+ for analyzer_name in ('analyzer', 'normalizer', 'search_analyzer', 'search_quote_analyzer'):
if not hasattr(f, analyzer_name):
continue
analyzer = getattr(f, analyzer_name)
diff --git a/elasticsearch_dsl/search.py b/elasticsearch_dsl/search.py
index bb2706d..fbb2d24 100644
--- a/elasticsearch_dsl/search.py
+++ b/elasticsearch_dsl/search.py
@@ -135,13 +135,24 @@ class Request(object):
s = Search()
s = s.index('twitter-2015.01.01', 'twitter-2015.01.02')
+ s = s.index(['twitter-2015.01.01', 'twitter-2015.01.02'])
"""
# .index() resets
s = self._clone()
if not index:
s._index = None
else:
- s._index = (self._index or []) + list(index)
+ indexes = []
+ for i in index:
+ if isinstance(i, str):
+ indexes.append(i)
+ elif isinstance(i, list):
+ indexes += i
+ elif isinstance(i, tuple):
+ indexes += list(i)
+
+ s._index = (self._index or []) + indexes
+
return s
def _add_doc_type(self, doc_type):
@@ -566,7 +577,7 @@ class Search(Request):
d.update(self._extra)
- if not self._source in (None, {}):
+ if self._source not in (None, {}):
d['_source'] = self._source
if self._highlight:
@@ -654,7 +665,7 @@ class Search(Request):
index=self._index,
doc_type=self._doc_type,
**self._params
- ):
+ ):
callback = self._doc_type_map.get(hit['_type'], Hit)
callback = getattr(callback, 'from_es', callback)
yield callback(hit)
@@ -676,7 +687,6 @@ class Search(Request):
)
-
class MultiSearch(Request):
"""
Combine multiple :class:`~elasticsearch_dsl.Search` objects into a single
diff --git a/setup.py b/setup.py
index d45101d..bfd7f45 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ import sys
from os.path import join, dirname
from setuptools import setup, find_packages
-VERSION = (5, 3, 0)
+VERSION = (5, 4, 0, 'dev')
__version__ = VERSION
__versionstr__ = '.'.join(map(str, VERSION))
| Error with DateHistogramFacet, bucket['key'] is None instead of int()
I am using Doctype with Date() field. When I want to use DateHistogramFacet I get this error:
`TypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType'`
Error is raised in file faceted_search.py in DateHistogramFacet.get_value() function on line 144: `return datetime.utcfromtimestamp(int(bucket['key']) / 1000)`
`bucket` variable shows this values during error:
```
bucket = {Bucket} {'key_as_string': '1970-01-01T00:00:00.000Z', 'key': None, '...}
doc_count = {int} 84
key = {NoneType} None
key_as_string = {str} '1970-01-01T00:00:00.000Z'
```
Before calling get_value() function `data` variable looks like this:
```
[{'key_as_string': '1970-01-01T00:00:00.000Z', 'key': None, 'doc_count': 4},
{'key_as_string': '1970-02-01T00:00:00.000Z', 'key': 2678400000, 'doc_count': 3},
{'key_as_string': '1970-03-01T00:00:00.000Z', 'key': 5097600000, 'doc_count': 0},
{'key_as_string': '1970-04-01T00:00:00.000Z', 'key': 7776000000, 'doc_count': 5},
{'key_as_string': '1970-05-01T00:00:00.000Z', 'key': 10368000000, 'doc_count': 17},
{'key_as_string': '1970-06-01T00:00:00.000Z', 'key': 13046400000, 'doc_count': 4},
{'key_as_string': '1970-07-01T00:00:00.000Z', 'key': 15638400000, 'doc_count': 8},
{'key_as_string': '1970-08-01T00:00:00.000Z', 'key': 18316800000, 'doc_count': 2},
{'key_as_string': '1970-09-01T00:00:00.000Z', 'key': 20995200000, 'doc_count': 1},
{'key_as_string': '1970-10-01T00:00:00.000Z', 'key': 23587200000, 'doc_count': 0},
{'key_as_string': '1970-11-01T00:00:00.000Z', 'key': 26265600000, 'doc_count': 6},
{'key_as_string': '1970-12-01T00:00:00.000Z', 'key': 28857600000, 'doc_count': 34},
.....]
```
| elastic/elasticsearch-dsl-py | diff --git a/test_elasticsearch_dsl/test_aggs.py b/test_elasticsearch_dsl/test_aggs.py
index 4f7dd10..a35de40 100644
--- a/test_elasticsearch_dsl/test_aggs.py
+++ b/test_elasticsearch_dsl/test_aggs.py
@@ -18,6 +18,12 @@ def test_meta():
'meta': {'some': 'metadata'}
} == a.to_dict()
+def test_meta_from_dict():
+ max_score = aggs.Max(field='score')
+ a = aggs.A('terms', field='tags', aggs={'max_score': max_score}, meta={'some': 'metadata'})
+
+ assert aggs.A(a.to_dict()) == a
+
def test_A_creates_proper_agg():
a = aggs.A('terms', field='tags')
diff --git a/test_elasticsearch_dsl/test_analysis.py b/test_elasticsearch_dsl/test_analysis.py
index 5e9ad94..014c43d 100644
--- a/test_elasticsearch_dsl/test_analysis.py
+++ b/test_elasticsearch_dsl/test_analysis.py
@@ -19,6 +19,24 @@ def test_analyzer_has_definition():
'filter': ["lowercase"],
} == a.get_definition()
+def test_normalizer_serializes_as_name():
+ n = analysis.normalizer('my_normalizer')
+
+ assert 'my_normalizer' == n.to_dict()
+
+def test_normalizer_has_definition():
+ n = analysis.CustomNormalizer(
+ 'my_normalizer',
+ filter=['lowercase', 'asciifolding'],
+ char_filter=['quote']
+ )
+
+ assert {
+ 'type': 'custom',
+ 'filter': ['lowercase', 'asciifolding'],
+ 'char_filter': ['quote']
+ } == n.get_definition()
+
def test_tokenizer():
t = analysis.tokenizer('trigram', 'nGram', min_gram=3, max_gram=3)
diff --git a/test_elasticsearch_dsl/test_faceted_search.py b/test_elasticsearch_dsl/test_faceted_search.py
index a06d10d..c887629 100644
--- a/test_elasticsearch_dsl/test_faceted_search.py
+++ b/test_elasticsearch_dsl/test_faceted_search.py
@@ -1,4 +1,8 @@
-from elasticsearch_dsl.faceted_search import FacetedSearch, TermsFacet
+from datetime import datetime
+
+from elasticsearch_dsl.faceted_search import (FacetedSearch, TermsFacet,
+ DateHistogramFacet)
+
class BlogSearch(FacetedSearch):
doc_types = ['user', 'post']
@@ -131,3 +135,8 @@ def test_filters_are_applied_to_search_ant_relevant_facets():
'highlight': {'fields': {'body': {}, 'title': {}}}
} == d
+
+def test_date_histogram_facet_with_1970_01_01_date():
+ dhf = DateHistogramFacet()
+ assert dhf.get_value({'key': None}) == datetime(1970, 1, 1, 0, 0)
+ assert dhf.get_value({'key': 0}) == datetime(1970, 1, 1, 0, 0)
diff --git a/test_elasticsearch_dsl/test_mapping.py b/test_elasticsearch_dsl/test_mapping.py
index f42d866..367005f 100644
--- a/test_elasticsearch_dsl/test_mapping.py
+++ b/test_elasticsearch_dsl/test_mapping.py
@@ -62,7 +62,7 @@ def test_properties_can_iterate_over_all_the_fields():
assert set(('f1', 'f2', 'f3', 'f4')) == set(f.test_attr for f in m.properties._collect_fields())
-def test_mapping_can_collect_all_analyzers():
+def test_mapping_can_collect_all_analyzers_and_normalizers():
a1 = analysis.analyzer('my_analyzer1',
tokenizer='keyword',
filter=['lowercase', analysis.token_filter('my_filter1', 'stop', stopwords=['a', 'b'])],
@@ -74,6 +74,13 @@ def test_mapping_can_collect_all_analyzers():
filter=[analysis.token_filter('my_filter2', 'stop', stopwords=['c', 'd'])],
)
a5 = analysis.analyzer('my_analyzer3', tokenizer='keyword')
+ n1 = analysis.normalizer('my_normalizer1',
+ filter=['lowercase']
+ )
+ n2 = analysis.normalizer('my_normalizer2',
+ filter=['my_filter1', 'my_filter2', analysis.token_filter('my_filter3', 'stop', stopwords=['e', 'f'])]
+ )
+ n3 = analysis.normalizer('unknown_custom')
m = mapping.Mapping('article')
m.field('title', 'text', analyzer=a1,
@@ -85,6 +92,9 @@ def test_mapping_can_collect_all_analyzers():
m.field('comments', Nested(properties={
'author': Text(analyzer=a4)
}))
+ m.field('normalized_title', 'keyword', normalizer=n1)
+ m.field('normalized_comment', 'keyword', normalizer=n2)
+ m.field('unknown', 'keyword', normalizer=n3)
m.meta('_all', analyzer=a5)
assert {
@@ -93,9 +103,14 @@ def test_mapping_can_collect_all_analyzers():
'my_analyzer2': {'filter': ['my_filter2'], 'tokenizer': 'trigram', 'type': 'custom'},
'my_analyzer3': {'tokenizer': 'keyword', 'type': 'custom'},
},
+ 'normalizer': {
+ 'my_normalizer1': {'filter': ['lowercase'], 'type': 'custom'},
+ 'my_normalizer2': {'filter': ['my_filter1', 'my_filter2', 'my_filter3'], 'type': 'custom'},
+ },
'filter': {
'my_filter1': {'stopwords': ['a', 'b'], 'type': 'stop'},
'my_filter2': {'stopwords': ['c', 'd'], 'type': 'stop'},
+ 'my_filter3': {'stopwords': ['e', 'f'], 'type': 'stop'},
},
'tokenizer': {
'trigram': {'max_gram': 3, 'min_gram': 3, 'type': 'nGram'},
diff --git a/test_elasticsearch_dsl/test_search.py b/test_elasticsearch_dsl/test_search.py
index 24cd37d..bf7dd55 100644
--- a/test_elasticsearch_dsl/test_search.py
+++ b/test_elasticsearch_dsl/test_search.py
@@ -162,6 +162,16 @@ def test_search_index():
s2 = s.index('i3')
assert s._index == ['i', 'i2']
assert s2._index == ['i', 'i2', 'i3']
+ s = search.Search()
+ s = s.index(['i', 'i2'], 'i3')
+ assert s._index == ['i', 'i2', 'i3']
+ s2 = s.index('i4')
+ assert s._index == ['i', 'i2', 'i3']
+ assert s2._index == ['i', 'i2', 'i3', 'i4']
+ s2 = s.index(['i4'])
+ assert s2._index == ['i', 'i2', 'i3', 'i4']
+ s2 = s.index(('i4', 'i5'))
+ assert s2._index == ['i', 'i2', 'i3', 'i4', 'i5']
def test_search_doc_type():
s = search.Search(doc_type='i')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 14
} | 5.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[develop]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"mock",
"pytest",
"pytest-cov",
"pytz"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
docutils==0.18.1
elasticsearch==5.5.3
-e git+https://github.com/elastic/elasticsearch-dsl-py.git@7c7a8d486877a25f53e226a1a00f900dfebf43cb#egg=elasticsearch_dsl
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: elasticsearch-dsl-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- coverage==6.2
- docutils==0.18.1
- elasticsearch==5.5.3
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/elasticsearch-dsl-py
| [
"test_elasticsearch_dsl/test_aggs.py::test_meta_from_dict",
"test_elasticsearch_dsl/test_analysis.py::test_normalizer_serializes_as_name",
"test_elasticsearch_dsl/test_analysis.py::test_normalizer_has_definition",
"test_elasticsearch_dsl/test_faceted_search.py::test_date_histogram_facet_with_1970_01_01_date",
"test_elasticsearch_dsl/test_mapping.py::test_mapping_can_collect_all_analyzers_and_normalizers",
"test_elasticsearch_dsl/test_search.py::test_search_index"
]
| []
| [
"test_elasticsearch_dsl/test_aggs.py::test_repr",
"test_elasticsearch_dsl/test_aggs.py::test_meta",
"test_elasticsearch_dsl/test_aggs.py::test_A_creates_proper_agg",
"test_elasticsearch_dsl/test_aggs.py::test_A_handles_nested_aggs_properly",
"test_elasticsearch_dsl/test_aggs.py::test_A_passes_aggs_through",
"test_elasticsearch_dsl/test_aggs.py::test_A_from_dict",
"test_elasticsearch_dsl/test_aggs.py::test_A_fails_with_incorrect_dict",
"test_elasticsearch_dsl/test_aggs.py::test_A_fails_with_agg_and_params",
"test_elasticsearch_dsl/test_aggs.py::test_buckets_are_nestable",
"test_elasticsearch_dsl/test_aggs.py::test_metric_inside_buckets",
"test_elasticsearch_dsl/test_aggs.py::test_buckets_equals_counts_subaggs",
"test_elasticsearch_dsl/test_aggs.py::test_buckets_to_dict",
"test_elasticsearch_dsl/test_aggs.py::test_nested_buckets_are_reachable_as_getitem",
"test_elasticsearch_dsl/test_aggs.py::test_nested_buckets_are_settable_as_getitem",
"test_elasticsearch_dsl/test_aggs.py::test_filter_can_be_instantiated_using_positional_args",
"test_elasticsearch_dsl/test_aggs.py::test_filter_aggregation_as_nested_agg",
"test_elasticsearch_dsl/test_aggs.py::test_filter_aggregation_with_nested_aggs",
"test_elasticsearch_dsl/test_aggs.py::test_filters_correctly_identifies_the_hash",
"test_elasticsearch_dsl/test_analysis.py::test_analyzer_serializes_as_name",
"test_elasticsearch_dsl/test_analysis.py::test_analyzer_has_definition",
"test_elasticsearch_dsl/test_analysis.py::test_tokenizer",
"test_elasticsearch_dsl/test_analysis.py::test_custom_analyzer_can_collect_custom_items",
"test_elasticsearch_dsl/test_faceted_search.py::test_query_is_created_properly",
"test_elasticsearch_dsl/test_faceted_search.py::test_query_is_created_properly_with_sort_tuple",
"test_elasticsearch_dsl/test_faceted_search.py::test_sort_string_backwards_compat",
"test_elasticsearch_dsl/test_faceted_search.py::test_filter_is_applied_to_search_but_not_relevant_facet",
"test_elasticsearch_dsl/test_faceted_search.py::test_filters_are_applied_to_search_ant_relevant_facets",
"test_elasticsearch_dsl/test_mapping.py::test_mapping_can_has_fields",
"test_elasticsearch_dsl/test_mapping.py::test_mapping_update_is_recursive",
"test_elasticsearch_dsl/test_mapping.py::test_properties_can_iterate_over_all_the_fields",
"test_elasticsearch_dsl/test_mapping.py::test_mapping_can_collect_multiple_analyzers",
"test_elasticsearch_dsl/test_mapping.py::test_even_non_custom_analyzers_can_have_params",
"test_elasticsearch_dsl/test_mapping.py::test_resolve_field_can_resolve_multifields",
"test_elasticsearch_dsl/test_search.py::test_expand__to_dot_is_respected",
"test_elasticsearch_dsl/test_search.py::test_execute_uses_cache",
"test_elasticsearch_dsl/test_search.py::test_iter_iterates_over_hits",
"test_elasticsearch_dsl/test_search.py::test_count_uses_cache",
"test_elasticsearch_dsl/test_search.py::test_cache_isnt_cloned",
"test_elasticsearch_dsl/test_search.py::test_search_starts_with_empty_query",
"test_elasticsearch_dsl/test_search.py::test_search_query_combines_query",
"test_elasticsearch_dsl/test_search.py::test_query_can_be_assigned_to",
"test_elasticsearch_dsl/test_search.py::test_query_can_be_wrapped",
"test_elasticsearch_dsl/test_search.py::test_using",
"test_elasticsearch_dsl/test_search.py::test_methods_are_proxied_to_the_query",
"test_elasticsearch_dsl/test_search.py::test_query_always_returns_search",
"test_elasticsearch_dsl/test_search.py::test_source_copied_on_clone",
"test_elasticsearch_dsl/test_search.py::test_aggs_get_copied_on_change",
"test_elasticsearch_dsl/test_search.py::test_search_doc_type",
"test_elasticsearch_dsl/test_search.py::test_doc_type_can_be_document_class",
"test_elasticsearch_dsl/test_search.py::test_sort",
"test_elasticsearch_dsl/test_search.py::test_slice",
"test_elasticsearch_dsl/test_search.py::test_index",
"test_elasticsearch_dsl/test_search.py::test_search_to_dict",
"test_elasticsearch_dsl/test_search.py::test_complex_example",
"test_elasticsearch_dsl/test_search.py::test_reverse",
"test_elasticsearch_dsl/test_search.py::test_from_dict_doesnt_need_query",
"test_elasticsearch_dsl/test_search.py::test_source",
"test_elasticsearch_dsl/test_search.py::test_source_on_clone",
"test_elasticsearch_dsl/test_search.py::test_source_on_clear",
"test_elasticsearch_dsl/test_search.py::test_suggest_accepts_global_text",
"test_elasticsearch_dsl/test_search.py::test_suggest",
"test_elasticsearch_dsl/test_search.py::test_exclude"
]
| []
| Apache License 2.0 | 1,356 | [
"docs/faceted_search.rst",
"elasticsearch_dsl/index.py",
"elasticsearch_dsl/field.py",
"setup.py",
"elasticsearch_dsl/document.py",
".gitignore",
"elasticsearch_dsl/__init__.py",
"elasticsearch_dsl/analysis.py",
"elasticsearch_dsl/aggs.py",
"Changelog.rst",
"elasticsearch_dsl/faceted_search.py",
"elasticsearch_dsl/search.py",
"elasticsearch_dsl/mapping.py",
"README"
]
| [
"docs/faceted_search.rst",
"elasticsearch_dsl/index.py",
"elasticsearch_dsl/field.py",
"setup.py",
"elasticsearch_dsl/document.py",
".gitignore",
"elasticsearch_dsl/__init__.py",
"elasticsearch_dsl/analysis.py",
"elasticsearch_dsl/aggs.py",
"Changelog.rst",
"elasticsearch_dsl/faceted_search.py",
"elasticsearch_dsl/search.py",
"elasticsearch_dsl/mapping.py",
"README"
]
|
|
Stranger6667__Flask-Postmark-9 | a717b345eb0a59aa10fcc5464dfd3b8d9e151824 | 2017-06-10 13:31:53 | a717b345eb0a59aa10fcc5464dfd3b8d9e151824 | codecov[bot]: # [Codecov](https://codecov.io/gh/Stranger6667/Flask-Postmark/pull/9?src=pr&el=h1) Report
> Merging [#9](https://codecov.io/gh/Stranger6667/Flask-Postmark/pull/9?src=pr&el=desc) into [master](https://codecov.io/gh/Stranger6667/Flask-Postmark/commit/a717b345eb0a59aa10fcc5464dfd3b8d9e151824?src=pr&el=desc) will **increase** coverage by `8.41%`.
> The diff coverage is `100%`.
[](https://codecov.io/gh/Stranger6667/Flask-Postmark/pull/9?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #9 +/- ##
==========================================
+ Coverage 88.46% 96.87% +8.41%
==========================================
Files 2 2
Lines 26 32 +6
Branches 4 5 +1
==========================================
+ Hits 23 31 +8
+ Partials 3 1 -2
```
| [Impacted Files](https://codecov.io/gh/Stranger6667/Flask-Postmark/pull/9?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [flask\_postmark/core.py](https://codecov.io/gh/Stranger6667/Flask-Postmark/pull/9?src=pr&el=tree#diff-Zmxhc2tfcG9zdG1hcmsvY29yZS5weQ==) | `96.66% <100%> (+9.16%)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/Stranger6667/Flask-Postmark/pull/9?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/Stranger6667/Flask-Postmark/pull/9?src=pr&el=footer). Last update [a717b34...95135e4](https://codecov.io/gh/Stranger6667/Flask-Postmark/pull/9?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/docs/changelog.rst b/docs/changelog.rst
index e39741d..25e60ab 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -10,6 +10,7 @@ Added
~~~~~
- Register the extension on the app. `#6`_
+- Use ``self.app`` as a fallback. `#5`_
0.1.0 - 2017-05-11
------------------
@@ -20,3 +21,4 @@ Added
.. _#6: https://github.com/Stranger6667/Flask-Postmark/issues/6
+.. _#5: https://github.com/Stranger6667/Flask-Postmark/issues/5
diff --git a/flask_postmark/core.py b/flask_postmark/core.py
index ccbdd8a..d1fd990 100644
--- a/flask_postmark/core.py
+++ b/flask_postmark/core.py
@@ -5,6 +5,7 @@ from postmarker.core import PostmarkClient
class Postmark(object):
+ app = None
def __init__(self, app=None):
self.app = app
@@ -15,6 +16,11 @@ class Postmark(object):
app.extensions['postmark'] = self
app.teardown_appcontext(self.teardown)
+ def _get_app(self):
+ if not current_app and self.app:
+ return self.app
+ return current_app
+
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'postmark_client'):
@@ -25,7 +31,8 @@ class Postmark(object):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'postmark_client'):
- ctx.postmark_client = PostmarkClient.from_config(current_app.config, is_uppercase=True)
+ app = self._get_app()
+ ctx.postmark_client = PostmarkClient.from_config(app.config, is_uppercase=True)
return ctx.postmark_client
def send(self, *args, **kwargs):
| Don't store app unless you fall back to it
In `__init__`, you set `self.app = app`, but you never use this value. The pattern for this is to fall back to it if `current_app` is not set, as a convenience to not need an app context when not using deferred init. Some devs prefer to exclude that and only allow `with app.app_context()`.
It's up to you how you want to handle this. Either continue to require a context and remove `self.app`, or use something like the following:
~~~python
class Postmark:
app = None
def _get_app(self):
if not current_app and self.app:
return self.app
return current_app
def session(self):
self._get_app().config
~~~
| Stranger6667/Flask-Postmark | diff --git a/tests/conftest.py b/tests/conftest.py
index 6c44173..12d4e3e 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -24,6 +24,10 @@ def app(server_token, postmark_request):
postmark.send(**data)
return make_response()
+ @app.route('/is_same_client', methods=['POST'])
+ def is_same_client():
+ return json.dumps(postmark.client is postmark.client)
+
@app.route('/send_batch', methods=['POST'])
def send_batch():
data = request.get_json()
diff --git a/tests/test_postmark.py b/tests/test_postmark.py
index 8142fa1..ffd268a 100644
--- a/tests/test_postmark.py
+++ b/tests/test_postmark.py
@@ -44,8 +44,19 @@ class TestPostmark:
}
assert data == [expected, expected]
+ def test_is_same_client(self):
+ assert self.post('/is_same_client', DATA) is True
+
def test_empty_app(self, app):
assert len(app.teardown_appcontext_funcs) == 1
postmark = Postmark()
postmark.init_app(app)
assert len(app.teardown_appcontext_funcs) == 2
+
+ def test_get_app(self, app):
+ postmark = Postmark(app)
+ assert postmark._get_app() is app
+
+ def test_no_context(self, app):
+ postmark = Postmark(app)
+ assert postmark.client is None
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
click==8.0.4
coverage==6.2
dataclasses==0.8
Flask==2.0.3
-e git+https://github.com/Stranger6667/Flask-Postmark.git@a717b345eb0a59aa10fcc5464dfd3b8d9e151824#egg=Flask_Postmark
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
itsdangerous==2.0.1
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pluggy==1.0.0
postmarker==1.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
Werkzeug==2.0.3
zipp==3.6.0
| name: Flask-Postmark
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- click==8.0.4
- coverage==6.2
- dataclasses==0.8
- flask==2.0.3
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- itsdangerous==2.0.1
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- postmarker==1.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- werkzeug==2.0.3
- zipp==3.6.0
prefix: /opt/conda/envs/Flask-Postmark
| [
"tests/test_postmark.py::TestPostmark::test_get_app"
]
| [
"tests/test_postmark.py::TestPostmark::test_send"
]
| [
"tests/test_postmark.py::TestPostmark::test_token",
"tests/test_postmark.py::TestPostmark::test_send_batch",
"tests/test_postmark.py::TestPostmark::test_is_same_client",
"tests/test_postmark.py::TestPostmark::test_empty_app",
"tests/test_postmark.py::TestPostmark::test_no_context"
]
| []
| MIT License | 1,357 | [
"flask_postmark/core.py",
"docs/changelog.rst"
]
| [
"flask_postmark/core.py",
"docs/changelog.rst"
]
|
borgbackup__borg-2654 | 4b38a17ab515d57cb1d6dbf1aac0d3129fca6a13 | 2017-06-10 16:53:36 | a439fa3e720c8bb2a82496768ffcce282fb7f7b7 | codecov-io: # [Codecov](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=h1) Report
> Merging [#2654](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=desc) into [master](https://codecov.io/gh/borgbackup/borg/commit/86363dcd4b218ed2d81686d2fc5ff1f064b8a07a?src=pr&el=desc) will **decrease** coverage by `0.06%`.
> The diff coverage is `78.66%`.
[](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #2654 +/- ##
==========================================
- Coverage 83.99% 83.92% -0.07%
==========================================
Files 23 23
Lines 8565 8696 +131
Branches 1445 1462 +17
==========================================
+ Hits 7194 7298 +104
- Misses 981 1003 +22
- Partials 390 395 +5
```
| [Impacted Files](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/borg/repository.py](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=tree#diff-c3JjL2JvcmcvcmVwb3NpdG9yeS5weQ==) | `87.51% <100%> (ø)` | :arrow_up: |
| [src/borg/archiver.py](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=tree#diff-c3JjL2JvcmcvYXJjaGl2ZXIucHk=) | `82.99% <100%> (ø)` | :arrow_up: |
| [src/borg/helpers.py](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=tree#diff-c3JjL2JvcmcvaGVscGVycy5weQ==) | `86.97% <60%> (+0.02%)` | :arrow_up: |
| [src/borg/cache.py](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=tree#diff-c3JjL2JvcmcvY2FjaGUucHk=) | `85.97% <79.02%> (-1.87%)` | :arrow_down: |
| [src/borg/remote.py](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=tree#diff-c3JjL2JvcmcvcmVtb3RlLnB5) | `80.06% <0%> (+0.33%)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=footer). Last update [86363dc...77bfc45](https://codecov.io/gh/borgbackup/borg/pull/2654?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
ThomasWaldmann: See also #2357.
ThomasWaldmann: hmm, if there is no files cache, that means all backup data set files will have to be read, chunked, all chunks id-hashed and looked up, right?
this will have quite an impact for all bigger files (maybe not so much for tiny files where other processing overhead is bigger than the time needed to read/chunk/hash).
enkore: It would be possible to introduce a new files cache format (`files.v2`?) that addresses it by including full ChunkListEntries, which won't be in this PR.
See https://github.com/borgbackup/borg/issues/2357#issuecomment-292162083
textshell: I think this should at least not memorize broken csize into the archive items. (i.e. Item.get_size should ignore memorize=True if any chunk has a 0 csize.
textshell: I'm not really happy with something in a release creating archives with somewhat bogus metadata without a clear plan forward. This could mean that we are forced to carry code to resolve missing csize for a long time.
Also i think this really should not disable the files cache. Using the files cache or not should be an orthogonal choice. It feels it shouldn't be hard to make that possible.
enkore: I'd recommend you read #2313 and #2357.
enkore: Anything left to do here?
ThomasWaldmann: did you see my comment about "32" hardcoded?
enkore: That's in a different patch.
ThomasWaldmann: I'll have to look it through again.
But my impression was that the discussion with @textshell was not finished, was it?
enkore: I wrote all the answers done months ago. I don't see anything left to discuss.
enkore: Just for the sake of closure...
> I think this should at least not memorize broken csize into the archive items. (i.e. Item.get_size should ignore memorize=True if any chunk has a 0 csize.
That never happened. 36fcbd6
> This could mean that we are forced to carry code to resolve missing csize for a long time.
(1) Borg already generates inconsistent statistics (see recreate warnings) (2) it's literally just statistics. I do not see Borg carrying the extra code around to accommodate this, should it be dropped again.
> Also i think this really should not disable the files cache. Using the files cache or not should be an orthogonal choice. It feels it shouldn't be hard to make that possible.
https://github.com/borgbackup/borg/issues/2313#issuecomment-300438677 | diff --git a/docs/internals/frontends.rst b/docs/internals/frontends.rst
index 4000bede..c41d427e 100644
--- a/docs/internals/frontends.rst
+++ b/docs/internals/frontends.rst
@@ -504,6 +504,7 @@ Errors
Operations
- cache.begin_transaction
+ - cache.download_chunks, appears with ``borg create --no-cache-sync``
- cache.commit
- cache.sync
diff --git a/src/borg/archiver.py b/src/borg/archiver.py
index 4536b83d..3275bf0b 100644
--- a/src/borg/archiver.py
+++ b/src/borg/archiver.py
@@ -504,7 +504,7 @@ def create_inner(archive, cache):
t0_monotonic = time.monotonic()
if not dry_run:
with Cache(repository, key, manifest, do_files=args.cache_files, progress=args.progress,
- lock_wait=self.lock_wait) as cache:
+ lock_wait=self.lock_wait, permit_adhoc_cache=args.no_cache_sync) as cache:
archive = Archive(repository, key, manifest, args.location.archive, cache=cache,
create=True, checkpoint_interval=args.checkpoint_interval,
numeric_owner=args.numeric_owner, noatime=args.noatime, noctime=args.noctime,
@@ -2826,6 +2826,8 @@ def define_common_options(add_common_option):
help='only display items with the given status characters')
subparser.add_argument('--json', action='store_true',
help='output stats as JSON (implies --stats)')
+ subparser.add_argument('--no-cache-sync', dest='no_cache_sync', action='store_true',
+ help='experimental: do not synchronize the cache. Implies --no-files-cache.')
exclude_group = subparser.add_argument_group('Exclusion options')
exclude_group.add_argument('-e', '--exclude', dest='patterns',
diff --git a/src/borg/cache.py b/src/borg/cache.py
index 70d6f029..47b53deb 100644
--- a/src/borg/cache.py
+++ b/src/borg/cache.py
@@ -4,6 +4,7 @@
import stat
from binascii import unhexlify
from collections import namedtuple
+from time import perf_counter
import msgpack
@@ -30,6 +31,7 @@
from .locking import Lock
from .platform import SaveFile
from .remote import cache_if_remote
+from .repository import LIST_SCAN_LIMIT
FileCacheEntry = namedtuple('FileCacheEntry', 'age inode size mtime chunk_ids')
@@ -347,6 +349,69 @@ def destroy(repository, path=None):
os.remove(config) # kill config first
shutil.rmtree(path)
+ def __new__(cls, repository, key, manifest, path=None, sync=True, do_files=False, warn_if_unencrypted=True,
+ progress=False, lock_wait=None, permit_adhoc_cache=False):
+ def local():
+ return LocalCache(repository=repository, key=key, manifest=manifest, path=path, sync=sync,
+ do_files=do_files, warn_if_unencrypted=warn_if_unencrypted, progress=progress,
+ lock_wait=lock_wait)
+
+ def adhoc():
+ return AdHocCache(repository=repository, key=key, manifest=manifest)
+
+ if not permit_adhoc_cache:
+ return local()
+
+ # ad-hoc cache may be permitted, but if the local cache is in sync it'd be stupid to invalidate
+ # it by needlessly using the ad-hoc cache.
+ # Check if the local cache exists and is in sync.
+
+ cache_config = CacheConfig(repository, path, lock_wait)
+ if cache_config.exists():
+ with cache_config:
+ cache_in_sync = cache_config.manifest_id == manifest.id
+ # Don't nest cache locks
+ if cache_in_sync:
+ # Local cache is in sync, use it
+ logger.debug('Cache: choosing local cache (in sync)')
+ return local()
+ logger.debug('Cache: choosing ad-hoc cache (local cache does not exist or is not in sync)')
+ return adhoc()
+
+
+class CacheStatsMixin:
+ str_format = """\
+All archives: {0.total_size:>20s} {0.total_csize:>20s} {0.unique_csize:>20s}
+
+ Unique chunks Total chunks
+Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
+
+ def __str__(self):
+ return self.str_format.format(self.format_tuple())
+
+ Summary = namedtuple('Summary', ['total_size', 'total_csize', 'unique_size', 'unique_csize', 'total_unique_chunks',
+ 'total_chunks'])
+
+ def stats(self):
+ # XXX: this should really be moved down to `hashindex.pyx`
+ stats = self.Summary(*self.chunks.summarize())._asdict()
+ return stats
+
+ def format_tuple(self):
+ stats = self.stats()
+ for field in ['total_size', 'total_csize', 'unique_csize']:
+ stats[field] = format_file_size(stats[field])
+ return self.Summary(**stats)
+
+ def chunks_stored_size(self):
+ return self.stats()['unique_csize']
+
+
+class LocalCache(CacheStatsMixin):
+ """
+ Persistent, local (client-side) cache.
+ """
+
def __init__(self, repository, key, manifest, path=None, sync=True, do_files=False, warn_if_unencrypted=True,
progress=False, lock_wait=None):
"""
@@ -394,31 +459,6 @@ def __enter__(self):
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
- def __str__(self):
- fmt = """\
-All archives: {0.total_size:>20s} {0.total_csize:>20s} {0.unique_csize:>20s}
-
- Unique chunks Total chunks
-Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
- return fmt.format(self.format_tuple())
-
- Summary = namedtuple('Summary', ['total_size', 'total_csize', 'unique_size', 'unique_csize', 'total_unique_chunks',
- 'total_chunks'])
-
- def stats(self):
- # XXX: this should really be moved down to `hashindex.pyx`
- stats = self.Summary(*self.chunks.summarize())._asdict()
- return stats
-
- def format_tuple(self):
- stats = self.stats()
- for field in ['total_size', 'total_csize', 'unique_csize']:
- stats[field] = format_file_size(stats[field])
- return self.Summary(**stats)
-
- def chunks_stored_size(self):
- return self.stats()['unique_csize']
-
def create(self):
"""Create a new empty cache at `self.path`
"""
@@ -547,10 +587,14 @@ def sync(self):
archive indexes.
"""
archive_path = os.path.join(self.path, 'chunks.archive.d')
+ # An index of chunks whose size had to be fetched
+ chunks_fetched_size_index = ChunkIndex()
# Instrumentation
processed_item_metadata_bytes = 0
processed_item_metadata_chunks = 0
compact_chunks_archive_saved_space = 0
+ fetched_chunks_for_csize = 0
+ fetched_bytes_for_csize = 0
def mkpath(id, suffix=''):
id_hex = bin_to_hex(id)
@@ -588,6 +632,39 @@ def cleanup_cached_archive(id, cleanup_compact=True):
except FileNotFoundError:
pass
+ def fetch_missing_csize(chunk_idx):
+ """
+ Archives created with AdHocCache will have csize=0 in all chunk list entries whose
+ chunks were already in the repository.
+
+ Scan *chunk_idx* for entries where csize=0 and fill in the correct information.
+ """
+ nonlocal fetched_chunks_for_csize
+ nonlocal fetched_bytes_for_csize
+
+ all_missing_ids = chunk_idx.zero_csize_ids()
+ fetch_ids = []
+ if len(chunks_fetched_size_index):
+ for id_ in all_missing_ids:
+ already_fetched_entry = chunks_fetched_size_index.get(id_)
+ if already_fetched_entry:
+ entry = chunk_idx[id_]._replace(csize=already_fetched_entry.csize)
+ assert entry.size == already_fetched_entry.size, 'Chunk size mismatch'
+ chunk_idx[id_] = entry
+ else:
+ fetch_ids.append(id_)
+ else:
+ fetch_ids = all_missing_ids
+
+ # This is potentially a rather expensive operation, but it's hard to tell at this point
+ # if it's a problem in practice (hence the experimental status of --no-cache-sync).
+ for id_, data in zip(fetch_ids, decrypted_repository.repository.get_many(fetch_ids)):
+ entry = chunk_idx[id_]._replace(csize=len(data))
+ chunk_idx[id_] = entry
+ chunks_fetched_size_index[id_] = entry
+ fetched_chunks_for_csize += 1
+ fetched_bytes_for_csize += len(data)
+
def fetch_and_build_idx(archive_id, decrypted_repository, chunk_idx):
nonlocal processed_item_metadata_bytes
nonlocal processed_item_metadata_chunks
@@ -603,6 +680,7 @@ def fetch_and_build_idx(archive_id, decrypted_repository, chunk_idx):
processed_item_metadata_chunks += 1
sync.feed(data)
if self.do_cache:
+ fetch_missing_csize(chunk_idx)
write_archive_index(archive_id, chunk_idx)
def write_archive_index(archive_id, chunk_idx):
@@ -698,8 +776,13 @@ def create_master_idx(chunk_idx):
chunk_idx = chunk_idx or ChunkIndex(master_index_capacity)
logger.info('Fetching archive index for %s ...', archive_name)
fetch_and_build_idx(archive_id, decrypted_repository, chunk_idx)
+ if not self.do_cache:
+ fetch_missing_csize(chunk_idx)
pi.finish()
- logger.debug('Cache sync: processed %s bytes (%d chunks) of metadata',
+ logger.debug('Cache sync: had to fetch %s (%d chunks) because no archive had a csize set for them '
+ '(due to --no-cache-sync)',
+ format_file_size(fetched_bytes_for_csize), fetched_chunks_for_csize)
+ logger.debug('Cache sync: processed %s (%d chunks) of metadata',
format_file_size(processed_item_metadata_bytes), processed_item_metadata_chunks)
logger.debug('Cache sync: compact chunks.archive.d storage saved %s bytes',
format_file_size(compact_chunks_archive_saved_space))
@@ -843,3 +926,143 @@ def memorize_file(self, path_hash, st, ids):
entry = FileCacheEntry(age=0, inode=st.st_ino, size=st.st_size, mtime=int_to_bigint(mtime_ns), chunk_ids=ids)
self.files[path_hash] = msgpack.packb(entry)
self._newest_mtime = max(self._newest_mtime or 0, mtime_ns)
+
+
+class AdHocCache(CacheStatsMixin):
+ """
+ Ad-hoc, non-persistent cache.
+
+ Compared to the standard LocalCache the AdHocCache does not maintain accurate reference count,
+ nor does it provide a files cache (which would require persistence). Chunks that were not added
+ during the current AdHocCache lifetime won't have correct size/csize set (0 bytes) and will
+ have an infinite reference count (MAX_VALUE).
+ """
+
+ str_format = """\
+All archives: unknown unknown unknown
+
+ Unique chunks Total chunks
+Chunk index: {0.total_unique_chunks:20d} unknown"""
+
+ def __init__(self, repository, key, manifest, warn_if_unencrypted=True):
+ self.repository = repository
+ self.key = key
+ self.manifest = manifest
+ self._txn_active = False
+
+ self.security_manager = SecurityManager(repository)
+ self.security_manager.assert_secure(manifest, key)
+
+ logger.warning('Note: --no-cache-sync is an experimental feature.')
+
+ # Public API
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+ files = None
+ do_files = False
+
+ def file_known_and_unchanged(self, path_hash, st, ignore_inode=False):
+ return None
+
+ def memorize_file(self, path_hash, st, ids):
+ pass
+
+ def add_chunk(self, id, chunk, stats, overwrite=False, wait=True):
+ assert not overwrite, 'AdHocCache does not permit overwrites — trying to use it for recreate?'
+ if not self._txn_active:
+ self._begin_txn()
+ size = len(chunk)
+ refcount = self.seen_chunk(id, size)
+ if refcount:
+ return self.chunk_incref(id, stats, size_=size)
+ data = self.key.encrypt(chunk)
+ csize = len(data)
+ self.repository.put(id, data, wait=wait)
+ self.chunks.add(id, 1, size, csize)
+ stats.update(size, csize, not refcount)
+ return ChunkListEntry(id, size, csize)
+
+ def seen_chunk(self, id, size=None):
+ if not self._txn_active:
+ self._begin_txn()
+ entry = self.chunks.get(id, ChunkIndexEntry(0, None, None))
+ if entry.refcount and size and not entry.size:
+ # The LocalCache has existing size information and uses *size* to make an effort at detecting collisions.
+ # This is of course not possible for the AdHocCache.
+ # Here *size* is used to update the chunk's size information, which will be zero for existing chunks.
+ self.chunks[id] = entry._replace(size=size)
+ return entry.refcount
+
+ def chunk_incref(self, id, stats, size_=None):
+ if not self._txn_active:
+ self._begin_txn()
+ count, size, csize = self.chunks.incref(id)
+ stats.update(size or size_, csize, False)
+ # When size is 0 and size_ is not given, then this chunk has not been locally visited yet (seen_chunk with
+ # size or add_chunk); we can't add references to those (size=0 is invalid) and generally don't try to.
+ assert size or size_
+ return ChunkListEntry(id, size or size_, csize)
+
+ def chunk_decref(self, id, stats, wait=True):
+ if not self._txn_active:
+ self._begin_txn()
+ count, size, csize = self.chunks.decref(id)
+ if count == 0:
+ del self.chunks[id]
+ self.repository.delete(id, wait=wait)
+ stats.update(-size, -csize, True)
+ else:
+ stats.update(-size, -csize, False)
+
+ def commit(self):
+ if not self._txn_active:
+ return
+ self.security_manager.save(self.manifest, self.key)
+ self._txn_active = False
+
+ def rollback(self):
+ self._txn_active = False
+ del self.chunks
+
+ # Private API
+
+ def _begin_txn(self):
+ self._txn_active = True
+ # Explicitly set the initial hash table capacity to avoid performance issues
+ # due to hash table "resonance".
+ # Since we're creating an archive, add 10 % from the start.
+ num_chunks = len(self.repository)
+ capacity = int(num_chunks / ChunkIndex.MAX_LOAD_FACTOR * 1.1)
+ self.chunks = ChunkIndex(capacity)
+ pi = ProgressIndicatorPercent(total=num_chunks, msg='Downloading chunk list... %3.0f%%',
+ msgid='cache.download_chunks')
+ t0 = perf_counter()
+ num_requests = 0
+ marker = None
+ while True:
+ result = self.repository.list(limit=LIST_SCAN_LIMIT, marker=marker)
+ num_requests += 1
+ if not result:
+ break
+ pi.show(increase=len(result))
+ marker = result[-1]
+ # All chunks from the repository have a refcount of MAX_VALUE, which is sticky,
+ # therefore we can't/won't delete them. Chunks we added ourselves in this transaction
+ # (e.g. checkpoint archives) are tracked correctly.
+ init_entry = ChunkIndexEntry(refcount=ChunkIndex.MAX_VALUE, size=0, csize=0)
+ for id_ in result:
+ self.chunks[id_] = init_entry
+ assert len(self.chunks) == num_chunks
+ # LocalCache does not contain the manifest, either.
+ del self.chunks[self.manifest.MANIFEST_ID]
+ duration = perf_counter() - t0
+ pi.finish()
+ logger.debug('AdHocCache: downloaded %d chunk IDs in %.2f s (%d requests), ~%s/s',
+ num_chunks, duration, num_requests, format_file_size(num_chunks * 34 / duration))
+ # Chunk IDs in a list are encoded in 34 bytes: 1 byte msgpack header, 1 byte length, 32 ID bytes.
+ # Protocol overhead is neglected in this calculation.
diff --git a/src/borg/hashindex.pyx b/src/borg/hashindex.pyx
index b8e86d14..0d271ad6 100644
--- a/src/borg/hashindex.pyx
+++ b/src/borg/hashindex.pyx
@@ -8,8 +8,9 @@ from libc.stdint cimport uint32_t, UINT32_MAX, uint64_t
from libc.errno cimport errno
from cpython.exc cimport PyErr_SetFromErrnoWithFilename
from cpython.buffer cimport PyBUF_SIMPLE, PyObject_GetBuffer, PyBuffer_Release
+from cpython.bytes cimport PyBytes_FromStringAndSize
-API_VERSION = '1.1_05'
+API_VERSION = '1.1_06'
cdef extern from "_hashindex.c":
@@ -410,6 +411,22 @@ cdef class ChunkIndex(IndexBase):
break
self._add(key, <uint32_t*> (key + self.key_size))
+ def zero_csize_ids(self):
+ cdef void *key = NULL
+ cdef uint32_t *values
+ entries = []
+ while True:
+ key = hashindex_next_key(self.index, key)
+ if not key:
+ break
+ values = <uint32_t*> (key + self.key_size)
+ refcount = _le32toh(values[0])
+ assert refcount <= _MAX_VALUE, "invalid reference count"
+ if _le32toh(values[2]) == 0:
+ # csize == 0
+ entries.append(PyBytes_FromStringAndSize(<char*> key, self.key_size))
+ return entries
+
cdef class ChunkKeyIterator:
cdef ChunkIndex idx
diff --git a/src/borg/helpers.py b/src/borg/helpers.py
index 9100bedf..870d2021 100644
--- a/src/borg/helpers.py
+++ b/src/borg/helpers.py
@@ -131,7 +131,7 @@ class MandatoryFeatureUnsupported(Error):
def check_extension_modules():
from . import platform, compress, item
- if hashindex.API_VERSION != '1.1_05':
+ if hashindex.API_VERSION != '1.1_06':
raise ExtensionModuleError
if chunker.API_VERSION != '1.1_01':
raise ExtensionModuleError
@@ -2010,7 +2010,7 @@ def default(self, o):
from .repository import Repository
from .remote import RemoteRepository
from .archive import Archive
- from .cache import Cache
+ from .cache import LocalCache, AdHocCache
if isinstance(o, Repository) or isinstance(o, RemoteRepository):
return {
'id': bin_to_hex(o.id),
@@ -2018,11 +2018,15 @@ def default(self, o):
}
if isinstance(o, Archive):
return o.info()
- if isinstance(o, Cache):
+ if isinstance(o, LocalCache):
return {
'path': o.path,
'stats': o.stats(),
}
+ if isinstance(o, AdHocCache):
+ return {
+ 'stats': o.stats(),
+ }
return super().default(o)
diff --git a/src/borg/item.pyx b/src/borg/item.pyx
index 5ca93404..91fe57ee 100644
--- a/src/borg/item.pyx
+++ b/src/borg/item.pyx
@@ -189,6 +189,7 @@ class Item(PropDict):
If memorize is True, the computed size value will be stored into the item.
"""
attr = 'csize' if compressed else 'size'
+ assert not (compressed and memorize), 'Item does not have a csize field.'
try:
if from_chunks:
raise AttributeError
diff --git a/src/borg/repository.py b/src/borg/repository.py
index d15d51b1..f73e9cf5 100644
--- a/src/borg/repository.py
+++ b/src/borg/repository.py
@@ -34,7 +34,7 @@
TAG_DELETE = 1
TAG_COMMIT = 2
-LIST_SCAN_LIMIT = 10000 # repo.list() / .scan() result count limit the borg client uses
+LIST_SCAN_LIMIT = 100000 # repo.list() / .scan() result count limit the borg client uses
FreeSpace = partial(defaultdict, int)
| borg create without syncing cache
code exists, needs testing, integration, but not a priority now | borgbackup/borg | diff --git a/conftest.py b/conftest.py
index e85ae6ef..cc428be1 100644
--- a/conftest.py
+++ b/conftest.py
@@ -62,16 +62,16 @@ def pytest_report_header(config, startdir):
class DefaultPatches:
def __init__(self, request):
- self.org_cache_wipe_cache = borg.cache.Cache.wipe_cache
+ self.org_cache_wipe_cache = borg.cache.LocalCache.wipe_cache
def wipe_should_not_be_called(*a, **kw):
raise AssertionError("Cache wipe was triggered, if this is part of the test add @pytest.mark.allow_cache_wipe")
if 'allow_cache_wipe' not in request.keywords:
- borg.cache.Cache.wipe_cache = wipe_should_not_be_called
+ borg.cache.LocalCache.wipe_cache = wipe_should_not_be_called
request.addfinalizer(self.undo)
def undo(self):
- borg.cache.Cache.wipe_cache = self.org_cache_wipe_cache
+ borg.cache.LocalCache.wipe_cache = self.org_cache_wipe_cache
@pytest.fixture(autouse=True)
diff --git a/src/borg/testsuite/archiver.py b/src/borg/testsuite/archiver.py
index e9bcef5e..b0b0a9b7 100644
--- a/src/borg/testsuite/archiver.py
+++ b/src/borg/testsuite/archiver.py
@@ -34,7 +34,7 @@
from .. import xattr, helpers, platform
from ..archive import Archive, ChunkBuffer, flags_noatime, flags_normal
from ..archiver import Archiver, parse_storage_quota
-from ..cache import Cache
+from ..cache import Cache, LocalCache
from ..constants import * # NOQA
from ..crypto.low_level import bytes_to_long, num_aes_blocks
from ..crypto.key import KeyfileKeyBase, RepoKey, KeyfileKey, Passphrase, TAMRequiredError
@@ -1031,6 +1031,21 @@ def test_create_pattern_intermediate_folders_first(self):
assert out_list.index('d x/a') < out_list.index('- x/a/foo_a')
assert out_list.index('d x/b') < out_list.index('- x/b/foo_b')
+ def test_create_no_cache_sync(self):
+ self.create_test_files()
+ self.cmd('init', '--encryption=repokey', self.repository_location)
+ self.cmd('delete', '--cache-only', self.repository_location)
+ create_json = json.loads(self.cmd('create', '--no-cache-sync', self.repository_location + '::test', 'input',
+ '--json', '--error')) # ignore experimental warning
+ info_json = json.loads(self.cmd('info', self.repository_location + '::test', '--json'))
+ create_stats = create_json['cache']['stats']
+ info_stats = info_json['cache']['stats']
+ assert create_stats == info_stats
+ self.cmd('delete', '--cache-only', self.repository_location)
+ self.cmd('create', '--no-cache-sync', self.repository_location + '::test2', 'input')
+ self.cmd('info', self.repository_location)
+ self.cmd('check', self.repository_location)
+
def test_extract_pattern_opt(self):
self.cmd('init', '--encryption=repokey', self.repository_location)
self.create_regular_file('file1', size=1024 * 80)
@@ -1509,14 +1524,14 @@ def test_unknown_mandatory_feature_in_cache(self):
self.cmd('create', self.repository_location + '::test', 'input')
else:
called = False
- wipe_cache_safe = Cache.wipe_cache
+ wipe_cache_safe = LocalCache.wipe_cache
def wipe_wrapper(*args):
nonlocal called
called = True
wipe_cache_safe(*args)
- with patch.object(Cache, 'wipe_cache', wipe_wrapper):
+ with patch.object(LocalCache, 'wipe_cache', wipe_wrapper):
self.cmd('create', self.repository_location + '::test', 'input')
assert called
@@ -2223,7 +2238,7 @@ def check_cache(self):
manifest, key = Manifest.load(repository, Manifest.NO_OPERATION_CHECK)
with Cache(repository, key, manifest, sync=False) as cache:
original_chunks = cache.chunks
- cache.destroy(repository)
+ Cache.destroy(repository)
with Cache(repository, key, manifest) as cache:
correct_chunks = cache.chunks
assert original_chunks is not correct_chunks
diff --git a/src/borg/testsuite/cache.py b/src/borg/testsuite/cache.py
index 6f6452a1..6cce0cb7 100644
--- a/src/borg/testsuite/cache.py
+++ b/src/borg/testsuite/cache.py
@@ -1,11 +1,19 @@
import io
+import os.path
from msgpack import packb
import pytest
-from ..hashindex import ChunkIndex, CacheSynchronizer
from .hashindex import H
+from .key import TestKey
+from ..archive import Statistics
+from ..cache import AdHocCache
+from ..compress import CompressionSpec
+from ..crypto.key import RepoKey
+from ..hashindex import ChunkIndex, CacheSynchronizer
+from ..helpers import Manifest
+from ..repository import Repository
class TestCacheSynchronizer:
@@ -196,3 +204,76 @@ def test_refcount_one_below_max_value(self):
assert index[H(0)] == (ChunkIndex.MAX_VALUE, 1234, 5678)
sync.feed(data)
assert index[H(0)] == (ChunkIndex.MAX_VALUE, 1234, 5678)
+
+
+class TestAdHocCache:
+ @pytest.yield_fixture
+ def repository(self, tmpdir):
+ self.repository_location = os.path.join(str(tmpdir), 'repository')
+ with Repository(self.repository_location, exclusive=True, create=True) as repository:
+ repository.put(H(1), b'1234')
+ repository.put(Manifest.MANIFEST_ID, b'5678')
+ yield repository
+
+ @pytest.fixture
+ def key(self, repository, monkeypatch):
+ monkeypatch.setenv('BORG_PASSPHRASE', 'test')
+ key = RepoKey.create(repository, TestKey.MockArgs())
+ key.compressor = CompressionSpec('none').compressor
+ return key
+
+ @pytest.fixture
+ def manifest(self, repository, key):
+ Manifest(key, repository).write()
+ return Manifest.load(repository, key=key, operations=Manifest.NO_OPERATION_CHECK)[0]
+
+ @pytest.fixture
+ def cache(self, repository, key, manifest):
+ return AdHocCache(repository, key, manifest)
+
+ def test_does_not_contain_manifest(self, cache):
+ assert not cache.seen_chunk(Manifest.MANIFEST_ID)
+
+ def test_does_not_delete_existing_chunks(self, repository, cache):
+ assert cache.seen_chunk(H(1)) == ChunkIndex.MAX_VALUE
+ cache.chunk_decref(H(1), Statistics())
+ assert repository.get(H(1)) == b'1234'
+
+ def test_does_not_overwrite(self, cache):
+ with pytest.raises(AssertionError):
+ cache.add_chunk(H(1), b'5678', Statistics(), overwrite=True)
+
+ def test_seen_chunk_add_chunk_size(self, cache):
+ assert cache.add_chunk(H(1), b'5678', Statistics()) == (H(1), 4, 0)
+
+ def test_deletes_chunks_during_lifetime(self, cache, repository):
+ """E.g. checkpoint archives"""
+ cache.add_chunk(H(5), b'1010', Statistics())
+ assert cache.seen_chunk(H(5)) == 1
+ cache.chunk_decref(H(5), Statistics())
+ assert not cache.seen_chunk(H(5))
+ with pytest.raises(Repository.ObjectNotFound):
+ repository.get(H(5))
+
+ def test_files_cache(self, cache):
+ assert cache.file_known_and_unchanged(bytes(32), None) is None
+ assert not cache.do_files
+ assert cache.files is None
+
+ def test_txn(self, cache):
+ assert not cache._txn_active
+ cache.seen_chunk(H(5))
+ assert cache._txn_active
+ assert cache.chunks
+ cache.rollback()
+ assert not cache._txn_active
+ assert not hasattr(cache, 'chunks')
+
+ def test_incref_after_add_chunk(self, cache):
+ assert cache.add_chunk(H(3), b'5678', Statistics()) == (H(3), 4, 47)
+ assert cache.chunk_incref(H(3), Statistics()) == (H(3), 4, 47)
+
+ def test_existing_incref_after_add_chunk(self, cache):
+ """This case occurs with part files, see Archive.chunk_file."""
+ assert cache.add_chunk(H(1), b'5678', Statistics()) == (H(1), 4, 0)
+ assert cache.chunk_incref(H(1), Statistics()) == (H(1), 4, 0)
diff --git a/src/borg/testsuite/item.py b/src/borg/testsuite/item.py
index 785a962c..f9d72f87 100644
--- a/src/borg/testsuite/item.py
+++ b/src/borg/testsuite/item.py
@@ -154,6 +154,11 @@ def test_item_file_size():
ChunkListEntry(csize=1, size=2000, id=None),
])
assert item.get_size() == 3000
+ with pytest.raises(AssertionError):
+ item.get_size(compressed=True, memorize=True)
+ assert item.get_size(compressed=True) == 2
+ item.get_size(memorize=True)
+ assert item.size == 3000
def test_item_file_size_no_chunks():
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 7
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-xdist",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libacl1-dev libacl1 libssl-dev liblz4-dev libzstd-dev build-essential pkg-config python3-pkgconfig"
],
"python": "3.9",
"reqs_path": [
"requirements.d/development.txt",
"requirements.d/docs.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
-e git+https://github.com/borgbackup/borg.git@4b38a17ab515d57cb1d6dbf1aac0d3129fca6a13#egg=borgbackup
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
Cython==3.0.12
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
guzzle_sphinx_theme==0.7.11
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
msgpack-python==0.5.6
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
py-cpuinfo==9.0.0
Pygments==2.19.1
pyproject-api==1.9.0
pytest==8.3.5
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-xdist==3.6.1
requests==2.32.3
setuptools-scm==8.2.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: borg
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- cython==3.0.12
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- guzzle-sphinx-theme==0.7.11
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- msgpack-python==0.5.6
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pygments==2.19.1
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-xdist==3.6.1
- requests==2.32.3
- setuptools-scm==8.2.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/borg
| [
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_lz4_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_lzma_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_zlib_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_delete_double_force",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_help",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_init_interrupt",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_init_requires_encryption_option",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_unencrypted",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_usage",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_help",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_init_requires_encryption_option",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_usage",
"src/borg/testsuite/archiver.py::test_get_args",
"src/borg/testsuite/archiver.py::test_compare_chunk_contents",
"src/borg/testsuite/archiver.py::TestBuildFilter::test_basic",
"src/borg/testsuite/archiver.py::TestBuildFilter::test_empty",
"src/borg/testsuite/archiver.py::TestBuildFilter::test_strip_components",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_simple",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[-p-progress-True-before]",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[-p-progress-True-after]",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[-p-progress-True-both]",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[--lock-wait=3-lock_wait-3-before]",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[--lock-wait=3-lock_wait-3-after]",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[--lock-wait=3-lock_wait-3-both]",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[--no-files-cache-no_files_cache-False-before]",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[--no-files-cache-no_files_cache-False-after]",
"src/borg/testsuite/archiver.py::TestCommonOptions::test_flag_position_independence[--no-files-cache-no_files_cache-False-both]",
"src/borg/testsuite/archiver.py::test_parse_storage_quota",
"src/borg/testsuite/archiver.py::test_help_formatting[benchmark",
"src/borg/testsuite/archiver.py::test_help_formatting[benchmark-parser1]",
"src/borg/testsuite/archiver.py::test_help_formatting[break-lock-parser2]",
"src/borg/testsuite/archiver.py::test_help_formatting[change-passphrase-parser3]",
"src/borg/testsuite/archiver.py::test_help_formatting[check-parser4]",
"src/borg/testsuite/archiver.py::test_help_formatting[create-parser5]",
"src/borg/testsuite/archiver.py::test_help_formatting[debug",
"src/borg/testsuite/archiver.py::test_help_formatting[debug-parser16]",
"src/borg/testsuite/archiver.py::test_help_formatting[delete-parser17]",
"src/borg/testsuite/archiver.py::test_help_formatting[diff-parser18]",
"src/borg/testsuite/archiver.py::test_help_formatting[export-tar-parser19]",
"src/borg/testsuite/archiver.py::test_help_formatting[extract-parser20]",
"src/borg/testsuite/archiver.py::test_help_formatting[help-parser21]",
"src/borg/testsuite/archiver.py::test_help_formatting[info-parser22]",
"src/borg/testsuite/archiver.py::test_help_formatting[init-parser23]",
"src/borg/testsuite/archiver.py::test_help_formatting[key",
"src/borg/testsuite/archiver.py::test_help_formatting[key-parser28]",
"src/borg/testsuite/archiver.py::test_help_formatting[list-parser29]",
"src/borg/testsuite/archiver.py::test_help_formatting[mount-parser30]",
"src/borg/testsuite/archiver.py::test_help_formatting[prune-parser31]",
"src/borg/testsuite/archiver.py::test_help_formatting[recreate-parser32]",
"src/borg/testsuite/archiver.py::test_help_formatting[rename-parser33]",
"src/borg/testsuite/archiver.py::test_help_formatting[serve-parser34]",
"src/borg/testsuite/archiver.py::test_help_formatting[umount-parser35]",
"src/borg/testsuite/archiver.py::test_help_formatting[upgrade-parser36]",
"src/borg/testsuite/archiver.py::test_help_formatting[with-lock-parser37]",
"src/borg/testsuite/archiver.py::test_help_formatting_helptexts[patterns-\\nFile",
"src/borg/testsuite/archiver.py::test_help_formatting_helptexts[placeholders-\\nRepository",
"src/borg/testsuite/archiver.py::test_help_formatting_helptexts[compression-\\nCompression",
"src/borg/testsuite/cache.py::TestKey::test_plaintext",
"src/borg/testsuite/cache.py::TestKey::test_roundtrip[PlaintextKey]",
"src/borg/testsuite/cache.py::TestKey::test_roundtrip[AuthenticatedKey]",
"src/borg/testsuite/cache.py::TestKey::test_roundtrip[Blake2AuthenticatedKey]",
"src/borg/testsuite/cache.py::TestKey::test_decrypt_decompress[PlaintextKey]",
"src/borg/testsuite/cache.py::TestKey::test_decrypt_decompress[AuthenticatedKey]",
"src/borg/testsuite/cache.py::TestKey::test_decrypt_decompress[Blake2AuthenticatedKey]",
"src/borg/testsuite/cache.py::TestKey::test_assert_id[PlaintextKey]",
"src/borg/testsuite/cache.py::TestKey::test_assert_id[AuthenticatedKey]",
"src/borg/testsuite/cache.py::TestKey::test_assert_id[KeyfileKey]",
"src/borg/testsuite/cache.py::TestKey::test_assert_id[RepoKey]",
"src/borg/testsuite/cache.py::TestKey::test_assert_id[Blake2KeyfileKey]",
"src/borg/testsuite/cache.py::TestKey::test_assert_id[Blake2RepoKey]",
"src/borg/testsuite/cache.py::TestKey::test_assert_id[Blake2AuthenticatedKey]",
"src/borg/testsuite/cache.py::TestKey::test_authenticated_encrypt",
"src/borg/testsuite/cache.py::TestKey::test_blake2_authenticated_encrypt",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_no_chunks",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_simple",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_multiple",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>0-elem0-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>0-\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00-error1]",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>0-1-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>0-1.0-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>0-True-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>0-False-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>0-None-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>1-elem0-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>1-\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00-error1]",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>1-1-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>1-1.0-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>1-True-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>1-False-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>1-None-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>2-elem0-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>2-\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00-error1]",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>2-1-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>2-1.0-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>2-True-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>2-False-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted[<lambda>2-None-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted_ancillary[data0-Invalid",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted_ancillary[data1-Invalid",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted_ancillary[data2-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted_ancillary[data3-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted_ancillary[data4-Unexpected",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_corrupted_refcount",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_refcount_max_value",
"src/borg/testsuite/cache.py::TestCacheSynchronizer::test_refcount_one_below_max_value",
"src/borg/testsuite/item.py::test_item_empty",
"src/borg/testsuite/item.py::test_item_from_dict",
"src/borg/testsuite/item.py::test_item_from_kw",
"src/borg/testsuite/item.py::test_item_int_property",
"src/borg/testsuite/item.py::test_item_bigint_property",
"src/borg/testsuite/item.py::test_item_user_group_none",
"src/borg/testsuite/item.py::test_item_se_str_property",
"src/borg/testsuite/item.py::test_item_list_property",
"src/borg/testsuite/item.py::test_item_dict_property",
"src/borg/testsuite/item.py::test_unknown_property",
"src/borg/testsuite/item.py::test_item_file_size_no_chunks"
]
| [
"src/borg/testsuite/archiver.py::test_return_codes[python]",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_aes_counter_uniqueness_keyfile",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_aes_counter_uniqueness_passphrase",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_atime",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_bad_filters",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_basic_functionality",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_break_lock",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_change_passphrase",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_check_cache",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_comment",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_common_options",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_auto_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_auto_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_lz4_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_lzma_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_none_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_none_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_zlib_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_corrupted_repository",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_dry_run",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_json",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_no_cache_sync",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern_exclude_folder_but_recurse",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern_exclude_folder_no_recurse",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern_file",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern_intermediate_folders_first",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern_root",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_read_special_broken_symlink",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_topical",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_without_root",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_dump_archive",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_dump_archive_items",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_dump_manifest",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_dump_repo_objs",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_profile",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_put_get_delete_obj",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_delete",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_delete_force",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_delete_repo",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_caches",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_keep_tagged",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_keep_tagged_deprecation",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_normalization",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_tagged",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_export_tar",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_export_tar_gz",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_export_tar_strip_components",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_export_tar_strip_components_links",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_hardlinks",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_include_exclude",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_include_exclude_regex",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_include_exclude_regex_from_file",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_list_output",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_pattern_opt",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_progress",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_with_pattern",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_file_status",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_file_status_excluded",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_info",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_info_json",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_init_nested_repositories",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_export_keyfile",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_export_paperkey",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_export_qr",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_export_repokey",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_import_errors",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_chunk_counts",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_format",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_hash",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_json",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_json_args",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_prefix",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_repository_format",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_size",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_log_json",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_overwrite",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_path_normalization",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_progress_off",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_progress_on",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_prune_repository",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_prune_repository_glob",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_prune_repository_prefix",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_prune_repository_save_space",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_basic",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_dry_run",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_exclude_caches",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_exclude_keep_tagged",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_exclude_tagged",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_list_output",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_rechunkify",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_recompress",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_skips_nothing_to_do",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_subtree_hardlinks",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_target",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_target_rc",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_rename",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repeated_files",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_move",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection2",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection2_no_cache",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection_no_cache",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection_repokey_blank_passphrase",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_security_dir_compat",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_sparse_file",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_strip_components",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_strip_components_links",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_symlink_extract",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_umask",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unix_socket",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_feature_on_cache_sync",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_feature_on_change_passphrase",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_feature_on_create",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_feature_on_delete",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_feature_on_read",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_feature_on_rename",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_mandatory_feature_in_cache",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unusual_filenames",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_with_lock",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_attic013_acl_bug",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_check_usage",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_corrupted_manifest",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_empty_repository",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_extra_chunks",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_manifest_rebuild_corrupted_chunk",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_manifest_rebuild_duplicate_archive",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_missing_archive_item_chunk",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_missing_archive_metadata",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_missing_file_chunk",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_missing_manifest",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_verify_data",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_verify_data_unencrypted",
"src/borg/testsuite/archiver.py::ManifestAuthenticationTest::test_disable",
"src/borg/testsuite/archiver.py::ManifestAuthenticationTest::test_disable2",
"src/borg/testsuite/archiver.py::ManifestAuthenticationTest::test_fresh_init_tam_required",
"src/borg/testsuite/archiver.py::ManifestAuthenticationTest::test_not_required",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_aes_counter_uniqueness_keyfile",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_aes_counter_uniqueness_passphrase",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_atime",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_bad_filters",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_basic_functionality",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_break_lock",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_change_passphrase",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_check_cache",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_comment",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_common_options",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_auto_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_auto_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_lz4_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_lz4_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_lzma_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_lzma_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_none_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_none_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_zlib_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_zlib_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_corrupted_repository",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_dry_run",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_json",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_no_cache_sync",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern_exclude_folder_but_recurse",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern_exclude_folder_no_recurse",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern_file",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern_intermediate_folders_first",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern_root",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_read_special_broken_symlink",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_topical",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_without_root",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_dump_archive",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_dump_archive_items",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_dump_manifest",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_dump_repo_objs",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_profile",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_delete",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_delete_double_force",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_delete_force",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_delete_repo",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_caches",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_keep_tagged",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_keep_tagged_deprecation",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_normalization",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_tagged",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_export_tar",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_export_tar_gz",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_export_tar_strip_components",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_export_tar_strip_components_links",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_hardlinks",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_include_exclude",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_include_exclude_regex",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_include_exclude_regex_from_file",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_list_output",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_pattern_opt",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_progress",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_with_pattern",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_file_status",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_file_status_excluded",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_info",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_info_json",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_init_interrupt",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_init_nested_repositories",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_export_keyfile",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_export_paperkey",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_export_qr",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_export_repokey",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_import_errors",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_chunk_counts",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_format",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_hash",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_json",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_json_args",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_prefix",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_repository_format",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_size",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_log_json",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_overwrite",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_path_normalization",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_progress_off",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_progress_on",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_prune_repository",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_prune_repository_glob",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_prune_repository_prefix",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_prune_repository_save_space",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_basic",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_dry_run",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_exclude_caches",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_exclude_keep_tagged",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_exclude_tagged",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_list_output",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_rechunkify",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_recompress",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_skips_nothing_to_do",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_subtree_hardlinks",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_target",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_target_rc",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_remote_repo_restrict_to_path",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_remote_repo_restrict_to_repository",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_rename",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repeated_files",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_move",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection2",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection2_no_cache",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection_no_cache",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection_repokey_blank_passphrase",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_security_dir_compat",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_sparse_file",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_strip_components",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_strip_components_doesnt_leak",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_strip_components_links",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_symlink_extract",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_umask",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unix_socket",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_feature_on_cache_sync",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_feature_on_change_passphrase",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_feature_on_create",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_feature_on_delete",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_feature_on_read",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_feature_on_rename",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_mandatory_feature_in_cache",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_unencrypted",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unusual_filenames",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_with_lock",
"src/borg/testsuite/archiver.py::ArchiverCorruptionTestCase::test_cache_chunks",
"src/borg/testsuite/archiver.py::ArchiverCorruptionTestCase::test_cache_files",
"src/borg/testsuite/archiver.py::ArchiverCorruptionTestCase::test_chunks_archive",
"src/borg/testsuite/archiver.py::ArchiverCorruptionTestCase::test_old_version_interfered",
"src/borg/testsuite/archiver.py::DiffArchiverTestCase::test_basic_functionality",
"src/borg/testsuite/archiver.py::DiffArchiverTestCase::test_sort_option",
"src/borg/testsuite/cache.py::TestKey::test_keyfile",
"src/borg/testsuite/cache.py::TestKey::test_keyfile_nonce_rollback_protection",
"src/borg/testsuite/cache.py::TestKey::test_keyfile_kfenv",
"src/borg/testsuite/cache.py::TestKey::test_keyfile2",
"src/borg/testsuite/cache.py::TestKey::test_keyfile2_kfenv",
"src/borg/testsuite/cache.py::TestKey::test_keyfile_blake2",
"src/borg/testsuite/cache.py::TestKey::test_passphrase",
"src/borg/testsuite/cache.py::TestKey::test_decrypt_integrity",
"src/borg/testsuite/cache.py::TestKey::test_roundtrip[KeyfileKey]",
"src/borg/testsuite/cache.py::TestKey::test_roundtrip[RepoKey]",
"src/borg/testsuite/cache.py::TestKey::test_roundtrip[Blake2KeyfileKey]",
"src/borg/testsuite/cache.py::TestKey::test_roundtrip[Blake2RepoKey]",
"src/borg/testsuite/cache.py::TestKey::test_decrypt_decompress[KeyfileKey]",
"src/borg/testsuite/cache.py::TestKey::test_decrypt_decompress[RepoKey]",
"src/borg/testsuite/cache.py::TestKey::test_decrypt_decompress[Blake2KeyfileKey]",
"src/borg/testsuite/cache.py::TestKey::test_decrypt_decompress[Blake2RepoKey]",
"src/borg/testsuite/item.py::test_item_file_size"
]
| []
| []
| BSD License | 1,358 | [
"src/borg/item.pyx",
"src/borg/helpers.py",
"docs/internals/frontends.rst",
"src/borg/repository.py",
"src/borg/archiver.py",
"src/borg/hashindex.pyx",
"src/borg/cache.py"
]
| [
"src/borg/item.pyx",
"src/borg/helpers.py",
"docs/internals/frontends.rst",
"src/borg/repository.py",
"src/borg/archiver.py",
"src/borg/hashindex.pyx",
"src/borg/cache.py"
]
|
d3dave__cough-3 | b2a4ebe62f953e35beff00a9a7d7426cc20f6350 | 2017-06-10 20:39:05 | b2a4ebe62f953e35beff00a9a7d7426cc20f6350 | diff --git a/cough/file.py b/cough/file.py
index 84338e7..d96aa8a 100644
--- a/cough/file.py
+++ b/cough/file.py
@@ -125,25 +125,17 @@ class ObjectModule:
return bytes(body_buffer)
def dump_sections(self):
+ data_buf_offset = FileHeader.struct.size + 40 * len(self.sections)
+ hdrs_buf = bytearray()
data_buf = bytearray()
- sections_offsets = []
- reloc_offsets = []
for sec in self.sections:
if sec.data:
- sections_offsets.append(len(data_buf))
+ sec.pointer_to_raw_data = data_buf_offset + len(data_buf)
data_buf += sec.data
if sec.relocations:
- reloc_offsets.append(len(data_buf))
+ sec.pointer_to_relocations = data_buf_offset + len(data_buf)
for reloc in sec.relocations:
data_buf += reloc.pack()
+ hdrs_buf += sec.get_header()
- sections_buffer_offset = FileHeader.struct.size + 40 * len(self.sections)
- hdrs_and_data_buf = bytearray()
- for i, sec in enumerate(self.sections):
- if sec.data:
- sec.pointer_to_raw_data = sections_buffer_offset + sections_offsets[i]
- if sec.relocations:
- sec.pointer_to_relocations = sections_buffer_offset + reloc_offsets[i]
- hdrs_and_data_buf += sec.get_header()
- hdrs_and_data_buf += data_buf
- return bytes(hdrs_and_data_buf)
+ return bytes(hdrs_buf + data_buf)
| Exception when adding an uninitialized section before an initialized section
file.py:133 is inconsistent with file.py:144 | d3dave/cough | diff --git a/tests/test_coff.py b/tests/test_coff.py
index 8ca9ce4..2193214 100644
--- a/tests/test_coff.py
+++ b/tests/test_coff.py
@@ -63,3 +63,16 @@ def test_reloc():
subprocess.run(['PowerShell.exe', BUILD_SCRIPT, file.name, '/out:' + '"' + exe_path + '"'], check=True)
proc = subprocess.run([exe_path], stdout=subprocess.PIPE, check=True)
assert proc.stdout == b'A'
+
+
+def test_uninit_before_init():
+ module = cough.ObjectModule()
+
+ sec_uninit = cough.Section(b'uninit', cough.SectionFlags.CNT_UNINITIALIZED_DATA)
+ sec_uninit.size_of_raw_data = 0x400
+ module.sections.append(sec_uninit)
+
+ sec_init = cough.Section(b'init', 0, b'\xAA\xBB\xCC\xDD')
+ module.sections.append(sec_init)
+
+ assert module.get_buffer()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/d3dave/cough.git@b2a4ebe62f953e35beff00a9a7d7426cc20f6350#egg=cough
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: cough
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/cough
| [
"tests/test_coff.py::test_uninit_before_init"
]
| [
"tests/test_coff.py::test_coff",
"tests/test_coff.py::test_reloc"
]
| []
| []
| MIT License | 1,359 | [
"cough/file.py"
]
| [
"cough/file.py"
]
|
|
tornadoweb__tornado-2082 | 3163c606f06065b296df70bef5390afa548825d8 | 2017-06-10 22:11:08 | 03f13800e854a6fc9e6efa2168e694d9599348bd | diff --git a/docs/gen.rst b/docs/gen.rst
index 52c7f55d..cf42269f 100644
--- a/docs/gen.rst
+++ b/docs/gen.rst
@@ -21,7 +21,6 @@
.. autoexception:: Return
.. autofunction:: with_timeout
- .. autoexception:: TimeoutError
.. autofunction:: sleep
diff --git a/docs/releases/v4.2.0.rst b/docs/releases/v4.2.0.rst
index d9484379..93493ee1 100644
--- a/docs/releases/v4.2.0.rst
+++ b/docs/releases/v4.2.0.rst
@@ -49,7 +49,7 @@ be unlocked *without* acquiring it. This encouraged unorthodox patterns; in
Tornado, just use `~.Semaphore.acquire`.
Toro's ``Event.wait`` raised a ``Timeout`` exception after a timeout. In
-Tornado, `.Event.wait` raises `tornado.gen.TimeoutError`.
+Tornado, `.Event.wait` raises ``tornado.gen.TimeoutError``.
Toro's ``Condition.wait`` also raised ``Timeout``, but in Tornado, the `.Future`
returned by `.Condition.wait` resolves to False after a timeout::
diff --git a/tornado/gen.py b/tornado/gen.py
index 2e9e9a6a..5558f986 100644
--- a/tornado/gen.py
+++ b/tornado/gen.py
@@ -89,7 +89,7 @@ from tornado.concurrent import Future, TracebackFuture, is_future, chain_future
from tornado.ioloop import IOLoop
from tornado.log import app_log
from tornado import stack_context
-from tornado.util import PY3, raise_exc_info
+from tornado.util import PY3, raise_exc_info, TimeoutError
try:
try:
@@ -154,10 +154,6 @@ class ReturnValueIgnoredError(Exception):
pass
-class TimeoutError(Exception):
- """Exception raised by ``with_timeout``."""
-
-
def _value_from_stopiteration(e):
try:
# StopIteration has a value attribute beginning in py33.
@@ -871,10 +867,10 @@ def maybe_future(x):
def with_timeout(timeout, future, quiet_exceptions=()):
"""Wraps a `.Future` (or other yieldable object) in a timeout.
- Raises `TimeoutError` if the input future does not complete before
- ``timeout``, which may be specified in any form allowed by
- `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time
- relative to `.IOLoop.time`)
+ Raises `tornado.util.TimeoutError` if the input future does not
+ complete before ``timeout``, which may be specified in any form
+ allowed by `.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or
+ an absolute time relative to `.IOLoop.time`)
If the wrapped `.Future` fails after it has timed out, the exception
will be logged unless it is of a type contained in ``quiet_exceptions``
diff --git a/tornado/ioloop.py b/tornado/ioloop.py
index 0527e922..73d0cbdb 100644
--- a/tornado/ioloop.py
+++ b/tornado/ioloop.py
@@ -48,7 +48,7 @@ from tornado.concurrent import TracebackFuture, is_future
from tornado.log import app_log, gen_log
from tornado.platform.auto import set_close_exec, Waker
from tornado import stack_context
-from tornado.util import PY3, Configurable, errno_from_exception, timedelta_to_seconds
+from tornado.util import PY3, Configurable, errno_from_exception, timedelta_to_seconds, TimeoutError
try:
import signal
@@ -70,10 +70,6 @@ except ImportError:
_POLL_TIMEOUT = 3600.0
-class TimeoutError(Exception):
- pass
-
-
class IOLoop(Configurable):
"""A level-triggered I/O loop.
@@ -457,7 +453,7 @@ class IOLoop(Configurable):
The keyword-only argument ``timeout`` may be used to set
a maximum duration for the function. If the timeout expires,
- a `TimeoutError` is raised.
+ a `tornado.util.TimeoutError` is raised.
This method is useful in conjunction with `tornado.gen.coroutine`
to allow asynchronous calls in a ``main()`` function::
diff --git a/tornado/locks.py b/tornado/locks.py
index 4f9ecf6d..c883aa23 100644
--- a/tornado/locks.py
+++ b/tornado/locks.py
@@ -99,7 +99,7 @@ class Condition(_TimeoutGarbageCollector):
# Wait up to 1 second.
yield condition.wait(timeout=datetime.timedelta(seconds=1))
- The method raises `tornado.gen.TimeoutError` if there's no notification
+ The method raises `tornado.util.TimeoutError` if there's no notification
before the deadline.
"""
@@ -220,7 +220,7 @@ class Event(object):
def wait(self, timeout=None):
"""Block until the internal flag is true.
- Returns a Future, which raises `tornado.gen.TimeoutError` after a
+ Returns a Future, which raises `tornado.util.TimeoutError` after a
timeout.
"""
if timeout is None:
@@ -480,7 +480,7 @@ class Lock(object):
def acquire(self, timeout=None):
"""Attempt to lock. Returns a Future.
- Returns a Future, which raises `tornado.gen.TimeoutError` after a
+ Returns a Future, which raises `tornado.util.TimeoutError` after a
timeout.
"""
return self._block.acquire(timeout)
diff --git a/tornado/queues.py b/tornado/queues.py
index 00fa4e22..73589611 100644
--- a/tornado/queues.py
+++ b/tornado/queues.py
@@ -166,7 +166,7 @@ class Queue(object):
def put(self, item, timeout=None):
"""Put an item into the queue, perhaps waiting until there is room.
- Returns a Future, which raises `tornado.gen.TimeoutError` after a
+ Returns a Future, which raises `tornado.util.TimeoutError` after a
timeout.
``timeout`` may be a number denoting a time (on the same
@@ -204,7 +204,7 @@ class Queue(object):
"""Remove and return an item from the queue.
Returns a Future which resolves once an item is available, or raises
- `tornado.gen.TimeoutError` after a timeout.
+ `tornado.util.TimeoutError` after a timeout.
``timeout`` may be a number denoting a time (on the same
scale as `tornado.ioloop.IOLoop.time`, normally `time.time`), or a
@@ -258,7 +258,7 @@ class Queue(object):
def join(self, timeout=None):
"""Block until all items in the queue are processed.
- Returns a Future, which raises `tornado.gen.TimeoutError` after a
+ Returns a Future, which raises `tornado.util.TimeoutError` after a
timeout.
"""
return self._finished.wait(timeout)
diff --git a/tornado/util.py b/tornado/util.py
index bfd80beb..aee718ae 100644
--- a/tornado/util.py
+++ b/tornado/util.py
@@ -84,6 +84,16 @@ except ImportError:
is_finalizing = _get_emulated_is_finalizing()
+class TimeoutError(Exception):
+ """Exception raised by `.with_timeout` and `.IOLoop.run_sync`.
+
+ .. versionchanged:: 5.0:
+ Unified ``tornado.gen.TimeoutError`` and
+ ``tornado.ioloop.TimeoutError`` as ``tornado.util.TimeoutError``.
+ Both former names remain as aliases.
+ """
+
+
class ObjectDict(_ObjectDictBase):
"""Makes a dictionary behave like an object, with attribute-style access.
"""
| Too many TimeoutErrors
This would probably warrant unifying.
```
>>> ioloop.TimeoutError
tornado.ioloop.TimeoutError
>>> gen.TimeoutError
tornado.gen.TimeoutError
>>> gen.TimeoutError is ioloop.TimeoutError
False
```
| tornadoweb/tornado | diff --git a/tornado/test/import_test.py b/tornado/test/import_test.py
index 88d02e02..d8cf14a5 100644
--- a/tornado/test/import_test.py
+++ b/tornado/test/import_test.py
@@ -45,3 +45,9 @@ class ImportTest(unittest.TestCase):
pass
else:
import tornado.curl_httpclient
+
+ def test_import_aliases(self):
+ # Ensure we don't delete formerly-documented aliases accidentally.
+ import tornado.ioloop, tornado.gen, tornado.util
+ self.assertIs(tornado.ioloop.TimeoutError, tornado.util.TimeoutError)
+ self.assertIs(tornado.gen.TimeoutError, tornado.util.TimeoutError)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 7
} | 4.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"futures",
"mock",
"monotonic",
"trollius",
"sphinx",
"sphinx_rtd_theme",
"codecov",
"virtualenv",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
distlib==0.3.9
docutils==0.18.1
filelock==3.4.1
futures==2.2.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
monotonic==1.6
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tornadoweb/tornado.git@3163c606f06065b296df70bef5390afa548825d8#egg=tornado
trollius==2.1.post2
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
virtualenv==20.17.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tornado
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- babel==2.11.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- distlib==0.3.9
- docutils==0.18.1
- filelock==3.4.1
- futures==2.2.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- monotonic==1.6
- platformdirs==2.4.0
- pygments==2.14.0
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- trollius==2.1.post2
- urllib3==1.26.20
- virtualenv==20.17.1
prefix: /opt/conda/envs/tornado
| [
"tornado/test/import_test.py::ImportTest::test_import_aliases"
]
| []
| [
"tornado/test/import_test.py::ImportTest::test_import_everything",
"tornado/test/import_test.py::ImportTest::test_import_pycurl"
]
| []
| Apache License 2.0 | 1,360 | [
"tornado/gen.py",
"docs/releases/v4.2.0.rst",
"docs/gen.rst",
"tornado/locks.py",
"tornado/queues.py",
"tornado/ioloop.py",
"tornado/util.py"
]
| [
"tornado/gen.py",
"docs/releases/v4.2.0.rst",
"docs/gen.rst",
"tornado/locks.py",
"tornado/queues.py",
"tornado/ioloop.py",
"tornado/util.py"
]
|
|
jddeal__python-cmr-52 | 089b0d6c3c6f002ef9fb412e7dc44c9053ae5351 | 2017-06-11 23:31:39 | 089b0d6c3c6f002ef9fb412e7dc44c9053ae5351 | diff --git a/README.rst b/README.rst
index 5a0e177..91f2785 100644
--- a/README.rst
+++ b/README.rst
@@ -140,3 +140,31 @@ To inspect and retreive results from the API, the following methods are availabl
# retrieve all the granules possible for the query
>>> granules = api.get_all() # this is a shortcut for api.get(api.hits())
+
+
+By default the responses will return as json and be accessible as a list of python dictionaries.
+Other formats can be specified before making the request:
+
+::
+
+ >>> granules = api.format("echo10").get(100)
+
+The following formats are supported for both granule and collection queries:
+
+* json (default)
+* xml
+* echo10
+* iso
+* iso19115
+* csv
+* atom
+* kml
+* native
+
+Collection queries also support the following formats:
+
+* dif
+* dif10
+* opendata
+* umm_json
+* umm_json_vX_Y (ex: umm_json_v1_9)
diff --git a/cmr/queries.py b/cmr/queries.py
index ca2a024..bfc187c 100644
--- a/cmr/queries.py
+++ b/cmr/queries.py
@@ -8,6 +8,7 @@ except ImportError:
from urllib import pathname2url as quote
from datetime import datetime
+from re import search
from requests import get, exceptions
CMR_OPS = "https://cmr.earthdata.nasa.gov/search/"
@@ -21,6 +22,11 @@ class Query(object):
_base_url = ""
_route = ""
+ _format = "json"
+ _valid_formats_regex = [
+ "json", "xml", "echo10", "iso", "iso19115",
+ "csv", "atom", "kml", "native"
+ ]
def __init__(self, route, mode=CMR_OPS):
self.params = {}
@@ -50,11 +56,15 @@ class Query(object):
except exceptions.HTTPError as ex:
raise RuntimeError(ex.response.text)
- latest = response.json()['feed']['entry']
+ if self._format == "json":
+ latest = response.json()['feed']['entry']
+ else:
+ latest = response.text
+
if len(latest) == 0:
break
- results = results + latest
+ results.append(latest)
page += 1
return results
@@ -89,6 +99,26 @@ class Query(object):
return self.get(self.hits())
+ def format(self, output_format="json"):
+ """
+ Sets the format for the returned results.
+
+ :param output_format: Preferred output format
+ :returns: Query instance
+ """
+
+ if not output_format:
+ output_format = "json"
+
+ # check requested format against the valid format regex's
+ for _format in self._valid_formats_regex:
+ if search(_format, output_format):
+ self._format = output_format
+ return self
+
+ # if we got here, we didn't find a matching format
+ raise ValueError("Unsupported format '{}'".format(output_format))
+
def online_only(self, online_only):
"""
Only match granules that are listed online and not available for download.
@@ -367,7 +397,12 @@ class Query(object):
options_as_string = "&".join(formatted_options)
- return "{}?{}&{}".format(self._base_url, params_as_string, options_as_string)
+ return "{}.{}?{}&{}".format(
+ self._base_url,
+ self._format,
+ params_as_string,
+ options_as_string
+ )
def _valid_state(self):
"""
@@ -398,7 +433,7 @@ class GranuleQuery(Query):
"""
def __init__(self, mode=CMR_OPS):
- Query.__init__(self, "granules.json", mode)
+ Query.__init__(self, "granules", mode)
def orbit_number(self, orbit1, orbit2=None):
""""
@@ -524,7 +559,11 @@ class CollectionQuery(Query):
"""
def __init__(self, mode=CMR_OPS):
- Query.__init__(self, "collections.json", mode)
+ Query.__init__(self, "collections", mode)
+
+ self._valid_formats_regex.extend([
+ "dif", "dif10", "opendata", "umm_json", "umm_json_v[0-9]_[0-9]"
+ ])
def archive_center(self, center):
"""
| Support more result formats
Adding in support for the other MIME Types wouldn't be a bad idea or difficult. Both granule and collection queries support the following:
- json (default)
- xml
- echo10
- iso
- iso19115
- dif
- dif10
- csv
- atom
- kml
- native
Collection queries also support these formats:
- opendata
- umm_json
- umm_json_vX_Y (X/Y are numbers the user provides in the string) | jddeal/python-cmr | diff --git a/tests/test_collection.py b/tests/test_collection.py
index 1272c24..3241e79 100644
--- a/tests/test_collection.py
+++ b/tests/test_collection.py
@@ -17,3 +17,22 @@ class TestCollectionClass(unittest.TestCase):
self.assertIn("keyword", query.params)
self.assertEqual(query.params["keyword"], "AST_*")
+
+ def test_valid_formats(self):
+ query = CollectionQuery()
+ formats = [
+ "json", "xml", "echo10", "iso", "iso19115",
+ "csv", "atom", "kml", "native", "dif", "dif10",
+ "opendata", "umm_json", "umm_json_v1_1" "umm_json_v1_9"]
+
+ for _format in formats:
+ query.format(_format)
+ self.assertEqual(query._format, _format)
+
+ def test_invalid_format(self):
+ query = CollectionQuery()
+
+ with self.assertRaises(ValueError):
+ query.format("invalid")
+ query.format("jsonn")
+ query.format("iso19116")
diff --git a/tests/test_granule.py b/tests/test_granule.py
index 6c31372..11b481b 100644
--- a/tests/test_granule.py
+++ b/tests/test_granule.py
@@ -349,3 +349,19 @@ class TestGranuleClass(unittest.TestCase):
def test_invalid_mode_constructor(self):
with self.assertRaises(ValueError):
query = GranuleQuery(None)
+
+ def test_valid_formats(self):
+ query = GranuleQuery()
+ formats = ["json", "xml", "echo10", "iso", "iso19115", "csv", "atom", "kml", "native"]
+
+ for _format in formats:
+ query.format(_format)
+ self.assertEqual(query._format, _format)
+
+ def test_invalid_format(self):
+ query = GranuleQuery()
+
+ with self.assertRaises(ValueError):
+ query.format("invalid")
+ query.format("jsonn")
+ query.format("iso19116")
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | appdirs==1.4.0
astroid==1.4.9
attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
isort==4.2.5
lazy-object-proxy==1.2.2
mccabe==0.5.3
packaging==16.8
pluggy==1.0.0
py==1.11.0
pylint==1.6.4
pyparsing==2.1.10
pytest==7.0.1
-e git+https://github.com/jddeal/python-cmr.git@089b0d6c3c6f002ef9fb412e7dc44c9053ae5351#egg=python_cmr
requests==2.12.4
six==1.10.0
tomli==1.2.3
typing_extensions==4.1.1
wrapt==1.10.8
zipp==3.6.0
| name: python-cmr
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- appdirs==1.4.0
- astroid==1.4.9
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isort==4.2.5
- lazy-object-proxy==1.2.2
- mccabe==0.5.3
- packaging==16.8
- pluggy==1.0.0
- py==1.11.0
- pylint==1.6.4
- pyparsing==2.1.10
- pytest==7.0.1
- requests==2.12.4
- six==1.10.0
- tomli==1.2.3
- typing-extensions==4.1.1
- wrapt==1.10.8
- zipp==3.6.0
prefix: /opt/conda/envs/python-cmr
| [
"tests/test_collection.py::TestCollectionClass::test_invalid_format",
"tests/test_collection.py::TestCollectionClass::test_valid_formats",
"tests/test_granule.py::TestGranuleClass::test_invalid_format",
"tests/test_granule.py::TestGranuleClass::test_valid_formats"
]
| []
| [
"tests/test_collection.py::TestCollectionClass::test_archive_center",
"tests/test_collection.py::TestCollectionClass::test_keyword",
"tests/test_granule.py::TestGranuleClass::test_bounding_box_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_bounding_box_set",
"tests/test_granule.py::TestGranuleClass::test_cloud_cover_all",
"tests/test_granule.py::TestGranuleClass::test_cloud_cover_max_only",
"tests/test_granule.py::TestGranuleClass::test_cloud_cover_min_only",
"tests/test_granule.py::TestGranuleClass::test_cloud_cover_none",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_day_set",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_invalid_type_set",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_night_set",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_unspecified_set",
"tests/test_granule.py::TestGranuleClass::test_downloadable_invalid",
"tests/test_granule.py::TestGranuleClass::test_downloadable_set",
"tests/test_granule.py::TestGranuleClass::test_empty_granule_ur",
"tests/test_granule.py::TestGranuleClass::test_empty_instrument",
"tests/test_granule.py::TestGranuleClass::test_empty_platform",
"tests/test_granule.py::TestGranuleClass::test_entry_title_set",
"tests/test_granule.py::TestGranuleClass::test_granule_ur",
"tests/test_granule.py::TestGranuleClass::test_instrument",
"tests/test_granule.py::TestGranuleClass::test_invalid_mode",
"tests/test_granule.py::TestGranuleClass::test_invalid_mode_constructor",
"tests/test_granule.py::TestGranuleClass::test_invalid_spatial_state",
"tests/test_granule.py::TestGranuleClass::test_line_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_line_set",
"tests/test_granule.py::TestGranuleClass::test_online_only_invalid",
"tests/test_granule.py::TestGranuleClass::test_online_only_set",
"tests/test_granule.py::TestGranuleClass::test_orbit_number_encode",
"tests/test_granule.py::TestGranuleClass::test_orbit_number_set",
"tests/test_granule.py::TestGranuleClass::test_platform",
"tests/test_granule.py::TestGranuleClass::test_point_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_point_set",
"tests/test_granule.py::TestGranuleClass::test_polygon_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_polygon_set",
"tests/test_granule.py::TestGranuleClass::test_short_name",
"tests/test_granule.py::TestGranuleClass::test_temporal_invalid_date_order",
"tests/test_granule.py::TestGranuleClass::test_temporal_invalid_strings",
"tests/test_granule.py::TestGranuleClass::test_temporal_invalid_types",
"tests/test_granule.py::TestGranuleClass::test_temporal_option_set",
"tests/test_granule.py::TestGranuleClass::test_temporal_set",
"tests/test_granule.py::TestGranuleClass::test_valid_spatial_state",
"tests/test_granule.py::TestGranuleClass::test_version"
]
| []
| MIT License | 1,361 | [
"README.rst",
"cmr/queries.py"
]
| [
"README.rst",
"cmr/queries.py"
]
|
|
jddeal__python-cmr-53 | 77d9eeb9310a34992cd7d0742f7b1903151737b2 | 2017-06-12 00:30:24 | 77d9eeb9310a34992cd7d0742f7b1903151737b2 | diff --git a/README.rst b/README.rst
index 91f2785..f2fb6d5 100644
--- a/README.rst
+++ b/README.rst
@@ -125,6 +125,22 @@ Collection searches support these methods (in addition to the shared methods abo
>>> api.keyword("M*D09")
+As an alternative to chaining methods together to set the parameters of your query, a
+method exists to allow you to pass your parameters as keyword arguments:
+
+::
+
+ # search for AST_L1T version 003 granules at latitude 42, longitude -100
+ >>> api.parameters(
+ short_name="AST_L1T",
+ version="003",
+ point=(-100, 42)
+ )
+
+Note: the kwarg key should match the name of a method from the above examples, and the value
+should be a tuple if it's a parameter that requires multiple values.
+
+
To inspect and retreive results from the API, the following methods are available:
::
diff --git a/cmr/queries.py b/cmr/queries.py
index bfc187c..7f56c22 100644
--- a/cmr/queries.py
+++ b/cmr/queries.py
@@ -8,6 +8,7 @@ except ImportError:
from urllib import pathname2url as quote
from datetime import datetime
+from inspect import getmembers, ismethod
from re import search
from requests import get, exceptions
@@ -99,6 +100,35 @@ class Query(object):
return self.get(self.hits())
+ def parameters(self, **kwargs):
+ """
+ Provide query parameters as keyword arguments. The keyword needs to match the name
+ of the method, and the value should either be the value or a tuple of values.
+
+ Example: parameters(short_name="AST_L1T", point=(42.5, -101.25))
+
+ :returns: Query instance
+ """
+
+ # build a dictionary of method names and their reference
+ methods = {}
+ for name, func in getmembers(self, predicate=ismethod):
+ methods[name] = func
+
+ for key, val in kwargs.items():
+
+ # verify the key matches one of our methods
+ if key not in methods:
+ raise ValueError("Unknown key {}".format(key))
+
+ # call the method
+ if isinstance(val, tuple):
+ methods[key](*val)
+ else:
+ methods[key](val)
+
+ return self
+
def format(self, output_format="json"):
"""
Sets the format for the returned results.
| Param() method that accepts keyword args
This might be kind of a neat alternative to using the different methods directly. For example:
```python
# current way
api.short_name("AST_L1T").version("006").point(-100, 42).query()
# param() way
api.params(
short_name="AST_L1T",
version="006",
point=(-100, 42)
).query()
```
And then in params() the class could inspect itself for those methods and delegate the parameters to the appropriate methods that we've already made. I'm thinking about this as an alternative to how we do it, not a replacement. | jddeal/python-cmr | diff --git a/tests/test_granule.py b/tests/test_granule.py
index 11b481b..338ca10 100644
--- a/tests/test_granule.py
+++ b/tests/test_granule.py
@@ -350,6 +350,22 @@ class TestGranuleClass(unittest.TestCase):
with self.assertRaises(ValueError):
query = GranuleQuery(None)
+ def test_valid_parameters(self):
+ query = GranuleQuery()
+
+ query.parameters(short_name="AST_L1T", version="003", point=(-100, 42))
+
+ self.assertEqual(query.params["short_name"], "AST_L1T")
+ self.assertEqual(query.params["version"], "003")
+ self.assertEqual(query.params["point"], "-100.0,42.0")
+
+ def test_invalid_parameters(self):
+ query = GranuleQuery()
+
+ with self.assertRaises(ValueError):
+ query.parameters(fake=123)
+ query.parameters(point=(-100, "badvalue"))
+
def test_valid_formats(self):
query = GranuleQuery()
formats = ["json", "xml", "echo10", "iso", "iso19115", "csv", "atom", "kml", "native"]
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | appdirs==1.4.0
astroid==1.4.9
attrs==22.2.0
certifi==2021.5.30
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
isort==4.2.5
lazy-object-proxy==1.2.2
mccabe==0.5.3
packaging==16.8
pluggy==1.0.0
py==1.11.0
pylint==1.6.4
pyparsing==2.1.10
pytest==7.0.1
pytest-cov==4.0.0
-e git+https://github.com/jddeal/python-cmr.git@77d9eeb9310a34992cd7d0742f7b1903151737b2#egg=python_cmr
requests==2.12.4
six==1.10.0
tomli==1.2.3
typing_extensions==4.1.1
wrapt==1.10.8
zipp==3.6.0
| name: python-cmr
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- appdirs==1.4.0
- astroid==1.4.9
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isort==4.2.5
- lazy-object-proxy==1.2.2
- mccabe==0.5.3
- packaging==16.8
- pluggy==1.0.0
- py==1.11.0
- pylint==1.6.4
- pyparsing==2.1.10
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.12.4
- six==1.10.0
- tomli==1.2.3
- typing-extensions==4.1.1
- wrapt==1.10.8
- zipp==3.6.0
prefix: /opt/conda/envs/python-cmr
| [
"tests/test_granule.py::TestGranuleClass::test_invalid_parameters",
"tests/test_granule.py::TestGranuleClass::test_valid_parameters"
]
| []
| [
"tests/test_granule.py::TestGranuleClass::test_bounding_box_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_bounding_box_set",
"tests/test_granule.py::TestGranuleClass::test_cloud_cover_all",
"tests/test_granule.py::TestGranuleClass::test_cloud_cover_max_only",
"tests/test_granule.py::TestGranuleClass::test_cloud_cover_min_only",
"tests/test_granule.py::TestGranuleClass::test_cloud_cover_none",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_day_set",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_invalid_type_set",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_night_set",
"tests/test_granule.py::TestGranuleClass::test_day_night_flag_unspecified_set",
"tests/test_granule.py::TestGranuleClass::test_downloadable_invalid",
"tests/test_granule.py::TestGranuleClass::test_downloadable_set",
"tests/test_granule.py::TestGranuleClass::test_empty_granule_ur",
"tests/test_granule.py::TestGranuleClass::test_empty_instrument",
"tests/test_granule.py::TestGranuleClass::test_empty_platform",
"tests/test_granule.py::TestGranuleClass::test_entry_title_set",
"tests/test_granule.py::TestGranuleClass::test_granule_ur",
"tests/test_granule.py::TestGranuleClass::test_instrument",
"tests/test_granule.py::TestGranuleClass::test_invalid_format",
"tests/test_granule.py::TestGranuleClass::test_invalid_mode",
"tests/test_granule.py::TestGranuleClass::test_invalid_mode_constructor",
"tests/test_granule.py::TestGranuleClass::test_invalid_spatial_state",
"tests/test_granule.py::TestGranuleClass::test_line_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_line_set",
"tests/test_granule.py::TestGranuleClass::test_online_only_invalid",
"tests/test_granule.py::TestGranuleClass::test_online_only_set",
"tests/test_granule.py::TestGranuleClass::test_orbit_number_encode",
"tests/test_granule.py::TestGranuleClass::test_orbit_number_set",
"tests/test_granule.py::TestGranuleClass::test_platform",
"tests/test_granule.py::TestGranuleClass::test_point_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_point_set",
"tests/test_granule.py::TestGranuleClass::test_polygon_invalid_set",
"tests/test_granule.py::TestGranuleClass::test_polygon_set",
"tests/test_granule.py::TestGranuleClass::test_short_name",
"tests/test_granule.py::TestGranuleClass::test_temporal_invalid_date_order",
"tests/test_granule.py::TestGranuleClass::test_temporal_invalid_strings",
"tests/test_granule.py::TestGranuleClass::test_temporal_invalid_types",
"tests/test_granule.py::TestGranuleClass::test_temporal_option_set",
"tests/test_granule.py::TestGranuleClass::test_temporal_set",
"tests/test_granule.py::TestGranuleClass::test_valid_formats",
"tests/test_granule.py::TestGranuleClass::test_valid_spatial_state",
"tests/test_granule.py::TestGranuleClass::test_version"
]
| []
| MIT License | 1,362 | [
"README.rst",
"cmr/queries.py"
]
| [
"README.rst",
"cmr/queries.py"
]
|
|
maximkulkin__lollipop-55 | 360bbc8f9c2b6203ab5af8a3cd051f852ba8dae3 | 2017-06-12 19:52:49 | 360bbc8f9c2b6203ab5af8a3cd051f852ba8dae3 | diff --git a/lollipop/types.py b/lollipop/types.py
index 3ec8a50..acb7f3b 100644
--- a/lollipop/types.py
+++ b/lollipop/types.py
@@ -679,16 +679,22 @@ class Dict(Type):
errors_builder = ValidationErrorBuilder()
result = {}
for k, v in iteritems(data):
- value_type = self.value_types.get(k)
- if value_type is None:
- continue
try:
k = self.key_type.load(k, *args, **kwargs)
except ValidationError as ve:
errors_builder.add_error(k, ve.messages)
+ if k is MISSING:
+ continue
+
+ value_type = self.value_types.get(k)
+ if value_type is None:
+ continue
+
try:
- result[k] = value_type.load(v, *args, **kwargs)
+ value = value_type.load(v, *args, **kwargs)
+ if value is not MISSING:
+ result[k] = value
except ValidationError as ve:
errors_builder.add_error(k, ve.messages)
@@ -715,8 +721,13 @@ class Dict(Type):
except ValidationError as ve:
errors_builder.add_error(k, ve.messages)
+ if k is MISSING:
+ continue
+
try:
- result[k] = value_type.dump(v, *args, **kwargs)
+ value = value_type.dump(v, *args, **kwargs)
+ if value is not MISSING:
+ result[k] = value
except ValidationError as ve:
errors_builder.add_error(k, ve.messages)
| Asymmetry in load/dump for Dict with key_type and dict as values_type
I think there is a problem with the Dict type when using both key_type verification and a values_type dictionary:
In Dict.dump, the key is used in its original / non-dumped form to lookup the value type. However, in Dict.load, the dumped key is used to lookup the value type.
This works fine when using native types as key type such as String and Integer since they map to the same loaded/dumped value. But it's causing a problem when using a more complex key type (e.g. in my case an Enum that dumps to a string).
I believe, in Dict.load, key_type.load should be called **before** the lookup of the value type, so that the lookup is again performed with the original / non-dumped value.
| maximkulkin/lollipop | diff --git a/tests/test_types.py b/tests/test_types.py
index e652fb6..6489dbf 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -641,6 +641,30 @@ class TestDict(NameDescriptionTestsMixin, RequiredTestsMixin, ValidationTestsMix
assert Dict(Any(), key_type=Integer())\
.load({'123': 'foo', '456': 'bar'}) == {123: 'foo', 456: 'bar'}
+ def test_loading_dict_with_custom_key_type_and_values_of_different_types(self):
+ assert Dict({1: Integer(), 2: String()}, key_type=Integer())\
+ .load({'1': '123', '2': 'bar'}) == {1: 123, 2: 'bar'}
+
+ def test_loading_skips_key_value_if_custom_key_type_loads_to_missing(self):
+ class CustomKeyType(String):
+ def load(self, data, *args, **kwargs):
+ if data == 'foo':
+ return MISSING
+ return super(CustomKeyType, self).load(data, *args, **kwargs)
+
+ assert Dict(String(), key_type=CustomKeyType())\
+ .load({'foo': 'hello', 'bar': 'goodbye'}) == {'bar': 'goodbye'}
+
+ def test_loading_skips_key_value_if_value_type_loads_to_missing(self):
+ class CustomValueType(String):
+ def load(self, data, *args, **kwargs):
+ if data == 'foo':
+ return MISSING
+ return super(CustomValueType, self).load(data, *args, **kwargs)
+
+ assert Dict(CustomValueType())\
+ .load({'key1': 'foo', 'key2': 'bar'}) == {'key2': 'bar'}
+
def test_loading_accepts_any_key_if_key_type_is_not_specified(self):
assert Dict(Any())\
.load({'123': 'foo', 456: 'bar'}) == {'123': 'foo', 456: 'bar'}
@@ -719,7 +743,27 @@ class TestDict(NameDescriptionTestsMixin, RequiredTestsMixin, ValidationTestsMix
def test_dumping_dict_with_values_of_different_types(self):
value = {'foo': 1, 'bar': 'hello', 'baz': True}
assert Dict({'foo': Integer(), 'bar': String(), 'baz': Boolean()})\
- .load(value) == value
+ .dump(value) == value
+
+ def test_dumping_skips_key_value_if_custom_key_type_loads_to_missing(self):
+ class CustomKeyType(String):
+ def dump(self, data, *args, **kwargs):
+ if data == 'foo':
+ return MISSING
+ return super(CustomKeyType, self).load(data, *args, **kwargs)
+
+ assert Dict(String(), key_type=CustomKeyType())\
+ .dump({'foo': 'hello', 'bar': 'goodbye'}) == {'bar': 'goodbye'}
+
+ def test_dumping_skips_key_value_if_value_type_loads_to_missing(self):
+ class CustomValueType(String):
+ def dump(self, data, *args, **kwargs):
+ if data == 'foo':
+ return MISSING
+ return super(CustomValueType, self).load(data, *args, **kwargs)
+
+ assert Dict(CustomValueType())\
+ .dump({'key1': 'foo', 'key2': 'bar'}) == {'key2': 'bar'}
def test_dumping_accepts_any_value_if_value_types_are_not_specified(self):
assert Dict()\
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
iniconfig==2.1.0
-e git+https://github.com/maximkulkin/lollipop.git@360bbc8f9c2b6203ab5af8a3cd051f852ba8dae3#egg=lollipop
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
| name: lollipop
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/lollipop
| [
"tests/test_types.py::TestDict::test_loading_dict_with_custom_key_type_and_values_of_different_types",
"tests/test_types.py::TestDict::test_loading_skips_key_value_if_custom_key_type_loads_to_missing",
"tests/test_types.py::TestDict::test_loading_skips_key_value_if_value_type_loads_to_missing",
"tests/test_types.py::TestDict::test_dumping_skips_key_value_if_custom_key_type_loads_to_missing",
"tests/test_types.py::TestDict::test_dumping_skips_key_value_if_value_type_loads_to_missing"
]
| []
| [
"tests/test_types.py::TestString::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestString::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestString::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestString::test_loading_passes_context_to_validator",
"tests/test_types.py::TestString::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestString::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestString::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestString::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestString::test_loading_None_raises_required_error",
"tests/test_types.py::TestString::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestString::test_dumping_None_raises_required_error",
"tests/test_types.py::TestString::test_name",
"tests/test_types.py::TestString::test_description",
"tests/test_types.py::TestString::test_loading_string_value",
"tests/test_types.py::TestString::test_loading_non_string_value_raises_ValidationError",
"tests/test_types.py::TestString::test_dumping_string_value",
"tests/test_types.py::TestString::test_dumping_non_string_value_raises_ValidationError",
"tests/test_types.py::TestNumber::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestNumber::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestNumber::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestNumber::test_loading_passes_context_to_validator",
"tests/test_types.py::TestNumber::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestNumber::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestNumber::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestNumber::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestNumber::test_loading_None_raises_required_error",
"tests/test_types.py::TestNumber::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestNumber::test_dumping_None_raises_required_error",
"tests/test_types.py::TestNumber::test_name",
"tests/test_types.py::TestNumber::test_description",
"tests/test_types.py::TestNumber::test_loading_float_value",
"tests/test_types.py::TestNumber::test_loading_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestNumber::test_dumping_float_value",
"tests/test_types.py::TestNumber::test_dumping_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestInteger::test_loading_integer_value",
"tests/test_types.py::TestInteger::test_loading_long_value",
"tests/test_types.py::TestInteger::test_loading_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestInteger::test_dumping_integer_value",
"tests/test_types.py::TestInteger::test_dumping_long_value",
"tests/test_types.py::TestInteger::test_dumping_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestFloat::test_loading_float_value",
"tests/test_types.py::TestFloat::test_loading_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestFloat::test_dumping_float_value",
"tests/test_types.py::TestFloat::test_dumping_non_numeric_value_raises_ValidationError",
"tests/test_types.py::TestBoolean::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestBoolean::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestBoolean::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestBoolean::test_loading_passes_context_to_validator",
"tests/test_types.py::TestBoolean::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestBoolean::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestBoolean::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestBoolean::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestBoolean::test_loading_None_raises_required_error",
"tests/test_types.py::TestBoolean::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestBoolean::test_dumping_None_raises_required_error",
"tests/test_types.py::TestBoolean::test_name",
"tests/test_types.py::TestBoolean::test_description",
"tests/test_types.py::TestBoolean::test_loading_boolean_value",
"tests/test_types.py::TestBoolean::test_loading_non_boolean_value_raises_ValidationError",
"tests/test_types.py::TestBoolean::test_dumping_boolean_value",
"tests/test_types.py::TestBoolean::test_dumping_non_boolean_value_raises_ValidationError",
"tests/test_types.py::TestDateTime::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestDateTime::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestDateTime::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestDateTime::test_loading_passes_context_to_validator",
"tests/test_types.py::TestDateTime::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestDateTime::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestDateTime::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestDateTime::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestDateTime::test_loading_None_raises_required_error",
"tests/test_types.py::TestDateTime::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestDateTime::test_dumping_None_raises_required_error",
"tests/test_types.py::TestDateTime::test_name",
"tests/test_types.py::TestDateTime::test_description",
"tests/test_types.py::TestDateTime::test_loading_string_date",
"tests/test_types.py::TestDateTime::test_loading_using_predefined_format",
"tests/test_types.py::TestDateTime::test_loading_using_custom_format",
"tests/test_types.py::TestDateTime::test_loading_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestDateTime::test_customizing_error_message_if_value_is_not_string",
"tests/test_types.py::TestDateTime::test_loading_raises_ValidationError_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestDateTime::test_customizing_error_message_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestDateTime::test_loading_passes_deserialized_date_to_validator",
"tests/test_types.py::TestDateTime::test_dumping_date",
"tests/test_types.py::TestDateTime::test_dumping_using_predefined_format",
"tests/test_types.py::TestDateTime::test_dumping_using_custom_format",
"tests/test_types.py::TestDateTime::test_dumping_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestDate::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestDate::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestDate::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestDate::test_loading_passes_context_to_validator",
"tests/test_types.py::TestDate::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestDate::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestDate::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestDate::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestDate::test_loading_None_raises_required_error",
"tests/test_types.py::TestDate::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestDate::test_dumping_None_raises_required_error",
"tests/test_types.py::TestDate::test_name",
"tests/test_types.py::TestDate::test_description",
"tests/test_types.py::TestDate::test_loading_string_date",
"tests/test_types.py::TestDate::test_loading_using_predefined_format",
"tests/test_types.py::TestDate::test_loading_using_custom_format",
"tests/test_types.py::TestDate::test_loading_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestDate::test_customizing_error_message_if_value_is_not_string",
"tests/test_types.py::TestDate::test_loading_raises_ValidationError_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestDate::test_customizing_error_message_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestDate::test_loading_passes_deserialized_date_to_validator",
"tests/test_types.py::TestDate::test_dumping_date",
"tests/test_types.py::TestDate::test_dumping_using_predefined_format",
"tests/test_types.py::TestDate::test_dumping_using_custom_format",
"tests/test_types.py::TestDate::test_dumping_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestTime::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestTime::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestTime::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestTime::test_loading_passes_context_to_validator",
"tests/test_types.py::TestTime::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestTime::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestTime::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestTime::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestTime::test_loading_None_raises_required_error",
"tests/test_types.py::TestTime::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestTime::test_dumping_None_raises_required_error",
"tests/test_types.py::TestTime::test_name",
"tests/test_types.py::TestTime::test_description",
"tests/test_types.py::TestTime::test_loading_string_date",
"tests/test_types.py::TestTime::test_loading_using_custom_format",
"tests/test_types.py::TestTime::test_loading_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestTime::test_customizing_error_message_if_value_is_not_string",
"tests/test_types.py::TestTime::test_loading_raises_ValidationError_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestTime::test_customizing_error_message_if_value_string_does_not_match_date_format",
"tests/test_types.py::TestTime::test_loading_passes_deserialized_date_to_validator",
"tests/test_types.py::TestTime::test_dumping_date",
"tests/test_types.py::TestTime::test_dumping_using_custom_format",
"tests/test_types.py::TestTime::test_dumping_raises_ValidationError_if_value_is_not_string",
"tests/test_types.py::TestList::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestList::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestList::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestList::test_loading_passes_context_to_validator",
"tests/test_types.py::TestList::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestList::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestList::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestList::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestList::test_loading_None_raises_required_error",
"tests/test_types.py::TestList::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestList::test_dumping_None_raises_required_error",
"tests/test_types.py::TestList::test_name",
"tests/test_types.py::TestList::test_description",
"tests/test_types.py::TestList::test_loading_list_value",
"tests/test_types.py::TestList::test_loading_non_list_value_raises_ValidationError",
"tests/test_types.py::TestList::test_loading_list_value_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestList::test_loading_list_value_with_items_that_have_validation_errors_raises_ValidationError",
"tests/test_types.py::TestList::test_loading_does_not_validate_whole_list_if_items_have_errors",
"tests/test_types.py::TestList::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestList::test_dumping_list_value",
"tests/test_types.py::TestList::test_dumping_sequence_value",
"tests/test_types.py::TestList::test_dumping_non_list_value_raises_ValidationError",
"tests/test_types.py::TestList::test_dumping_list_value_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestList::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestTuple::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestTuple::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestTuple::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestTuple::test_loading_passes_context_to_validator",
"tests/test_types.py::TestTuple::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestTuple::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestTuple::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestTuple::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestTuple::test_loading_None_raises_required_error",
"tests/test_types.py::TestTuple::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestTuple::test_dumping_None_raises_required_error",
"tests/test_types.py::TestTuple::test_name",
"tests/test_types.py::TestTuple::test_description",
"tests/test_types.py::TestTuple::test_loading_tuple_with_values_of_same_type",
"tests/test_types.py::TestTuple::test_loading_tuple_with_values_of_different_type",
"tests/test_types.py::TestTuple::test_loading_non_tuple_value_raises_ValidationError",
"tests/test_types.py::TestTuple::test_loading_tuple_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestTuple::test_loading_tuple_with_items_that_have_validation_errors_raises_ValidationErrors",
"tests/test_types.py::TestTuple::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestTuple::test_dump_tuple",
"tests/test_types.py::TestTuple::test_dump_sequence",
"tests/test_types.py::TestTuple::test_dumping_non_tuple_raises_ValidationError",
"tests/test_types.py::TestTuple::test_dumping_sequence_of_incorrect_length_raises_ValidationError",
"tests/test_types.py::TestTuple::test_dumping_tuple_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestTuple::test_dumping_tuple_passes_context_to_inner_type_dump",
"tests/test_types.py::TestDict::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestDict::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestDict::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestDict::test_loading_passes_context_to_validator",
"tests/test_types.py::TestDict::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestDict::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestDict::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestDict::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestDict::test_loading_None_raises_required_error",
"tests/test_types.py::TestDict::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestDict::test_dumping_None_raises_required_error",
"tests/test_types.py::TestDict::test_name",
"tests/test_types.py::TestDict::test_description",
"tests/test_types.py::TestDict::test_loading_dict_with_custom_key_type",
"tests/test_types.py::TestDict::test_loading_accepts_any_key_if_key_type_is_not_specified",
"tests/test_types.py::TestDict::test_loading_dict_with_values_of_the_same_type",
"tests/test_types.py::TestDict::test_loading_dict_with_values_of_different_types",
"tests/test_types.py::TestDict::test_loading_accepts_any_value_if_value_types_are_not_specified",
"tests/test_types.py::TestDict::test_loading_non_dict_value_raises_ValidationError",
"tests/test_types.py::TestDict::test_loading_dict_with_incorrect_key_value_raises_ValidationError",
"tests/test_types.py::TestDict::test_loading_dict_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestDict::test_loading_dict_with_items_that_have_validation_errors_raises_ValidationError",
"tests/test_types.py::TestDict::test_loading_does_not_validate_whole_list_if_items_have_errors",
"tests/test_types.py::TestDict::test_loading_dict_with_incorrect_key_value_and_incorrect_value_raises_ValidationError_with_both_errors",
"tests/test_types.py::TestDict::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestDict::test_dumping_dict_with_custom_key_type",
"tests/test_types.py::TestDict::test_dumping_accepts_any_key_if_key_type_is_not_specified",
"tests/test_types.py::TestDict::test_dumping_dict_with_values_of_the_same_type",
"tests/test_types.py::TestDict::test_dumping_dict_with_values_of_different_types",
"tests/test_types.py::TestDict::test_dumping_accepts_any_value_if_value_types_are_not_specified",
"tests/test_types.py::TestDict::test_dumping_non_dict_value_raises_ValidationError",
"tests/test_types.py::TestDict::test_dumping_dict_with_incorrect_key_value_raises_ValidationError",
"tests/test_types.py::TestDict::test_dumping_dict_with_items_of_incorrect_type_raises_ValidationError",
"tests/test_types.py::TestDict::test_dumping_dict_with_incorrect_key_value_and_incorrect_value_raises_ValidationError_with_both_errors",
"tests/test_types.py::TestDict::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestOneOf::test_loading_values_of_one_of_listed_types",
"tests/test_types.py::TestOneOf::test_loading_raises_ValidationError_if_value_is_of_unlisted_type",
"tests/test_types.py::TestOneOf::test_loading_raises_ValidationError_if_deserialized_value_has_errors",
"tests/test_types.py::TestOneOf::test_loading_raises_ValidationError_if_type_hint_is_unknown",
"tests/test_types.py::TestOneOf::test_loading_with_type_hinting",
"tests/test_types.py::TestOneOf::test_loading_with_type_hinting_raises_ValidationError_if_deserialized_value_has_errors",
"tests/test_types.py::TestOneOf::test_dumping_values_of_one_of_listed_types",
"tests/test_types.py::TestOneOf::test_dumping_raises_ValidationError_if_value_is_of_unlisted_type",
"tests/test_types.py::TestOneOf::test_dumping_raises_ValidationError_if_type_hint_is_unknown",
"tests/test_types.py::TestOneOf::test_dumping_raises_ValidationError_if_serialized_value_has_errors",
"tests/test_types.py::TestOneOf::test_dumping_with_type_hinting",
"tests/test_types.py::TestOneOf::test_dumping_with_type_hinting_raises_ValidationError_if_deserialized_value_has_errors",
"tests/test_types.py::TestAttributeField::test_getting_value_returns_value_of_given_object_attribute",
"tests/test_types.py::TestAttributeField::test_getting_value_returns_value_of_configured_object_attribute",
"tests/test_types.py::TestAttributeField::test_getting_value_returns_value_of_field_name_transformed_with_given_name_transformation",
"tests/test_types.py::TestAttributeField::test_setting_value_sets_given_value_to_given_object_attribute",
"tests/test_types.py::TestAttributeField::test_setting_value_sets_given_value_to_configured_object_attribute",
"tests/test_types.py::TestAttributeField::test_setting_value_sets_given_value_to_field_name_transformed_with_given_name_transformation",
"tests/test_types.py::TestAttributeField::test_loading_value_with_field_type",
"tests/test_types.py::TestAttributeField::test_loading_given_attribute_regardless_of_attribute_override",
"tests/test_types.py::TestAttributeField::test_loading_missing_value_if_attribute_does_not_exist",
"tests/test_types.py::TestAttributeField::test_loading_passes_context_to_field_type_load",
"tests/test_types.py::TestAttributeField::test_dumping_given_attribute_from_object",
"tests/test_types.py::TestAttributeField::test_dumping_object_attribute_with_field_type",
"tests/test_types.py::TestAttributeField::test_dumping_a_different_attribute_from_object",
"tests/test_types.py::TestAttributeField::test_dumping_passes_context_to_field_type_dump",
"tests/test_types.py::TestMethodField::test_get_value_returns_result_of_calling_configured_method_on_object",
"tests/test_types.py::TestMethodField::test_get_value_returns_result_of_calling_method_calculated_by_given_function_on_object",
"tests/test_types.py::TestMethodField::test_get_value_returns_MISSING_if_get_method_is_not_specified",
"tests/test_types.py::TestMethodField::test_get_value_raises_ValueError_if_method_does_not_exist",
"tests/test_types.py::TestMethodField::test_get_value_raises_ValueError_if_property_is_not_callable",
"tests/test_types.py::TestMethodField::test_get_value_passes_context_to_method",
"tests/test_types.py::TestMethodField::test_set_value_calls_configure_method_on_object",
"tests/test_types.py::TestMethodField::test_set_value_calls_method_calculated_by_given_function_on_object",
"tests/test_types.py::TestMethodField::test_set_value_does_not_do_anything_if_set_method_is_not_specified",
"tests/test_types.py::TestMethodField::test_set_value_raises_ValueError_if_method_does_not_exist",
"tests/test_types.py::TestMethodField::test_set_value_raises_ValueError_if_property_is_not_callable",
"tests/test_types.py::TestMethodField::test_set_value_passes_context_to_method",
"tests/test_types.py::TestMethodField::test_loading_value_with_field_type",
"tests/test_types.py::TestMethodField::test_loading_value_returns_loaded_value",
"tests/test_types.py::TestMethodField::test_loading_value_passes_context_to_field_types_load",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_calls_field_types_load_into",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_calls_field_types_load_if_load_into_is_not_available",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_calls_field_types_load_if_old_value_is_None",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_calls_field_types_load_if_old_value_is_MISSING",
"tests/test_types.py::TestMethodField::test_loading_value_into_existing_object_passes_context_to_field_types_load_into",
"tests/test_types.py::TestMethodField::test_dumping_result_of_given_objects_method",
"tests/test_types.py::TestMethodField::test_dumping_result_of_objects_method_with_field_type",
"tests/test_types.py::TestMethodField::test_dumping_result_of_a_different_objects_method",
"tests/test_types.py::TestMethodField::test_dumping_raises_ValueError_if_given_method_does_not_exist",
"tests/test_types.py::TestMethodField::test_dumping_raises_ValueError_if_given_method_is_not_callable",
"tests/test_types.py::TestMethodField::test_dumping_passes_context_to_field_type_dump",
"tests/test_types.py::TestFunctionField::test_get_value_returns_result_of_calling_configured_function_with_object",
"tests/test_types.py::TestFunctionField::test_get_value_returns_MISSING_if_get_func_is_not_specified",
"tests/test_types.py::TestFunctionField::test_get_value_raises_ValueError_if_property_is_not_callable",
"tests/test_types.py::TestFunctionField::test_get_value_passes_context_to_func",
"tests/test_types.py::TestFunctionField::test_set_value_calls_configure_method_on_object",
"tests/test_types.py::TestFunctionField::test_set_value_does_not_do_anything_if_set_func_is_not_specified",
"tests/test_types.py::TestFunctionField::test_set_value_raises_ValueError_if_property_is_not_callable",
"tests/test_types.py::TestFunctionField::test_set_value_passes_context_to_func",
"tests/test_types.py::TestFunctionField::test_loading_value_with_field_type",
"tests/test_types.py::TestFunctionField::test_loading_value_returns_loaded_value",
"tests/test_types.py::TestFunctionField::test_loading_value_passes_context_to_field_types_load",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_calls_field_types_load_into",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_calls_field_types_load_if_load_into_is_not_available",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_calls_field_types_load_if_old_value_is_None",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_calls_field_types_load_if_old_value_is_MISSING",
"tests/test_types.py::TestFunctionField::test_loading_value_into_existing_object_passes_context_to_field_types_load_into",
"tests/test_types.py::TestFunctionField::test_dumping_result_of_given_function",
"tests/test_types.py::TestFunctionField::test_dumping_result_of_objects_method_with_field_type",
"tests/test_types.py::TestFunctionField::test_dumping_raises_ValueError_if_given_get_func_is_not_callable",
"tests/test_types.py::TestFunctionField::test_dumping_passes_context_to_field_type_dump",
"tests/test_types.py::TestConstant::test_name",
"tests/test_types.py::TestConstant::test_description",
"tests/test_types.py::TestConstant::test_loading_always_returns_missing",
"tests/test_types.py::TestConstant::test_loading_raises_ValidationError_if_loaded_value_is_not_a_constant_value_specified",
"tests/test_types.py::TestConstant::test_loading_value_with_inner_type_before_checking_value_correctness",
"tests/test_types.py::TestConstant::test_customizing_error_message_when_value_is_incorrect",
"tests/test_types.py::TestConstant::test_dumping_always_returns_given_value",
"tests/test_types.py::TestConstant::test_dumping_given_constant_with_field_type",
"tests/test_types.py::TestObject::test_loading_does_not_raise_ValidationError_if_validators_succeed",
"tests/test_types.py::TestObject::test_loading_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestObject::test_loading_raises_ValidationError_with_combined_messages_if_multiple_validators_fail",
"tests/test_types.py::TestObject::test_loading_passes_context_to_validator",
"tests/test_types.py::TestObject::test_validation_returns_None_if_validators_succeed",
"tests/test_types.py::TestObject::test_validation_returns_errors_if_validator_fails",
"tests/test_types.py::TestObject::test_validation_returns_combined_errors_if_multiple_validators_fail",
"tests/test_types.py::TestObject::test_loading_missing_value_raises_required_error",
"tests/test_types.py::TestObject::test_loading_None_raises_required_error",
"tests/test_types.py::TestObject::test_dumping_missing_value_raises_required_error",
"tests/test_types.py::TestObject::test_dumping_None_raises_required_error",
"tests/test_types.py::TestObject::test_name",
"tests/test_types.py::TestObject::test_description",
"tests/test_types.py::TestObject::test_default_field_type_is_unset_by_default",
"tests/test_types.py::TestObject::test_inheriting_default_field_type_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_constructor_is_unset_by_default",
"tests/test_types.py::TestObject::test_inheriting_constructor_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_allow_extra_fields_is_unset_by_default",
"tests/test_types.py::TestObject::test_inheriting_allow_extra_fields_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_immutable_is_unset_by_default",
"tests/test_types.py::TestObject::test_inheriting_immutable_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_ordered_is_unset_by_default",
"tests/test_types.py::TestObject::test_iheriting_ordered_from_first_base_class_that_has_it_set",
"tests/test_types.py::TestObject::test_loading_dict_value",
"tests/test_types.py::TestObject::test_loading_non_dict_values_raises_ValidationError",
"tests/test_types.py::TestObject::test_loading_bypasses_values_for_which_field_type_returns_missing_value",
"tests/test_types.py::TestObject::test_loading_dict_with_field_errors_raises_ValidationError_with_all_field_errors_merged",
"tests/test_types.py::TestObject::test_loading_dict_with_field_errors_does_not_run_whole_object_validators",
"tests/test_types.py::TestObject::test_loading_calls_field_load_passing_field_name_and_whole_data",
"tests/test_types.py::TestObject::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestObject::test_constructing_objects_with_default_constructor_on_load",
"tests/test_types.py::TestObject::test_constructing_custom_objects_on_load",
"tests/test_types.py::TestObject::test_load_ignores_extra_fields_by_default",
"tests/test_types.py::TestObject::test_load_raises_ValidationError_if_reporting_extra_fields",
"tests/test_types.py::TestObject::test_loading_inherited_fields",
"tests/test_types.py::TestObject::test_loading_multiple_inherited_fields",
"tests/test_types.py::TestObject::test_loading_raises_ValidationError_if_inherited_fields_have_errors",
"tests/test_types.py::TestObject::test_loading_only_specified_fields",
"tests/test_types.py::TestObject::test_loading_only_specified_fields_does_not_affect_own_fields",
"tests/test_types.py::TestObject::test_loading_all_but_specified_base_class_fields",
"tests/test_types.py::TestObject::test_loading_all_but_specified_fields_does_not_affect_own_fields",
"tests/test_types.py::TestObject::test_loading_values_into_existing_object",
"tests/test_types.py::TestObject::test_loading_values_into_existing_object_returns_that_object",
"tests/test_types.py::TestObject::test_loading_values_into_existing_object_passes_all_object_attributes_to_validators",
"tests/test_types.py::TestObject::test_loading_values_into_immutable_object_creates_a_copy",
"tests/test_types.py::TestObject::test_loading_values_into_immutable_object_does_not_modify_original_object",
"tests/test_types.py::TestObject::test_loading_values_into_nested_object_of_immutable_object_creates_copy_of_it_regardless_of_nested_objects_immutable_flag",
"tests/test_types.py::TestObject::test_loading_values_into_nested_object_of_immutable_object_does_not_modify_original_objects",
"tests/test_types.py::TestObject::test_loading_values_into_nested_objects_with_inplace_False_does_not_modify_original_objects",
"tests/test_types.py::TestObject::test_loading_values_into_existing_objects_ignores_missing_fields",
"tests/test_types.py::TestObject::test_loading_MISSING_into_existing_object_does_not_do_anything",
"tests/test_types.py::TestObject::test_loading_None_into_existing_objects_raises_ValidationError",
"tests/test_types.py::TestObject::test_loading_None_into_field_of_existing_object_passes_None_to_field",
"tests/test_types.py::TestObject::test_loading_values_into_existing_objects_raises_ValidationError_if_data_contains_errors",
"tests/test_types.py::TestObject::test_loading_values_into_existing_objects_raises_ValidationError_if_validator_fails",
"tests/test_types.py::TestObject::test_loading_values_into_existing_objects_annotates_field_errors_with_field_names",
"tests/test_types.py::TestObject::test_loading_values_into_existing_nested_objects",
"tests/test_types.py::TestObject::test_loading_values_into_existing_object_when_nested_object_does_not_exist",
"tests/test_types.py::TestObject::test_validating_data_for_existing_objects_returns_None_if_data_is_valid",
"tests/test_types.py::TestObject::test_validating_data_for_existing_objects_returns_errors_if_data_contains_errors",
"tests/test_types.py::TestObject::test_validating_data_for_existing_objects_returns_errors_if_validator_fails",
"tests/test_types.py::TestObject::test_validating_data_for_existing_objects_does_not_modify_original_objects",
"tests/test_types.py::TestObject::test_dumping_object_attributes",
"tests/test_types.py::TestObject::test_dumping_calls_field_dump_passing_field_name_and_whole_object",
"tests/test_types.py::TestObject::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestObject::test_dumping_inherited_fields",
"tests/test_types.py::TestObject::test_dumping_multiple_inherited_fields",
"tests/test_types.py::TestObject::test_dumping_only_specified_fields_of_base_classes",
"tests/test_types.py::TestObject::test_dumping_only_specified_fields_does_not_affect_own_fields",
"tests/test_types.py::TestObject::test_dumping_all_but_specified_base_class_fields",
"tests/test_types.py::TestObject::test_dumping_all_but_specified_fields_does_not_affect_own_fields",
"tests/test_types.py::TestObject::test_shortcut_for_specifying_constant_fields",
"tests/test_types.py::TestObject::test_dumping_fields_in_declared_order_if_ordered_is_True",
"tests/test_types.py::TestOptional::test_loading_value_calls_load_of_inner_type",
"tests/test_types.py::TestOptional::test_loading_missing_value_returns_None",
"tests/test_types.py::TestOptional::test_loading_None_returns_None",
"tests/test_types.py::TestOptional::test_loading_missing_value_does_not_call_inner_type_load",
"tests/test_types.py::TestOptional::test_loading_None_does_not_call_inner_type_load",
"tests/test_types.py::TestOptional::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestOptional::test_overriding_missing_value_on_load",
"tests/test_types.py::TestOptional::test_overriding_None_value_on_load",
"tests/test_types.py::TestOptional::test_using_function_to_override_value_on_load",
"tests/test_types.py::TestOptional::test_loading_passes_context_to_override_function",
"tests/test_types.py::TestOptional::test_dumping_value_calls_dump_of_inner_type",
"tests/test_types.py::TestOptional::test_dumping_missing_value_returns_None",
"tests/test_types.py::TestOptional::test_dumping_None_returns_None",
"tests/test_types.py::TestOptional::test_dumping_missing_value_does_not_call_inner_type_dump",
"tests/test_types.py::TestOptional::test_dumping_None_does_not_call_inner_type_dump",
"tests/test_types.py::TestOptional::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestOptional::test_overriding_missing_value_on_dump",
"tests/test_types.py::TestOptional::test_overriding_None_value_on_dump",
"tests/test_types.py::TestOptional::test_using_function_to_override_value_on_dump",
"tests/test_types.py::TestOptional::test_dumping_passes_context_to_override_function",
"tests/test_types.py::TestLoadOnly::test_name",
"tests/test_types.py::TestLoadOnly::test_description",
"tests/test_types.py::TestLoadOnly::test_loading_returns_inner_type_load_result",
"tests/test_types.py::TestLoadOnly::test_loading_passes_context_to_inner_type_load",
"tests/test_types.py::TestLoadOnly::test_dumping_always_returns_missing",
"tests/test_types.py::TestLoadOnly::test_dumping_does_not_call_inner_type_dump",
"tests/test_types.py::TestDumpOnly::test_name",
"tests/test_types.py::TestDumpOnly::test_description",
"tests/test_types.py::TestDumpOnly::test_loading_always_returns_missing",
"tests/test_types.py::TestDumpOnly::test_loading_does_not_call_inner_type_dump",
"tests/test_types.py::TestDumpOnly::test_dumping_returns_inner_type_dump_result",
"tests/test_types.py::TestDumpOnly::test_dumping_passes_context_to_inner_type_dump",
"tests/test_types.py::TestTransform::test_name",
"tests/test_types.py::TestTransform::test_description",
"tests/test_types.py::TestTransform::test_loading_calls_pre_load_with_original_value",
"tests/test_types.py::TestTransform::test_loading_calls_inner_type_load_with_result_of_pre_load",
"tests/test_types.py::TestTransform::test_loading_calls_post_load_with_result_of_inner_type_load",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_inner_type_load",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_pre_load",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_post_load",
"tests/test_types.py::TestTransform::test_dumping_calls_pre_dump_with_original_value",
"tests/test_types.py::TestTransform::test_dumping_calls_inner_type_dump_with_result_of_pre_dump",
"tests/test_types.py::TestTransform::test_dumping_calls_post_dump_with_result_of_inner_type_dump",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_inner_type_dump",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_pre_dump",
"tests/test_types.py::TestTransform::test_transform_passes_context_to_post_dump",
"tests/test_types.py::TestValidatedType::test_returns_subclass_of_given_type",
"tests/test_types.py::TestValidatedType::test_returns_type_that_has_single_given_validator",
"tests/test_types.py::TestValidatedType::test_accepts_context_unaware_validators",
"tests/test_types.py::TestValidatedType::test_returns_type_that_has_multiple_given_validators",
"tests/test_types.py::TestValidatedType::test_specifying_more_validators_on_type_instantiation",
"tests/test_types.py::TestValidatedType::test_new_type_accepts_same_constructor_arguments_as_base_type"
]
| []
| MIT License | 1,363 | [
"lollipop/types.py"
]
| [
"lollipop/types.py"
]
|
|
jboss-dockerfiles__dogen-149 | a475173ce00b2d6686c601ffc46a8d2bc3ed0a7f | 2017-06-13 12:32:57 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/generator.py b/dogen/generator.py
index 0562cd8..415dcb0 100644
--- a/dogen/generator.py
+++ b/dogen/generator.py
@@ -313,9 +313,10 @@ class Generator(object):
algorithms.append(supported_algorithm)
try:
- if os.path.exists(filename) and algorithms:
- for algorithm in algorithms:
- self.check_sum(filename, source[algorithm], algorithm)
+ if os.path.exists(filename):
+ if algorithms:
+ for algorithm in algorithms:
+ self.check_sum(filename, source[algorithm], algorithm)
passed = True
except Exception as e:
self.log.debug(str(e))
@@ -361,7 +362,7 @@ class Generator(object):
else:
if hint:
self.log.info(hint)
- self.log.info("Please download the '%s' artifact manually and save it as '%s'" % (artifact, filename))
+ self.log.info("Please download the '%s' artifact manually and save it as '%s'" % (artifact, filename))
raise Error("Artifact '%s' could not be fetched!" % artifact)
def check_sum(self, filename, checksum, algorithm):
| Dogen fails if md5 is not set for 'local' artifacts
Using dogen 2.0. In my image.yaml, if I have the following:
````
sources:
- artifact: myfile.tar.gz
````
I run dogen mounting `pwd` into /tmp/output inside the dogen container, and put the myfile.tar.gz inside the build/ folder. Dogen is invoked with /tmp/output/build as the destination folder.
I would expect that the local file myfile.tar.gz is added to the resulting image. However, dogen throws an error of not being able to find the artifact. In addition, the helpful hint on where to download it, i.e. "Please download the 'myfile.tar.gz' artifact manually and save it as '/tmp/output/build/myfile.tar.gz' is missing unless I also add the 'hint' field.
If I do this:
```
sources:
- artifact: myfile.tar.gz
md5: <md5sum of myfile.tar.gz>
```
Then dogen is successful creating the Dockerfile.
I would like to be able to omit the md5 field and still be able to generate the Dockerfile, as this is useful for generating Dockerfiles for locally built artifacts.
A detailed example can be found here: https://pastebin.com/31fqRGUa
| jboss-dockerfiles/dogen | diff --git a/tests/test_unit_generate_handle_files.py b/tests/test_unit_generate_handle_files.py
index daf240a..e811ebb 100644
--- a/tests/test_unit_generate_handle_files.py
+++ b/tests/test_unit_generate_handle_files.py
@@ -155,6 +155,17 @@ class TestHandleSources(unittest.TestCase):
self.assertEquals(str(cm.exception), "Could not download artifact from orignal location, reason: original")
mock_fetch_file.assert_has_calls([mock.call('http://cache/get?#algorithm#=#hash#', 'target/jboss-eap.zip'), mock.call('http://host.com/jboss-eap.zip', 'target/jboss-eap.zip')])
+ @mock.patch('dogen.generator.Generator._fetch_file')
+ def test_fetch_artifact_should_fail_with_nice_message_when_artifact_without_url_is_not_found_locally(self, mock_fetch_file):
+ self.generator.cfg = {'sources': [{'artifact': 'jboss-eap.zip'}]}
+
+ with self.assertRaises(Error) as cm:
+ self.generator.handle_sources()
+
+ self.assertEquals(str(cm.exception), "Artifact 'jboss-eap.zip' could not be fetched!")
+ mock_fetch_file.assert_not_called()
+ self.log.info.assert_any_call("Please download the 'jboss-eap.zip' artifact manually and save it as 'target/jboss-eap.zip'")
+
@mock.patch('dogen.generator.Generator._fetch_file')
def test_fetch_artifact_should_fetch_file_from_cache(self, mock_fetch_file):
self.generator.cfg = {'sources': [{'artifact': 'http://host.com/jboss-eap.zip'}]}
@@ -169,7 +180,8 @@ class TestHandleSources(unittest.TestCase):
mock_fetch_file.assert_called_with('http://cache/get?jboss-eap.zip', 'target/jboss-eap.zip')
@mock.patch('dogen.generator.Generator._fetch_file')
- def test_fetch_artifact_should_fetch_file(self, mock_fetch_file):
+ @mock.patch('dogen.generator.os.path.exists', return_value=False)
+ def test_fetch_artifact_should_fetch_file(self, mock_path_exists, mock_fetch_file):
self.generator.cfg = {'sources': [{'artifact': 'http://host.com/jboss-eap.zip'}]}
self.generator.handle_sources()
# No checksum provided and computed
@@ -187,3 +199,12 @@ class TestHandleSources(unittest.TestCase):
self.assertEquals(self.generator.cfg['artifacts'], {'jboss-eap.zip': None})
mock_fetch_file.assert_has_calls([mock.call('http://cache/get?#algorithm#=#hash#', 'target/jboss-eap.zip'), mock.call('http://host.com/jboss-eap.zip', 'target/jboss-eap.zip')])
+
+ @mock.patch('dogen.generator.Generator._fetch_file')
+ @mock.patch('dogen.generator.os.path.exists', return_value=True)
+ def test_fetch_artifact_should_not_fetch_file_if_exists(self, mock_path_exists, mock_fetch_file):
+ self.generator.cfg = {'sources': [{'artifact': 'http://host.com/jboss-eap.zip'}]}
+ self.generator.handle_sources()
+ # No checksum provided and computed
+ self.assertEquals(self.generator.cfg['artifacts'], {'jboss-eap.zip': None})
+ mock_fetch_file.assert_not_called()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
colorlog==6.9.0
coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@a475173ce00b2d6686c601ffc46a8d2bc3ed0a7f#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- colorlog==6.9.0
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fail_with_nice_message_when_artifact_without_url_is_not_found_locally",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_not_fetch_file_if_exists"
]
| []
| [
"tests/test_unit_generate_handle_files.py::TestURL::test_local_file",
"tests/test_unit_generate_handle_files.py::TestURL::test_remote_http_file",
"tests/test_unit_generate_handle_files.py::TestURL::test_remote_https_file",
"tests/test_unit_generate_handle_files.py::TestFetchFile::test_fetching_with_filename",
"tests/test_unit_generate_handle_files.py::TestFetchFile::test_fetching_with_tmpfile",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_do_not_fail_if_no_template_is_provided",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_fetch_template_success",
"tests/test_unit_generate_handle_files.py::TestCustomTemplateHandling::test_fetch_template_with_error",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_download_from_original_location_if_cached_location_failed",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fail_when_cached_download_failed_and_original_failed_too",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fail_when_fetching_fails",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fetch_file",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_should_fetch_file_from_cache",
"tests/test_unit_generate_handle_files.py::TestHandleSources::test_fetch_artifact_without_url_should_fail"
]
| []
| MIT License | 1,364 | [
"dogen/generator.py"
]
| [
"dogen/generator.py"
]
|
|
python-cmd2__cmd2-121 | af37a3414e232c268f9b57761670c051ab3fda32 | 2017-06-13 13:06:56 | ddfd3d9a400ae81468e9abcc89fe690c30b7ec7f | diff --git a/cmd2.py b/cmd2.py
index 7965ea3b..66741e91 100755
--- a/cmd2.py
+++ b/cmd2.py
@@ -1895,8 +1895,7 @@ Script should contain one command per line, just like command would be typed in
sys.argv = [sys.argv[0]] # the --test argument upsets unittest.main()
testcase = TestMyAppCase()
runner = unittest.TextTestRunner()
- result = runner.run(testcase)
- result.printErrors()
+ runner.run(testcase)
def _run_commands_at_invocation(self, callargs):
"""Runs commands provided as arguments on the command line when the application is started.
| Transcript testing prints errors twcie
Transcript test failures end up printing the failure twice. | python-cmd2/cmd2 | diff --git a/tests/test_transcript.py b/tests/test_transcript.py
index 03fec92a..b193a20d 100644
--- a/tests/test_transcript.py
+++ b/tests/test_transcript.py
@@ -270,7 +270,7 @@ def test_transcript_from_cmdloop(request, capsys):
# Check for the unittest "OK" condition for the 1 test which ran
expected_start = ".\n----------------------------------------------------------------------\nRan 1 test in"
- expected_end = "s\n\nOK\n\n"
+ expected_end = "s\n\nOK\n"
out, err = capsys.readouterr()
if six.PY3:
assert err.startswith(expected_start)
@@ -296,7 +296,7 @@ def test_multiline_command_transcript_with_comments_at_beginning(request, capsys
# Check for the unittest "OK" condition for the 1 test which ran
expected_start = ".\n----------------------------------------------------------------------\nRan 1 test in"
- expected_end = "s\n\nOK\n\n"
+ expected_end = "s\n\nOK\n"
out, err = capsys.readouterr()
if six.PY3:
assert err.startswith(expected_start)
@@ -329,7 +329,7 @@ def test_regex_transcript(request, capsys):
# Check for the unittest "OK" condition for the 1 test which ran
expected_start = ".\n----------------------------------------------------------------------\nRan 1 test in"
- expected_end = "s\n\nOK\n\n"
+ expected_end = "s\n\nOK\n"
out, err = capsys.readouterr()
if six.PY3:
assert err.startswith(expected_start)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/python-cmd2/cmd2.git@af37a3414e232c268f9b57761670c051ab3fda32#egg=cmd2
exceptiongroup==1.2.2
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pyparsing==3.2.3
pytest==8.3.5
six==1.17.0
tomli==2.2.1
| name: cmd2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pyparsing==3.2.3
- pytest==8.3.5
- six==1.17.0
- tomli==2.2.1
prefix: /opt/conda/envs/cmd2
| [
"tests/test_transcript.py::test_transcript_from_cmdloop",
"tests/test_transcript.py::test_multiline_command_transcript_with_comments_at_beginning",
"tests/test_transcript.py::test_regex_transcript"
]
| []
| [
"tests/test_transcript.py::Cmd2TestCase::runTest",
"tests/test_transcript.py::test_base_with_transcript",
"tests/test_transcript.py::TestMyAppCase::runTest",
"tests/test_transcript.py::test_optparser",
"tests/test_transcript.py::test_optparser_nosuchoption",
"tests/test_transcript.py::test_comment_stripping",
"tests/test_transcript.py::test_optarser_correct_args_with_quotes_and_midline_options",
"tests/test_transcript.py::test_optarser_options_with_spaces_in_quotes",
"tests/test_transcript.py::test_commands_at_invocation",
"tests/test_transcript.py::test_select_options",
"tests/test_transcript.py::test_invalid_syntax"
]
| []
| MIT License | 1,365 | [
"cmd2.py"
]
| [
"cmd2.py"
]
|
|
python-cmd2__cmd2-125 | 0af8018cdce37e40719e3f6c4023e9525b1e69de | 2017-06-13 18:59:29 | ddfd3d9a400ae81468e9abcc89fe690c30b7ec7f | diff --git a/CHANGES.md b/CHANGES.md
index 2324522f..c1686888 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -11,6 +11,7 @@ News
* Enhancements
* Added the ability to exclude commands from the help menu (**eof** included by default)
* Redundant list command removed and features merged into history command
+ * Added **pyscript** command which supports running Python scripts with arguments
0.7.2
-----
diff --git a/README.md b/README.md
index cd739773..8c5141b6 100755
--- a/README.md
+++ b/README.md
@@ -155,8 +155,9 @@ example/exampleSession.txt:
Documented commands (type help <topic>):
========================================
-_relative_load edit help list orate py run say shell show
-cmdenvironment eof history load pause quit save set shortcuts speak
+_relative_load help orate pyscript save shell speak
+cmdenvironment history pause quit say shortcuts
+edit load py run set show
(Cmd) help say
Repeats what you tell me to.
diff --git a/cmd2.py b/cmd2.py
index 3fe13f54..aabda96e 100755
--- a/cmd2.py
+++ b/cmd2.py
@@ -1583,6 +1583,42 @@ class Cmd(cmd.Cmd):
self._in_py = False
return self._should_quit
+ # noinspection PyUnusedLocal
+ @options([], arg_desc='<script_path> [script_arguments]')
+ def do_pyscript(self, arg, opts=None):
+ """\nRuns a python script file inside the console
+
+Console commands can be executed inside this script with cmd("your command")
+However, you cannot run nested "py" or "pyscript" commands from within this script
+Paths or arguments that contain spaces must be enclosed in quotes
+"""
+ if not arg:
+ self.perror("pyscript command requires at least 1 argument ...", traceback_war=False)
+ self.do_help('pyscript')
+ return
+
+ if not USE_ARG_LIST:
+ arg = shlex.split(arg, posix=POSIX_SHLEX)
+
+ # Get the absolute path of the script
+ script_path = os.path.abspath(os.path.expanduser(arg[0]))
+
+ # Save current command line arguments
+ orig_args = sys.argv
+
+ # Overwrite sys.argv to allow the script to take command line arguments
+ sys.argv = [script_path]
+ sys.argv.extend(arg[1:])
+
+ # Run the script
+ self.do_py("run('{}')".format(arg[0]))
+
+ # Restore command line arguments to original state
+ sys.argv = orig_args
+
+ # Enable tab completion of paths for pyscript command
+ complete_pyscript = path_complete
+
# Only include the do_ipy() method if IPython is available on the system
if ipython_available:
# noinspection PyMethodMayBeStatic,PyUnusedLocal
diff --git a/examples/arg_printer.py b/examples/arg_printer.py
new file mode 100755
index 00000000..42084d4e
--- /dev/null
+++ b/examples/arg_printer.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# coding=utf-8
+import sys
+print("Running Python script {!r} which was called with {} arguments".format(sys.argv[0], len(sys.argv) - 1))
+for i, arg in enumerate(sys.argv[1:]):
+ print("arg {}: {!r}".format(i+1, arg))
diff --git a/examples/exampleSession.txt b/examples/exampleSession.txt
index 62c130f0..b2cf24c0 100644
--- a/examples/exampleSession.txt
+++ b/examples/exampleSession.txt
@@ -3,8 +3,9 @@
Documented commands (type help <topic>):
========================================
-_relative_load edit history orate py run say shell show
-cmdenvironment help load pause quit save set shortcuts speak
+_relative_load help orate pyscript save shell speak
+cmdenvironment history pause quit say shortcuts
+edit load py run set show
(Cmd) help say
Repeats what you tell me to.
| Add ability to pass arguments to a Python script
The **py** command supports running a Python script via "py run('script.py')". However, it does not currently support the ability to pass any arguments to the script.
This functionality would be a lot more flexible and reusable if arguments could be passed to the script.
So either either enhance the current py command with the ability to run a script with arguments or add a new command which supports the ability to run a script and pass it arguments. | python-cmd2/cmd2 | diff --git a/tests/conftest.py b/tests/conftest.py
index 41bd007a..77f525f7 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -15,8 +15,8 @@ import cmd2
# Help text for base cmd2.Cmd application
BASE_HELP = """Documented commands (type help <topic>):
========================================
-_relative_load edit history pause quit save shell show
-cmdenvironment help load py run set shortcuts
+_relative_load edit history pause pyscript run set shortcuts
+cmdenvironment help load py quit save shell show
"""
# Help text for the history command
diff --git a/tests/test_transcript.py b/tests/test_transcript.py
index b193a20d..5ac7d6fd 100644
--- a/tests/test_transcript.py
+++ b/tests/test_transcript.py
@@ -106,8 +106,9 @@ def test_base_with_transcript(_cmdline_app):
Documented commands (type help <topic>):
========================================
-_relative_load edit history orate py run say shell show
-cmdenvironment help load pause quit save set shortcuts speak
+_relative_load help orate pyscript save shell speak
+cmdenvironment history pause quit say shortcuts
+edit load py run set show
(Cmd) help say
Repeats what you tell me to.
diff --git a/tests/transcript.txt b/tests/transcript.txt
index d0fd86a6..582f08cb 100644
--- a/tests/transcript.txt
+++ b/tests/transcript.txt
@@ -2,8 +2,9 @@
Documented commands (type help <topic>):
========================================
-_relative_load edit history orate py run say shell show
-cmdenvironment help load pause quit save set shortcuts speak
+_relative_load help orate pyscript save shell speak
+cmdenvironment history pause quit say shortcuts
+edit load py run set show
(Cmd) help say
Repeats what you tell me to.
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 4
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock",
"sphinx",
"sphinx-rtd-theme",
"pytest-xdist",
"pytest-cov"
],
"pre_install": [
"pip install tox"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
babel==2.17.0
cachetools==5.5.2
certifi==2025.1.31
chardet==5.2.0
charset-normalizer==3.4.1
-e git+https://github.com/python-cmd2/cmd2.git@0af8018cdce37e40719e3f6c4023e9525b1e69de#egg=cmd2
colorama==0.4.6
coverage==7.8.0
distlib==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
Pygments==2.19.1
pyparsing==3.2.3
pyproject-api==1.9.0
pytest==8.3.5
pytest-cov==6.0.0
pytest-xdist==3.6.1
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-rtd-theme==3.0.2
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
urllib3==2.3.0
virtualenv==20.29.3
zipp==3.21.0
| name: cmd2
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- babel==2.17.0
- cachetools==5.5.2
- certifi==2025.1.31
- chardet==5.2.0
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- distlib==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pygments==2.19.1
- pyparsing==3.2.3
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-xdist==3.6.1
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-rtd-theme==3.0.2
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- urllib3==2.3.0
- virtualenv==20.29.3
- zipp==3.21.0
prefix: /opt/conda/envs/cmd2
| [
"tests/test_transcript.py::test_base_with_transcript",
"tests/test_transcript.py::test_transcript_from_cmdloop"
]
| []
| [
"tests/test_transcript.py::Cmd2TestCase::runTest",
"tests/test_transcript.py::TestMyAppCase::runTest",
"tests/test_transcript.py::test_optparser",
"tests/test_transcript.py::test_optparser_nosuchoption",
"tests/test_transcript.py::test_comment_stripping",
"tests/test_transcript.py::test_optarser_correct_args_with_quotes_and_midline_options",
"tests/test_transcript.py::test_optarser_options_with_spaces_in_quotes",
"tests/test_transcript.py::test_commands_at_invocation",
"tests/test_transcript.py::test_select_options",
"tests/test_transcript.py::test_multiline_command_transcript_with_comments_at_beginning",
"tests/test_transcript.py::test_invalid_syntax",
"tests/test_transcript.py::test_regex_transcript"
]
| []
| MIT License | 1,366 | [
"cmd2.py",
"CHANGES.md",
"examples/exampleSession.txt",
"examples/arg_printer.py",
"README.md"
]
| [
"cmd2.py",
"CHANGES.md",
"examples/exampleSession.txt",
"examples/arg_printer.py",
"README.md"
]
|
|
networkx__networkx-2473 | 90c1645ef6dd4f80a8a09fb4ec597d99ab57f1de | 2017-06-14 00:09:45 | 3f4fd85765bf2d88188cfd4c84d0707152e6cd1e | diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py
index a82665e10..77d2c4e87 100644
--- a/networkx/algorithms/cycles.py
+++ b/networkx/algorithms/cycles.py
@@ -1,14 +1,18 @@
-"""
-========================
-Cycle finding algorithms
-========================
-"""
# Copyright (C) 2010-2012 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
+#
+# Authors: Jon Olav Vik <[email protected]>,
+# Dan Schult <[email protected]>
+# Aric Hagberg <[email protected]>
+"""
+========================
+Cycle finding algorithms
+========================
+"""
from collections import defaultdict
@@ -17,16 +21,13 @@ from networkx.utils import *
from networkx.algorithms.traversal.edgedfs import helper_funcs, edge_dfs
__all__ = [
- 'cycle_basis','simple_cycles','recursive_simple_cycles', 'find_cycle'
+ 'cycle_basis', 'simple_cycles', 'recursive_simple_cycles', 'find_cycle'
]
-__author__ = "\n".join(['Jon Olav Vik <[email protected]>',
- 'Dan Schult <[email protected]>',
- 'Aric Hagberg <[email protected]>'])
@not_implemented_for('directed')
@not_implemented_for('multigraph')
-def cycle_basis(G,root=None):
+def cycle_basis(G, root=None):
""" Returns a list of cycles which form a basis for cycles of G.
A basis for cycles of a network is a minimal collection of
@@ -49,10 +50,10 @@ def cycle_basis(G,root=None):
Examples
--------
- >>> G=nx.Graph()
+ >>> G = nx.Graph()
>>> nx.add_cycle(G, [0, 1, 2, 3])
>>> nx.add_cycle(G, [0, 3, 4, 5])
- >>> print(nx.cycle_basis(G,0))
+ >>> print(nx.cycle_basis(G, 0))
[[3, 4, 5, 0], [1, 2, 3, 0]]
Notes
@@ -68,36 +69,36 @@ def cycle_basis(G,root=None):
--------
simple_cycles
"""
- gnodes=set(G.nodes())
- cycles=[]
+ gnodes = set(G.nodes())
+ cycles = []
while gnodes: # loop over connected components
if root is None:
- root=gnodes.pop()
- stack=[root]
- pred={root:root}
- used={root:set()}
+ root = gnodes.pop()
+ stack = [root]
+ pred = {root: root}
+ used = {root: set()}
while stack: # walk the spanning tree finding cycles
- z=stack.pop() # use last-in so cycles easier to find
- zused=used[z]
+ z = stack.pop() # use last-in so cycles easier to find
+ zused = used[z]
for nbr in G[z]:
if nbr not in used: # new node
- pred[nbr]=z
+ pred[nbr] = z
stack.append(nbr)
- used[nbr]=set([z])
- elif nbr == z: # self loops
+ used[nbr] = set([z])
+ elif nbr == z: # self loops
cycles.append([z])
- elif nbr not in zused:# found a cycle
- pn=used[nbr]
- cycle=[nbr,z]
- p=pred[z]
+ elif nbr not in zused: # found a cycle
+ pn = used[nbr]
+ cycle = [nbr, z]
+ p = pred[z]
while p not in pn:
cycle.append(p)
- p=pred[p]
+ p = pred[p]
cycle.append(p)
cycles.append(cycle)
used[nbr].add(z)
- gnodes-=set(pred)
- root=None
+ gnodes -= set(pred)
+ root = None
return cycles
@@ -125,7 +126,8 @@ def simple_cycles(G):
Examples
--------
- >>> G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)])
+ >>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
+ >>> G = nx.DiGraph(edges)
>>> len(list(nx.simple_cycles(G)))
5
@@ -161,10 +163,10 @@ def simple_cycles(G):
--------
cycle_basis
"""
- def _unblock(thisnode,blocked,B):
- stack=set([thisnode])
+ def _unblock(thisnode, blocked, B):
+ stack = set([thisnode])
while stack:
- node=stack.pop()
+ node = stack.pop()
if node in blocked:
blocked.remove(node)
stack.update(B[node])
@@ -173,51 +175,49 @@ def simple_cycles(G):
# Johnson's algorithm requires some ordering of the nodes.
# We assign the arbitrary ordering given by the strongly connected comps
# There is no need to track the ordering as each node removed as processed.
- subG = type(G)(G.edges()) # save the actual graph so we can mutate it here
- # We only take the edges because we do not want to
- # copy edge and node attributes here.
+ # Also we save the actual graph so we can mutate it. We only take the
+ # edges because we do not want to copy edge and node attributes here.
+ subG = type(G)(G.edges())
sccs = list(nx.strongly_connected_components(subG))
while sccs:
- scc=sccs.pop()
+ scc = sccs.pop()
# order of scc determines ordering of nodes
startnode = scc.pop()
# Processing node runs "circuit" routine from recursive version
- path=[startnode]
- blocked = set() # vertex: blocked from search?
- closed = set() # nodes involved in a cycle
+ path = [startnode]
+ blocked = set() # vertex: blocked from search?
+ closed = set() # nodes involved in a cycle
blocked.add(startnode)
- B=defaultdict(set) # graph portions that yield no elementary circuit
- stack=[ (startnode,list(subG[startnode])) ] # subG gives component nbrs
+ B = defaultdict(set) # graph portions that yield no elementary circuit
+ stack = [(startnode, list(subG[startnode]))] # subG gives comp nbrs
while stack:
- thisnode,nbrs = stack[-1]
+ thisnode, nbrs = stack[-1]
if nbrs:
nextnode = nbrs.pop()
-# print thisnode,nbrs,":",nextnode,blocked,B,path,stack,startnode
-# f=raw_input("pause")
if nextnode == startnode:
yield path[:]
closed.update(path)
-# print "Found a cycle",path,closed
+# print "Found a cycle", path, closed
elif nextnode not in blocked:
path.append(nextnode)
- stack.append( (nextnode,list(subG[nextnode])) )
+ stack.append((nextnode, list(subG[nextnode])))
closed.discard(nextnode)
blocked.add(nextnode)
continue
# done with nextnode... look for more neighbors
if not nbrs: # no more nbrs
if thisnode in closed:
- _unblock(thisnode,blocked,B)
+ _unblock(thisnode, blocked, B)
else:
for nbr in subG[thisnode]:
if thisnode not in B[nbr]:
B[nbr].add(thisnode)
stack.pop()
-# assert path[-1]==thisnode
+# assert path[-1] == thisnode
path.pop()
# done processing this node
subG.remove_node(startnode)
- H=subG.subgraph(scc) # make smaller to avoid work in SCC routine
+ H = subG.subgraph(scc) # make smaller to avoid work in SCC routine
sccs.extend(list(nx.strongly_connected_components(H)))
@@ -245,7 +245,8 @@ def recursive_simple_cycles(G):
Example:
- >>> G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)])
+ >>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
+ >>> G = nx.DiGraph(edges)
>>> nx.recursive_simple_cycles(G)
[[0], [0, 1, 2], [0, 2], [1, 2], [2]]
@@ -279,10 +280,10 @@ def recursive_simple_cycles(G):
_unblock(B[thisnode].pop())
def circuit(thisnode, startnode, component):
- closed = False # set to True if elementary path is closed
+ closed = False # set to True if elementary path is closed
path.append(thisnode)
blocked[thisnode] = True
- for nextnode in component[thisnode]: # direct successors of thisnode
+ for nextnode in component[thisnode]: # direct successors of thisnode
if nextnode == startnode:
result.append(path[:])
closed = True
@@ -293,18 +294,18 @@ def recursive_simple_cycles(G):
_unblock(thisnode)
else:
for nextnode in component[thisnode]:
- if thisnode not in B[nextnode]: # TODO: use set for speedup?
+ if thisnode not in B[nextnode]: # TODO: use set for speedup?
B[nextnode].append(thisnode)
- path.pop() # remove thisnode from path
+ path.pop() # remove thisnode from path
return closed
- path = [] # stack of nodes in current path
- blocked = defaultdict(bool) # vertex: blocked from search?
- B = defaultdict(list) # graph portions that yield no elementary circuit
- result = [] # list to accumulate the circuits found
+ path = [] # stack of nodes in current path
+ blocked = defaultdict(bool) # vertex: blocked from search?
+ B = defaultdict(list) # graph portions that yield no elementary circuit
+ result = [] # list to accumulate the circuits found
# Johnson's algorithm requires some ordering of the nodes.
# They might not be sortable so we assign an arbitrary ordering.
- ordering=dict(zip(G,range(len(G))))
+ ordering = dict(zip(G, range(len(G))))
for s in ordering:
# Build the subgraph induced by s and following nodes in the ordering
subgraph = G.subgraph(node for node in G
@@ -312,18 +313,18 @@ def recursive_simple_cycles(G):
# Find the strongly connected component in the subgraph
# that contains the least node according to the ordering
strongcomp = nx.strongly_connected_components(subgraph)
- mincomp=min(strongcomp,
- key=lambda nodes: min(ordering[n] for n in nodes))
+ mincomp = min(strongcomp, key=lambda ns: min(ordering[n] for n in ns))
component = G.subgraph(mincomp)
if component:
# smallest node in the component according to the ordering
- startnode = min(component,key=ordering.__getitem__)
+ startnode = min(component, key=ordering.__getitem__)
for node in component:
blocked[node] = False
B[node][:] = []
- dummy=circuit(startnode, startnode, component)
+ dummy = circuit(startnode, startnode, component)
return result
+
def find_cycle(G, source=None, orientation='original'):
"""
Returns the edges of a cycle found via a directed, depth-first traversal.
@@ -379,7 +380,7 @@ def find_cycle(G, source=None, orientation='original'):
is also known as a polytree).
>>> import networkx as nx
- >>> G = nx.DiGraph([(0,1), (0,2), (1,2)])
+ >>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
>>> try:
... find_cycle(G, orientation='original')
... except:
@@ -391,32 +392,6 @@ def find_cycle(G, source=None, orientation='original'):
"""
out_edge, key, tailhead = helper_funcs(G, orientation)
- def prune(edges, active_nodes):
- # This edge results from backtracking.
- # Pop until we get a node whose head equals the current tail.
- # So for example, we might have:
- # [(0,1), (1,2), (2,3)], (1,4)
- # which must become:
- # [(0,1)], (1,4)
- while True:
- try:
- popped_edge = edges.pop()
- except IndexError:
- edges = []
- active_nodes = {tail}
- break
- else:
- popped_head = tailhead(popped_edge)[1]
- active_nodes.remove(popped_head)
-
- if edges:
- previous_head = tailhead(edges[-1])[1]
- if tail == previous_head:
- break
- else:
- previous_head = None
- return edges, active_nodes, previous_head
-
explored = set()
cycle = []
final_node = None
@@ -435,8 +410,31 @@ def find_cycle(G, source=None, orientation='original'):
for edge in edge_dfs(G, start_node, orientation):
# Determine if this edge is a continuation of the active path.
tail, head = tailhead(edge)
+ if head in explored:
+ # Then we've already explored it. No loop is possible.
+ continue
if previous_head is not None and tail != previous_head:
- edges, active_nodes, previous_head = prune(edges, active_nodes)
+ # This edge results from backtracking.
+ # Pop until we get a node whose head equals the current tail.
+ # So for example, we might have:
+ # (0, 1), (1, 2), (2, 3), (1, 4)
+ # which must become:
+ # (0, 1), (1, 4)
+ while True:
+ try:
+ popped_edge = edges.pop()
+ except IndexError:
+ edges = []
+ active_nodes = {tail}
+ break
+ else:
+ popped_head = tailhead(popped_edge)[1]
+ active_nodes.remove(popped_head)
+
+ if edges:
+ last_head = tailhead(edges[-1])[1]
+ if tail == last_head:
+ break
edges.append(edge)
if head in active_nodes:
@@ -445,11 +443,9 @@ def find_cycle(G, source=None, orientation='original'):
final_node = head
break
else:
- previous_head = head
seen.add(head)
active_nodes.add(head)
- if head in explored:
- edges, active_nodes, previous_head = prune(edges, active_nodes)
+ previous_head = head
if cycle:
break
@@ -469,4 +465,3 @@ def find_cycle(G, source=None, orientation='original'):
break
return cycle[i:]
-
| find_cycle() doesn't find cycle in cyclic digraph
```python
print("version: {}".format(nx.__version__))
dg = nx.DiGraph()
dg.add_edge(1, 0)
dg.add_edge(2, 0)
dg.add_edge(1, 2)
dg.add_edge(2, 1)
find_cycle(dg)
```
```
version: 1.11
Traceback (most recent call last):
File "<ipython-input-127-ea87764febbd>", line 7, in <module>
find_cycle(dg)
File "xxxx\AppData\Local\Continuum\Anaconda\lib\site-packages\networkx\algorithms\cycles.py", line 453, in find_cycle
raise nx.exception.NetworkXNoCycle('No cycle found.')
```
I believe the problem lies in that when `head` is a previously explored node all paths from the current `start_node` are ignored instead of just pruning from `head`
```python
elif head in explored:
# Then we've already explored it. No loop is possible.
break
```
Removing this condition fixes the bug as far as I can tell given the graphs I am working with. However, a more efficient solution that does proper pruning would be more efficient especially for larger graphs. | networkx/networkx | diff --git a/networkx/algorithms/tests/test_cycles.py b/networkx/algorithms/tests/test_cycles.py
index f4335d134..6f13c4c2c 100644
--- a/networkx/algorithms/tests/test_cycles.py
+++ b/networkx/algorithms/tests/test_cycles.py
@@ -7,55 +7,58 @@ from networkx.algorithms import find_cycle
FORWARD = nx.algorithms.edgedfs.FORWARD
REVERSE = nx.algorithms.edgedfs.REVERSE
+
class TestCycles:
def setUp(self):
- G=networkx.Graph()
- nx.add_cycle(G, [0,1,2,3])
- nx.add_cycle(G, [0,3,4,5])
- nx.add_cycle(G, [0,1,6,7,8])
- G.add_edge(8,9)
- self.G=G
-
- def is_cyclic_permutation(self,a,b):
- n=len(a)
- if len(b)!=n:
+ G = networkx.Graph()
+ nx.add_cycle(G, [0, 1, 2, 3])
+ nx.add_cycle(G, [0, 3, 4, 5])
+ nx.add_cycle(G, [0, 1, 6, 7, 8])
+ G.add_edge(8, 9)
+ self.G = G
+
+ def is_cyclic_permutation(self, a, b):
+ n = len(a)
+ if len(b) != n:
return False
- l=a+a
- return any(l[i:i+n]==b for i in range(2*n-n+1))
+ l = a + a
+ return any(l[i:i+n] == b for i in range(2 * n - n + 1))
def test_cycle_basis(self):
- G=self.G
- cy=networkx.cycle_basis(G,0)
- sort_cy= sorted( sorted(c) for c in cy )
- assert_equal(sort_cy, [[0,1,2,3],[0,1,6,7,8],[0,3,4,5]])
- cy=networkx.cycle_basis(G,1)
- sort_cy= sorted( sorted(c) for c in cy )
- assert_equal(sort_cy, [[0,1,2,3],[0,1,6,7,8],[0,3,4,5]])
- cy=networkx.cycle_basis(G,9)
- sort_cy= sorted( sorted(c) for c in cy )
- assert_equal(sort_cy, [[0,1,2,3],[0,1,6,7,8],[0,3,4,5]])
+ G = self.G
+ cy = networkx.cycle_basis(G, 0)
+ sort_cy = sorted(sorted(c) for c in cy)
+ assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]])
+ cy = networkx.cycle_basis(G, 1)
+ sort_cy = sorted(sorted(c) for c in cy)
+ assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]])
+ cy = networkx.cycle_basis(G, 9)
+ sort_cy = sorted(sorted(c) for c in cy)
+ assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]])
# test disconnected graphs
nx.add_cycle(G, "ABC")
- cy=networkx.cycle_basis(G,9)
- sort_cy= sorted(sorted(c) for c in cy[:-1]) + [sorted(cy[-1])]
- assert_equal(sort_cy, [[0,1,2,3],[0,1,6,7,8],[0,3,4,5],['A','B','C']])
+ cy = networkx.cycle_basis(G, 9)
+ sort_cy = sorted(sorted(c) for c in cy[:-1]) + [sorted(cy[-1])]
+ assert_equal(sort_cy, [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5],
+ ['A', 'B', 'C']])
@raises(nx.NetworkXNotImplemented)
def test_cycle_basis(self):
- G=nx.DiGraph()
- cy=networkx.cycle_basis(G,0)
+ G = nx.DiGraph()
+ cy = networkx.cycle_basis(G, 0)
@raises(nx.NetworkXNotImplemented)
def test_cycle_basis(self):
- G=nx.MultiGraph()
- cy=networkx.cycle_basis(G,0)
+ G = nx.MultiGraph()
+ cy = networkx.cycle_basis(G, 0)
def test_simple_cycles(self):
- G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)])
- cc=sorted(nx.simple_cycles(G))
- ca=[[0], [0, 1, 2], [0, 2], [1, 2], [2]]
+ edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]
+ G = nx.DiGraph(edges)
+ cc = sorted(nx.simple_cycles(G))
+ ca = [[0], [0, 1, 2], [0, 2], [1, 2], [2]]
for c in cc:
- assert_true(any(self.is_cyclic_permutation(c,rc) for rc in ca))
+ assert_true(any(self.is_cyclic_permutation(c, rc) for rc in ca))
@raises(nx.NetworkXNotImplemented)
def test_simple_cycles_graph(self):
@@ -64,88 +67,89 @@ class TestCycles:
def test_unsortable(self):
# TODO What does this test do? das 6/2013
- G=nx.DiGraph()
- nx.add_cycle(G, ['a',1])
- c=list(nx.simple_cycles(G))
+ G = nx.DiGraph()
+ nx.add_cycle(G, ['a', 1])
+ c = list(nx.simple_cycles(G))
def test_simple_cycles_small(self):
G = nx.DiGraph()
- nx.add_cycle(G, [1,2,3])
- c=sorted(nx.simple_cycles(G))
- assert_equal(len(c),1)
- assert_true(self.is_cyclic_permutation(c[0],[1,2,3]))
- nx.add_cycle(G, [10,20,30])
- cc=sorted(nx.simple_cycles(G))
- ca=[[1,2,3],[10,20,30]]
+ nx.add_cycle(G, [1, 2, 3])
+ c = sorted(nx.simple_cycles(G))
+ assert_equal(len(c), 1)
+ assert_true(self.is_cyclic_permutation(c[0], [1, 2, 3]))
+ nx.add_cycle(G, [10, 20, 30])
+ cc = sorted(nx.simple_cycles(G))
+ ca = [[1, 2, 3], [10, 20, 30]]
for c in cc:
- assert_true(any(self.is_cyclic_permutation(c,rc) for rc in ca))
+ assert_true(any(self.is_cyclic_permutation(c, rc) for rc in ca))
def test_simple_cycles_empty(self):
G = nx.DiGraph()
- assert_equal(list(nx.simple_cycles(G)),[])
+ assert_equal(list(nx.simple_cycles(G)), [])
def test_complete_directed_graph(self):
# see table 2 in Johnson's paper
- ncircuits=[1,5,20,84,409,2365,16064]
- for n,c in zip(range(2,9),ncircuits):
- G=nx.DiGraph(nx.complete_graph(n))
- assert_equal(len(list(nx.simple_cycles(G))),c)
+ ncircuits = [1, 5, 20, 84, 409, 2365, 16064]
+ for n, c in zip(range(2, 9), ncircuits):
+ G = nx.DiGraph(nx.complete_graph(n))
+ assert_equal(len(list(nx.simple_cycles(G))), c)
- def worst_case_graph(self,k):
+ def worst_case_graph(self, k):
# see figure 1 in Johnson's paper
# this graph has excactly 3k simple cycles
- G=nx.DiGraph()
- for n in range(2,k+2):
- G.add_edge(1,n)
- G.add_edge(n,k+2)
- G.add_edge(2*k+1,1)
- for n in range(k+2,2*k+2):
- G.add_edge(n,2*k+2)
- G.add_edge(n,n+1)
- G.add_edge(2*k+3,k+2)
- for n in range(2*k+3,3*k+3):
- G.add_edge(2*k+2,n)
- G.add_edge(n,3*k+3)
- G.add_edge(3*k+3,2*k+2)
+ G = nx.DiGraph()
+ for n in range(2, k+2):
+ G.add_edge(1, n)
+ G.add_edge(n, k+2)
+ G.add_edge(2*k+1, 1)
+ for n in range(k+2, 2*k+2):
+ G.add_edge(n, 2*k+2)
+ G.add_edge(n, n+1)
+ G.add_edge(2*k+3, k+2)
+ for n in range(2*k+3, 3*k+3):
+ G.add_edge(2*k+2, n)
+ G.add_edge(n, 3*k+3)
+ G.add_edge(3*k+3, 2*k+2)
return G
def test_worst_case_graph(self):
# see figure 1 in Johnson's paper
- for k in range(3,10):
- G=self.worst_case_graph(k)
- l=len(list(nx.simple_cycles(G)))
- assert_equal(l,3*k)
+ for k in range(3, 10):
+ G = self.worst_case_graph(k)
+ l = len(list(nx.simple_cycles(G)))
+ assert_equal(l, 3*k)
def test_recursive_simple_and_not(self):
- for k in range(2,10):
- G=self.worst_case_graph(k)
- cc=sorted(nx.simple_cycles(G))
- rcc=sorted(nx.recursive_simple_cycles(G))
- assert_equal(len(cc),len(rcc))
+ for k in range(2, 10):
+ G = self.worst_case_graph(k)
+ cc = sorted(nx.simple_cycles(G))
+ rcc = sorted(nx.recursive_simple_cycles(G))
+ assert_equal(len(cc), len(rcc))
for c in cc:
- assert_true(any(self.is_cyclic_permutation(c,rc) for rc in rcc))
+ assert_true(any(self.is_cyclic_permutation(c, r) for r in rcc))
for rc in rcc:
- assert_true(any(self.is_cyclic_permutation(rc,c) for c in cc))
+ assert_true(any(self.is_cyclic_permutation(rc, c) for c in cc))
def test_simple_graph_with_reported_bug(self):
- G=nx.DiGraph()
- edges = [(0, 2), (0, 3), (1, 0), (1, 3), (2, 1), (2, 4), \
- (3, 2), (3, 4), (4, 0), (4, 1), (4, 5), (5, 0), \
- (5, 1), (5, 2), (5, 3)]
+ G = nx.DiGraph()
+ edges = [(0, 2), (0, 3), (1, 0), (1, 3), (2, 1), (2, 4),
+ (3, 2), (3, 4), (4, 0), (4, 1), (4, 5), (5, 0),
+ (5, 1), (5, 2), (5, 3)]
G.add_edges_from(edges)
- cc=sorted(nx.simple_cycles(G))
- assert_equal(len(cc),26)
- rcc=sorted(nx.recursive_simple_cycles(G))
- assert_equal(len(cc),len(rcc))
+ cc = sorted(nx.simple_cycles(G))
+ assert_equal(len(cc), 26)
+ rcc = sorted(nx.recursive_simple_cycles(G))
+ assert_equal(len(cc), len(rcc))
for c in cc:
- assert_true(any(self.is_cyclic_permutation(c,rc) for rc in rcc))
+ assert_true(any(self.is_cyclic_permutation(c, rc) for rc in rcc))
for rc in rcc:
- assert_true(any(self.is_cyclic_permutation(rc,c) for c in cc))
+ assert_true(any(self.is_cyclic_permutation(rc, c) for c in cc))
# These tests might fail with hash randomization since they depend on
# edge_dfs. For more information, see the comments in:
# networkx/algorithms/traversal/tests/test_edgedfs.py
+
class TestFindCycle(object):
def setUp(self):
self.nodes = [0, 1, 2, 3]
@@ -158,13 +162,13 @@ class TestFindCycle(object):
def test_digraph(self):
G = nx.DiGraph(self.edges)
x = list(find_cycle(G, self.nodes))
- x_= [(0, 1), (1, 0)]
+ x_ = [(0, 1), (1, 0)]
assert_equal(x, x_)
def test_multigraph(self):
G = nx.MultiGraph(self.edges)
x = list(find_cycle(G, self.nodes))
- x_ = [(0, 1, 0), (1, 0, 1)] # or (1, 0, 2)
+ x_ = [(0, 1, 0), (1, 0, 1)] # or (1, 0, 2)
# Hash randomization...could be any edge.
assert_equal(x[0], x_[0])
assert_equal(x[1][:2], x_[1][:2])
@@ -172,7 +176,7 @@ class TestFindCycle(object):
def test_multidigraph(self):
G = nx.MultiDiGraph(self.edges)
x = list(find_cycle(G, self.nodes))
- x_ = [(0, 1, 0), (1, 0, 0)] # (1, 0, 1)
+ x_ = [(0, 1, 0), (1, 0, 0)] # (1, 0, 1)
assert_equal(x[0], x_[0])
assert_equal(x[1][:2], x_[1][:2])
@@ -185,16 +189,16 @@ class TestFindCycle(object):
def test_multidigraph_ignore(self):
G = nx.MultiDiGraph(self.edges)
x = list(find_cycle(G, self.nodes, orientation='ignore'))
- x_ = [(0, 1, 0, FORWARD), (1, 0, 0, FORWARD)] # or (1, 0, 1, 1)
+ x_ = [(0, 1, 0, FORWARD), (1, 0, 0, FORWARD)] # or (1, 0, 1, 1)
assert_equal(x[0], x_[0])
assert_equal(x[1][:2], x_[1][:2])
assert_equal(x[1][3], x_[1][3])
def test_multidigraph_ignore2(self):
# Loop traversed an edge while ignoring its orientation.
- G = nx.MultiDiGraph([(0,1), (1,2), (1,2)])
- x = list(find_cycle(G, [0,1,2], orientation='ignore'))
- x_ = [(1,2,0,FORWARD), (1,2,1,REVERSE)]
+ G = nx.MultiDiGraph([(0, 1), (1, 2), (1, 2)])
+ x = list(find_cycle(G, [0, 1, 2], orientation='ignore'))
+ x_ = [(1, 2, 0, FORWARD), (1, 2, 1, REVERSE)]
assert_equal(x, x_)
def test_multidigraph_ignore2(self):
@@ -202,24 +206,23 @@ class TestFindCycle(object):
# The goal here is to cover the case when 2 to be researched from 4,
# when 4 is visited from the first time (so we must make sure that 4
# is not visited from 2, and hence, we respect the edge orientation).
- G = nx.MultiDiGraph([(0,1), (1,2), (2,3), (4,2)])
+ G = nx.MultiDiGraph([(0, 1), (1, 2), (2, 3), (4, 2)])
assert_raises(nx.exception.NetworkXNoCycle,
- find_cycle, G, [0,1,2,3,4], orientation='original')
+ find_cycle, G, [0, 1, 2, 3, 4], orientation='original')
def test_dag(self):
- G = nx.DiGraph([(0,1), (0,2), (1,2)])
+ G = nx.DiGraph([(0, 1), (0, 2), (1, 2)])
assert_raises(nx.exception.NetworkXNoCycle,
find_cycle, G, orientation='original')
x = list(find_cycle(G, orientation='ignore'))
- assert_equal(x, [(0,1,FORWARD), (1,2,FORWARD), (0,2,REVERSE)])
+ assert_equal(x, [(0, 1, FORWARD), (1, 2, FORWARD), (0, 2, REVERSE)])
def test_prev_explored(self):
# https://github.com/networkx/networkx/issues/2323
G = nx.DiGraph()
- G.add_edges_from([(1,0), (2,0), (1,2), (2,1)])
- assert_raises(nx.exception.NetworkXNoCycle,
- find_cycle, G, source=0)
+ G.add_edges_from([(1, 0), (2, 0), (1, 2), (2, 1)])
+ assert_raises(nx.NetworkXNoCycle, find_cycle, G, source=0)
x = list(nx.find_cycle(G, 1))
x_ = [(1, 2), (2, 1)]
assert_equal(x, x_)
@@ -227,3 +230,15 @@ class TestFindCycle(object):
x = list(nx.find_cycle(G, 2))
x_ = [(2, 1), (1, 2)]
assert_equal(x, x_)
+
+ x = list(nx.find_cycle(G))
+ x_ = [(1, 2), (2, 1)]
+ assert_equal(x, x_)
+
+ def test_no_cycle(self):
+ # https://github.com/networkx/networkx/issues/2439
+
+ G = nx.DiGraph()
+ G.add_edges_from([(1, 2), (2, 0), (3, 1), (3, 2)])
+ assert_raises(nx.NetworkXNoCycle, find_cycle, G, source=0)
+ assert_raises(nx.NetworkXNoCycle, find_cycle, G)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | help | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y libgdal-dev graphviz"
],
"python": "3.6",
"reqs_path": [
"requirements/default.txt",
"requirements/test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
decorator==5.1.1
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/networkx/networkx.git@90c1645ef6dd4f80a8a09fb4ec597d99ab57f1de#egg=networkx
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: networkx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- decorator==5.1.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/networkx
| [
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_no_cycle"
]
| [
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_graph",
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_digraph",
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_multigraph",
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_multidigraph",
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_digraph_ignore",
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_multidigraph_ignore"
]
| [
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_cycle_basis",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_simple_cycles",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_simple_cycles_graph",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_unsortable",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_simple_cycles_small",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_simple_cycles_empty",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_complete_directed_graph",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_worst_case_graph",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_recursive_simple_and_not",
"networkx/algorithms/tests/test_cycles.py::TestCycles::test_simple_graph_with_reported_bug",
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_multidigraph_ignore2",
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_dag",
"networkx/algorithms/tests/test_cycles.py::TestFindCycle::test_prev_explored"
]
| []
| BSD 3-Clause | 1,367 | [
"networkx/algorithms/cycles.py"
]
| [
"networkx/algorithms/cycles.py"
]
|
|
seperman__deepdiff-75 | 0d11d0874ca6c540d86c7429f9072d774f7b4a94 | 2017-06-14 03:40:03 | d4918e17c555df1b46827f865c5c105097199f80 | maxrothman: Looks like I made a typo, which caused tests to fail. It should be fixed now, and when this merges, you can just use the "squash and merge" option to hide the "squash me" commit.
maxrothman: Never mind about the squashing thing, I handled it myself. Just getting this to pass tests now.
coveralls:
[](https://:/builds/12038008)
Coverage remained the same at 100.0% when pulling **f87c032ab810fa719e2625ba96e61913a9585565 on maxrothman:patch-1** into **0d11d0874ca6c540d86c7429f9072d774f7b4a94 on seperman:master**.
coveralls:
[](https://:/builds/12038178)
Coverage remained the same at 100.0% when pulling **b794c87090b0e63ef97b10cdd090697316e4b934 on maxrothman:patch-1** into **0d11d0874ca6c540d86c7429f9072d774f7b4a94 on seperman:master**.
| diff --git a/deepdiff/search.py b/deepdiff/search.py
index 26ba882..f9fc465 100644
--- a/deepdiff/search.py
+++ b/deepdiff/search.py
@@ -131,7 +131,10 @@ class DeepSearch(dict):
if is_namedtuple:
obj = obj._asdict()
else:
- obj = obj.__dict__
+ # Skip magic methods. Slightly hacky, but unless people are defining
+ # new magic methods they want to search, it should work fine.
+ obj = {i: getattr(obj, i) for i in dir(obj)
+ if not (i.startswith('__') and i.endswith('__'))}
except AttributeError:
try:
obj = {i: getattr(obj, i) for i in obj.__slots__}
| DeepSearch doesn't search for inherited class attributes
```python
class Foo(object):
bar = 'baz'
deepdiff.DeepSearch(Foo, 'baz')
#-> {}
```
The solution to this looks pretty simple: in `DeepSearch.__search_obj()`, change [this line](https://github.com/seperman/deepdiff/blob/master/deepdiff/search.py#L134) to `obj = {getattr(attr) for attr in dir(obj)}`. `dir()` traverses the inheritence tree, finding attributes not in `__dict__`. I'm happy to make a PR if this looks good to you. | seperman/deepdiff | diff --git a/tests/test_search.py b/tests/test_search.py
index 49929c6..6257875 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -286,6 +286,18 @@ class DeepSearchTestCase(unittest.TestCase):
result = {'matched_values': {'root'}}
self.assertEqual(DeepSearch(obj, item, verbose_level=1, case_sensitive=False), result)
+ def test_search_inherited_attributes(self):
+ class Parent(object):
+ a = 1
+
+ class Child(Parent):
+ b = 2
+
+ obj = Child()
+ item = 1
+ result = {'matched_values': {'root.a'}}
+ self.assertEqual(DeepSearch(obj, item, verbose_level=1), result)
+
class GrepTestCase(unittest.TestCase):
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 3.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
-e git+https://github.com/seperman/deepdiff.git@0d11d0874ca6c540d86c7429f9072d774f7b4a94#egg=deepdiff
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jsonpickle==2.2.0
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: deepdiff
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- jsonpickle==2.2.0
- mock==5.2.0
prefix: /opt/conda/envs/deepdiff
| [
"tests/test_search.py::DeepSearchTestCase::test_search_inherited_attributes"
]
| []
| [
"tests/test_search.py::DeepSearchTestCase::test_bad_attribute",
"tests/test_search.py::DeepSearchTestCase::test_case_insensitive_of_str_in_list",
"tests/test_search.py::DeepSearchTestCase::test_case_insensitive_of_str_in_one_liner",
"tests/test_search.py::DeepSearchTestCase::test_case_sensitive_of_str_in_list",
"tests/test_search.py::DeepSearchTestCase::test_case_sensitive_of_str_in_one_liner",
"tests/test_search.py::DeepSearchTestCase::test_custom_object",
"tests/test_search.py::DeepSearchTestCase::test_custom_object_in_dictionary_verbose",
"tests/test_search.py::DeepSearchTestCase::test_custom_object_skip_path",
"tests/test_search.py::DeepSearchTestCase::test_custom_object_verbose",
"tests/test_search.py::DeepSearchTestCase::test_int_in_dictionary",
"tests/test_search.py::DeepSearchTestCase::test_loop",
"tests/test_search.py::DeepSearchTestCase::test_loop_in_lists",
"tests/test_search.py::DeepSearchTestCase::test_named_tuples_verbose",
"tests/test_search.py::DeepSearchTestCase::test_number_in_list",
"tests/test_search.py::DeepSearchTestCase::test_skip_dictionary_path",
"tests/test_search.py::DeepSearchTestCase::test_skip_list_path",
"tests/test_search.py::DeepSearchTestCase::test_skip_path1",
"tests/test_search.py::DeepSearchTestCase::test_skip_type_str",
"tests/test_search.py::DeepSearchTestCase::test_string_in_dictionary",
"tests/test_search.py::DeepSearchTestCase::test_string_in_dictionary_case_insensitive",
"tests/test_search.py::DeepSearchTestCase::test_string_in_dictionary_in_list_verbose",
"tests/test_search.py::DeepSearchTestCase::test_string_in_dictionary_key_case_insensitive_partial",
"tests/test_search.py::DeepSearchTestCase::test_string_in_dictionary_verbose",
"tests/test_search.py::DeepSearchTestCase::test_string_in_list",
"tests/test_search.py::DeepSearchTestCase::test_string_in_list_verbose",
"tests/test_search.py::DeepSearchTestCase::test_string_in_list_verbose2",
"tests/test_search.py::DeepSearchTestCase::test_string_in_list_verbose3",
"tests/test_search.py::DeepSearchTestCase::test_string_in_root",
"tests/test_search.py::DeepSearchTestCase::test_string_in_root_verbose",
"tests/test_search.py::DeepSearchTestCase::test_string_in_set_verbose",
"tests/test_search.py::DeepSearchTestCase::test_string_in_tuple",
"tests/test_search.py::DeepSearchTestCase::test_unknown_parameters",
"tests/test_search.py::GrepTestCase::test_grep_dict"
]
| []
| MIT License | 1,368 | [
"deepdiff/search.py"
]
| [
"deepdiff/search.py"
]
|
acorg__slurm-pipeline-23 | b10a0383fa91cf20ea1844a63eb58936d7906a7a | 2017-06-14 11:36:32 | b10a0383fa91cf20ea1844a63eb58936d7906a7a | diff --git a/Makefile b/Makefile
index 99c405f..08eef5b 100644
--- a/Makefile
+++ b/Makefile
@@ -1,4 +1,4 @@
-.PHONY: check, tcheck, pep8, pyflakes, lint, wc, clean, clobber, upload
+.PHONY: check, tcheck, examples-test, pep8, pyflakes, lint, wc, clean, clobber, upload
check:
python -m discover -v
@@ -6,6 +6,12 @@ check:
tcheck:
trial --rterrors test
+examples-test:
+ make -C examples/word-count run clean
+ make -C examples/word-count-with-skipping run clean
+ make -C examples/blast run clean
+ make -C examples/blast-with-force-and-simulate run clean
+
pep8:
find . -path './.tox' -prune -o -path './build' -prune -o -path './dist' -prune -o -name '*.py' -print0 | xargs -0 pep8
diff --git a/README.md b/README.md
index 8e85e06..230930e 100644
--- a/README.md
+++ b/README.md
@@ -248,7 +248,7 @@ the full list:
## Step script environment variables
The following environment variables are set when a step's script is
-exectued:
+executed:
* `SP_ORIGINAL_ARGS` contains the (space-separated) list of arguments
originally passed to `slurm-pipeline.py`. Most scripts will not need to
@@ -275,26 +275,42 @@ exectued:
invokes `sbatch` to guarantee that the execution of the script does not
begin until after the tasks from all dependent steps have finished
successfully.
+* `SP_NICE_ARG` contains a string that should be put on the command line when
+ calling `sbatch`. This sets the priority level of the SLURM jobs. The numeric
+ nice value can be set using the `--nice` option when running `slurm-pipeline.py`.
+ See `man sbatch` for details on nice values. Note that calling `sbatch` with
+ this value is not enforced. The `--nice` option simply provides a simple way
+ to specify a priority value on the command line and to pass it to scripts.
+ Scripts can always ignore it or use their own value. If no value is given,
+ `SP_NICE_ARG` will contain just the string `--nice`, which will tell SLURM
+ to use a default nice value. It is useful to use a default nice value as it
+ allows a regular user to later submit jobs with a higher priority (lower nice
+ value). A regular user cannot use a negative nice value, so if a default
+ nice value was not used, all jobs get nice value `0` which prevents the user
+ from submitting higher priority jobs later on.
+
+## Calling sbatch with SP_DEPENDENCY_ARG and SP_NICE_ARG
+
+The canonical way to use `SP_DEPENDENCY_ARG` and `SP_NICE_ARG` when calling
+`sbatch` in a step shell script is as follows:
- The canonical way to use `SP_DEPENDENCY_ARG` in a step script is as
- follows:
+```sh
+jobid=`sbatch $SP_DEPENDENCY_ARG $SP_NICE_ARG script.sh | cut -f4 -d' '`
+echo "TASK: $task $jobid"
+```
- ```sh
- jobid=`sbatch -n 1 $SP_DEPENDENCY_ARG submit.sh $task | cut -f4 -d' '`
- echo "TASK: $task $jobid"
- ```
+This calls `sbatch` with the dependency and nice arguments (if any) and
+gets the job id from the `sbatch` output (`sbatch` prints a line like
+`Submitted batch job 3779695`) and the `cut` in the above pulls out just
+the job id. The task name is then emitted, along with the job id.
- This calls `sbatch` with the dependency argument (if any) and
- simultaneously gets the job id from the `sbatch` output (`sbatch` prints a
- line like `Submitted batch job 3779695`) and the `cut` in the above pulls
- out just the job id. The task name is then emitted, along with the job id.
## Separation of concerns
`slurm-pipeline.py` doesn't interact with SLURM at all. Actually, the
-*only* thing it knows about SLURM is how to construct a dependency argument
-for `sbatch` (so it could in theory be generalized to support other
-workload managers).
+*only* things it knows about SLURM is how to construct dependency and nice
+arguments for `sbatch` (so it could in theory be generalized to support
+other workload managers).
To use `slurm-pipeline.py` you need to make a specification file such as
the one above to indicate the steps in your pipeline, their scripts, and
@@ -716,19 +732,20 @@ the `examples` directory.
SLURM allows users to submit scripts for later execution. Thus there are
two distinct phases of operation: the time of scheduling and the later
time(s) of script excecution. When using `slurm-pipeline.py` it is
-important to understand this distinction.
+very important to keep this distinction in mind.
The reason is that `slurm-pipeline.py` only examines the output of
-scheduling scripts for task names and job ids. If a scheduling script calls
-`sbatch` to execute a later script, the output of that later script cannot
-be checked for `TASK: xxx 97322` style output because `slurm-pipeline.py`
-is completely unaware of the existence of that script. Thus all tasks and
-job dependencies must be established at the time of scheduling.
+*scheduling* scripts for task names and job ids. If a scheduling script
+calls `sbatch` to execute a later script, the output of that later script
+(when it is finally run by SLURM) cannot be checked for `TASK: xxx 97322`
+style output because `slurm-pipeline.py` is completely unaware of the
+existence of that script. In other words, *all tasks and job dependencies
+must be established at the time of scheduling.*
Normally this is not an issue, as many pipelines fall nicely into the model
used by `slurm-pipeline.py`. But sometimes it is necessary to write a step
-script that performs a slow synchronous operation in order to emit
-tasks. For example, you might have a very large input file that you want to
+script that performs a slow synchronous operation in order to emit tasks.
+For example, you might have a very large input file that you want to
process in smaller pieces. You can use `split` to break the file into
pieces and emit task names such as `xaa`, `xab` etc, but you must do this
synchronously (i.e., in the step script, not in a script submitted to
diff --git a/bin/slurm-pipeline.py b/bin/slurm-pipeline.py
index d041de9..93fef8a 100755
--- a/bin/slurm-pipeline.py
+++ b/bin/slurm-pipeline.py
@@ -64,6 +64,12 @@ parser.add_argument(
'i.e., those with no dependencies, in the current specification may '
'begin.'))
+parser.add_argument(
+ '--nice', type=int,
+ help=('A numeric nice (priority) value, in the range -10000 (highest '
+ 'priority) to 10000 (lowest priority). Note that only privileged '
+ 'users can specify a negative adjustment.'))
+
args, scriptArgs = parser.parse_known_args()
sp = SlurmPipeline(args.specification)
@@ -73,7 +79,7 @@ startAfter = list(map(int, args.startAfter)) if args.startAfter else None
status = sp.schedule(
force=args.force, firstStep=args.firstStep, lastStep=args.lastStep,
sleep=args.sleep, scriptArgs=scriptArgs, skip=args.skip,
- startAfter=startAfter)
+ startAfter=startAfter, nice=args.nice)
statusAsJSON = sp.specificationToJSON(status)
diff --git a/examples/blast-with-force-and-simulate/README.md b/examples/blast-with-force-and-simulate/README.md
new file mode 100644
index 0000000..bf81acc
--- /dev/null
+++ b/examples/blast-with-force-and-simulate/README.md
@@ -0,0 +1,1 @@
+A description of this example can be found in the [top-level README](../../README.md) file.
diff --git a/examples/blast/2-run-blast.sh b/examples/blast/2-run-blast.sh
index 816264b..75a6760 100755
--- a/examples/blast/2-run-blast.sh
+++ b/examples/blast/2-run-blast.sh
@@ -2,7 +2,7 @@
./blast.sh -query $1 -outfmt "6 bitscore sseqid qseqid"
-# Clean up (remove the split FASTA file made by split-fasta.sh).
+# Clean up (remove the split FASTA file made by 1-split-fasta.sh).
rm $1
echo "TASK: $1"
diff --git a/examples/blast/README.md b/examples/blast/README.md
new file mode 100644
index 0000000..bf81acc
--- /dev/null
+++ b/examples/blast/README.md
@@ -0,0 +1,1 @@
+A description of this example can be found in the [top-level README](../../README.md) file.
diff --git a/examples/blast/blast.sh b/examples/blast/blast.sh
index 4943ed0..a44c43f 100755
--- a/examples/blast/blast.sh
+++ b/examples/blast/blast.sh
@@ -1,10 +1,9 @@
#!/bin/bash
-
# $1 = "--query" (to simulate running BLAST), which we just ignore.
# $3 = "--outfmt" (ditto).
-# $2 is given to us by run-blast.sh (it's one of the x?? FASTA files). Pull
+# $2 is given to us by 2-run-blast.sh (it's one of the x?? FASTA files). Pull
# out the query id so we can make fake BLAST output for it.
queryId=`head -n 1 $2 | cut -c2-`
diff --git a/examples/double-collect/README.md b/examples/double-collect/README.md
new file mode 100644
index 0000000..bf81acc
--- /dev/null
+++ b/examples/double-collect/README.md
@@ -0,0 +1,1 @@
+A description of this example can be found in the [top-level README](../../README.md) file.
diff --git a/examples/word-count-with-skipping/README.md b/examples/word-count-with-skipping/README.md
new file mode 100644
index 0000000..bf81acc
--- /dev/null
+++ b/examples/word-count-with-skipping/README.md
@@ -0,0 +1,1 @@
+A description of this example can be found in the [top-level README](../../README.md) file.
diff --git a/examples/word-count/README.md b/examples/word-count/README.md
new file mode 100644
index 0000000..bf81acc
--- /dev/null
+++ b/examples/word-count/README.md
@@ -0,0 +1,1 @@
+A description of this example can be found in the [top-level README](../../README.md) file.
diff --git a/setup.py b/setup.py
index da5b6b6..209d169 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@
from setuptools import setup
setup(name='slurm-pipeline',
- version='1.1.2',
+ version='1.1.3',
packages=['slurm_pipeline'],
include_package_data=True,
url='https://github.com/acorg/slurm-pipeline',
diff --git a/slurm_pipeline/pipeline.py b/slurm_pipeline/pipeline.py
index 8ee6f8d..817c9ae 100644
--- a/slurm_pipeline/pipeline.py
+++ b/slurm_pipeline/pipeline.py
@@ -26,6 +26,11 @@ class SlurmPipeline(SlurmPipelineBase):
# job ids. The following regex just matches the first part of that.
TASK_NAME_LINE = re.compile('^TASK:\s+(\S+)\s*')
+ # Limits on the --nice argument to sbatch. In later SLURM versions the
+ # limits are +/-2147483645. See https://slurm.schedmd.com/sbatch.html
+ NICE_HIGHEST = -10000
+ NICE_LOWEST = 10000
+
@staticmethod
def checkSpecification(specification):
"""
@@ -54,7 +59,7 @@ class SlurmPipeline(SlurmPipelineBase):
SlurmPipelineBase.checkSpecification(specification)
def schedule(self, force=False, firstStep=None, lastStep=None, sleep=0.0,
- scriptArgs=None, skip=None, startAfter=None):
+ scriptArgs=None, skip=None, startAfter=None, nice=None):
"""
Schedule the running of our execution specification.
@@ -86,8 +91,12 @@ class SlurmPipeline(SlurmPipelineBase):
(either successully or unsuccessully, it doesn't matter) before
steps in the current specification may start. If C{None}, steps in
the current specification may start immediately.
+ @param nice: An C{int} nice (priority) value, in the range
+ self.NICE_HIGHEST to self.NICE_LOWEST. Note that only
+ privileged users can specify a negative adjustment.
@raise SchedulingError: If there is a problem with the first, last, or
- skipped steps, as determined by self._checkRuntime.
+ skipped steps, as determined by self._checkRuntime. ValueError if
+ C{nice} is not numeric or is out of its allowed range.
@return: A specification C{dict}. This is a copy of the original
specification, updated with information about this scheduling.
"""
@@ -97,11 +106,12 @@ class SlurmPipeline(SlurmPipelineBase):
if nSteps and lastStep is not None and firstStep is None:
firstStep = list(specification['steps'])[0]
skip = set(skip or ())
- self._checkRuntime(steps, firstStep, lastStep, skip)
+ self._checkRuntime(steps, firstStep, lastStep, skip, nice)
specification.update({
'force': force,
'firstStep': firstStep,
'lastStep': lastStep,
+ 'nice': nice,
'scheduledAt': time.time(),
'scriptArgs': scriptArgs,
'skip': skip,
@@ -110,6 +120,8 @@ class SlurmPipeline(SlurmPipelineBase):
})
environ['SP_FORCE'] = str(int(force))
+ environ['SP_NICE_ARG'] = (
+ '--nice' if nice is None else '--nice %d' % nice)
firstStepFound = lastStepFound = False
for stepIndex, stepName in enumerate(steps):
@@ -297,8 +309,8 @@ class SlurmPipeline(SlurmPipelineBase):
(taskName, jobIds, script, step['name']))
tasks[taskName].update(jobIds)
- @staticmethod
- def _checkRuntime(steps, firstStep=None, lastStep=None, skip=None):
+ def _checkRuntime(self, steps, firstStep=None, lastStep=None, skip=None,
+ nice=None):
"""
Check that a proposed scheduling makes sense.
@@ -309,9 +321,13 @@ class SlurmPipeline(SlurmPipelineBase):
@param lastStep: If not C{None}, the name of the last specification
step to execute.
@param skip: A C{set} of C{str} step names that should be skipped.
+ @param nice: An C{int} nice (priority) value, in the range
+ self.NICE_HIGHEST to self.NICE_LOWEST. Note that only privileged
+ users can specify a negative adjustment.
@raise SchedulingError: if the last step occurs before the first, if
- the last or first step are unknown, or if asked to skip a
- non-existent step.
+ the last or first step are unknown, if asked to skip a
+ non-existent step, or if C{nice} is not numeric or is out of its
+ allowed range (see above).
@return: An C{OrderedDict} keyed by specification step name,
with values that are step C{dict}s. This provides convenient /
direct access to steps by name.
@@ -326,6 +342,23 @@ class SlurmPipeline(SlurmPipelineBase):
raise SchedulingError(
'Last step %r not found in specification' % lastStep)
+ if nice is not None:
+ try:
+ nice = int(nice)
+ except ValueError:
+ raise SchedulingError(
+ 'Nice (priority) value %r is not numeric' % nice)
+ else:
+ if nice < self.NICE_HIGHEST or nice > self.NICE_LOWEST:
+ raise SchedulingError(
+ 'Nice (priority) value %r is outside the allowed '
+ '[%d, %d] range' %
+ (nice, self.NICE_HIGHEST, self.NICE_LOWEST))
+
+ if lastStep is not None and lastStep not in steps:
+ raise SchedulingError(
+ 'Last step %r not found in specification' % lastStep)
+
for step in steps.values():
if step['name'] == firstStep:
firstStepFound = True
| Add --nice command line option
To allow setting of priority.
| acorg/slurm-pipeline | diff --git a/test/test_pipeline.py b/test/test_pipeline.py
index 560145c..a665d01 100644
--- a/test/test_pipeline.py
+++ b/test/test_pipeline.py
@@ -871,6 +871,64 @@ class TestSlurmPipeline(TestCase):
env = subprocessMock.mock_calls[0][2]['env']
self.assertEqual('1', env['SP_FORCE'])
+ @patch('subprocess.check_output')
+ @patch('os.access')
+ @patch('os.path.exists')
+ def testDefaultNice(self, existsMock, accessMock, subprocessMock):
+ """
+ If no nice value is given to schedule, SP_NICE_ARG must be set to
+ '--nice' in the step execution environment.
+ """
+ subprocessMock.return_value = ''
+
+ sp = SlurmPipeline(
+ {
+ 'steps': [
+ {
+ 'name': 'name1',
+ 'script': 'script1',
+ },
+ ],
+ })
+ sp.schedule()
+
+ subprocessMock.assert_has_calls([
+ call(['script1'], cwd='.', universal_newlines=True,
+ stdin=DEVNULL, env=ANY),
+ ])
+
+ env = subprocessMock.mock_calls[0][2]['env']
+ self.assertEqual('--nice', env['SP_NICE_ARG'])
+
+ @patch('subprocess.check_output')
+ @patch('os.access')
+ @patch('os.path.exists')
+ def testSpecificNice(self, existsMock, accessMock, subprocessMock):
+ """
+ If a specific nice value is given to schedule, SP_NICE_ARG must be set
+ to the expected value in the step execution environment.
+ """
+ subprocessMock.return_value = ''
+
+ sp = SlurmPipeline(
+ {
+ 'steps': [
+ {
+ 'name': 'name1',
+ 'script': 'script1',
+ },
+ ],
+ })
+ sp.schedule(nice=40)
+
+ subprocessMock.assert_has_calls([
+ call(['script1'], cwd='.', universal_newlines=True,
+ stdin=DEVNULL, env=ANY),
+ ])
+
+ env = subprocessMock.mock_calls[0][2]['env']
+ self.assertEqual('--nice 40', env['SP_NICE_ARG'])
+
@patch('subprocess.check_output')
@patch('os.access')
@patch('os.path.exists')
@@ -1378,6 +1436,7 @@ class TestSlurmPipeline(TestCase):
{
'firstStep': 'name2',
'lastStep': None,
+ 'nice': None,
'force': True,
'scheduledAt': 10.0,
'scriptArgs': None,
@@ -1495,3 +1554,70 @@ class TestSlurmPipeline(TestCase):
env3 = subprocessMock.mock_calls[2][2]['env']
self.assertEqual('--dependency=afternotok:238?afternotok:560',
env3['SP_DEPENDENCY_ARG'])
+
+ @patch('subprocess.check_output')
+ @patch('os.access')
+ @patch('os.path.exists')
+ def testStringNice(self, existsMock, accessMock, subprocessMock):
+ """
+ If a string nice value is passed to schedule, a SchedulingError
+ must be raised.
+ """
+ subprocessMock.return_value = 'TASK: xxx 123\n'
+ sp = SlurmPipeline(
+ {
+ 'steps': [
+ {
+ 'name': 'name1',
+ 'script': 'script1',
+ },
+ ],
+ })
+ error = "^Nice \(priority\) value 'x' is not numeric$"
+ assertRaisesRegex(self, SchedulingError, error, sp.schedule, nice='x')
+
+ @patch('subprocess.check_output')
+ @patch('os.access')
+ @patch('os.path.exists')
+ def testNiceTooBig(self, existsMock, accessMock, subprocessMock):
+ """
+ If a nice value that is too big (> 10000) is passed to schedule, a
+ SchedulingError must be raised.
+ """
+ subprocessMock.return_value = 'TASK: xxx 123\n'
+ sp = SlurmPipeline(
+ {
+ 'steps': [
+ {
+ 'name': 'name1',
+ 'script': 'script1',
+ },
+ ],
+ })
+ error = ("^Nice \(priority\) value 10001 is outside the allowed "
+ "\[-10000, 10000\] range$")
+ assertRaisesRegex(self, SchedulingError, error, sp.schedule,
+ nice=10001)
+
+ @patch('subprocess.check_output')
+ @patch('os.access')
+ @patch('os.path.exists')
+ def testNiceTooSmall(self, existsMock, accessMock, subprocessMock):
+ """
+ If a nice value that is too small (< -10000) is passed to schedule, a
+ SchedulingError must be raised.
+ """
+ subprocessMock.return_value = 'TASK: xxx 123\n'
+ sp = SlurmPipeline(
+ {
+ 'steps': [
+ {
+ 'name': 'name1',
+ 'script': 'script1',
+ },
+ ],
+ })
+ error = ("^Nice \(priority\) value -10001 is outside the allowed "
+ "\[-10000, 10000\] range$")
+ assertRaisesRegex(self, SchedulingError, error, sp.schedule,
+ nice=-10001)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 7
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements-3.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | discover==0.4.0
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
six==1.10.0
-e git+https://github.com/acorg/slurm-pipeline.git@b10a0383fa91cf20ea1844a63eb58936d7906a7a#egg=slurm_pipeline
tomli==2.2.1
| name: slurm-pipeline
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- discover==0.4.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- six==1.10.0
- tomli==2.2.1
prefix: /opt/conda/envs/slurm-pipeline
| [
"test/test_pipeline.py::TestSlurmPipeline::testDefaultNice",
"test/test_pipeline.py::TestSlurmPipeline::testJSON",
"test/test_pipeline.py::TestSlurmPipeline::testNiceTooBig",
"test/test_pipeline.py::TestSlurmPipeline::testNiceTooSmall",
"test/test_pipeline.py::TestSlurmPipeline::testSpecificNice",
"test/test_pipeline.py::TestSlurmPipeline::testStringNice"
]
| []
| [
"test/test_pipeline.py::TestSlurmPipeline::testAccessAndExistsAreCalled",
"test/test_pipeline.py::TestSlurmPipeline::testAccessFails",
"test/test_pipeline.py::TestSlurmPipeline::testAlreadyScheduled",
"test/test_pipeline.py::TestSlurmPipeline::testCollectStepWithEmptyDependencies",
"test/test_pipeline.py::TestSlurmPipeline::testCollectStepWithNoDependencies",
"test/test_pipeline.py::TestSlurmPipeline::testCwdWithRelativeScriptPath",
"test/test_pipeline.py::TestSlurmPipeline::testErrorStep",
"test/test_pipeline.py::TestSlurmPipeline::testErrorStepWithNoDependencies",
"test/test_pipeline.py::TestSlurmPipeline::testFirstStepAndLastStepDifferent",
"test/test_pipeline.py::TestSlurmPipeline::testFirstStepAndLastStepSame",
"test/test_pipeline.py::TestSlurmPipeline::testFirstStepAndNoLastStep",
"test/test_pipeline.py::TestSlurmPipeline::testFirstStepOnly",
"test/test_pipeline.py::TestSlurmPipeline::testForce",
"test/test_pipeline.py::TestSlurmPipeline::testLastStepBeforeFirstStep",
"test/test_pipeline.py::TestSlurmPipeline::testLastStepOnly",
"test/test_pipeline.py::TestSlurmPipeline::testNonexistentFirstStep",
"test/test_pipeline.py::TestSlurmPipeline::testNonexistentLastStep",
"test/test_pipeline.py::TestSlurmPipeline::testNonexistentScript",
"test/test_pipeline.py::TestSlurmPipeline::testRepeatedTaskJobId",
"test/test_pipeline.py::TestSlurmPipeline::testRepeatedTaskName",
"test/test_pipeline.py::TestSlurmPipeline::testScheduledTime",
"test/test_pipeline.py::TestSlurmPipeline::testScriptArgs",
"test/test_pipeline.py::TestSlurmPipeline::testSingleCollectorDependencyNoJobIds",
"test/test_pipeline.py::TestSlurmPipeline::testSingleCollectorDependencyTaskNamesAndJobIds",
"test/test_pipeline.py::TestSlurmPipeline::testSingleDependencySynchronousTaskNamesJobIdsAndCalls",
"test/test_pipeline.py::TestSlurmPipeline::testSingleDependencyTaskNamesJobIdsAndCalls",
"test/test_pipeline.py::TestSlurmPipeline::testSkipNone",
"test/test_pipeline.py::TestSlurmPipeline::testSkipNonexistentStep",
"test/test_pipeline.py::TestSlurmPipeline::testSkipNonexistentSteps",
"test/test_pipeline.py::TestSlurmPipeline::testSkipTwo",
"test/test_pipeline.py::TestSlurmPipeline::testSleep",
"test/test_pipeline.py::TestSlurmPipeline::testSleepNotCalledByDefault",
"test/test_pipeline.py::TestSlurmPipeline::testSleepNotCalledWhenZero",
"test/test_pipeline.py::TestSlurmPipeline::testStartAfter",
"test/test_pipeline.py::TestSlurmPipeline::testStepStdout",
"test/test_pipeline.py::TestSlurmPipeline::testStepsDict",
"test/test_pipeline.py::TestSlurmPipeline::testTaskScheduleTime",
"test/test_pipeline.py::TestSlurmPipeline::testTasksFollowingSchedule"
]
| []
| MIT License | 1,369 | [
"Makefile",
"examples/blast/blast.sh",
"examples/word-count/README.md",
"bin/slurm-pipeline.py",
"examples/double-collect/README.md",
"setup.py",
"slurm_pipeline/pipeline.py",
"examples/blast-with-force-and-simulate/README.md",
"examples/blast/2-run-blast.sh",
"README.md",
"examples/blast/README.md",
"examples/word-count-with-skipping/README.md"
]
| [
"Makefile",
"examples/blast/blast.sh",
"examples/word-count/README.md",
"bin/slurm-pipeline.py",
"examples/double-collect/README.md",
"setup.py",
"slurm_pipeline/pipeline.py",
"examples/blast-with-force-and-simulate/README.md",
"examples/blast/2-run-blast.sh",
"README.md",
"examples/blast/README.md",
"examples/word-count-with-skipping/README.md"
]
|
|
ethereum__pyrlp-42 | 8f16b5ebf3e009616689f2d95a3793affc88db2e | 2017-06-14 13:08:39 | 8f16b5ebf3e009616689f2d95a3793affc88db2e | diff --git a/rlp/lazy.py b/rlp/lazy.py
index e12583d..e9a0b14 100644
--- a/rlp/lazy.py
+++ b/rlp/lazy.py
@@ -100,13 +100,26 @@ class LazyList(Sequence):
return item
def __getitem__(self, i):
+ if isinstance(i, slice):
+ if i.step is not None:
+ raise TypeError("Step not supported")
+ start = i.start
+ stop = i.stop
+ else:
+ start = i
+ stop = i + 1
+
try:
- while len(self._elements) <= i:
- self.next()
+ while len(self._elements) < stop:
+ e = self.next()
except StopIteration:
assert self.index == self.end
- raise IndexError('Index %d out of range' % i)
- return self._elements[i]
+ raise IndexError('Index %s out of range' % i)
+
+ if isinstance(i, slice):
+ return self._elements[start:stop]
+ else:
+ return self._elements[start]
def __len__(self):
if not self._len:
| LazyList.__getitem__ assumes i is a number but it can be a slice
LazyList's __getitem__ implementation [assumes **i** is always a number](https://github.com/ethereum/pyrlp/blob/develop/rlp/lazy.py#L108), but it can [be a slice as well](https://docs.python.org/2/reference/datamodel.html#object.__getitem__)
The fix is trivial, but I want to investigate it further because it's not clear to me how that deserialize() call would cause __getitem__ to raise an IndexError, since it [checks the list's length before accessing the first element](https://github.com/ethereum/pyrlp/blob/develop/rlp/sedes/big_endian_int.py#L38)
Here's an example traceback I got from running the hive consensus simulator against pyethapp
```
TypeErrorTraceback (most recent call last)
<ipython-input-1-59579d93b561> in <module>()
----> 1 from importblock import Importer; Importer(eth).run()
/importblock.py in run(self)
22 data = open('/blocks/' + block, 'r').read()
23 block_data = rlp.decode_lazy(data)
---> 24 header = BlockHeader.deserialize(block_data[0])
25 transactions = rlp.sedes.CountableList(Transaction).deserialize(block_data[1])
26 uncles = rlp.sedes.CountableList(BlockHeader).deserialize(block_data[2])
/usr/lib/python2.7/site-packages/rlp-0.5.1-py2.7.egg/rlp/sedes/lists.pyc in deserialize(cls, serial, exclude, mutable, **kwargs)
243 def deserialize(cls, serial, exclude=None, mutable=False, **kwargs):
244 try:
--> 245 values = cls.get_sedes().deserialize(serial)
246 except ListDeserializationError as e:
247 raise ObjectDeserializationError(serial=serial, sedes=cls, list_exception=e)
/usr/lib/python2.7/site-packages/rlp-0.5.1-py2.7.egg/rlp/sedes/lists.pyc in deserialize(self, serial)
81 if not (sedes_consumed or elements_consumed):
82 try:
---> 83 result.append(sedes.deserialize(element))
84 except DeserializationError as e:
85 raise ListDeserializationError(serial=serial, element_exception=e, index=index)
/usr/lib/python2.7/site-packages/rlp-0.5.1-py2.7.egg/rlp/sedes/big_endian_int.pyc in deserialize(self, serial)
36 raise DeserializationError('Invalid serialization (wrong size)',
37 serial)
---> 38 if self.l is None and len(serial) > 0 and serial[0:1] == ascii_chr(0):
39 raise DeserializationError('Invalid serialization (not minimal '
40 'length)', serial)
/usr/lib/python2.7/site-packages/rlp-0.5.1-py2.7.egg/rlp/lazy.pyc in __getitem__(self, i)
106 except StopIteration:
107 assert self.index == self.end
--> 108 raise IndexError('Index %d out of range' % i)
109 return self._elements[i]
110
```
| ethereum/pyrlp | diff --git a/tests/test_lazy.py b/tests/test_lazy.py
index bf9d478..a969483 100644
--- a/tests/test_lazy.py
+++ b/tests/test_lazy.py
@@ -38,6 +38,17 @@ def test_string():
rlp.peek(rlp.encode(s), [0])
+def test_list_getitem():
+ l = rlp.decode_lazy(rlp.encode([1,2,3]), big_endian_int)
+ assert isinstance(l, rlp.lazy.LazyList)
+ assert l[0] == 1
+ assert l[1] == 2
+ assert l[2] == 3
+ assert l[0:3] == [1,2,3]
+ assert l[0:2] == [1,2]
+ assert l[0:1] == [1]
+
+
def test_nested_list():
l = ((), (b'a'), (b'b', b'c', b'd'))
dec = lambda: rlp.decode_lazy(rlp.encode(l))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
-e git+https://github.com/ethereum/pyrlp.git@8f16b5ebf3e009616689f2d95a3793affc88db2e#egg=rlp
tomli==2.2.1
| name: pyrlp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
- wheel==0.23.0
prefix: /opt/conda/envs/pyrlp
| [
"tests/test_lazy.py::test_list_getitem"
]
| []
| [
"tests/test_lazy.py::test_empty_list",
"tests/test_lazy.py::test_string",
"tests/test_lazy.py::test_nested_list",
"tests/test_lazy.py::test_sedes",
"tests/test_lazy.py::test_peek"
]
| []
| MIT License | 1,370 | [
"rlp/lazy.py"
]
| [
"rlp/lazy.py"
]
|
|
typesafehub__conductr-cli-494 | 30063b71f1434a91351a4f263514bcdd71150c45 | 2017-06-14 22:00:02 | 39719b38ec6fc0f598756700a8a815b56bd8bc59 | diff --git a/conductr_cli/bndl_main.py b/conductr_cli/bndl_main.py
index f1b0c15..0d95348 100644
--- a/conductr_cli/bndl_main.py
+++ b/conductr_cli/bndl_main.py
@@ -53,6 +53,8 @@ class ComponentAction(argparse.Action):
namespace.start_command_dicts[-1]['component'] = value
elif namespace.component_action == 'volume':
namespace.volume_dicts[-1]['component'] = value
+ elif namespace.component_action == 'description':
+ namespace.description_dicts[-1]['component'] = value
class EndpointAction(argparse.Action):
@@ -147,6 +149,12 @@ class VolumeAction(argparse.Action):
namespace.volume_dicts[-1][dict_key] = value
+class DescriptionAction(argparse.Action):
+ def __call__(self, parser, namespace, value, option_strings):
+ namespace.component_action = 'description'
+ namespace.description_dicts.append({'description': value})
+
+
def process_args(args):
log = logging.getLogger(__name__)
@@ -173,32 +181,44 @@ def process_args(args):
if args.start_command_dicts:
args.start_commands = []
- start_command = type('', (), {})()
- start_command.start_command = args.start_command_dicts[-1]['start_command']
+ for start_command_dict in args.start_command_dicts:
+ start_command = type('', (), {})()
+ start_command.start_command = start_command_dict['start_command']
- if 'component' in args.start_command_dicts[-1]:
- start_command.component = args.start_command_dicts[-1]['component']
+ if 'component' in start_command_dict:
+ start_command.component = start_command_dict['component']
- args.start_commands.append(start_command)
+ args.start_commands.append(start_command)
if args.volume_dicts:
args.volumes = []
- volume = type('', (), {})()
- parts = args.volume_dicts[-1]['volume'].split('=', 1)
+ for volume_dict in args.volume_dicts:
+ volume = type('', (), {})()
+ parts = volume_dict['volume'].split('=', 1)
- if len(parts) != 2:
- log.error('bndl: volumes must be specified in the format NAME=MOUNT_POINT. '
- 'Example: bndl --volume myvol=/data')
- sys.exit(2)
+ if len(parts) != 2:
+ log.error('bndl: volumes must be specified in the format NAME=MOUNT_POINT. '
+ 'Example: bndl --volume myvol=/data')
+ sys.exit(2)
+
+ volume.name = parts[0]
+ volume.mount_point = parts[1]
+
+ if 'component' in volume_dict:
+ volume.component = volume_dict['component']
- volume.name = parts[0]
- volume.mount_point = parts[1]
+ args.volumes.append(volume)
- if 'component' in args.volume_dicts[-1]:
- volume.component = args.volume_dicts[-1]['component']
+ if args.description_dicts:
+ args.descriptions = []
- args.volumes.append(volume)
+ for description_dict in args.description_dicts:
+ description = type('', (), {})()
+ description.description = description_dict['description']
+ if 'component' in description_dict:
+ description.component = description_dict['component']
+ args.descriptions.append(description)
def add_conf_arguments(parser):
@@ -216,22 +236,27 @@ def add_conf_arguments(parser):
parser.add_argument('--component',
help='Specify the component that should be modified\n'
'Required when the bundle has more than one component\n'
- 'Used in conjunction with the following: --endpoint, --start-command',
+ 'Used in conjunction with the following: '
+ '--description, --endpoint, --start-command, --volume',
metavar='COMPONENT',
action=ComponentAction)
- parser.add_argument('--component-description',
- required=False,
- help='Description to use for the generated ConductR component\n'
- 'For use with docker and oci-image formats',
- dest='component_description')
-
parser.add_argument('--compatibility-version',
nargs='?',
required=False,
help='Sets the "compatibilityVersion" bundle.conf value',
dest='compatibility_version')
+ parser.add_argument('--description',
+ required=False,
+ help='Sets "description" for a component\n'
+ 'If the bundle has more than one component, you must specify --component\n'
+ 'Example: bndl --description "My service" --component service',
+ metavar='DESCRIPTION',
+ dest='description_dicts',
+ default=[],
+ action=DescriptionAction)
+
parser.add_argument('--disk-space',
nargs='?',
required=False,
diff --git a/conductr_cli/bndl_oci.py b/conductr_cli/bndl_oci.py
index e165750..fa2d407 100644
--- a/conductr_cli/bndl_oci.py
+++ b/conductr_cli/bndl_oci.py
@@ -1,5 +1,5 @@
from pyhocon import HOCONConverter, ConfigFactory, ConfigTree
-from conductr_cli.bndl_utils import load_bundle_args_into_conf, create_check_hocon
+from conductr_cli.bndl_utils import create_check_hocon
from conductr_cli.constants import BNDL_DEFAULT_CHECK_RETRY_COUNT, BNDL_DEFAULT_CHECK_RETRY_DELAY
import json
import os
@@ -9,10 +9,7 @@ import tempfile
def oci_image_bundle_conf(args, component_name, oci_manifest, oci_config):
- conf = ConfigFactory.parse_string('')
- load_bundle_args_into_conf(conf, args, args.with_defaults)
-
- annotations_tree = conf.get('annotations')
+ annotations_tree = ConfigTree()
if 'annotations' in oci_manifest and oci_manifest['annotations'] is not None:
for key in sorted(oci_manifest['annotations']):
@@ -23,7 +20,6 @@ def oci_image_bundle_conf(args, component_name, oci_manifest, oci_config):
endpoints_tree = ConfigTree()
oci_tree = ConfigTree()
- oci_tree.put('description', args.component_description)
oci_tree.put('file-system-type', 'oci-image')
oci_tree.put('start-command', [])
oci_tree.put('endpoints', endpoints_tree)
@@ -69,6 +65,8 @@ def oci_image_bundle_conf(args, component_name, oci_manifest, oci_config):
volumes.put(key, vol_path)
oci_tree.put('volumes', volumes)
+ conf = ConfigFactory.parse_string('')
+ conf.put('annotations', annotations_tree)
conf.put('components', components_tree)
return HOCONConverter.to_hocon(conf)
diff --git a/conductr_cli/bndl_utils.py b/conductr_cli/bndl_utils.py
index d2be060..1d05942 100644
--- a/conductr_cli/bndl_utils.py
+++ b/conductr_cli/bndl_utils.py
@@ -174,13 +174,11 @@ def escape_bash_double_quotes(input):
def load_bundle_args_into_conf(config, args, application_type):
- # this is unrolled because it's actually pretty complicated to get the order
- # correct given that some attributes need special handling and defaults
-
config_defaults = application_type.config_defaults(args.format.to_file_system_type()) if application_type else None
args_check_addresses = getattr(args, 'check_addresses', None)
args_compatibility_version = getattr(args, 'compatibility_version', None)
+ args_descriptions = getattr(args, 'descriptions', None)
args_disk_space = getattr(args, 'disk_space', None)
args_endpoints = getattr(args, 'endpoints', None)
args_memory = getattr(args, 'memory', None)
@@ -247,6 +245,15 @@ def load_bundle_args_into_conf(config, args, application_type):
config.put('compatibilityVersion', config_defaults['compatibilityVersion'])
# Component properties
+ if args_descriptions:
+ if 'components' not in config:
+ config.put('components', ConfigTree())
+
+ for description in args_descriptions:
+ component_name = detect_component(config, description, config_name)
+ description_key = 'components.{}.description'.format(component_name)
+ config.put(description_key, description.description)
+
if args_endpoints is not None:
if 'components' not in config:
config.put('components', ConfigTree())
| bndl generates bundle.conf with bug for description
Description should not be what it is below:
```
components {
cp-zookeeper {
description = <pyhocon.config_tree.NoneValue object at 0x7f452549aac8>
file-system-type = "oci-image"
start-command = []
endpoints {
cp-zookeeper-tcp-2181 {
bind-protocol = "tcp"
bind-port = 2181
service-name = "cp-zookeeper-tcp-2181"
}
cp-zookeeper-tcp-2888 {
bind-protocol = "tcp"
bind-port = 2888
service-name = "cp-zookeeper-tcp-2888"
}
cp-zookeeper-tcp-3888 {
bind-protocol = "tcp"
bind-port = 3888
service-name = "cp-zookeeper-tcp-3888"
}
}
volumes {
volume-etc-zookeeper-secrets = "/etc/zookeeper/secrets"
volume-var-lib-zookeeper-data = "/var/lib/zookeeper/data"
volume-var-lib-zookeeper-log = "/var/lib/zookeeper/log"
}
}
bundle-status {
description = "Status check for the bundle component"
file-system-type = "universal"
start-command = [
"check"
"--any-address"
"$CP_ZOOKEEPER_TCP_2181_HOST?retry-delay=10&retry-count=6"
"$CP_ZOOKEEPER_TCP_2888_HOST?retry-delay=10&retry-count=6"
"$CP_ZOOKEEPER_TCP_3888_HOST?retry-delay=10&retry-count=6"
]
endpoints {}
}
}``` | typesafehub/conductr-cli | diff --git a/conductr_cli/test/test_bndl_create.py b/conductr_cli/test/test_bndl_create.py
index 7a89b0b..a5bfb9c 100644
--- a/conductr_cli/test/test_bndl_create.py
+++ b/conductr_cli/test/test_bndl_create.py
@@ -106,7 +106,6 @@ class TestBndlCreate(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'image_tag': 'latest',
'output': tmpfile,
- 'component_description': '',
'use_shazar': True,
'use_default_endpoints': True,
'annotations': [],
@@ -149,7 +148,6 @@ class TestBndlCreate(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'image_tag': 'latest',
'output': tmpfile,
- 'component_description': '',
'use_shazar': False,
'use_default_endpoints': True,
'annotations': [],
@@ -195,7 +193,6 @@ class TestBndlCreate(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'image_tag': 'latest',
'output': tmpfile,
- 'component_description': '',
'use_shazar': True,
'use_default_endpoints': True,
'annotations': [],
@@ -218,7 +215,6 @@ class TestBndlCreate(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'image_tag': 'latest',
'output': tmpfile2,
- 'component_description': '',
'use_shazar': True,
'use_default_endpoints': True,
'annotations': [],
@@ -263,7 +259,6 @@ class TestBndlCreate(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'image_tag': 'latest',
'output': tmpfile,
- 'component_description': '',
'use_shazar': True,
'use_default_endpoints': True,
'annotations': [],
@@ -308,7 +303,6 @@ class TestBndlCreate(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'image_tag': 'latest',
'output': tmpfile,
- 'component_description': '',
'use_shazar': False,
'use_default_endpoints': True,
'annotations': [],
@@ -331,7 +325,6 @@ class TestBndlCreate(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'image_tag': 'latest',
'output': tmpfile2,
- 'component_description': '',
'use_shazar': False,
'use_default_endpoints': True,
'annotations': [],
@@ -530,6 +523,17 @@ class TestBndlCreate(CliTestCase):
'name': 'my-vol',
'mount_point': '/other-data',
'component': 'test1'
+ }),
+ create_attributes_object({
+ 'name': 'my-vol2',
+ 'mount_point': '/data',
+ 'component': 'test1'
+ })
+ ],
+ 'descriptions': [
+ create_attributes_object({
+ 'description': 'this is a test',
+ 'component': 'test2'
})
],
'with_defaults': None
@@ -558,6 +562,7 @@ class TestBndlCreate(CliTestCase):
| ]
| volumes {
| my-vol = "/other-data"
+ | my-vol2 = "/data"
| }
| }
| test2 {
@@ -565,6 +570,7 @@ class TestBndlCreate(CliTestCase):
| "abc"
| "test"
| ]
+ | description = "this is a test"
| volumes {
| my-vol = "/data"
| }
@@ -899,7 +905,6 @@ class TestBndlCreate(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'image_tag': 'latest',
'output': tmpfile,
- 'component_description': '',
'use_shazar': True,
'use_default_endpoints': True,
'use_default_volumes': True,
diff --git a/conductr_cli/test/test_bndl_main.py b/conductr_cli/test/test_bndl_main.py
index 48e968e..1ca6fb2 100644
--- a/conductr_cli/test/test_bndl_main.py
+++ b/conductr_cli/test/test_bndl_main.py
@@ -31,8 +31,6 @@ class TestBndl(CliTestCase):
'-o',
'/dev/null',
'--no-shazar',
- '--component-description',
- 'some description',
'--version',
'4',
'--compatibility-version',
@@ -81,7 +79,13 @@ class TestBndl(CliTestCase):
'--component',
'web-component',
'--volume',
- 'test2:/data2'
+ 'test2:/data2',
+ '--description',
+ 'this is a test',
+ '--description',
+ 'another test',
+ '--component',
+ 'web-component'
])
self.assertEqual(args.source, 'oci-image-dir')
@@ -91,7 +95,6 @@ class TestBndl(CliTestCase):
self.assertEqual(args.image_name, 'test')
self.assertEqual(args.output, '/dev/null')
self.assertFalse(args.use_shazar)
- self.assertEqual(args.component_description, 'some description')
self.assertEqual(args.version, '4')
self.assertEqual(args.compatibility_version, '5')
self.assertEqual(args.system, 'myapp')
@@ -126,6 +129,10 @@ class TestBndl(CliTestCase):
{'component': 'web-component', 'volume': 'test:/data'},
{'volume': 'test2:/data2'}
])
+ self.assertEqual(args.description_dicts, [
+ {'description': 'this is a test'},
+ {'description': 'another test', 'component': 'web-component'}
+ ])
def test_parser_acl_params(self):
parser = bndl_main.build_parser()
diff --git a/conductr_cli/test/test_bndl_oci.py b/conductr_cli/test/test_bndl_oci.py
index 05b6699..aa9ba6e 100644
--- a/conductr_cli/test/test_bndl_oci.py
+++ b/conductr_cli/test/test_bndl_oci.py
@@ -108,7 +108,6 @@ class TestBndlOci(CliTestCase):
'format': BndlFormat.OCI_IMAGE,
'name': 'world',
'tags': [],
- 'component_description': 'testing desc 1',
'image_tag': 'testing',
'use_default_endpoints': True,
'use_default_volumes': True,
@@ -121,7 +120,6 @@ class TestBndlOci(CliTestCase):
'name': 'world',
'tags': [],
'annotations': [],
- 'component_description': 'testing desc 2',
'version': '4',
'compatibility_version': '5',
'system': 'myapp',
@@ -149,23 +147,8 @@ class TestBndlOci(CliTestCase):
| }
| }
|}
- |compatibilityVersion = "0"
- |diskSpace = 1073741824
- |memory = 402653184
- |name = "world"
- |nrOfCpus = 0.1
- |roles = [
- | "web"
- |]
- |system = "world"
- |systemVersion = "0"
- |tags = [
- | "testing"
- |]
- |version = "1"
|components {
| my-component {
- | description = "testing desc 1"
| file-system-type = "oci-image"
| start-command = []
| endpoints {}
@@ -188,24 +171,8 @@ class TestBndlOci(CliTestCase):
| }
| }
|}
- |compatibilityVersion = "5"
- |diskSpace = "16384"
- |memory = "65536"
- |name = "world"
- |nrOfCpus = "8"
- |roles = [
- | "web"
- | "backend"
- |]
- |system = "myapp"
- |systemVersion = "3"
- |tags = [
- | "0.0.1"
- |]
- |version = "4"
|components {
| my-other-component {
- | description = "testing desc 2"
| file-system-type = "oci-image"
| start-command = []
| endpoints {}
@@ -217,7 +184,6 @@ class TestBndlOci(CliTestCase):
base_args = create_attributes_object({
'format': BndlFormat.OCI_IMAGE,
'name': 'world',
- 'component_description': 'testing desc 1',
'image_tag': 'testing',
'use_default_endpoints': True,
'use_default_check': True,
@@ -245,23 +211,8 @@ class TestBndlOci(CliTestCase):
| }
| }
|}
- |compatibilityVersion = "0"
- |diskSpace = 1073741824
- |memory = 402653184
- |name = "world"
- |nrOfCpus = 0.1
- |roles = [
- | "web"
- |]
- |system = "world"
- |systemVersion = "0"
- |tags = [
- | "testing"
- |]
- |version = "1"
|components {
| my-component {
- | description = "testing desc 1"
| file-system-type = "oci-image"
| start-command = []
| endpoints {
@@ -289,7 +240,6 @@ class TestBndlOci(CliTestCase):
base_args = create_attributes_object({
'format': BndlFormat.OCI_IMAGE,
'name': 'world',
- 'component_description': 'testing desc 1',
'image_tag': 'testing',
'use_default_endpoints': False,
'use_default_volumes': True,
@@ -316,23 +266,8 @@ class TestBndlOci(CliTestCase):
| }
| }
|}
- |compatibilityVersion = "0"
- |diskSpace = 1073741824
- |memory = 402653184
- |name = "world"
- |nrOfCpus = 0.1
- |roles = [
- | "web"
- |]
- |system = "world"
- |systemVersion = "0"
- |tags = [
- | "testing"
- |]
- |version = "1"
|components {
| my-component {
- | description = "testing desc 1"
| file-system-type = "oci-image"
| start-command = []
| endpoints {}
@@ -344,7 +279,6 @@ class TestBndlOci(CliTestCase):
base_args = create_attributes_object({
'format': BndlFormat.OCI_IMAGE,
'name': 'world',
- 'component_description': 'testing desc 1',
'image_tag': 'testing',
'use_default_endpoints': True,
'use_default_check': True,
@@ -372,23 +306,8 @@ class TestBndlOci(CliTestCase):
| }
| }
|}
- |compatibilityVersion = "0"
- |diskSpace = 1073741824
- |memory = 402653184
- |name = "world"
- |nrOfCpus = 0.1
- |roles = [
- | "web"
- |]
- |system = "world"
- |systemVersion = "0"
- |tags = [
- | "testing"
- |]
- |version = "1"
|components {
| my-component {
- | description = "testing desc 1"
| file-system-type = "oci-image"
| start-command = []
| endpoints {
@@ -419,10 +338,10 @@ class TestBndlOci(CliTestCase):
)
def test_oci_image_with_default_endpoints_no_check(self):
+ self.maxDiff = None
base_args = create_attributes_object({
'format': BndlFormat.OCI_IMAGE,
'name': 'world',
- 'component_description': 'testing desc 1',
'image_tag': 'testing',
'use_default_endpoints': True,
'use_default_check': False,
@@ -450,23 +369,8 @@ class TestBndlOci(CliTestCase):
| }
| }
|}
- |compatibilityVersion = "0"
- |diskSpace = 1073741824
- |memory = 402653184
- |name = "world"
- |nrOfCpus = 0.1
- |roles = [
- | "web"
- |]
- |system = "world"
- |systemVersion = "0"
- |tags = [
- | "testing"
- |]
- |version = "1"
|components {
| my-component {
- | description = "testing desc 1"
| file-system-type = "oci-image"
| start-command = []
| endpoints {
@@ -491,7 +395,6 @@ class TestBndlOci(CliTestCase):
base_args = create_attributes_object({
'format': BndlFormat.OCI_IMAGE,
'name': 'world',
- 'component_description': 'testing desc 1',
'image_tag': 'testing',
'use_default_endpoints': True,
'use_default_check': True,
@@ -529,23 +432,8 @@ class TestBndlOci(CliTestCase):
| }
| description = "hello world"
|}
- |compatibilityVersion = "0"
- |diskSpace = 1073741824
- |memory = 402653184
- |name = "world"
- |nrOfCpus = 0.1
- |roles = [
- | "web"
- |]
- |system = "world"
- |systemVersion = "0"
- |tags = [
- | "testing"
- |]
- |version = "1"
|components {
| my-component {
- | description = "testing desc 1"
| file-system-type = "oci-image"
| start-command = []
| endpoints {
diff --git a/conductr_cli/test/test_bndl_utils.py b/conductr_cli/test/test_bndl_utils.py
index 180194b..865fd14 100644
--- a/conductr_cli/test/test_bndl_utils.py
+++ b/conductr_cli/test/test_bndl_utils.py
@@ -168,7 +168,6 @@ class TestBndlUtils(CliTestCase):
base_args = create_attributes_object({
'name': 'world',
'format': BndlFormat.BUNDLE,
- 'component_description': 'testing desc 1',
'tags': ['testing'],
'annotations': {}
})
@@ -176,7 +175,6 @@ class TestBndlUtils(CliTestCase):
base_args_dup_tags = create_attributes_object({
'name': 'world',
'format': BndlFormat.BUNDLE,
- 'component_description': 'testing desc 1',
'tags': ['testing', 'testing', 'testing'],
'annotations': {}
})
@@ -184,7 +182,6 @@ class TestBndlUtils(CliTestCase):
extended_args = create_attributes_object({
'name': 'world',
'format': BndlFormat.BUNDLE,
- 'component_description': 'testing desc 2',
'version': '4',
'compatibility_version': '5',
'system': 'myapp',
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 3
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"flake8",
"rstcheck"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | annotated-types==0.7.0
argcomplete==3.6.1
arrow==1.3.0
certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
colorama==0.4.6
-e git+https://github.com/typesafehub/conductr-cli.git@30063b71f1434a91351a4f263514bcdd71150c45#egg=conductr_cli
docutils==0.21.2
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
flake8==7.2.0
idna==3.10
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jsonschema==2.6.0
markdown-it-py==3.0.0
mccabe==0.7.0
mdurl==0.1.2
packaging @ file:///croot/packaging_1734472117206/work
pager==3.3
pluggy @ file:///croot/pluggy_1733169602837/work
prettytable==0.7.2
psutil==5.9.8
pycodestyle==2.13.0
pydantic==2.11.1
pydantic_core==2.33.0
pyflakes==3.3.1
Pygments==2.19.1
pyhocon==0.3.35
PyJWT==1.4.2
pyparsing==3.2.3
pyreadline==2.1
pytest @ file:///croot/pytest_1738938843180/work
python-dateutil==2.9.0.post0
requests==2.32.3
requests-toolbelt==1.0.0
rich==14.0.0
rstcheck==6.2.4
rstcheck-core==1.2.1
shellingham==1.5.4
six==1.17.0
sseclient==0.0.14
toml==0.10.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typer==0.15.2
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
urllib3==2.3.0
www-authenticate==0.9.2
| name: conductr-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- annotated-types==0.7.0
- argcomplete==3.6.1
- arrow==1.3.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- colorama==0.4.6
- docutils==0.21.2
- flake8==7.2.0
- idna==3.10
- jsonschema==2.6.0
- markdown-it-py==3.0.0
- mccabe==0.7.0
- mdurl==0.1.2
- pager==3.3
- prettytable==0.7.2
- psutil==5.9.8
- pycodestyle==2.13.0
- pydantic==2.11.1
- pydantic-core==2.33.0
- pyflakes==3.3.1
- pygments==2.19.1
- pyhocon==0.3.35
- pyjwt==1.4.2
- pyparsing==3.2.3
- pyreadline==2.1
- python-dateutil==2.9.0.post0
- requests==2.32.3
- requests-toolbelt==1.0.0
- rich==14.0.0
- rstcheck==6.2.4
- rstcheck-core==1.2.1
- shellingham==1.5.4
- six==1.17.0
- sseclient==0.0.14
- toml==0.10.2
- typer==0.15.2
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- urllib3==2.3.0
- www-authenticate==0.9.2
prefix: /opt/conda/envs/conductr-cli
| [
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_deterministic_with_shazar",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_deterministic_without_shazar",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_oci_env",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_with_shazar",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_without_shazar",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_parser_with_all_params",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_bundle_conf",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_bundle_conf_endpoints",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_bundle_conf_no_endpoints",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_with_check",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_with_default_endpoints_no_check",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_params"
]
| [
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_conf",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_conf_dir",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_conf_no_name",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_configuration_arg_no_name",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_envs",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_bundle_envs_append",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_mtime_from_config",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_no_input_bundle",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_no_input_configuration",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_validation_excludes"
]
| [
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_no_format",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_no_ref",
"conductr_cli/test/test_bndl_create.py::TestBndlCreate::test_not_oci",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_parser_acl_params",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_parser_no_args",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_parser_with_min_params",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_run_dash_rewrite",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_warn_ambigous_bind_protocol",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_warn_bad_file",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_warn_bad_oci_image_format_no_layout",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_warn_bad_oci_image_format_no_tag",
"conductr_cli/test/test_bndl_main.py::TestBndl::test_warn_output_tty",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_unpack_dir_wrong_format",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_unpack_nested_dir",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_unpack_nested_tar",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_unpack_tar_wrong_format",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_unpack_toplevel_dir",
"conductr_cli/test/test_bndl_oci.py::TestBndlOci::test_oci_image_unpack_toplevel_tar",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_detect_format_dir",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_detect_format_stream",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_digest_reader_writer",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_escape_bash_double_quotes",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_first_mtime",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_load_bundle_args_into_conf",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_load_bundle_args_into_conf_with_generic_defaults",
"conductr_cli/test/test_bndl_utils.py::TestBndlUtils::test_load_bundle_args_into_conf_with_play_defaults"
]
| []
| Apache License 2.0 | 1,371 | [
"conductr_cli/bndl_utils.py",
"conductr_cli/bndl_main.py",
"conductr_cli/bndl_oci.py"
]
| [
"conductr_cli/bndl_utils.py",
"conductr_cli/bndl_main.py",
"conductr_cli/bndl_oci.py"
]
|
|
uccser__verto-214 | 1c84fb7ec8168c2eaf1c9c83ac4f79dd28ca859a | 2017-06-15 00:49:48 | d6ab64cf8e769bdac5a533571ee949c8718a37bd | diff --git a/docs/source/processors/interactive.rst b/docs/source/processors/interactive.rst
index 7382d7e..a029332 100644
--- a/docs/source/processors/interactive.rst
+++ b/docs/source/processors/interactive.rst
@@ -74,7 +74,7 @@ Optional Tag Parameters
The set of filepaths can be accessed after conversion,
see :ref:`accessing_verto_data`.
-The default HTML for button links is:
+The default HTML for an interactive is:
.. literalinclude:: ../../../verto/html-templates/interactive.html
:language: css+jinja
diff --git a/verto/processors/InteractiveBlockProcessor.py b/verto/processors/InteractiveBlockProcessor.py
index 71ec87b..37a295f 100644
--- a/verto/processors/InteractiveBlockProcessor.py
+++ b/verto/processors/InteractiveBlockProcessor.py
@@ -62,10 +62,11 @@ class InteractiveBlockProcessor(GenericTagBlockProcessor):
text = argument_values.get('text', None)
parameters = argument_values.get('parameters', None)
+ # add to list of interactives
+ self.required.add(name)
+
if interactive_type == 'in-page':
self.scripts.add('interactive/{}/scripts.html'.format(name))
- if interactive_type != 'whole-page':
- self.required.add(name)
context = dict()
context['type'] = interactive_type
| Add all interactives to required files | uccser/verto | diff --git a/verto/tests/InteractiveTest.py b/verto/tests/InteractiveTest.py
index 77f464a..375e56b 100644
--- a/verto/tests/InteractiveTest.py
+++ b/verto/tests/InteractiveTest.py
@@ -6,6 +6,7 @@ from verto.VertoExtension import VertoExtension
from verto.processors.InteractiveBlockProcessor import InteractiveBlockProcessor
from verto.tests.ProcessorTest import ProcessorTest
+
class InteractiveTest(ProcessorTest):
'''The interactive processor is a simple tag with a complex
output that relies on external systems.
@@ -37,7 +38,7 @@ class InteractiveTest(ProcessorTest):
expected_string = self.read_test_file(self.processor_name, 'doc_example_in_page_usage_expected.html', strip=True)
self.assertEqual(expected_string, converted_test_string)
- required_files={
+ required_files = {
'interactives': {
'binary-cards'
},
@@ -50,7 +51,7 @@ class InteractiveTest(ProcessorTest):
self.assertEqual(self.verto_extension.required_files, required_files)
def test_doc_example_whole_page(self):
- '''Example of an whole-page interactive.
+ '''Example of a whole-page interactive.
'''
test_string = self.read_test_file(self.processor_name, 'doc_example_whole_page_usage.md')
blocks = self.to_blocks(test_string)
@@ -61,8 +62,8 @@ class InteractiveTest(ProcessorTest):
expected_string = self.read_test_file(self.processor_name, 'doc_example_whole_page_usage_expected.html', strip=True)
self.assertEqual(expected_string, converted_test_string)
- required_files={
- 'interactives': set(),
+ required_files = {
+ 'interactives': {"binary-cards"},
'images': {
'binary-cards/thumbnail.png'
},
@@ -83,7 +84,7 @@ class InteractiveTest(ProcessorTest):
expected_string = self.read_test_file(self.processor_name, 'doc_example_iframe_usage_expected.html', strip=True)
self.assertEqual(expected_string, converted_test_string)
- required_files={
+ required_files = {
'interactives': {
'binary-cards'
},
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 2
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==2.9.6
Markdown==2.6.8
MarkupSafe==2.0.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-slugify==1.2.4
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==1.6.2
sphinx-rtd-theme==0.2.4
sphinxcontrib-serializinghtml==1.1.5
sphinxcontrib-websupport==1.2.4
tomli==1.2.3
typing_extensions==4.1.1
Unidecode==1.3.8
urllib3==1.26.20
-e git+https://github.com/uccser/verto.git@1c84fb7ec8168c2eaf1c9c83ac4f79dd28ca859a#egg=verto
zipp==3.6.0
| name: verto
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==2.9.6
- markdown==2.6.8
- markupsafe==2.0.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-slugify==1.2.4
- pytz==2025.2
- requests==2.27.1
- setuptools==36.0.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==1.6.2
- sphinx-rtd-theme==0.2.4
- sphinxcontrib-serializinghtml==1.1.5
- sphinxcontrib-websupport==1.2.4
- tomli==1.2.3
- typing-extensions==4.1.1
- unidecode==1.3.8
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/verto
| [
"verto/tests/InteractiveTest.py::InteractiveTest::test_doc_example_whole_page"
]
| []
| [
"verto/tests/InteractiveTest.py::InteractiveTest::test_doc_example_iframe",
"verto/tests/InteractiveTest.py::InteractiveTest::test_doc_example_in_page",
"verto/tests/InteractiveTest.py::InteractiveTest::test_doc_example_override_html"
]
| []
| MIT License | 1,372 | [
"docs/source/processors/interactive.rst",
"verto/processors/InteractiveBlockProcessor.py"
]
| [
"docs/source/processors/interactive.rst",
"verto/processors/InteractiveBlockProcessor.py"
]
|
|
rorodata__firefly-12 | 8f85f769b450eb45d9b4e3a338e988a042bf7459 | 2017-06-15 06:05:45 | 8f85f769b450eb45d9b4e3a338e988a042bf7459 | diff --git a/firefly/main.py b/firefly/main.py
index bb06d97..37eb583 100644
--- a/firefly/main.py
+++ b/firefly/main.py
@@ -7,7 +7,7 @@ from .server import FireflyServer
def parse_args():
p = argparse.ArgumentParser()
p.add_argument("-b", "--bind", dest="ADDRESS", default="127.0.0.1:8000")
- p.add_argument("function", help="function to serve")
+ p.add_argument("functions", nargs='+', help="functions to serve")
return p.parse_args()
def load_function(function_spec):
@@ -17,7 +17,14 @@ def load_function(function_spec):
mod_name, func_name = function_spec.rsplit(".", 1)
mod = importlib.import_module(mod_name)
func = getattr(mod, func_name)
- return func
+ return (func_name, func)
+
+def load_functions(function_specs):
+ return [load_function(function_spec) for function_spec in function_specs]
+
+def add_routes(app, functions):
+ for name, function in functions:
+ app.add_route('/'+name, function)
def main():
# ensure current directory is added to sys.path
@@ -25,10 +32,10 @@ def main():
sys.path.insert(0, "")
args = parse_args()
- function = load_function(args.function)
+ functions = load_functions(args.functions)
app = Firefly()
- app.add_route("/", function)
+ add_routes(app, functions)
server = FireflyServer(app, {"bind": args.ADDRESS})
server.run()
| Allow firefly to support multiple functions
It should take multiple functions as command line arguments and expose all of them.
```
$ firefly myfile.square myfile.cube
```
And use:
```
$ curl -d '{"x": 5}' http://localhost:8000/square
25
$ curl -d '{"x": 5}' http://localhost:8000/cube
125
``` | rorodata/firefly | diff --git a/tests/test_main.py b/tests/test_main.py
new file mode 100644
index 0000000..7ecfbf2
--- /dev/null
+++ b/tests/test_main.py
@@ -0,0 +1,8 @@
+import os
+from firefly.main import load_function
+
+def test_load_functions():
+ os.path.exists2 = os.path.exists
+ name, func = load_function("os.path.exists2")
+ assert name == "exists2"
+ assert func == os.path.exists
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/rorodata/firefly.git@8f85f769b450eb45d9b4e3a338e988a042bf7459#egg=Firefly
gunicorn==19.7.1
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
typing_extensions==4.1.1
WebOb==1.7.2
zipp==3.6.0
| name: firefly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- gunicorn==19.7.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- webob==1.7.2
- zipp==3.6.0
prefix: /opt/conda/envs/firefly
| [
"tests/test_main.py::test_load_functions"
]
| []
| []
| []
| Apache License 2.0 | 1,373 | [
"firefly/main.py"
]
| [
"firefly/main.py"
]
|
|
rorodata__firefly-13 | 6f199213a35bf87d17c594c023bf6ed4360f70a0 | 2017-06-15 09:17:43 | 6f199213a35bf87d17c594c023bf6ed4360f70a0 | diff --git a/firefly/app.py b/firefly/app.py
index c317648..d3c0da8 100644
--- a/firefly/app.py
+++ b/firefly/app.py
@@ -3,13 +3,28 @@ from webob.exc import HTTPNotFound
import json
from .validator import validate_args, ValidationError
from .utils import json_encode
+from .version import __version__
class Firefly(object):
def __init__(self):
self.mapping = {}
+ self.add_route('/', self.generate_index,internal=True)
- def add_route(self, path, function, **kwargs):
- self.mapping[path] = FireflyFunction(function, **kwargs)
+ def add_route(self, path, function, function_name=None, **kwargs):
+ self.mapping[path] = FireflyFunction(function, function_name, **kwargs)
+
+ def generate_function_list(self):
+ return {f.name: {"path": path, "doc": f.doc}
+ for path, f in self.mapping.items()
+ if f.options.get("internal") != True}
+
+ def generate_index(self):
+ help_dict = {
+ "app": "firefly",
+ "version": __version__,
+ "functions": self.generate_function_list()
+ }
+ return help_dict
def __call__(self, environ, start_response):
request = Request(environ)
@@ -25,10 +40,16 @@ class Firefly(object):
class FireflyFunction(object):
- def __init__(self, function, **kwargs):
+ def __init__(self, function, function_name=None, **options):
self.function = function
+ self.options = options
+ self.name = function_name or function.__name__
+ self.doc = function.__doc__ or ""
def __call__(self, request):
+ if self.options.get("internal", False):
+ return self.make_response(self.function())
+
kwargs = self.get_inputs(request)
try:
validate_args(self.function, kwargs)
diff --git a/firefly/main.py b/firefly/main.py
index 37eb583..dd4a4fb 100644
--- a/firefly/main.py
+++ b/firefly/main.py
@@ -24,7 +24,7 @@ def load_functions(function_specs):
def add_routes(app, functions):
for name, function in functions:
- app.add_route('/'+name, function)
+ app.add_route('/'+name, function, name)
def main():
# ensure current directory is added to sys.path
| Add the ability to find all the available functions served by a firefly service
The user should be able to find the functions available in a firefly service. Right now a server supports only one function, but that is going to change soon.
So, we need to identify the right way to expose the list of functions. It could either be on `/` or some other endpoint like `/_list`. Look at the other RPC implementations and see how they work. We don't have to follow them, but that would give a good idea. It would be better to provide the docstring and argument names along with the function listing.
Please discuss the plan here before implementing. | rorodata/firefly | diff --git a/tests/test_app.py b/tests/test_app.py
index edad1b9..183c914 100644
--- a/tests/test_app.py
+++ b/tests/test_app.py
@@ -1,13 +1,41 @@
from webob import Request, Response
-from firefly.app import FireflyFunction
+from firefly.app import Firefly, FireflyFunction
def square(a):
+ '''Computes square'''
return a**2
+class TestFirefly:
+ def test_generate_function_list(self):
+ firefly = Firefly()
+ assert firefly.generate_function_list() == {}
+
+ firefly.add_route("/square", square, "square")
+ returned_dict = {
+ "square": {
+ "path": "/square",
+ "doc": "Computes square"
+ }
+ }
+ assert firefly.generate_function_list() == returned_dict
+
+ def test_generate_function_list_for_func_name(self):
+ firefly = Firefly()
+ firefly.add_route("/sq2", square, "sq")
+ returned_dict = {
+ "sq": {
+ "path": "/sq2",
+ "doc": "Computes square"
+ }
+ }
+ assert firefly.generate_function_list() == returned_dict
+
+
+
class TestFireflyFunction:
def test_call(self):
func = FireflyFunction(square)
- request = Request.blank("/", POST='{"a": 3}')
+ request = Request.blank("/square", POST='{"a": 3}')
response = func(request)
assert response.status == '200 OK'
assert response.text == '9'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
-e git+https://github.com/rorodata/firefly.git@6f199213a35bf87d17c594c023bf6ed4360f70a0#egg=Firefly
gunicorn==19.7.1
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
tomli==2.2.1
WebOb==1.7.2
| name: firefly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- gunicorn==19.7.1
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
- webob==1.7.2
prefix: /opt/conda/envs/firefly
| [
"tests/test_app.py::TestFirefly::test_generate_function_list",
"tests/test_app.py::TestFirefly::test_generate_function_list_for_func_name"
]
| []
| [
"tests/test_app.py::TestFireflyFunction::test_call"
]
| []
| Apache License 2.0 | 1,374 | [
"firefly/app.py",
"firefly/main.py"
]
| [
"firefly/app.py",
"firefly/main.py"
]
|
|
frictionlessdata__goodtables-py-197 | 25db6e4c20efc0a626323ed3bbef23845acbaf47 | 2017-06-15 12:39:43 | 25db6e4c20efc0a626323ed3bbef23845acbaf47 | roll: @amercader
Please take a look. It addresses https://sentry.io/open-knowledge/goodtablesio-production/issues/287753774/ | diff --git a/goodtables/presets/table.py b/goodtables/presets/table.py
index 0248096..940a85d 100644
--- a/goodtables/presets/table.py
+++ b/goodtables/presets/table.py
@@ -39,6 +39,10 @@ def table(source, schema=None, **options):
warnings.append(
'Table schema "%s" has a validation error "%s"' %
(schema, str(error).splitlines()[0]))
+ except Exception as error:
+ warnings.append(
+ 'Table Schema "%s" has a loading error "%s"' %
+ (schema, error))
# Add table
if not warnings:
| Catch jsontableschema.exceptions.InvalidJSONError
# Overview
For `table` and `datapackage` datasets it has to be caught. See - https://sentry.io/open-knowledge/goodtablesio-production/issues/287753774/ | frictionlessdata/goodtables-py | diff --git a/tests/presets/test_table.py b/tests/presets/test_table.py
index 1281a86..91fe320 100644
--- a/tests/presets/test_table.py
+++ b/tests/presets/test_table.py
@@ -20,3 +20,10 @@ def test_preset_table_but_got_datapackage_issue_187():
assert len(warnings) == 1
assert len(tables) == 0
assert 'Use "datapackage" preset' in warnings[0]
+
+
+def test_preset_table_invalid_json_issue_196():
+ warnings, tables = presets.table('valid.csv', schema='data/invalid_json.json')
+ assert len(warnings) == 1
+ assert len(tables) == 0
+ assert 'has a loading error' in warnings[0]
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install --upgrade -e .[develop,ods]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"mock",
"pyyaml",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
chardet==5.0.0
charset-normalizer==2.0.12
click==6.7
datapackage==0.8.9
distlib==0.3.9
et-xmlfile==1.1.0
ezodf==0.3.2
filelock==3.4.1
future==0.18.3
-e git+https://github.com/frictionlessdata/goodtables-py.git@25db6e4c20efc0a626323ed3bbef23845acbaf47#egg=goodtables
greenlet==2.0.2
idna==3.10
ijson==3.3.0
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
isodate==0.6.1
jmespath==0.10.0
jsonlines==3.1.0
jsonschema==2.6.0
jsontableschema==0.10.1
linear-tsv==1.1.0
lxml==3.8.0
mccabe==0.7.0
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
openpyxl==3.1.3
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.10.0
pydocstyle==6.3.0
pyflakes==3.0.1
pylama==7.7.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-dateutil==2.9.0.post0
PyYAML==6.0.1
requests==2.27.1
rfc3986==0.4.1
s3transfer==0.5.2
six==1.17.0
snowballstemmer==2.2.0
SQLAlchemy==1.4.54
tabulator==1.53.5
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tox==3.28.0
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
unicodecsv==0.14.1
urllib3==1.26.20
virtualenv==20.17.1
xlrd==2.0.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: goodtables-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- boto3==1.23.10
- botocore==1.26.10
- chardet==5.0.0
- charset-normalizer==2.0.12
- click==6.7
- datapackage==0.8.9
- distlib==0.3.9
- et-xmlfile==1.1.0
- ezodf==0.3.2
- filelock==3.4.1
- future==0.18.3
- greenlet==2.0.2
- idna==3.10
- ijson==3.3.0
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- isodate==0.6.1
- jmespath==0.10.0
- jsonlines==3.1.0
- jsonschema==2.6.0
- jsontableschema==0.10.1
- linear-tsv==1.1.0
- lxml==3.8.0
- mccabe==0.7.0
- mock==5.2.0
- openpyxl==3.1.3
- platformdirs==2.4.0
- pycodestyle==2.10.0
- pydocstyle==6.3.0
- pyflakes==3.0.1
- pylama==7.7.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- requests==2.27.1
- rfc3986==0.4.1
- s3transfer==0.5.2
- six==1.17.0
- snowballstemmer==2.2.0
- sqlalchemy==1.4.54
- tabulator==1.53.5
- tox==3.28.0
- unicodecsv==0.14.1
- urllib3==1.26.20
- virtualenv==20.17.1
- xlrd==2.0.1
prefix: /opt/conda/envs/goodtables-py
| [
"tests/presets/test_table.py::test_preset_table_invalid_json_issue_196"
]
| []
| [
"tests/presets/test_table.py::test_preset_table",
"tests/presets/test_table.py::test_preset_table_but_got_datapackage_issue_187"
]
| []
| MIT License | 1,375 | [
"goodtables/presets/table.py"
]
| [
"goodtables/presets/table.py"
]
|
NeuralEnsemble__python-neo-341 | 9bd5f01a3b3dc311a89c0be3b0008e516fd3a098 | 2017-06-15 12:57:26 | f0285a7ab15ff6535d3e6736e0163c4fa6aea091 | diff --git a/neo/core/analogsignal.py b/neo/core/analogsignal.py
index 8e129a52..aa971c80 100644
--- a/neo/core/analogsignal.py
+++ b/neo/core/analogsignal.py
@@ -285,11 +285,11 @@ class AnalogSignal(BaseNeo, pq.Quantity):
Get the item or slice :attr:`i`.
'''
obj = super(AnalogSignal, self).__getitem__(i)
- if isinstance(i, int): # a single point in time across all channels
+ if isinstance(i, (int, np.integer)): # a single point in time across all channels
obj = pq.Quantity(obj.magnitude, units=obj.units)
elif isinstance(i, tuple):
j, k = i
- if isinstance(j, int): # extract a quantity array
+ if isinstance(j, (int, np.integer)): # extract a quantity array
obj = pq.Quantity(obj.magnitude, units=obj.units)
else:
if isinstance(j, slice):
@@ -303,7 +303,7 @@ class AnalogSignal(BaseNeo, pq.Quantity):
# in the general case, would need to return IrregularlySampledSignal(Array)
else:
raise TypeError("%s not supported" % type(j))
- if isinstance(k, int):
+ if isinstance(k, (int, np.integer)):
obj = obj.reshape(-1, 1)
if self.channel_index:
obj.channel_index = self.channel_index.__getitem__(k)
diff --git a/neo/core/irregularlysampledsignal.py b/neo/core/irregularlysampledsignal.py
index b37034fc..045fb24a 100644
--- a/neo/core/irregularlysampledsignal.py
+++ b/neo/core/irregularlysampledsignal.py
@@ -225,11 +225,11 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
Get the item or slice :attr:`i`.
'''
obj = super(IrregularlySampledSignal, self).__getitem__(i)
- if isinstance(i, int): # a single point in time across all channels
+ if isinstance(i, (int, np.integer)): # a single point in time across all channels
obj = pq.Quantity(obj.magnitude, units=obj.units)
elif isinstance(i, tuple):
j, k = i
- if isinstance(j, int): # a single point in time across some channels
+ if isinstance(j, (int, np.integer)): # a single point in time across some channels
obj = pq.Quantity(obj.magnitude, units=obj.units)
else:
if isinstance(j, slice):
@@ -238,7 +238,7 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
raise NotImplementedError("Arrays not yet supported")
else:
raise TypeError("%s not supported" % type(j))
- if isinstance(k, int):
+ if isinstance(k, (int, np.integer)):
obj = obj.reshape(-1, 1)
elif isinstance(i, slice):
obj.times = self.times.__getitem__(i)
| Slicing `AnalogSignal` with numpy.int64 gives an incorrect result
Example:
```python
>>> signal[:, 0].shape
(1000, 1)
>>> signal[:, np.int64(0)].shape
(1000,)
```
| NeuralEnsemble/python-neo | diff --git a/neo/test/coretest/test_analogsignal.py b/neo/test/coretest/test_analogsignal.py
index 0053e566..4b769fe3 100644
--- a/neo/test/coretest/test_analogsignal.py
+++ b/neo/test/coretest/test_analogsignal.py
@@ -313,28 +313,29 @@ class TestAnalogSignalArrayMethods(unittest.TestCase):
def test__slice_should_return_AnalogSignalArray(self):
# slice
- result = self.signal1[3:8, 0]
- self.assertIsInstance(result, AnalogSignal)
- assert_neo_object_is_compliant(result)
- self.assertEqual(result.name, 'spam') # should slicing really preserve name and description?
- self.assertEqual(result.description, 'eggs') # perhaps these should be modified to indicate the slice?
- self.assertEqual(result.file_origin, 'testfile.txt')
- self.assertEqual(result.annotations, {'arg1': 'test'})
-
- self.assertEqual(result.size, 5)
- self.assertEqual(result.sampling_period, self.signal1.sampling_period)
- self.assertEqual(result.sampling_rate, self.signal1.sampling_rate)
- self.assertEqual(result.t_start,
- self.signal1.t_start+3*result.sampling_period)
- self.assertEqual(result.t_stop,
- result.t_start + 5*result.sampling_period)
- assert_array_equal(result.magnitude, self.data1[3:8].reshape(-1, 1))
-
- # Test other attributes were copied over (in this case, defaults)
- self.assertEqual(result.file_origin, self.signal1.file_origin)
- self.assertEqual(result.name, self.signal1.name)
- self.assertEqual(result.description, self.signal1.description)
- self.assertEqual(result.annotations, self.signal1.annotations)
+ for index in (0, np.int64(0)):
+ result = self.signal1[3:8, index]
+ self.assertIsInstance(result, AnalogSignal)
+ assert_neo_object_is_compliant(result)
+ self.assertEqual(result.name, 'spam') # should slicing really preserve name and description?
+ self.assertEqual(result.description, 'eggs') # perhaps these should be modified to indicate the slice?
+ self.assertEqual(result.file_origin, 'testfile.txt')
+ self.assertEqual(result.annotations, {'arg1': 'test'})
+
+ self.assertEqual(result.size, 5)
+ self.assertEqual(result.sampling_period, self.signal1.sampling_period)
+ self.assertEqual(result.sampling_rate, self.signal1.sampling_rate)
+ self.assertEqual(result.t_start,
+ self.signal1.t_start+3*result.sampling_period)
+ self.assertEqual(result.t_stop,
+ result.t_start + 5*result.sampling_period)
+ assert_array_equal(result.magnitude, self.data1[3:8].reshape(-1, 1))
+
+ # Test other attributes were copied over (in this case, defaults)
+ self.assertEqual(result.file_origin, self.signal1.file_origin)
+ self.assertEqual(result.name, self.signal1.name)
+ self.assertEqual(result.description, self.signal1.description)
+ self.assertEqual(result.annotations, self.signal1.annotations)
def test__slice_should_let_access_to_parents_objects(self):
result = self.signal1.time_slice(1*pq.ms,3*pq.ms)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/NeuralEnsemble/python-neo.git@9bd5f01a3b3dc311a89c0be3b0008e516fd3a098#egg=neo
nose==1.3.7
numpy==1.19.5
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
quantities==0.13.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-neo
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- numpy==1.19.5
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- quantities==0.13.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-neo
| [
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__slice_should_return_AnalogSignalArray"
]
| [
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__add_quantity_should_preserve_data_complement",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__add_two_consistent_signals_should_preserve_data_complement",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalFunctions::test__pickle"
]
| [
"neo/test/coretest/test_analogsignal.py::Test__generate_datasets::test__fake_neo__cascade",
"neo/test/coretest/test_analogsignal.py::Test__generate_datasets::test__fake_neo__nocascade",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create2D_with_copy_false_should_return_view",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_from_array_no_units_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_from_list",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_from_np_array",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_from_quantities_array",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_from_quantities_array_inconsistent_units_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_inconsistent_sampling_rate_and_period_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_with_None_sampling_rate_should_raise_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_with_None_t_start_should_raise_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_with_additional_argument",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_with_copy_false_should_return_view",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_with_copy_true_should_return_copy",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalConstructor::test__create_without_sampling_rate_or_period_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__compliant",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__duplicate_with_new_array",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__duration_getter",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__repr",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__sampling_period_getter",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__sampling_period_setter",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__sampling_period_setter_None_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__sampling_period_setter_not_quantity_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__sampling_rate_getter",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__sampling_rate_setter",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__sampling_rate_setter_None_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__sampling_rate_setter_not_quantity_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__t_start_setter_None_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__t_stop_getter",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalProperties::test__times_getter",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__comparison_with_inconsistent_units_should_raise_Exception",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__compliant",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__copy_should_let_access_to_parents_objects",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__getitem_out_of_bounds_IndexError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__getitem_should_return_single_quantity",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__rescale_new",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__rescale_new_incompatible_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__rescale_same",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__simple_statistics",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__slice_should_change_sampling_period",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__slice_should_let_access_to_parents_objects",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test__slice_should_modify_linked_channelindex",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test_as_array",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test_as_quantity",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalArrayMethods::test_comparison_operators",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalEquality::test__signals_with_different_data_complement_should_be_not_equal",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__add_const_quantity_should_preserve_data_complement",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__add_signals_with_inconsistent_data_complement_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__compliant",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__divide_by_const_should_preserve_data_complement",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__mult_by_const_float_should_preserve_data_complement",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__subtract_const_should_preserve_data_complement",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalCombination::test__subtract_from_const_should_return_signal",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalSampling::test___get_sampling_rate__period_array_rate_none_TypeError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalSampling::test___get_sampling_rate__period_none_rate_float_TypeError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalSampling::test___get_sampling_rate__period_none_rate_none_ValueError",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalSampling::test___get_sampling_rate__period_none_rate_quant",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalSampling::test___get_sampling_rate__period_quant_rate_none",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalSampling::test___get_sampling_rate__period_rate_equivalent",
"neo/test/coretest/test_analogsignal.py::TestAnalogSignalSampling::test___get_sampling_rate__period_rate_not_equivalent_ValueError"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,376 | [
"neo/core/irregularlysampledsignal.py",
"neo/core/analogsignal.py"
]
| [
"neo/core/irregularlysampledsignal.py",
"neo/core/analogsignal.py"
]
|
|
Azure__azure-cli-3743 | e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec | 2017-06-15 20:19:26 | eb12ac454cbe1ddb59c86cdf2045e1912660e750 | codecov-io: # [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/3743?src=pr&el=h1) Report
> Merging [#3743](https://codecov.io/gh/Azure/azure-cli/pull/3743?src=pr&el=desc) into [master](https://codecov.io/gh/Azure/azure-cli/commit/ff20840c0278a29204adbb2c23c50d92feaa799f?src=pr&el=desc) will **not change** coverage.
> The diff coverage is `50%`.
[](https://codecov.io/gh/Azure/azure-cli/pull/3743?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #3743 +/- ##
=======================================
Coverage 72.09% 72.09%
=======================================
Files 422 422
Lines 26259 26259
Branches 3995 3995
=======================================
Hits 18932 18932
Misses 6099 6099
Partials 1228 1228
```
| [Impacted Files](https://codecov.io/gh/Azure/azure-cli/pull/3743?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [.../azure/cli/command\_modules/resource/\_validators.py](https://codecov.io/gh/Azure/azure-cli/pull/3743?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktcmVzb3VyY2UvYXp1cmUvY2xpL2NvbW1hbmRfbW9kdWxlcy9yZXNvdXJjZS9fdmFsaWRhdG9ycy5weQ==) | `71.95% <50%> (ø)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/Azure/azure-cli/pull/3743?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/3743?src=pr&el=footer). Last update [ff20840...2d89597](https://codecov.io/gh/Azure/azure-cli/pull/3743?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
| diff --git a/azure-cli.pyproj b/azure-cli.pyproj
index ea34c0dd2..842d462d2 100644
--- a/azure-cli.pyproj
+++ b/azure-cli.pyproj
@@ -616,8 +616,6 @@
<Compile Include="command_modules\azure-cli-sf\azure\cli\command_modules\sf\_params.py" />
<Compile Include="command_modules\azure-cli-sf\azure\cli\command_modules\sf\__init__.py" />
<Compile Include="command_modules\azure-cli-sf\setup.py" />
- <Compile Include="command_modules\azure-cli-sf\tests\manual_scenario_sf_commands.py" />
- <Compile Include="command_modules\azure-cli-sf\tests\manual_sf_commands.py" />
<Compile Include="command_modules\azure-cli-sql\azure\cli\command_modules\sql\commands.py" />
<Compile Include="command_modules\azure-cli-sql\azure\cli\command_modules\sql\custom.py" />
<Compile Include="command_modules\azure-cli-sql\azure\cli\command_modules\sql\help.py" />
@@ -937,12 +935,10 @@
<Folder Include="command_modules\azure-cli-redis\azure\cli\command_modules\" />
<Folder Include="command_modules\azure-cli-redis\azure\cli\command_modules\redis\" />
<Folder Include="command_modules\azure-cli-redis\tests\" />
- <Folder Include="command_modules\azure-cli-resource\azure\cli\command_modules\resource\tests\" />
<Folder Include="command_modules\azure-cli-resource\tests\" />
<Folder Include="command_modules\azure-cli-role\tests\" />
<Folder Include="command_modules\azure-cli-redis\azure\cli\command_modules\redis\tests\" />
<Folder Include="command_modules\azure-cli-role\tests\" />
- <Folder Include="command_modules\azure-cli-role\tests\recordings\" />
<Folder Include="command_modules\azure-cli-sf\" />
<Folder Include="command_modules\azure-cli-sf\azure\" />
<Folder Include="command_modules\azure-cli-sf\azure\cli\" />
@@ -1122,6 +1118,9 @@
<Content Include="command_modules\azure-cli-resource\tests\crossrg_deploy.json" />
<Content Include="command_modules\azure-cli-resource\tests\simple_deploy.json" />
<Content Include="command_modules\azure-cli-resource\tests\simple_deploy_parameters.json" />
+ <Content Include="command_modules\azure-cli-resource\tests\test-object.json" />
+ <Content Include="command_modules\azure-cli-resource\tests\test-params.json" />
+ <Content Include="command_modules\azure-cli-resource\tests\test-template.json" />
<Content Include="command_modules\azure-cli-role\HISTORY.rst" />
<Content Include="command_modules\azure-cli-sf\HISTORY.rst" />
<Content Include="command_modules\azure-cli-sql\HISTORY.rst" />
@@ -1138,11 +1137,6 @@
<Content Include="command_modules\azure-cli-vm\tests\sample-public.json" />
<Content Include="command_modules\azure-cli-vm\tests\vmss_create_test_plan.md" />
<Content Include="command_modules\azure-cli-vm\tests\vm_create_test_plan.md" />
- <Content Include="command_modules\azure-cli-role\tests\recordings\test_application_set_scenario.yaml" />
- <Content Include="command_modules\azure-cli-role\tests\recordings\test_create_for_rbac_with_secret.yaml" />
- <Content Include="command_modules\azure-cli-role\tests\recordings\test_role_assignment_scenario.yaml" />
- <Content Include="command_modules\azure-cli-role\tests\recordings\test_role_create_scenario.yaml" />
- <Content Include="command_modules\azure-cli-role\tests\recordings\test_sp_create_scenario.yaml" />
</ItemGroup>
<ItemGroup>
<Interpreter Include="..\env\">
diff --git a/src/command_modules/azure-cli-resource/HISTORY.rst b/src/command_modules/azure-cli-resource/HISTORY.rst
index eb2eb204b..7713c8e11 100644
--- a/src/command_modules/azure-cli-resource/HISTORY.rst
+++ b/src/command_modules/azure-cli-resource/HISTORY.rst
@@ -1,9 +1,16 @@
.. :changelog:
Release History
+
===============
+
+unreleased
+++++++++++++++++++
+* `group deployment create`: Fixes issue where some parameter files were no longer recognized using @<file> syntax.
+
+
2.0.8 (2017-06-13)
-^^^^^^^^^^^^^^^^^^
+++++++++++++++++++
* Fix up some parsing and error messages. (#3584)
* Fix various pylint disable rules
* Fix --resource-type parsing for the lock command to accept <resource-namespace>/<resource-type>
diff --git a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py
index fd6531210..c97a2867f 100644
--- a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py
+++ b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py
@@ -62,11 +62,11 @@ def validate_deployment_parameters(namespace):
parameters = {}
for params in namespace.parameters or []:
for item in params:
- if not _try_parse_key_value_object(parameters, item):
- param_obj = _try_load_file_object(item) or _try_parse_json_object(item)
- if not param_obj:
- raise CLIError('Unable to parse parameter: {}'.format(item))
+ param_obj = _try_load_file_object(item) or _try_parse_json_object(item)
+ if param_obj:
parameters.update(param_obj)
+ elif not _try_parse_key_value_object(parameters, item):
+ raise CLIError('Unable to parse parameter: {}'.format(item))
namespace.parameters = parameters
| Parameters file values not being read during deployment create
### Description
When using `az group deployment create` with the newest version of the Azure CLI it no longer respects the `--parameters @<file>` contents and prompts me to type out each value in the command line.
Tested same template/parameters JSON files on another machine with an older version of Azure CLI without issue.
---
### Environment summary
**Install Method:** How did you install the CLI? (e.g. pip, interactive script, apt-get, Docker, MSI, nightly)
Answer here: Linux install using curl -L <url> | bash
**CLI Version:** What version of the CLI and modules are installed? (Use `az --version`)
Answer here:
azure-cli (2.0.8)
acr (2.0.6)
acs (2.0.8)
appservice (0.1.8)
batch (3.0.1)
billing (0.1.1)
cdn (0.0.4)
cloud (2.0.4)
cognitiveservices (0.1.4)
command-modules-nspkg (2.0.0)
component (2.0.5)
configure (2.0.8)
consumption (0.1.1)
core (2.0.9)
cosmosdb (0.1.8)
dla (0.0.8)
dls (0.0.8)
feedback (2.0.4)
find (0.2.4)
interactive (0.3.4)
iot (0.1.7)
keyvault (2.0.6)
lab (0.0.6)
monitor (0.0.6)
network (2.0.8)
nspkg (3.0.0)
profile (2.0.6)
rdbms (0.0.3)
redis (0.2.5)
resource (2.0.8)
role (2.0.6)
sf (1.0.3)
sql (2.0.5)
storage (2.0.8)
vm (2.0.8)
Python (Linux) 2.7.5 (default, Feb 10 2017, 17:36:50)
[GCC 4.9.2 20141101 (Red Hat 4.9.2-1)]
Python location '/home/jayden/lib/azure-cli/bin/python'
**OS Version:** What OS and version are you using?
Answer here: Modified Fedora 18
**Shell Type:** What shell are you using? (e.g. bash, cmd.exe, Bash on Windows)
Answer here: bash
| Azure/azure-cli | diff --git a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment.yaml b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment.yaml
index 5e8b598db..36ac195af 100644
--- a/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment.yaml
+++ b/src/command_modules/azure-cli-resource/tests/recordings/latest/test_group_deployment.yaml
@@ -1,350 +1,1070 @@
interactions:
+- request:
+ body: '{"tags": {"use": "az-test"}, "location": "westus"}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group create]
+ Connection: [keep-alive]
+ Content-Length: ['50']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001?api-version=2017-05-10
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001","name":"cli_test_deployment000001","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['328']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:13 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ status: {code: 201, message: Created}
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [network vnet create]
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [984358f0-4c90-11e7-8df4-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test","name":"azure-cli-deployment-test","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001","name":"cli_test_deployment000001","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:22:36 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
- content-length: ['240']
+ cache-control: [no-cache]
+ content-length: ['328']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:14 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
- body: '{"location": "westus", "properties": {"dhcpOptions": {}, "subnets": [{"properties":
- {"addressPrefix": "10.0.0.0/24"}, "name": "subnet1"}], "addressSpace": {"addressPrefixes":
- ["10.0.0.0/16"]}}, "tags": {}}'
+ body: '{"tags": {}, "location": "westus", "properties": {"addressSpace": {"addressPrefixes":
+ ["10.0.0.0/16"]}, "dhcpOptions": {}, "subnets": [{"properties": {"addressPrefix":
+ "10.0.0.0/24"}, "name": "subnet1"}]}}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [network vnet create]
Connection: [keep-alive]
Content-Length: ['205']
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 networkmanagementclient/1.0.0rc3 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9864d83e-4c90-11e7-a276-a0b3ccf7272a]
method: PUT
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1?api-version=2017-03-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1?api-version=2017-03-01
response:
- body: {string: "{\r\n \"name\": \"vnet1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1\"\
- ,\r\n \"etag\": \"W/\\\"afaa273d-fae6-4176-8e47-5f3f865704fe\\\"\",\r\n \
+ body: {string: "{\r\n \"name\": \"vnet1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1\"\
+ ,\r\n \"etag\": \"W/\\\"2e8849fc-07a0-48ba-8b8a-b03fb20355de\\\"\",\r\n \
\ \"type\": \"Microsoft.Network/virtualNetworks\",\r\n \"location\": \"westus\"\
,\r\n \"tags\": {},\r\n \"properties\": {\r\n \"provisioningState\":\
- \ \"Updating\",\r\n \"resourceGuid\": \"240e9c40-73c7-4893-83a0-d30f69019829\"\
+ \ \"Updating\",\r\n \"resourceGuid\": \"d09ae1e7-9c02-4ca6-a4aa-e763eb6f73c1\"\
,\r\n \"addressSpace\": {\r\n \"addressPrefixes\": [\r\n \"\
10.0.0.0/16\"\r\n ]\r\n },\r\n \"dhcpOptions\": {\r\n \"dnsServers\"\
: []\r\n },\r\n \"subnets\": [\r\n {\r\n \"name\": \"subnet1\"\
- ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"\
- ,\r\n \"etag\": \"W/\\\"afaa273d-fae6-4176-8e47-5f3f865704fe\\\"\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"\
+ ,\r\n \"etag\": \"W/\\\"2e8849fc-07a0-48ba-8b8a-b03fb20355de\\\"\"\
,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
,\r\n \"addressPrefix\": \"10.0.0.0/24\"\r\n }\r\n }\r\
\n ],\r\n \"virtualNetworkPeerings\": []\r\n }\r\n}"}
headers:
- Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/providers/Microsoft.Network/locations/westus/operations/8c6d54b6-72de-49cb-87be-4a6bb7702d08?api-version=2017-03-01']
- Cache-Control: [no-cache]
- Content-Length: ['1072']
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:22:36 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Retry-After: ['3']
- Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1195']
+ azure-asyncoperation: ['https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/ad102992-75fc-4bfa-adfa-ced6b93b3a71?api-version=2017-03-01']
+ cache-control: [no-cache]
+ content-length: ['1160']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:14 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ retry-after: ['3']
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
status: {code: 201, message: Created}
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [network vnet create]
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 networkmanagementclient/1.0.0rc3 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9864d83e-4c90-11e7-a276-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/8c6d54b6-72de-49cb-87be-4a6bb7702d08?api-version=2017-03-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/ad102992-75fc-4bfa-adfa-ced6b93b3a71?api-version=2017-03-01
response:
body: {string: "{\r\n \"status\": \"Succeeded\"\r\n}"}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:22:40 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Transfer-Encoding: [chunked]
- Vary: [Accept-Encoding]
+ cache-control: [no-cache]
content-length: ['29']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:17 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [network vnet create]
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 networkmanagementclient/1.0.0rc3 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9864d83e-4c90-11e7-a276-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1?api-version=2017-03-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1?api-version=2017-03-01
response:
- body: {string: "{\r\n \"name\": \"vnet1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1\"\
- ,\r\n \"etag\": \"W/\\\"8a5432ce-5584-4583-be8a-b322c796f079\\\"\",\r\n \
+ body: {string: "{\r\n \"name\": \"vnet1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1\"\
+ ,\r\n \"etag\": \"W/\\\"f3010535-1488-4c62-9947-dc69cccdd1c7\\\"\",\r\n \
\ \"type\": \"Microsoft.Network/virtualNetworks\",\r\n \"location\": \"westus\"\
,\r\n \"tags\": {},\r\n \"properties\": {\r\n \"provisioningState\":\
- \ \"Succeeded\",\r\n \"resourceGuid\": \"240e9c40-73c7-4893-83a0-d30f69019829\"\
+ \ \"Succeeded\",\r\n \"resourceGuid\": \"d09ae1e7-9c02-4ca6-a4aa-e763eb6f73c1\"\
,\r\n \"addressSpace\": {\r\n \"addressPrefixes\": [\r\n \"\
10.0.0.0/16\"\r\n ]\r\n },\r\n \"dhcpOptions\": {\r\n \"dnsServers\"\
: []\r\n },\r\n \"subnets\": [\r\n {\r\n \"name\": \"subnet1\"\
- ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"\
- ,\r\n \"etag\": \"W/\\\"8a5432ce-5584-4583-be8a-b322c796f079\\\"\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"\
+ ,\r\n \"etag\": \"W/\\\"f3010535-1488-4c62-9947-dc69cccdd1c7\\\"\"\
,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
,\r\n \"addressPrefix\": \"10.0.0.0/24\"\r\n }\r\n }\r\
\n ],\r\n \"virtualNetworkPeerings\": []\r\n }\r\n}"}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:22:41 GMT']
- ETag: [W/"8a5432ce-5584-4583-be8a-b322c796f079"]
- Expires: ['-1']
- Pragma: [no-cache]
- Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Transfer-Encoding: [chunked]
- Vary: [Accept-Encoding]
- content-length: ['1074']
+ cache-control: [no-cache]
+ content-length: ['1162']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:17 GMT']
+ etag: [W/"f3010535-1488-4c62-9947-dc69cccdd1c7"]
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
- body: '{"properties": {"mode": "Incremental", "template": {"resources": [{"dependsOn":
- [], "properties": {"backendAddressPools": "[parameters(''backendAddressPools'')]",
- "frontendIPConfigurations": [{"properties": {"privateIPAllocationMethod": "[parameters(''privateIPAllocationMethod'')]",
- "subnet": {"id": "[parameters(''subnetId'')]"}}, "name": "LoadBalancerFrontEnd"}]},
- "name": "[parameters(''name'')]", "location": "[parameters(''location'')]",
- "apiVersion": "2016-03-30", "type": "Microsoft.Network/loadBalancers"}], "contentVersion":
- "1.0.0.0", "parameters": {"location": {"type": "string"}, "privateIPAllocationMethod":
- {"type": "string"}, "backendAddressPools": {"type": "array"}, "name": {"type":
- "string"}, "subnetId": {"type": "string"}}, "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
- "parameters": {"location": {"value": "westus"}, "privateIPAllocationMethod":
- {"value": "Dynamic"}, "backendAddressPools": {"value": [{"name": "bepool1"},
- {"name": "bepool2"}]}, "name": {"value": "test-lb"}, "subnetId": {"value": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}}}}'
+ body: 'b''{"properties": {"template": {"contentVersion": "1.0.0.0", "parameters":
+ {"location": {"type": "string"}, "tags": {"type": "object"}, "privateIPAllocationMethod":
+ {"type": "string"}, "name": {"type": "string"}, "backendAddressPools": {"type":
+ "array"}, "subnetId": {"type": "string"}}, "resources": [{"dependsOn": [], "location":
+ "[parameters(\''location\'')]", "tags": "[parameters(\''tags\'')]", "name":
+ "[parameters(\''name\'')]", "properties": {"backendAddressPools": "[parameters(\''backendAddressPools\'')]",
+ "frontendIPConfigurations": [{"name": "LoadBalancerFrontEnd", "properties":
+ {"privateIPAllocationMethod": "[parameters(\''privateIPAllocationMethod\'')]",
+ "subnet": {"id": "[parameters(\''subnetId\'')]"}}}]}, "type": "Microsoft.Network/loadBalancers",
+ "apiVersion": "2016-03-30"}], "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
+ "parameters": {"location": {"value": "westus"}, "tags": {"value": {"key": "super=value"}},
+ "privateIPAllocationMethod": {"value": "Dynamic"}, "name": {"value": "test-lb"},
+ "subnetId": {"value": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"},
+ "backendAddressPools": {"value": [{"name": "bepool1"}, {"name": "bepool2"}]}},
+ "mode": "Incremental"}}'''
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment validate]
Connection: [keep-alive]
- Content-Length: ['1228']
+ Content-Length: ['1375']
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9ad7925e-4c90-11e7-9479-a0b3ccf7272a]
method: POST
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment/validate?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/mock-deployment/validate?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/deployment_dry_run","name":"deployment_dry_run","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:42.0844954Z","duration":"PT0S","correlationId":"522bdd66-c165-41e3-8913-93f10b835055","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"validatedResources":[{"apiVersion":"2016-03-30","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb","name":"test-lb","type":"Microsoft.Network/loadBalancers","location":"westus","properties":{"backendAddressPools":[{"name":"bepool1"},{"name":"bepool2"}],"frontendIPConfigurations":[{"properties":{"privateIPAllocationMethod":"Dynamic","subnet":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"name":"LoadBalancerFrontEnd"}]}}]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/deployment_dry_run","name":"deployment_dry_run","properties":{"templateHash":"6408713692666354622","parameters":{"location":{"type":"String","value":"westus"},"tags":{"type":"Object","value":{"key":"super=value"}},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"name":{"type":"String","value":"test-lb"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-15T20:10:19.3122369Z","duration":"PT0S","correlationId":"d30de1dd-6b13-4d1c-843e-158c7ed0317c","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"validatedResources":[{"apiVersion":"2016-03-30","id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","name":"test-lb","type":"Microsoft.Network/loadBalancers","location":"westus","tags":{"key":"super=value"},"properties":{"backendAddressPools":[{"name":"bepool1"},{"name":"bepool2"}],"frontendIPConfigurations":[{"name":"LoadBalancerFrontEnd","properties":{"privateIPAllocationMethod":"Dynamic","subnet":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}}}]}}]}}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:22:41 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Transfer-Encoding: [chunked]
- Vary: [Accept-Encoding]
- content-length: ['1676']
- x-ms-ratelimit-remaining-subscription-writes: ['1195']
+ cache-control: [no-cache]
+ content-length: ['1935']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:19 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
status: {code: 200, message: OK}
- request:
- body: '{"properties": {"mode": "Incremental", "template": {"resources": [{"dependsOn":
- [], "properties": {"backendAddressPools": "[parameters(''backendAddressPools'')]",
- "frontendIPConfigurations": [{"properties": {"privateIPAllocationMethod": "[parameters(''privateIPAllocationMethod'')]",
- "subnet": {"id": "[parameters(''subnetId'')]"}}, "name": "LoadBalancerFrontEnd"}]},
- "name": "[parameters(''name'')]", "location": "[parameters(''location'')]",
- "apiVersion": "2016-03-30", "type": "Microsoft.Network/loadBalancers"}], "contentVersion":
- "1.0.0.0", "parameters": {"location": {"type": "string"}, "privateIPAllocationMethod":
- {"type": "string"}, "backendAddressPools": {"type": "array"}, "name": {"type":
- "string"}, "subnetId": {"type": "string"}}, "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
- "parameters": {"location": {"value": "westus"}, "privateIPAllocationMethod":
- {"value": "Dynamic"}, "backendAddressPools": {"value": [{"name": "bepool1"},
- {"name": "bepool2"}]}, "name": {"value": "test-lb"}, "subnetId": {"value": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}}}}'
+ body: 'b''{"properties": {"template": {"contentVersion": "1.0.0.0", "parameters":
+ {"location": {"type": "string"}, "tags": {"type": "object"}, "privateIPAllocationMethod":
+ {"type": "string"}, "name": {"type": "string"}, "backendAddressPools": {"type":
+ "array"}, "subnetId": {"type": "string"}}, "resources": [{"dependsOn": [], "location":
+ "[parameters(\''location\'')]", "tags": "[parameters(\''tags\'')]", "name":
+ "[parameters(\''name\'')]", "properties": {"backendAddressPools": "[parameters(\''backendAddressPools\'')]",
+ "frontendIPConfigurations": [{"name": "LoadBalancerFrontEnd", "properties":
+ {"privateIPAllocationMethod": "[parameters(\''privateIPAllocationMethod\'')]",
+ "subnet": {"id": "[parameters(\''subnetId\'')]"}}}]}, "type": "Microsoft.Network/loadBalancers",
+ "apiVersion": "2016-03-30"}], "$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"},
+ "parameters": {"location": {"value": "westus"}, "tags": {"value": {"key": "super=value"}},
+ "privateIPAllocationMethod": {"value": "Dynamic"}, "name": {"value": "test-lb"},
+ "subnetId": {"value": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"},
+ "backendAddressPools": {"value": [{"name": "bepool1"}, {"name": "bepool2"}]}},
+ "mode": "Incremental"}}'''
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
Connection: [keep-alive]
- Content-Length: ['1228']
+ Content-Length: ['1375']
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9b58cf52-4c90-11e7-9b16-a0b3ccf7272a]
method: PUT
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-06-08T21:22:43.2188091Z","duration":"PT0.2770006S","correlationId":"c18f33be-ece9-4c12-96b9-eabd6fd111ff","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"6408713692666354622","parameters":{"location":{"type":"String","value":"westus"},"tags":{"type":"Object","value":{"key":"super=value"}},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"name":{"type":"String","value":"test-lb"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-06-15T20:10:20.2470721Z","duration":"PT0.2383683S","correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[]}}'}
headers:
- Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operationStatuses/08587046499225358060?api-version=2017-05-10']
- Cache-Control: [no-cache]
- Content-Length: ['1028']
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:22:42 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1197']
+ azure-asyncoperation: ['https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/operationStatuses/08587040494654689172?api-version=2017-05-10']
+ cache-control: [no-cache]
+ content-length: ['1170']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:19 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
status: {code: 201, message: Created}
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A20Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:20 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A22Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:22 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A23Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:24 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A25Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:25 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A27Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:26 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A28Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:28 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A30Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:30 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A31Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:31 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A33Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:33 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A35Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:34 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A36Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:36 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A38Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['3477']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:38 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A39Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['3477']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:39 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A41Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"74a75f6d-5f5d-4a25-99f5-34381d6dedae","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/74a75f6d-5f5d-4a25-99f5-34381d6dedae/ticks/636331542214496826","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"5504e6a1-ff15-47b0-8cb8-cf4cf0e4321a","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:21.4496826Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95/ticks/636331542212153096","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"statusCode":"Created","serviceRequestId":"f01a0be4-f105-42e3-af90-e63ad09c45fb","responseBody":"{\"name\":\"test-lb\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"type\":\"Microsoft.Network/loadBalancers\",\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"resourceGuid\":\"5dfd3e9a-ce5b-431e-be12-87e156d8ea66\",\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/frontendIPConfigurations/LoadBalancerFrontEnd\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\",\"privateIPAddress\":\"10.0.0.4\",\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}],\"backendAddressPools\":[{\"name\":\"bepool1\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool1\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}},{\"name\":\"bepool2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool2\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}}],\"loadBalancingRules\":[],\"probes\":[],\"inboundNatRules\":[],\"outboundNatRules\":[],\"inboundNatPools\":[]}}"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:21.2153096Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"59544866-db93-4a1c-a670-4188f831748f","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/59544866-db93-4a1c-a670-4188f831748f/ticks/636331542209653184","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"requestbody":"{\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"backendAddressPools\":[{\"name\":\"bepool1\"},{\"name\":\"bepool2\"}],\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"properties\":{\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}]}}"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:20.9653184Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['16465']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:41 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A42Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"74a75f6d-5f5d-4a25-99f5-34381d6dedae","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/74a75f6d-5f5d-4a25-99f5-34381d6dedae/ticks/636331542214496826","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"5504e6a1-ff15-47b0-8cb8-cf4cf0e4321a","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:21.4496826Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95/ticks/636331542212153096","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"statusCode":"Created","serviceRequestId":"f01a0be4-f105-42e3-af90-e63ad09c45fb","responseBody":"{\"name\":\"test-lb\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"type\":\"Microsoft.Network/loadBalancers\",\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"resourceGuid\":\"5dfd3e9a-ce5b-431e-be12-87e156d8ea66\",\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/frontendIPConfigurations/LoadBalancerFrontEnd\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\",\"privateIPAddress\":\"10.0.0.4\",\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}],\"backendAddressPools\":[{\"name\":\"bepool1\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool1\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}},{\"name\":\"bepool2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool2\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}}],\"loadBalancingRules\":[],\"probes\":[],\"inboundNatRules\":[],\"outboundNatRules\":[],\"inboundNatPools\":[]}}"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:21.2153096Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"59544866-db93-4a1c-a670-4188f831748f","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/59544866-db93-4a1c-a670-4188f831748f/ticks/636331542209653184","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"requestbody":"{\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"backendAddressPools\":[{\"name\":\"bepool1\"},{\"name\":\"bepool2\"}],\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"properties\":{\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}]}}"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:20.9653184Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['16465']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:43 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A44Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"74a75f6d-5f5d-4a25-99f5-34381d6dedae","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/74a75f6d-5f5d-4a25-99f5-34381d6dedae/ticks/636331542214496826","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"5504e6a1-ff15-47b0-8cb8-cf4cf0e4321a","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:21.4496826Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95/ticks/636331542212153096","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"statusCode":"Created","serviceRequestId":"f01a0be4-f105-42e3-af90-e63ad09c45fb","responseBody":"{\"name\":\"test-lb\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"type\":\"Microsoft.Network/loadBalancers\",\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"resourceGuid\":\"5dfd3e9a-ce5b-431e-be12-87e156d8ea66\",\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/frontendIPConfigurations/LoadBalancerFrontEnd\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\",\"privateIPAddress\":\"10.0.0.4\",\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}],\"backendAddressPools\":[{\"name\":\"bepool1\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool1\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}},{\"name\":\"bepool2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool2\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}}],\"loadBalancingRules\":[],\"probes\":[],\"inboundNatRules\":[],\"outboundNatRules\":[],\"inboundNatPools\":[]}}"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:21.2153096Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"59544866-db93-4a1c-a670-4188f831748f","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/59544866-db93-4a1c-a670-4188f831748f/ticks/636331542209653184","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"requestbody":"{\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"backendAddressPools\":[{\"name\":\"bepool1\"},{\"name\":\"bepool2\"}],\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"properties\":{\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}]}}"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:20.9653184Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"b73560a9-5d20-4dce-8bdf-538b84e5aea1","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/b73560a9-5d20-4dce-8bdf-538b84e5aea1/ticks/636331542203426587","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"properties":{"statusCode":"Created","serviceRequestId":null},"status":{"value":"Accepted","localizedValue":"Accepted"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:20.3426587Z","submissionTimestamp":"2017-06-15T20:10:41.3300109Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['20029']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:43 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A45Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"74a75f6d-5f5d-4a25-99f5-34381d6dedae","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/74a75f6d-5f5d-4a25-99f5-34381d6dedae/ticks/636331542214496826","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"5504e6a1-ff15-47b0-8cb8-cf4cf0e4321a","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:21.4496826Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95/ticks/636331542212153096","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"statusCode":"Created","serviceRequestId":"f01a0be4-f105-42e3-af90-e63ad09c45fb","responseBody":"{\"name\":\"test-lb\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"type\":\"Microsoft.Network/loadBalancers\",\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"resourceGuid\":\"5dfd3e9a-ce5b-431e-be12-87e156d8ea66\",\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/frontendIPConfigurations/LoadBalancerFrontEnd\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\",\"privateIPAddress\":\"10.0.0.4\",\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}],\"backendAddressPools\":[{\"name\":\"bepool1\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool1\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}},{\"name\":\"bepool2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool2\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}}],\"loadBalancingRules\":[],\"probes\":[],\"inboundNatRules\":[],\"outboundNatRules\":[],\"inboundNatPools\":[]}}"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:21.2153096Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"59544866-db93-4a1c-a670-4188f831748f","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/59544866-db93-4a1c-a670-4188f831748f/ticks/636331542209653184","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"requestbody":"{\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"backendAddressPools\":[{\"name\":\"bepool1\"},{\"name\":\"bepool2\"}],\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"properties\":{\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}]}}"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:20.9653184Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"b73560a9-5d20-4dce-8bdf-538b84e5aea1","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/b73560a9-5d20-4dce-8bdf-538b84e5aea1/ticks/636331542203426587","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"properties":{"statusCode":"Created","serviceRequestId":null},"status":{"value":"Accepted","localizedValue":"Accepted"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:20.3426587Z","submissionTimestamp":"2017-06-15T20:10:41.3300109Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['20029']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:44 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A46Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"74a75f6d-5f5d-4a25-99f5-34381d6dedae","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/74a75f6d-5f5d-4a25-99f5-34381d6dedae/ticks/636331542214496826","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"5504e6a1-ff15-47b0-8cb8-cf4cf0e4321a","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:21.4496826Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95/ticks/636331542212153096","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"statusCode":"Created","serviceRequestId":"f01a0be4-f105-42e3-af90-e63ad09c45fb","responseBody":"{\"name\":\"test-lb\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"type\":\"Microsoft.Network/loadBalancers\",\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"resourceGuid\":\"5dfd3e9a-ce5b-431e-be12-87e156d8ea66\",\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/frontendIPConfigurations/LoadBalancerFrontEnd\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\",\"privateIPAddress\":\"10.0.0.4\",\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}],\"backendAddressPools\":[{\"name\":\"bepool1\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool1\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}},{\"name\":\"bepool2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool2\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}}],\"loadBalancingRules\":[],\"probes\":[],\"inboundNatRules\":[],\"outboundNatRules\":[],\"inboundNatPools\":[]}}"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:21.2153096Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"59544866-db93-4a1c-a670-4188f831748f","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/59544866-db93-4a1c-a670-4188f831748f/ticks/636331542209653184","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"requestbody":"{\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"backendAddressPools\":[{\"name\":\"bepool1\"},{\"name\":\"bepool2\"}],\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"properties\":{\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}]}}"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:20.9653184Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"b73560a9-5d20-4dce-8bdf-538b84e5aea1","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/b73560a9-5d20-4dce-8bdf-538b84e5aea1/ticks/636331542203426587","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"properties":{"statusCode":"Created","serviceRequestId":null},"status":{"value":"Accepted","localizedValue":"Accepted"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:20.3426587Z","submissionTimestamp":"2017-06-15T20:10:41.3300109Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['20029']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:47 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A48Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"74a75f6d-5f5d-4a25-99f5-34381d6dedae","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/74a75f6d-5f5d-4a25-99f5-34381d6dedae/ticks/636331542214496826","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"5504e6a1-ff15-47b0-8cb8-cf4cf0e4321a","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:21.4496826Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95/ticks/636331542212153096","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"statusCode":"Created","serviceRequestId":"f01a0be4-f105-42e3-af90-e63ad09c45fb","responseBody":"{\"name\":\"test-lb\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"type\":\"Microsoft.Network/loadBalancers\",\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"resourceGuid\":\"5dfd3e9a-ce5b-431e-be12-87e156d8ea66\",\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/frontendIPConfigurations/LoadBalancerFrontEnd\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\",\"privateIPAddress\":\"10.0.0.4\",\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}],\"backendAddressPools\":[{\"name\":\"bepool1\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool1\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}},{\"name\":\"bepool2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool2\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}}],\"loadBalancingRules\":[],\"probes\":[],\"inboundNatRules\":[],\"outboundNatRules\":[],\"inboundNatPools\":[]}}"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:21.2153096Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"59544866-db93-4a1c-a670-4188f831748f","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/59544866-db93-4a1c-a670-4188f831748f/ticks/636331542209653184","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"requestbody":"{\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"backendAddressPools\":[{\"name\":\"bepool1\"},{\"name\":\"bepool2\"}],\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"properties\":{\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}]}}"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:20.9653184Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"b73560a9-5d20-4dce-8bdf-538b84e5aea1","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/b73560a9-5d20-4dce-8bdf-538b84e5aea1/ticks/636331542203426587","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"properties":{"statusCode":"Created","serviceRequestId":null},"status":{"value":"Accepted","localizedValue":"Accepted"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:20.3426587Z","submissionTimestamp":"2017-06-15T20:10:41.3300109Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['20029']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:48 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 monitorclient/0.3.0 Azure-SDK-For-Python AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/microsoft.insights/eventtypes/management/values?$filter=eventTimestamp%20ge%202017-05-05T04%3A10%3A49Z%20and%20correlationId%20eq%20%27b8232cfd-5846-4949-a2b5-60475b16d780%27&api-version=2015-04-01
+ response:
+ body: {string: '{"value":[{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"74a75f6d-5f5d-4a25-99f5-34381d6dedae","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/74a75f6d-5f5d-4a25-99f5-34381d6dedae/ticks/636331542214496826","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"5504e6a1-ff15-47b0-8cb8-cf4cf0e4321a","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:21.4496826Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/8c76c4e6-ab23-4f0f-9e72-45dcf55fbb95/ticks/636331542212153096","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"statusCode":"Created","serviceRequestId":"f01a0be4-f105-42e3-af90-e63ad09c45fb","responseBody":"{\"name\":\"test-lb\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"type\":\"Microsoft.Network/loadBalancers\",\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"provisioningState\":\"Succeeded\",\"resourceGuid\":\"5dfd3e9a-ce5b-431e-be12-87e156d8ea66\",\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/frontendIPConfigurations/LoadBalancerFrontEnd\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\",\"privateIPAddress\":\"10.0.0.4\",\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}],\"backendAddressPools\":[{\"name\":\"bepool1\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool1\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}},{\"name\":\"bepool2\",\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool2\",\"etag\":\"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\"properties\":{\"provisioningState\":\"Succeeded\"}}],\"loadBalancingRules\":[],\"probes\":[],\"inboundNatRules\":[],\"outboundNatRules\":[],\"inboundNatPools\":[]}}"},"status":{"value":"Succeeded","localizedValue":"Succeeded"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:21.2153096Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Network/loadBalancers/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"59544866-db93-4a1c-a670-4188f831748f","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"f6b7e036-afe7-451f-8002-f7050a3f24c3","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/events/59544866-db93-4a1c-a670-4188f831748f/ticks/636331542209653184","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Network","localizedValue":"Microsoft.Network"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":{"value":"Microsoft.Network/loadBalancers","localizedValue":"Microsoft.Network/loadBalancers"},"operationId":"739eb912-937c-4fed-a9be-2bb7934d4d41","operationName":{"value":"Microsoft.Network/loadBalancers/write","localizedValue":"Microsoft.Network/loadBalancers/write"},"properties":{"requestbody":"{\"location\":\"westus\",\"tags\":{\"key\":\"super=value\"},\"properties\":{\"backendAddressPools\":[{\"name\":\"bepool1\"},{\"name\":\"bepool2\"}],\"frontendIPConfigurations\":[{\"name\":\"LoadBalancerFrontEnd\",\"properties\":{\"privateIPAllocationMethod\":\"Dynamic\",\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"}}}]}}"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:20.9653184Z","submissionTimestamp":"2017-06-15T20:10:40.24568Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"b73560a9-5d20-4dce-8bdf-538b84e5aea1","eventName":{"value":"EndRequest","localizedValue":"End
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/b73560a9-5d20-4dce-8bdf-538b84e5aea1/ticks/636331542203426587","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"properties":{"statusCode":"Created","serviceRequestId":null},"status":{"value":"Accepted","localizedValue":"Accepted"},"subStatus":{"value":"Created","localizedValue":"Created
+ (HTTP Status Code: 201)"},"eventTimestamp":"2017-06-15T20:10:20.3426587Z","submissionTimestamp":"2017-06-15T20:10:41.3300109Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"},{"authorization":{"action":"Microsoft.Resources/deployments/write","scope":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment"},"caller":"[email protected]","channels":"Operation","claims":{"aud":"https://management.core.windows.net/","iss":"https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7b93a3e9c5a/","iat":"1497556604","nbf":"1497556604","exp":"1497560504","http://schemas.microsoft.com/claims/authnclassreference":"1","aio":"Y2ZgYHCpMd2zJSHh9sHaqmOLfZk6DMyjWdwmHe1d2/0hNSkhmAkA","http://schemas.microsoft.com/claims/authnmethodsreferences":"pwd","appid":"04b07795-8ddb-461a-bbee-02f9e1bf7b46","appidacr":"0","e_exp":"262800","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname":"Admin2","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname":"Admin2","groups":"e4bb0b56-1014-40f8-88ab-3d8a8cb0e086,d758a069-52cc-47f6-bc00-961cea17be39","ipaddr":"167.220.0.229","name":"Admin2","http://schemas.microsoft.com/identity/claims/objectidentifier":"5963f50c-7c43-405c-af7e-53294de76abd","platf":"14","puid":"1003BFFD959F8423","http://schemas.microsoft.com/identity/claims/scope":"user_impersonation","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier":"sDgexRwCNIfY-hzQjjCDvZT7Izdfo4Syrr4x0dDNzR4","http://schemas.microsoft.com/identity/claims/tenantid":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name":"[email protected]","http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn":"[email protected]","ver":"1.0","wids":"62e90394-69f5-4237-9190-012177145e10"},"correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","description":"","eventDataId":"89d46fa2-00f4-4e20-acb3-92aeac6f8d3e","eventName":{"value":"BeginRequest","localizedValue":"Begin
+ request"},"category":{"value":"Administrative","localizedValue":"Administrative"},"httpRequest":{"clientRequestId":"a79b3190-5206-11e7-b4af-a0b3ccf7272a","clientIpAddress":"167.220.1.229","method":"PUT"},"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/events/89d46fa2-00f4-4e20-acb3-92aeac6f8d3e/ticks/636331542194986937","level":"Informational","resourceGroupName":"cli_test_deployment000001","resourceProviderName":{"value":"Microsoft.Resources","localizedValue":"Microsoft
+ Resources"},"resourceId":"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","resourceType":{"value":"Microsoft.Resources/deployments","localizedValue":"Microsoft.Resources/deployments"},"operationId":"b8232cfd-5846-4949-a2b5-60475b16d780","operationName":{"value":"Microsoft.Resources/deployments/write","localizedValue":"Microsoft.Resources/deployments/write"},"status":{"value":"Started","localizedValue":"Started"},"subStatus":{"value":"","localizedValue":""},"eventTimestamp":"2017-06-15T20:10:19.4986937Z","submissionTimestamp":"2017-06-15T20:10:30.6284676Z","subscriptionId":"0b1f6471-1bf0-4dda-aec3-cb9272f09590","tenantId":"54826b22-38d6-4fb2-bad9-b7b93a3e9c5a"}]}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['20029']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:49 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-IIS/8.5]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: ['Accept-Encoding,Accept-Encoding']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9b58cf52-4c90-11e7-9b16-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587046499225358060?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587040494654689172?api-version=2017-05-10
response:
body: {string: '{"status":"Succeeded"}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:23:13 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
+ cache-control: [no-cache]
content-length: ['22']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:49 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment create]
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [9b58cf52-4c90-11e7-9b16-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:51.7379575Z","duration":"PT8.796149S","correlationId":"c18f33be-ece9-4c12-96b9-eabd6fd111ff","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb"}]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"6408713692666354622","parameters":{"location":{"type":"String","value":"westus"},"tags":{"type":"Object","value":{"key":"super=value"}},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"name":{"type":"String","value":"test-lb"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-15T20:10:21.4099596Z","duration":"PT1.4012558S","correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"}]}}'}
headers:
- Cache-Control: [no-cache]
+ cache-control: [no-cache]
+ content-length: ['1393']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:50 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [network lb show]
+ Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:23:13 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
- content-length: ['1206']
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 networkmanagementclient/1.0.0rc3 Azure-SDK-For-Python
+ AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb?api-version=2017-03-01
+ response:
+ body: {string: "{\r\n \"name\": \"test-lb\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb\"\
+ ,\r\n \"etag\": \"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/loadBalancers\",\r\n \"location\": \"westus\"\
+ ,\r\n \"tags\": {\r\n \"key\": \"super=value\"\r\n },\r\n \"properties\"\
+ : {\r\n \"provisioningState\": \"Succeeded\",\r\n \"resourceGuid\":\
+ \ \"5dfd3e9a-ce5b-431e-be12-87e156d8ea66\",\r\n \"frontendIPConfigurations\"\
+ : [\r\n {\r\n \"name\": \"LoadBalancerFrontEnd\",\r\n \"\
+ id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/frontendIPConfigurations/LoadBalancerFrontEnd\"\
+ ,\r\n \"etag\": \"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"privateIPAddress\": \"10.0.0.4\",\r\n \"privateIPAllocationMethod\"\
+ : \"Dynamic\",\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"backendAddressPools\"\
+ : [\r\n {\r\n \"name\": \"bepool1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool1\"\
+ ,\r\n \"etag\": \"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ \r\n }\r\n },\r\n {\r\n \"name\": \"bepool2\",\r\n\
+ \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb/backendAddressPools/bepool2\"\
+ ,\r\n \"etag\": \"W/\\\"deb1081f-8468-4ee2-89a3-00e28114da5d\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ \r\n }\r\n }\r\n ],\r\n \"loadBalancingRules\": [],\r\n\
+ \ \"probes\": [],\r\n \"inboundNatRules\": [],\r\n \"outboundNatRules\"\
+ : [],\r\n \"inboundNatPools\": []\r\n }\r\n}"}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['2420']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:50 GMT']
+ etag: [W/"deb1081f-8468-4ee2-89a3-00e28114da5d"]
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment list]
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [ae78895c-4c90-11e7-adda-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/?api-version=2017-05-10
response:
- body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:51.7379575Z","duration":"PT8.796149S","correlationId":"c18f33be-ece9-4c12-96b9-eabd6fd111ff","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb"}]}}]}'}
+ body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"6408713692666354622","parameters":{"location":{"type":"String","value":"westus"},"tags":{"type":"Object","value":{"key":"super=value"}},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"name":{"type":"String","value":"test-lb"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-15T20:10:21.4099596Z","duration":"PT1.4012558S","correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"}]}}]}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:23:14 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
- content-length: ['1218']
+ cache-control: [no-cache]
+ content-length: ['1405']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:50 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment show]
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [aeb3b476-4c90-11e7-ba4f-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2017-05-10
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"18048340065001902796","parameters":{"location":{"type":"String","value":"westus"},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"name":{"type":"String","value":"test-lb"},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:51.7379575Z","duration":"PT8.796149S","correlationId":"c18f33be-ece9-4c12-96b9-eabd6fd111ff","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb"}]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment","name":"azure-cli-deployment","properties":{"templateHash":"6408713692666354622","parameters":{"location":{"type":"String","value":"westus"},"tags":{"type":"Object","value":{"key":"super=value"}},"privateIPAllocationMethod":{"type":"String","value":"Dynamic"},"name":{"type":"String","value":"test-lb"},"backendAddressPools":{"type":"Array","value":[{"name":"bepool1"},{"name":"bepool2"}]},"subnetId":{"type":"String","value":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/subnet1"}},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-06-15T20:10:21.4099596Z","duration":"PT1.4012558S","correlationId":"b8232cfd-5846-4949-a2b5-60475b16d780","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"loadBalancers","locations":["westus"]}]}],"dependencies":[],"outputResources":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb"}]}}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:23:14 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
- content-length: ['1206']
+ cache-control: [no-cache]
+ content-length: ['1393']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:51 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
+ CommandName: [group deployment operation list]
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
- AZURECLI/TEST/2.0.7+dev]
+ AZURECLI/2.0.9+dev]
accept-language: [en-US]
- x-ms-client-request-id: [aeea1dd2-4c90-11e7-a097-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/azure-cli-deployment-test/deployments/mock-deployment/operations?api-version=2017-05-10
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001/deployments/mock-deployment/operations?api-version=2017-05-10
response:
- body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operations/62E23BA3072F9A59","operationId":"62E23BA3072F9A59","properties":{"provisioningOperation":"Create","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:49.7320302Z","duration":"PT3.1487534S","trackingId":"25239975-d954-465b-897a-708ab8de526f","serviceRequestId":"1d49c05c-2558-47e1-84ac-d104ba980e53","statusCode":"Created","targetResource":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":"Microsoft.Network/loadBalancers","resourceName":"test-lb"}}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/azure-cli-deployment-test/providers/Microsoft.Resources/deployments/azure-cli-deployment/operations/08587046499225358060","operationId":"08587046499225358060","properties":{"provisioningOperation":"EvaluateDeploymentOutput","provisioningState":"Succeeded","timestamp":"2017-06-08T21:22:51.4958641Z","duration":"PT1.3552308S","trackingId":"52533323-6355-43b1-b5ca-d54f37c55fea","statusCode":"OK","statusMessage":null}}]}'}
+ body: {string: '{"value":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/operations/BDF6DEB6CF066808","operationId":"BDF6DEB6CF066808","properties":{"provisioningOperation":"Create","provisioningState":"Succeeded","timestamp":"2017-06-15T20:10:21.2323261Z","duration":"PT0.4173229S","trackingId":"54c6f4d1-3680-4636-a172-e3a85ddb5f91","serviceRequestId":"f01a0be4-f105-42e3-af90-e63ad09c45fb","statusCode":"Created","targetResource":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Network/loadBalancers/test-lb","resourceType":"Microsoft.Network/loadBalancers","resourceName":"test-lb"}}},{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_deployment000001/providers/Microsoft.Resources/deployments/azure-cli-deployment/operations/08587040494654689172","operationId":"08587040494654689172","properties":{"provisioningOperation":"EvaluateDeploymentOutput","provisioningState":"Succeeded","timestamp":"2017-06-15T20:10:21.3811157Z","duration":"PT0.1108897S","trackingId":"5504e6a1-ff15-47b0-8cb8-cf4cf0e4321a","statusCode":"OK","statusMessage":null}}]}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Thu, 08 Jun 2017 21:23:14 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
- content-length: ['1272']
+ cache-control: [no-cache]
+ content-length: ['1404']
+ content-type: [application/json; charset=utf-8]
+ date: ['Thu, 15 Jun 2017 20:10:51 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ CommandName: [group delete]
+ Connection: [keep-alive]
+ Content-Length: ['0']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.15063-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 resourcemanagementclient/1.1.0rc1 Azure-SDK-For-Python
+ AZURECLI/2.0.9+dev]
+ accept-language: [en-US]
+ method: DELETE
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_deployment000001?api-version=2017-05-10
+ response:
+ body: {string: ''}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['0']
+ date: ['Thu, 15 Jun 2017 20:10:52 GMT']
+ expires: ['-1']
+ location: ['https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/operationresults/eyJqb2JJZCI6IlJFU09VUkNFR1JPVVBERUxFVElPTkpPQi1DTEk6NUZURVNUOjVGREVQTE9ZTUVOVFAzQzdCQlVVNUVPU0tPV0ZNMlE0QzRLTXxGQTIwQkM5MDBBMzBFRDFCLVdFU1RVUyIsImpvYkxvY2F0aW9uIjoid2VzdHVzIn0?api-version=2017-05-10']
+ pragma: [no-cache]
+ retry-after: ['15']
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ status: {code: 202, message: Accepted}
version: 1
diff --git a/src/command_modules/azure-cli-resource/tests/test-params.json b/src/command_modules/azure-cli-resource/tests/test-params.json
index d393e2593..ef8b30fdc 100644
--- a/src/command_modules/azure-cli-resource/tests/test-params.json
+++ b/src/command_modules/azure-cli-resource/tests/test-params.json
@@ -10,6 +10,11 @@
},
"privateIPAllocationMethod": {
"value": "Dynamic"
+ },
+ "tags": {
+ "value": {
+ "key": "super=value"
+ }
}
}
}
\ No newline at end of file
diff --git a/src/command_modules/azure-cli-resource/tests/test-template.json b/src/command_modules/azure-cli-resource/tests/test-template.json
index 8e5a5d5da..cdd7f5a8b 100644
--- a/src/command_modules/azure-cli-resource/tests/test-template.json
+++ b/src/command_modules/azure-cli-resource/tests/test-template.json
@@ -16,6 +16,9 @@
},
"backendAddressPools": {
"type": "array"
+ },
+ "tags": {
+ "type": "object"
}
},
"resources": [
@@ -38,6 +41,7 @@
],
"backendAddressPools": "[parameters('backendAddressPools')]"
},
+ "tags": "[parameters('tags')]",
"type": "Microsoft.Network/loadBalancers"
}
]
diff --git a/src/command_modules/azure-cli-resource/tests/test_custom.py b/src/command_modules/azure-cli-resource/tests/test_custom.py
index 6ef0754a4..877a0fa8a 100644
--- a/src/command_modules/azure-cli-resource/tests/test_custom.py
+++ b/src/command_modules/azure-cli-resource/tests/test_custom.py
@@ -176,7 +176,7 @@ class TestCustom(unittest.TestCase):
{
"parameter_list": [['{"foo": "bar"}', '{"foo": "baz"}']],
"expected": {"foo": "baz"},
- },
+ }
]
for test in tests:
diff --git a/src/command_modules/azure-cli-resource/tests/test_resource.py b/src/command_modules/azure-cli-resource/tests/test_resource.py
index 7c310420d..debe7e4a6 100644
--- a/src/command_modules/azure-cli-resource/tests/test_resource.py
+++ b/src/command_modules/azure-cli-resource/tests/test_resource.py
@@ -310,47 +310,43 @@ class ProviderOperationTest(VCRTestBase):
])
-class DeploymentTest(ResourceGroupVCRTestBase):
- def __init__(self, test_method):
- super(DeploymentTest, self).__init__(__file__, test_method,
- resource_group='azure-cli-deployment-test')
-
- def test_group_deployment(self):
- self.execute()
+class DeploymentTest(ScenarioTest):
- def body(self):
+ @ResourceGroupPreparer(name_prefix='cli_test_deployment')
+ def test_group_deployment(self, resource_group):
curr_dir = os.path.dirname(os.path.realpath(__file__))
template_file = os.path.join(curr_dir, 'test-template.json').replace('\\', '\\\\')
parameters_file = os.path.join(curr_dir, 'test-params.json').replace('\\', '\\\\')
object_file = os.path.join(curr_dir, 'test-object.json').replace('\\', '\\\\')
deployment_name = 'azure-cli-deployment'
- subnet_id = self.cmd('network vnet create -g {} -n vnet1 --subnet-name subnet1'.format(self.resource_group))['newVNet']['subnets'][0]['id']
+ subnet_id = self.cmd('network vnet create -g {} -n vnet1 --subnet-name subnet1'.format(resource_group)).get_output_in_json()['newVNet']['subnets'][0]['id']
self.cmd('group deployment validate -g {} --template-file {} --parameters @"{}" --parameters subnetId="{}" --parameters backendAddressPools=@"{}"'.format(
- self.resource_group, template_file, parameters_file, subnet_id, object_file), checks=[
- JMESPathCheck('properties.provisioningState', 'Succeeded')
+ resource_group, template_file, parameters_file, subnet_id, object_file), checks=[
+ JCheck('properties.provisioningState', 'Succeeded')
])
self.cmd('group deployment create -g {} -n {} --template-file {} --parameters @"{}" --parameters subnetId="{}" --parameters backendAddressPools=@"{}"'.format(
- self.resource_group, deployment_name, template_file, parameters_file, subnet_id, object_file), checks=[
- JMESPathCheck('properties.provisioningState', 'Succeeded'),
- JMESPathCheck('resourceGroup', self.resource_group),
+ resource_group, deployment_name, template_file, parameters_file, subnet_id, object_file), checks=[
+ JCheck('properties.provisioningState', 'Succeeded'),
+ JCheck('resourceGroup', resource_group),
+ ])
+ self.cmd('network lb show -g {} -n test-lb'.format(resource_group), checks=[
+ JCheck('tags', {'key': 'super=value'})
])
- self.cmd('group deployment list -g {}'.format(self.resource_group), checks=[
- JMESPathCheck('[0].name', deployment_name),
- JMESPathCheck('[0].resourceGroup', self.resource_group)
+ self.cmd('group deployment list -g {}'.format(resource_group), checks=[
+ JCheck('[0].name', deployment_name),
+ JCheck('[0].resourceGroup', resource_group)
])
- self.cmd('group deployment show -g {} -n {}'.format(self.resource_group, deployment_name),
- checks=[
- JMESPathCheck('name', deployment_name),
- JMESPathCheck('resourceGroup', self.resource_group)
- ])
- self.cmd('group deployment operation list -g {} -n {}'.format(self.resource_group,
- deployment_name), checks=[
- JMESPathCheck('length([])', 2),
- JMESPathCheck('[0].resourceGroup', self.resource_group)
+ self.cmd('group deployment show -g {} -n {}'.format(resource_group, deployment_name), checks=[
+ JCheck('name', deployment_name),
+ JCheck('resourceGroup', resource_group)
+ ])
+ self.cmd('group deployment operation list -g {} -n {}'.format(resource_group, deployment_name), checks=[
+ JCheck('length([])', 2),
+ JCheck('[0].resourceGroup', resource_group)
])
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": -1,
"issue_text_score": 0,
"test_score": -1
},
"num_modified_files": 3
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libffi-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==2.11.7
attrs==22.2.0
autopep8==2.0.4
azure-batch==3.0.0
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_billing&subdirectory=src/command_modules/azure-cli-billing
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_cdn&subdirectory=src/command_modules/azure-cli-cdn
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_cognitiveservices&subdirectory=src/command_modules/azure-cli-cognitiveservices
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_consumption&subdirectory=src/command_modules/azure-cli-consumption
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_cosmosdb&subdirectory=src/command_modules/azure-cli-cosmosdb
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_dla&subdirectory=src/command_modules/azure-cli-dla
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_dls&subdirectory=src/command_modules/azure-cli-dls
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_interactive&subdirectory=src/command_modules/azure-cli-interactive
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_lab&subdirectory=src/command_modules/azure-cli-lab
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_monitor&subdirectory=src/command_modules/azure-cli-monitor
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_rdbms&subdirectory=src/command_modules/azure-cli-rdbms
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_sf&subdirectory=src/command_modules/azure-cli-sf
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@e80e204a01bf4f32c17d8f89a955c3dd49eaa7ec#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.28
azure-core==1.24.2
azure-datalake-store==0.0.9
azure-devtools==1.2.0
azure-graphrbac==0.30.0rc6
azure-keyvault==0.3.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==4.0.0
azure-mgmt-billing==0.1.0
azure-mgmt-cdn==0.30.2
azure-mgmt-cognitiveservices==1.0.0
azure-mgmt-compute==1.0.0rc1
azure-mgmt-consumption==0.1.0
azure-mgmt-containerregistry==0.2.1
azure-mgmt-datalake-analytics==0.1.4
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.4
azure-mgmt-devtestlabs==2.0.0
azure-mgmt-dns==1.0.1
azure-mgmt-documentdb==0.1.3
azure-mgmt-iothub==0.2.2
azure-mgmt-keyvault==0.31.0
azure-mgmt-monitor==0.2.1
azure-mgmt-network==1.0.0rc3
azure-mgmt-nspkg==1.0.0
azure-mgmt-rdbms==0.1.0
azure-mgmt-redis==1.0.0
azure-mgmt-resource==1.1.0rc1
azure-mgmt-sql==0.5.1
azure-mgmt-storage==1.0.0rc1
azure-mgmt-trafficmanager==0.30.0
azure-mgmt-web==0.32.0
azure-monitor==0.3.0
azure-multiapi-storage==0.1.0
azure-nspkg==1.0.0
azure-servicefabric==5.6.130
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
ConfigArgParse==1.7
coverage==6.2
cryptography==40.0.2
flake8==5.0.4
futures==3.1.1
humanfriendly==2.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
isodate==0.7.0
isort==5.10.1
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.7.0
mock==5.2.0
msrest==0.4.29
msrestazure==0.4.34
multidict==5.2.0
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pluggy==1.0.0
prompt-toolkit==3.0.36
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.10.0
pycparser==2.21
pydocumentdb==2.3.5
pyflakes==2.5.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.7.1
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.7
tomli==1.2.3
typed-ast==1.5.5
typing-extensions==4.1.1
urllib3==1.26.20
urllib3-secure-extra==0.1.0
vcrpy==1.10.3
vsts-cd-manager==1.0.2
wcwidth==0.2.13
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
yarl==1.7.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==2.11.7
- attrs==22.2.0
- autopep8==2.0.4
- azure-batch==3.0.0
- azure-common==1.1.28
- azure-core==1.24.2
- azure-datalake-store==0.0.9
- azure-devtools==1.2.0
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.3.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==4.0.0
- azure-mgmt-billing==0.1.0
- azure-mgmt-cdn==0.30.2
- azure-mgmt-cognitiveservices==1.0.0
- azure-mgmt-compute==1.0.0rc1
- azure-mgmt-consumption==0.1.0
- azure-mgmt-containerregistry==0.2.1
- azure-mgmt-datalake-analytics==0.1.4
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.4
- azure-mgmt-devtestlabs==2.0.0
- azure-mgmt-dns==1.0.1
- azure-mgmt-documentdb==0.1.3
- azure-mgmt-iothub==0.2.2
- azure-mgmt-keyvault==0.31.0
- azure-mgmt-monitor==0.2.1
- azure-mgmt-network==1.0.0rc3
- azure-mgmt-nspkg==1.0.0
- azure-mgmt-rdbms==0.1.0
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==1.1.0rc1
- azure-mgmt-sql==0.5.1
- azure-mgmt-storage==1.0.0rc1
- azure-mgmt-trafficmanager==0.30.0
- azure-mgmt-web==0.32.0
- azure-monitor==0.3.0
- azure-multiapi-storage==0.1.0
- azure-nspkg==1.0.0
- azure-servicefabric==5.6.130
- cffi==1.15.1
- colorama==0.3.7
- configargparse==1.7
- coverage==6.2
- cryptography==40.0.2
- flake8==5.0.4
- futures==3.1.1
- humanfriendly==2.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isodate==0.7.0
- isort==5.10.1
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.7.0
- mock==5.2.0
- msrest==0.4.29
- msrestazure==0.4.34
- multidict==5.2.0
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pip==9.0.1
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.10.0
- pycparser==2.21
- pydocumentdb==2.3.5
- pyflakes==2.5.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.7.1
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.7
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- urllib3==1.26.20
- urllib3-secure-extra==0.1.0
- vcrpy==1.10.3
- vsts-cd-manager==1.0.2
- wcwidth==0.2.13
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- yarl==1.7.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/command_modules/azure-cli-resource/tests/test_resource.py::DeploymentTest::test_group_deployment"
]
| []
| [
"src/command_modules/azure-cli-resource/tests/test_custom.py::TestCustom::test_deployment_parameters",
"src/command_modules/azure-cli-resource/tests/test_custom.py::TestCustom::test_extract_parameters",
"src/command_modules/azure-cli-resource/tests/test_custom.py::TestCustom::test_resource_missing_parameters",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceGroupScenarioTest::test_resource_group",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceGroupNoWaitScenarioTest::test_resource_group_no_wait",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceScenarioTest::test_resource_scenario",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceIDScenarioTest::test_resource_id_scenario",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceCreateScenarioTest::test_resource_create",
"src/command_modules/azure-cli-resource/tests/test_resource.py::TagScenarioTest::test_tag_scenario",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ProviderRegistrationTest::test_provider_registration",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ProviderOperationTest::test_provider_operation",
"src/command_modules/azure-cli-resource/tests/test_resource.py::DeploymentnoWaitTest::test_group_deployment_no_wait",
"src/command_modules/azure-cli-resource/tests/test_resource.py::DeploymentThruUriTest::test_group_deployment_thru_uri",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ResourceMoveScenarioTest::test_resource_move",
"src/command_modules/azure-cli-resource/tests/test_resource.py::FeatureScenarioTest::test_feature_list",
"src/command_modules/azure-cli-resource/tests/test_resource.py::PolicyScenarioTest::test_resource_policy",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ManagedAppDefinitionScenarioTest::test_managedappdef",
"src/command_modules/azure-cli-resource/tests/test_resource.py::ManagedAppScenarioTest::test_managedapp",
"src/command_modules/azure-cli-resource/tests/test_resource.py::CrossRGDeploymentScenarioTest::test_crossrg_deployment"
]
| []
| MIT License | 1,377 | [
"azure-cli.pyproj",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py",
"src/command_modules/azure-cli-resource/HISTORY.rst"
]
| [
"azure-cli.pyproj",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_validators.py",
"src/command_modules/azure-cli-resource/HISTORY.rst"
]
|
weecology__retriever-935 | f56fcfd29b9a0a86ffe0fb180ae4d2a3c6b2b841 | 2017-06-16 00:19:12 | 17b7b9352ed687d599dbc5bc4fe44eab3f58bf7e | ethanwhite: Tests are all passing (locally at least). It would be worth testing out a few other datasets using both flat file and database formats to confirm that this didn't break any edge cases.
henrykironde: I think mysql parameter should be ?
henrykironde: Not sure but will do some tests later
ethanwhite: > I think mysql parameter should be ?
Yeah, I thought so originally too, but couldn't get to it work and then looked at the docs again.
> Not sure but will do some tests later
Looks like the tests are failing on CI so no need to worry about this one until I get that sorted out.
ethanwhite: Looks like I broke something on Python 2 (hooray for CI!). I'll take a look when I get the chance and update later.
ethanwhite: Looks like it's just MySQL on Python 2, so some sort of lovely edge case. It fails when trying to insert the first line of `mammal-masses` (to provide an example). First line is the insert statement, second line is the first of the sets of values to be inserted by `executemany`, and then the error message:
```
Creating table mammal_masses.MammalMasses...
INSERT INTO mammal_masses.MammalMasses (continent, status, sporder, family, genus, species, log_mass_g, comb_mass_g, reference) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
[u'AF', u'extant', u'Artiodactyla', u'Bovidae', u'Addax', u'nasomaculatus', '4.85', '70000.3', u'60']
INSERT INTO mammal_masses.MammalMasses (continent, status, sporder, family, genus, species, log_mass_g, comb_mass_g, reference) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
(1064, u"You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near '4.85'', b''70000.3'', '60'),('AF', 'extant', 'Artiodactyla', 'Bovidae', 'Aepycer' at line 1")
```
The error message has `b''70000.3''` instead of `b"70000.3"`. Not sure if this is the source of the issue or just a display quick. | diff --git a/retriever/engines/csvengine.py b/retriever/engines/csvengine.py
index 303017e..d4a1961 100644
--- a/retriever/engines/csvengine.py
+++ b/retriever/engines/csvengine.py
@@ -53,9 +53,13 @@ class engine(Engine):
"""Write a line to the output file"""
self.output_file.writerows(statement)
+ def executemany(self, statement, values, commit=True):
+ """Write a line to the output file"""
+ self.output_file.writerows(statement)
+
def format_insert_value(self, value, datatype):
"""Formats a value for an insert statement"""
- v = Engine.format_insert_value(self, value, datatype, escape=False, processed=True)
+ v = Engine.format_insert_value(self, value, datatype)
if v == 'null':
return ""
try:
diff --git a/retriever/engines/jsonengine.py b/retriever/engines/jsonengine.py
index 360d172..873c927 100644
--- a/retriever/engines/jsonengine.py
+++ b/retriever/engines/jsonengine.py
@@ -67,9 +67,13 @@ class engine(Engine):
"""Write a line to the output file"""
self.output_file.writelines(statement)
+ def executemany(self, statement, values, commit=True):
+ """Write a line to the output file"""
+ self.output_file.writelines(statement)
+
def format_insert_value(self, value, datatype):
"""Formats a value for an insert statement"""
- v = Engine.format_insert_value(self, value, datatype, escape=False, processed=True)
+ v = Engine.format_insert_value(self, value, datatype)
if v == 'null':
return ""
try:
diff --git a/retriever/engines/msaccess.py b/retriever/engines/msaccess.py
index c133f18..860deb7 100644
--- a/retriever/engines/msaccess.py
+++ b/retriever/engines/msaccess.py
@@ -51,10 +51,6 @@ class engine(Engine):
dropstatement = "DROP %s %s" % (objecttype, objectname)
return dropstatement
- def escape_single_quotes(self, value):
- """Escapes the single quotes in the value"""
- return value.replace("'", "''")
-
def insert_data_from_file(self, filename):
"""Perform a bulk insert."""
self.get_cursor()
diff --git a/retriever/engines/mysql.py b/retriever/engines/mysql.py
index 05c669c..a6c0db7 100644
--- a/retriever/engines/mysql.py
+++ b/retriever/engines/mysql.py
@@ -19,6 +19,7 @@ class engine(Engine):
"bool": "BOOL",
}
max_int = 4294967295
+ placeholder = "%s"
required_opts = [("user",
"Enter your MySQL username",
"root"),
diff --git a/retriever/engines/postgres.py b/retriever/engines/postgres.py
index 1df9bc9..e30e3dd 100644
--- a/retriever/engines/postgres.py
+++ b/retriever/engines/postgres.py
@@ -17,6 +17,7 @@ class engine(Engine):
"bool": "boolean",
}
max_int = 2147483647
+ placeholder = "%s"
required_opts = [("user",
"Enter your PostgreSQL username",
"postgres"),
@@ -63,10 +64,6 @@ class engine(Engine):
statement += " CASCADE;"
return statement.replace(" DATABASE ", " SCHEMA ")
- def escape_single_quotes(self, value):
- """Escapes single quotes in the value"""
- return value.replace("'", "''")
-
def insert_data_from_file(self, filename):
"""Use PostgreSQL's "COPY FROM" statement to perform a bulk insert."""
self.get_cursor()
diff --git a/retriever/engines/sqlite.py b/retriever/engines/sqlite.py
index ca6b4c1..9b8630a 100644
--- a/retriever/engines/sqlite.py
+++ b/retriever/engines/sqlite.py
@@ -17,6 +17,7 @@ class engine(Engine):
"char": "TEXT",
"bool": "INTEGER",
}
+ placeholder = "?"
required_opts = [("file",
"Enter the filename of your SQLite database",
os.path.join(DATA_DIR, "sqlite.db"),
@@ -31,10 +32,6 @@ class engine(Engine):
a separate connection."""
return None
- def escape_single_quotes(self, line):
- """Escapes single quotes in the line"""
- return line.replace("'", "''")
-
def get_bulk_insert_statement(self):
"""Get insert statement for bulk inserts
diff --git a/retriever/engines/xmlengine.py b/retriever/engines/xmlengine.py
index ed90c19..d0f970a 100644
--- a/retriever/engines/xmlengine.py
+++ b/retriever/engines/xmlengine.py
@@ -64,10 +64,14 @@ class engine(Engine):
"""Write a line to the output file"""
self.output_file.writelines(statement)
+ def executemany(self, statement, values, commit=True):
+ """Write a line to the output file"""
+ self.output_file.writelines(statement)
+
def format_insert_value(self, value, datatype):
"""Formats a value for an insert statement"""
- v = Engine.format_insert_value(self, value, datatype, escape=False, processed=True)
- if v == 'null':
+ v = Engine.format_insert_value(self, value, datatype)
+ if v == None:
return ""
try:
if len(v) > 1 and v[0] == v[-1] == "'":
diff --git a/retriever/lib/datapackage.py b/retriever/lib/datapackage.py
index de40c43..dfff62d 100644
--- a/retriever/lib/datapackage.py
+++ b/retriever/lib/datapackage.py
@@ -97,26 +97,6 @@ def get_contains_pk(dialect):
dialect['contains_pk'] = val
-def get_escape_single_quotes(dialect):
- """Set escape_single_quotes property"""
- val = clean_input("escape_single_quotes (bool = True/False) (press return to skip): ",
- ignore_empty=True, dtype=bool)
- if val == "" or val == []:
- # return and dont add key to dialect dict if empty val
- return
- dialect['escape_single_quotes'] = val
-
-
-def get_escape_double_quotes(dialect):
- """Set escape_double_quotes property"""
- val = clean_input("escape_double_quotes (bool = True/False) (press return to skip): ",
- ignore_empty=True, dtype=bool)
- if val == "" or val == []:
- # return and dont add key to dialect dict if empty val
- return
- dialect['escape_double_quotes'] = val
-
-
def get_fixed_width(dialect):
"""Set fixed_width property"""
val = clean_input("fixed_width (bool = True/False) (press return to skip): ",
@@ -195,8 +175,6 @@ def create_json():
get_delimiter(table['dialect'])
get_do_not_bulk_insert(table['dialect'])
get_contains_pk(table['dialect'])
- get_escape_single_quotes(table['dialect'])
- get_escape_double_quotes(table['dialect'])
get_fixed_width(table['dialect'])
get_header_rows(table['dialect'])
diff --git a/retriever/lib/engine.py b/retriever/lib/engine.py
index 117fc9f..5cded86 100644
--- a/retriever/lib/engine.py
+++ b/retriever/lib/engine.py
@@ -112,15 +112,15 @@ class Engine(object):
if self.debug:
print(cleanvalues)
raise
- multiple_values = []
try:
- self.execute(insert_stmt, commit=False)
+ self.executemany(insert_stmt, multiple_values, commit=False)
prompt = "Progress: " + str(count_iter) + " / " + str(real_line_length) + " rows inserted into " + self.table_name() + " totaling " + str(total) + ":"
sys.stdout.write(prompt + "\b" * len(prompt))
sys.stdout.flush()
except:
print(insert_stmt)
raise
+ multiple_values = []
else:
multiple_values.append(cleanvalues)
count_iter += 1
@@ -470,20 +470,18 @@ class Engine(object):
dropstatement = "DROP %s IF EXISTS %s" % (objecttype, objectname)
return dropstatement
- def escape_single_quotes(self, value):
- """Escapes single quotes in the value"""
- return value.replace("'", "\\'")
-
- def escape_double_quotes(self, value):
- """Escapes double quotes in the value"""
- return value.replace('"', '\\"')
-
def execute(self, statement, commit=True):
"""Executes the given statement"""
self.cursor.execute(statement)
if commit:
self.connection.commit()
+ def executemany(self, statement, values, commit=True):
+ """Executes the given statement with multiple values"""
+ self.cursor.executemany(statement, values)
+ if commit:
+ self.connection.commit()
+
def exists(self, script):
"""Checks to see if the given table exists"""
return all([self.table_exists(
@@ -516,7 +514,7 @@ class Engine(object):
"""Returns the full path of a file in the archive directory."""
return os.path.join(self.format_data_dir(), filename)
- def format_insert_value(self, value, datatype, escape=True, processed=False):
+ def format_insert_value(self, value, datatype):
"""Format a value for an insert statement based on data type
Different data types need to be formated differently to be properly
@@ -527,14 +525,7 @@ class Engine(object):
2. Harmonizing null indicators
3. Cleaning up badly formatted integers
4. Obtaining consistent float representations of decimals
-
- The optional `escape` argument controls whether additional quotes in
- strings are escaped, as needed for SQL database management systems
- (escape=True), or not escaped, as needed for flat file based engines
- (escape=False).
-
- The optional processed argument indicates that the engine has it's own
- escaping mechanism. i.e the csv engine which uses its own dialect"""
+ """
datatype = datatype.split('-')[-1]
strvalue = str(value).strip()
@@ -544,41 +535,32 @@ class Engine(object):
strvalue = strvalue[1:-1]
missing_values = ("null", "none")
if strvalue.lower() in missing_values:
- return "null"
+ return None
elif datatype in ("int", "bigint", "bool"):
if strvalue:
intvalue = strvalue.split('.')[0]
if intvalue:
return int(intvalue)
else:
- return "null"
+ return None
else:
- return "null"
+ return None
elif datatype in ("double", "decimal"):
if strvalue.strip():
try:
decimals = float(str(strvalue))
- return str(decimals)
+ return decimals
except:
- return "null"
+ return None
else:
- return "null"
+ return None
elif datatype == "char":
if strvalue.lower() in missing_values:
- return "null"
- if escape:
- # automatically escape quotes in string fields
- if hasattr(self.table, "escape_double_quotes") and self.table.escape_double_quotes:
- strvalue = self.escape_double_quotes(strvalue)
- if hasattr(self.table, "escape_single_quotes") and self.table.escape_single_quotes:
- strvalue = self.escape_single_quotes(strvalue)
- return "'" + strvalue + "'"
- if processed:
- return strvalue
+ return None
else:
- return "'" + strvalue + "'"
+ return strvalue
else:
- return "null"
+ return None
def get_cursor(self):
"""Gets the db cursor."""
@@ -645,15 +627,19 @@ class Engine(object):
columns = self.table.get_insert_columns()
types = self.table.get_column_datatypes()
columncount = len(self.table.get_insert_columns(join=False, create=False))
- insert_stmt = "INSERT INTO {} ({}) VALUES ".format(self.table_name(), columns)
for row in values:
row_length = len(row)
# Add None with appropriate value type for empty cells
for i in range(columncount - row_length):
row.append(self.format_insert_value(None, types[row_length + i]))
- insert_stmt += " (" + ", ".join([str(val) for val in row]) + "), "
- insert_stmt = insert_stmt.rstrip(", ") + ";"
+ insert_stmt = "INSERT INTO " + self.table_name()
+ insert_stmt += " (" + columns + ")"
+ insert_stmt += " VALUES ("
+ for i in range(0, columncount):
+ insert_stmt += "{}, ".format(self.placeholder)
+ insert_stmt = insert_stmt.rstrip(", ") + ")"
+
if self.debug:
print(insert_stmt)
return insert_stmt
diff --git a/retriever/lib/table.py b/retriever/lib/table.py
index 3b28911..25ff7c6 100644
--- a/retriever/lib/table.py
+++ b/retriever/lib/table.py
@@ -26,8 +26,6 @@ class Table(object):
self.record_id = 0
self.columns = []
self.replace_columns = []
- self.escape_single_quotes = True
- self.escape_double_quotes = True
self.cleaned_columns = False
for key, item in list(kwargs.items()):
setattr(self, key, item[0] if isinstance(item, tuple) else item)
diff --git a/scripts/mt_st_helens_veg.json b/scripts/mt_st_helens_veg.json
index 8fbe5f5..33e03ba 100644
--- a/scripts/mt_st_helens_veg.json
+++ b/scripts/mt_st_helens_veg.json
@@ -143,8 +143,7 @@
},
{
"dialect": {
- "do_not_bulk_insert": "True",
- "escape_single_quotes": "True"
+ "do_not_bulk_insert": "True"
},
"name": "species",
"schema": {},
@@ -166,5 +165,5 @@
"species_plot_year": "https://ndownloader.figshare.com/files/5613783",
"structure_plot_year": "https://ndownloader.figshare.com/files/5613786"
},
- "version": "1.2.0"
+ "version": "1.2.1"
}
diff --git a/version.txt b/version.txt
index b3e3451..492f2bf 100644
--- a/version.txt
+++ b/version.txt
@@ -46,7 +46,7 @@ mapped_plant_quads_ks.json,1.2.0
mapped_plant_quads_mt.json,1.2.0
marine_recruitment_data.json,1.0.0
mediter_basin_plant_traits.json,1.0.0
-mt_st_helens_veg.json,1.2.0
+mt_st_helens_veg.json,1.2.1
nematode_traits.json,1.0.0
ngreatplains-flowering-dates.json,1.0.0
npn.py,2.1.0
| SQL INSERT statements use custom quoting rather than parameter binding
Any reason you can't use parameter binding for binding values to SQL statements such as `INSERT (...) VALUES (...)`? For most SQL engines, it's both faster and safer than using custom quoting and direct interpolation of values. It's certainly a lot less code (because the SQL engine does most of the type conversion and quoting).
| weecology/retriever | diff --git a/test/test_retriever.py b/test/test_retriever.py
index 0af9764..f155d9a 100644
--- a/test/test_retriever.py
+++ b/test/test_retriever.py
@@ -127,16 +127,6 @@ def test_drop_statement():
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
-def test_escape_single_quotes():
- """Test escaping of single quotes"""
- assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
-
-
-def test_escape_double_quotes():
- """Test escaping of double quotes"""
- assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
-
-
def test_extract_values_fixed_width():
"""Test extraction of values from line of fixed width data"""
test_engine.table.fixed_width = [5, 2, 2, 3, 4]
@@ -181,18 +171,18 @@ def test_format_insert_value_int():
def test_format_insert_value_double():
"""Test formatting of values for insert statements"""
- assert test_engine.format_insert_value(26.22, 'double') == '26.22'
+ assert test_engine.format_insert_value(26.22, 'double') == 26.22
def test_format_insert_value_string_simple():
"""Test formatting of values for insert statements"""
- assert test_engine.format_insert_value('simple text', 'char') == "'simple text'"
+ assert test_engine.format_insert_value('simple text', 'char') == "simple text"
def test_format_insert_value_string_complex():
"""Test formatting of values for insert statements"""
assert test_engine.format_insert_value('my notes: "have extra, stuff"',
- 'char') == '\'my notes: \\"have extra, stuff\\"\''
+ 'char') == 'my notes: "have extra, stuff"'
def test_getmd5_lines():
@@ -407,4 +397,3 @@ def test_add_schema():
add_schema(table_dict, table)
assert(table_dict == result)
-
\ No newline at end of file
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 12
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
argcomplete==3.1.2
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
docutils==0.18.1
execnet==1.9.0
future==1.0.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
numpydoc==1.1.0
packaging==21.3
pluggy==1.0.0
pockets==0.9.1
psycopg2==2.7.7
py==1.11.0
Pygments==2.14.0
PyMySQL==1.0.2
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
pytz==2025.2
requests==2.27.1
-e git+https://github.com/weecology/retriever.git@f56fcfd29b9a0a86ffe0fb180ae4d2a3c6b2b841#egg=retriever
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-py3doc-enhanced-theme==2.4.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-napoleon==0.7
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
xlrd==2.0.1
zipp==3.6.0
| name: retriever
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- argcomplete==3.1.2
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- coverage==6.2
- docutils==0.18.1
- execnet==1.9.0
- future==1.0.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- numpydoc==1.1.0
- packaging==21.3
- pluggy==1.0.0
- pockets==0.9.1
- psycopg2==2.7.7
- py==1.11.0
- pygments==2.14.0
- pymysql==1.0.2
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-py3doc-enhanced-theme==2.4.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-napoleon==0.7
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- xlrd==2.0.1
- zipp==3.6.0
prefix: /opt/conda/envs/retriever
| [
"test/test_retriever.py::test_format_insert_value_double",
"test/test_retriever.py::test_format_insert_value_string_simple",
"test/test_retriever.py::test_format_insert_value_string_complex"
]
| []
| [
"test/test_retriever.py::test_auto_get_columns",
"test/test_retriever.py::test_auto_get_datatypes",
"test/test_retriever.py::test_auto_get_columns_extra_whitespace",
"test/test_retriever.py::test_auto_get_columns_cleanup",
"test/test_retriever.py::test_auto_get_delimiter_comma",
"test/test_retriever.py::test_auto_get_delimiter_tab",
"test/test_retriever.py::test_auto_get_delimiter_semicolon",
"test/test_retriever.py::test_correct_invalid_value_string",
"test/test_retriever.py::test_correct_invalid_value_number",
"test/test_retriever.py::test_correct_invalid_value_exception",
"test/test_retriever.py::test_create_db_statement",
"test/test_retriever.py::test_database_name",
"test/test_retriever.py::test_drop_statement",
"test/test_retriever.py::test_extract_values_fixed_width",
"test/test_retriever.py::test_find_file_absent",
"test/test_retriever.py::test_find_file_present",
"test/test_retriever.py::test_format_data_dir",
"test/test_retriever.py::test_format_filename",
"test/test_retriever.py::test_format_insert_value_int",
"test/test_retriever.py::test_getmd5_lines",
"test/test_retriever.py::test_getmd5_path",
"test/test_retriever.py::test_json2csv",
"test/test_retriever.py::test_xml2csv",
"test/test_retriever.py::test_sort_file",
"test/test_retriever.py::test_sort_csv",
"test/test_retriever.py::test_is_empty_null_string",
"test/test_retriever.py::test_is_empty_empty_list",
"test/test_retriever.py::test_is_empty_not_null_string",
"test/test_retriever.py::test_is_empty_not_empty_list",
"test/test_retriever.py::test_clean_input_empty_input_ignore_empty",
"test/test_retriever.py::test_clean_input_empty_input_not_ignore_empty",
"test/test_retriever.py::test_clean_input_string_input",
"test/test_retriever.py::test_clean_input_empty_list_ignore_empty",
"test/test_retriever.py::test_clean_input_empty_list_not_ignore_empty",
"test/test_retriever.py::test_clean_input_not_empty_list",
"test/test_retriever.py::test_clean_input_bool",
"test/test_retriever.py::test_clean_input_not_bool",
"test/test_retriever.py::test_add_dialect",
"test/test_retriever.py::test_add_schema"
]
| []
| MIT License | 1,378 | [
"retriever/engines/xmlengine.py",
"retriever/engines/postgres.py",
"retriever/engines/msaccess.py",
"retriever/lib/datapackage.py",
"retriever/lib/engine.py",
"scripts/mt_st_helens_veg.json",
"retriever/engines/sqlite.py",
"retriever/lib/table.py",
"retriever/engines/jsonengine.py",
"version.txt",
"retriever/engines/csvengine.py",
"retriever/engines/mysql.py"
]
| [
"retriever/engines/xmlengine.py",
"retriever/engines/postgres.py",
"retriever/engines/msaccess.py",
"retriever/lib/datapackage.py",
"retriever/lib/engine.py",
"scripts/mt_st_helens_veg.json",
"retriever/engines/sqlite.py",
"retriever/lib/table.py",
"retriever/engines/jsonengine.py",
"version.txt",
"retriever/engines/csvengine.py",
"retriever/engines/mysql.py"
]
|
tBuLi__symfit-132 | 6c49b46ae7377c3e639461a9970e18b49d263852 | 2017-06-16 09:06:09 | 6c49b46ae7377c3e639461a9970e18b49d263852 | diff --git a/symfit/core/argument.py b/symfit/core/argument.py
index 8ef7024..faa0d8e 100644
--- a/symfit/core/argument.py
+++ b/symfit/core/argument.py
@@ -88,8 +88,13 @@ class Parameter(Argument):
self.value = value
self.fixed = fixed
if not self.fixed:
- self.min = min
- self.max = max
+ if min is not None and max is not None and min > max:
+ print(min, max)
+ raise ValueError(
+ 'The value of `min` should be less than or equal to the value of `max`.')
+ else:
+ self.min = min
+ self.max = max
class Variable(Argument):
diff --git a/symfit/core/fit.py b/symfit/core/fit.py
index ddc73ca..c0f59c0 100644
--- a/symfit/core/fit.py
+++ b/symfit/core/fit.py
@@ -405,7 +405,16 @@ class BaseModel(Mapping):
"""
:return: List of tuples of all bounds on parameters.
"""
- return [(np.nextafter(p.value, 0), p.value) if p.fixed else (p.min, p.max) for p in self.params]
+ bounds = []
+ for p in self.params:
+ if p.fixed:
+ if p.value >= 0.0:
+ bounds.append([np.nextafter(p.value, 0), p.value])
+ else:
+ bounds.append([p.value, np.nextafter(p.value, 0)])
+ else:
+ bounds.append([p.min, p.max])
+ return bounds
@property
def shared_parameters(self):
@@ -1365,11 +1374,10 @@ class Minimize(BaseFit):
else:
return np.array(ans)
- def execute(self, method='SLSQP', *args, **kwargs):
+ def execute(self, *args, **kwargs):
ans = minimize(
self.error_func,
self.initial_guesses,
- method=method,
args=(self.independent_data, self.dependent_data, self.sigma_data,),
bounds=self.model.bounds,
constraints=self.scipy_constraints,
| Setting a negative fixed parameter
I am needing to set a negative fixed parameter for a model. This may be an issue or user error. I started the following stack exchange question.
https://stackoverflow.com/questions/44562190/setting-a-negative-fixed-parameter-in-the-symfit-python-package
Sorry for leaving a comment on Git, but both Stack Exchange and Git removed the ability to send comments so I figured this would be the best way to alert those who work on the project as Stack Exchange does not have a Symfit tag.
| tBuLi/symfit | diff --git a/tests/test_minimize.py b/tests/test_minimize.py
index 9961702..e64884c 100644
--- a/tests/test_minimize.py
+++ b/tests/test_minimize.py
@@ -57,7 +57,7 @@ class TestMinimize(unittest.TestCase):
res = minimize(func, [-1.0,1.0], args=(-1.0,), jac=func_deriv,
method='SLSQP', options={'disp': False})
fit = Maximize(model)
- fit_result = fit.execute()
+ fit_result = fit.execute(method='SLSQP')
self.assertAlmostEqual(fit_result.value(x), res.x[0])
self.assertAlmostEqual(fit_result.value(y), res.x[1])
@@ -69,7 +69,7 @@ class TestMinimize(unittest.TestCase):
fit = Maximize(model, constraints=constraints)
self.assertEqual(fit.constraints[0].constraint_type, Ge)
self.assertEqual(fit.constraints[1].constraint_type, Eq)
- fit_result = fit.execute()
+ fit_result = fit.execute(method='SLSQP')
self.assertAlmostEqual(fit_result.value(x), res.x[0])
self.assertAlmostEqual(fit_result.value(y), res.x[1])
diff --git a/tests/test_model.py b/tests/test_model.py
index df42195..526d432 100644
--- a/tests/test_model.py
+++ b/tests/test_model.py
@@ -8,7 +8,7 @@ import sympy
import numpy as np
from scipy.optimize import curve_fit
-from symfit import Variable, Parameter, Fit, FitResults, LinearLeastSquares, parameters, variables, NumericalLeastSquares, NonLinearLeastSquares, Model, TaylorModel
+from symfit import Variable, Parameter, Fit, FitResults, LinearLeastSquares, parameters, variables, NumericalLeastSquares, NonLinearLeastSquares, Model, TaylorModel, exp
from symfit.core.support import seperate_symbols, sympy_to_py
from symfit.distributions import Gaussian
@@ -45,6 +45,30 @@ class TestModel(unittest.TestCase):
self.assertEqual(model.dependent_vars, list(model.keys()))
+ def test_bounds(self):
+ """
+ The bounds of an object should always be such that lower < upper.
+ :return:
+ """
+ a = Parameter(value= - 2.482092e-01, fixed=True)
+ # a = Parameter()
+ try:
+ b = Parameter(value=5.0, min=6.0, max=4.0)
+ except ValueError:
+ b = Parameter(value=5.0, min=4.0, max=6.0)
+ c = Parameter(value=2.219756e+02, fixed=True)
+ x = Variable()
+
+ # build the model
+ model = Model(a + b * (1 - exp(-c / x)))
+ print(model.bounds)
+ for bounds in model.bounds:
+ if None in bounds:
+ pass
+ else:
+ # Both are set
+ min, max = bounds
+ self.assertGreaterEqual(max, min)
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[contrib]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
cycler==0.11.0
importlib-metadata==4.8.3
iniconfig==1.1.1
kiwisolver==1.3.1
matplotlib==3.3.4
mpmath==1.3.0
numpy==1.19.5
packaging==21.3
Pillow==8.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
scipy==1.5.4
six==1.17.0
-e git+https://github.com/tBuLi/symfit.git@6c49b46ae7377c3e639461a9970e18b49d263852#egg=symfit
sympy==1.9
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: symfit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- cycler==0.11.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- kiwisolver==1.3.1
- matplotlib==3.3.4
- mpmath==1.3.0
- numpy==1.19.5
- packaging==21.3
- pillow==8.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- scipy==1.5.4
- six==1.17.0
- sympy==1.9
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/symfit
| [
"tests/test_model.py::TestModel::test_bounds"
]
| [
"tests/test_minimize.py::TestMinimize::test_constraint_types",
"tests/test_minimize.py::TestMinimize::test_minimize"
]
| [
"tests/test_model.py::TestModel::test_model_as_dict",
"tests/test_model.py::TestModel::test_order"
]
| []
| MIT License | 1,379 | [
"symfit/core/argument.py",
"symfit/core/fit.py"
]
| [
"symfit/core/argument.py",
"symfit/core/fit.py"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.