query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| metadata
dict | negatives
sequencelengths 30
30
| negative_scores
sequencelengths 30
30
| document_score
stringlengths 4
10
| document_rank
stringclasses 2
values |
---|---|---|---|---|---|---|
Deletes the enablement for given skillId/stage and customerId (retrieved from Auth token). | def delete_skill_enablement_v1(self, skill_id, stage, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "delete_skill_enablement_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage' is set
if ('stage' not in params) or (params['stage'] is None):
raise ValueError(
"Missing the required parameter `stage` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage' in params:
path_params['stage'] = params['stage']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message="No Content; Confirms that enablement is successfully deleted."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="DELETE",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete(self, customerguid, jobguid=\"\", executionparams=None):",
"def delete(self, stage_id):\n was_killed = self.staging_service.kill_process_of_stage_order(stage_id)\n if was_killed:\n self.set_status(NO_CONTENT)\n else:\n self.set_status(INTERNAL_SERVER_ERROR,\n reason=\"Could not kill stage order with id: {}, either it wasn't in a state \"\n \"which allows it to be killed, or the pid associated with the stage order \"\n \"did not allow itself to be killed. Consult the server logs for an exact \"\n \"reason.\")",
"def delete_preset(self, preset_id, REQUEST=None):\r\n\r\n raise NotImplementedError",
"def delete(self, context, id_):\n try:\n db_resource_mgr_data = self.db_api.get_resource_manager(\n context, id_)\n act_res_data = self._get_resources(context,\n db_resource_mgr_data)\n for act in act_res_data:\n if act[\"state\"] in [eon_const.EON_RESOURCE_STATE_ACTIVATED,\n eon_const.EON_RESOURCE_STATE_PROVISIONED]:\n msg = _(\"Found resources in activated or provisioned \"\n \"state\")\n raise exception.DeleteException(err=msg)\n\n _resource_data = _make_response(\n db_resource_mgr_data)\n LOG.info(\"Details for the ID %s is: %s\" % (\n id_, logging.mask_password(_resource_data)))\n driver_obj = driver.load_resource_mgr_driver(\n db_resource_mgr_data['type'])\n driver_obj.validate_delete(db_resource_mgr_data)\n\n driver_obj.delete_vc_pass_through(context, db_resource_mgr_data)\n self.db_api.delete_resource_manager(context, id_)\n except exception.NotFound as e:\n msg = \"Failed to delete resource manager %s. Error: %s\" % (\n _resource_data.get('name'), e.message)\n LOG.exception(msg)\n raise e",
"def delete(self, request, app_id, addon_name):\n addon = Addon.objects.get(app__app_id=app_id, display_name=addon_name)\n provider = get_provider_from_provider_name(addon.provider_name)\n result = provider.deprovision(addon.provider_uuid)\n manager = StateMachineManager()\n with manager.transition(addon.id, AddonEvent.deprovision_success):\n pass\n manager.start_task(addon.id)\n return self.respond({'message': result['message']})",
"def deleteCustomer(self, **params):\n self.__requireParams(params, ['id'])\n return self.__req('delete_customer', params)",
"def delete(parameters, session):\n from Modules.Classes.ExperimentalScenario import ExperimentalScenario\n # Received --> [id_exeriment]\n # Retrieve all scenarios associated with target experiment\n exp_sc = session.query(ExperimentalScenario).filter(ExperimentalScenario.experiment_id == parameters[0]).all()\n for item in exp_sc:\n # Retrieve all ExperimentalScenarioPattern association for current experimental scenario\n exp_scenarios_pat = session.query(ExperimentalScenarioPattern).filter(and_(\n ExperimentalScenarioPattern.experimental_scenario_id == item.id,\n ExperimentalScenarioPattern.pattern_type == 2)).all()\n for item2 in exp_scenarios_pat:\n session.delete(item2)\n session.commit()\n session.close()\n msg_rspt = Message(action=2, comment='Register deleted successfully')\n return msg_rspt",
"def delete(self, **kwargs):\n self._plans.delete(**kwargs)",
"def delete_customer(customer_id):\n try:\n with database.transaction():\n customer = Customer.get(Customer.customer_id == customer_id)\n customer.delete_instance()\n customer.save()\n except Exception as unknown_error:\n print(f'Error. Could not delete customer {customer_id}. {unknown_error}')",
"def salesforce_delete(self, obj_name, obj_id):\n self.builtin.log(\"Deleting {} with Id {}\".format(obj_name, obj_id))\n obj_class = getattr(self.cumulusci.sf, obj_name)\n obj_class.delete(obj_id)\n self.remove_session_record(obj_name, obj_id)",
"def delete(child_session, stack_name, AdministratorAccountId, failed_accounts, account):\n try:\n\n # TODO Check whether this role exists in a cfn stack\n # TODO if the role exists, check if the trust relationship is correct\n cfn = child_session.client('cloudformation')\n response = cfn.delete_stack(StackName=stack_name)\n response = cfn.delete_stack(StackName='cfn-stack-set-role')\n if Iam_helpers.check_iam_role_exists(child_session,\n 'AWSCloudFormationStackSetExecutionRole'):\n print(f\"Found AWSCloudFormationStackSetExecutionRole\")\n except Exception as e:\n print(e)\n failed_accounts.append(account)\n\n return",
"def delete_customer(customer_id):\n try:\n remove_user = cm.Customers.get(cm.Customers.customer_id == customer_id)\n remove_user.delete_instance()\n except cm.DoesNotExist:\n logging.info(\"Customer successfully deleted from database.\")",
"def delete_demo(exploration_id):\n exploration = get_exploration_by_id(exploration_id, strict=False)\n if not exploration:\n # This exploration does not exist, so it cannot be deleted.\n logging.info('Exploration with id %s was not deleted, because it '\n 'does not exist.' % exploration_id)\n else:\n delete_exploration(ADMIN_COMMITTER_ID, exploration_id)",
"def delete_customer(customer_id):\n del_query = Customer.get(Customer.customer_id == customer_id)\n return bool(del_query.delete_instance())",
"def delete_stage(stage):\n folder = stage_folder(stage)\n shutil.rmtree(folder) # delete old\n ensure_path(folder) # create new",
"def delete(customer):\n if isinstance(customer, resources.Customer):\n customer = customer.id\n\n http_client = HttpClient()\n http_client.delete(routes.url(routes.CUSTOMER_RESOURCE, resource_id=customer))",
"def delete_customer(customer_id):\n found = search_customer(customer_id)\n if found is None:\n LOGGER.warning('Could not find customer for delete with id %d.',\n customer_id)\n else:\n found.delete_instance()",
"def delete(cls, aws_cloud_account_id: str):\n\t\tpass",
"def set_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"set_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully created/updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def delete_customer(connection, customer_id):\n connection.command_path = 'customer/{0}'.format(customer_id)\n extra_headers = {connection.header_key: connection.token}\n url = connection.build_url()\n verify_ssl = connection.verify_ssl\n res = requests.delete(url, headers=extra_headers, verify=verify_ssl)\n if res.status_code == 204:\n return True\n raise CustomerDeletionException(res.content)",
"def customer_delete(request, slug,id):\n \n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n\n if request.method == 'POST':\n return HttpResponseRedirect('/company/'+str(slug))\n else: \n #verifies if the company exists if not returns a 404 page\n customer_reference = get_object_or_404(Customer, id=id,company=company)\n\n #deletes the view and redirects to the page.\n customer_reference.delete()\n return HttpResponseRedirect('/company/'+str(slug))",
"def test_teams_remove_customer_from_workgroup_v1(self):\n pass",
"def delete_from_provider(self, builder, provider, credentials, target, parameters):",
"def delete(self, **params):\n return self._api.delete_customer(self.id, **params)",
"def delete_customer(customer_id):\n try:\n with database.transaction():\n customer = Customer.get(Customer.customer_id == customer_id)\n customer.delete_instance()\n customer.save()\n logger.info(f\"Successfully deleted customer {customer_id}\")\n except Exception as unknown_error:\n logger.error(\n f\"Error. Failed to delete customer {customer_id}. {unknown_error}\"\n )\n print(\n f'Error. Could not delete customer {customer_id}. {unknown_error}'\n )",
"def delete_customer(cls, api, id, **params):\n return api.delete_customer(id, **params)",
"def delete_customer(customer_id):\n LOGGER.info(\"Deleting customer %s\", customer_id)\n try:\n db_customer = Customers.get(Customers.customer_id == customer_id)\n db_customer.delete_instance()\n LOGGER.info(\"Customer %s deleted\", customer_id)\n except DoesNotExist as e_val:\n LOGGER.warning(\n \"Customer %s does not exist: Delete operation ignored\", customer_id\n )\n LOGGER.warning(e_val)",
"def delete(self):\n key = self.request.get('key')\n\n if not self.assert_xsrf_token_or_fail(\n self.request, 'delete-lesson', {'key': key}):\n return\n\n if not CourseOutlineRights.can_delete(self):\n transforms.send_json_response(\n self, 401, 'Access denied.', {'key': key})\n return\n\n course = courses.Course(self)\n lesson = course.find_lesson_by_id(None, key)\n if not lesson:\n transforms.send_json_response(\n self, 404, 'Object not found.', {'key': key})\n return\n\n assert course.delete_lesson(lesson)\n course.save()\n\n transforms.send_json_response(self, 200, 'Deleted.')",
"def delete_user_entitlement(self, user_id):\n route_values = {}\n if user_id is not None:\n route_values['userId'] = self._serialize.url('user_id', user_id, 'str')\n self._send(http_method='DELETE',\n location_id='8480c6eb-ce60-47e9-88df-eca3c801638b',\n version='6.0-preview.3',\n route_values=route_values)",
"def delete_specific_amenity(amenity_id):\n amenity = storage.get('Amenity', amenity_id)\n if not amenity:\n abort(404)\n storage.delete(amenity)\n storage.save()\n return make_response(jsonify({}), 200)"
] | [
"0.602979",
"0.58418995",
"0.54291016",
"0.54278046",
"0.5400935",
"0.5389526",
"0.535687",
"0.5331528",
"0.5319464",
"0.5280911",
"0.5278951",
"0.52693886",
"0.5259713",
"0.52176064",
"0.52004325",
"0.5174218",
"0.5165518",
"0.5154062",
"0.51538026",
"0.514276",
"0.51390976",
"0.51019484",
"0.5098316",
"0.50949043",
"0.5086414",
"0.50825137",
"0.50596285",
"0.50578946",
"0.5057243",
"0.5035782"
] | 0.68601894 | 0 |
Checks whether an enablement exist for given skillId/stage and customerId (retrieved from Auth token) | def get_skill_enablement_status_v1(self, skill_id, stage, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "get_skill_enablement_status_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage' is set
if ('stage' not in params) or (params['stage'] is None):
raise ValueError(
"Missing the required parameter `stage` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage' in params:
path_params['stage'] = params['stage']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message="No Content; Confirms that enablement resource exists for given skillId & stage."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"set_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully created/updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def delete_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"delete_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully deleted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def checkPlayerSkillExists(self, userid, skillName):\r\n if not isinstance(userid, int):\r\n userid = self.getUserIdFromSteamId(userid)\r\n self.execute(\"SELECT level FROM Skill WHERE UserID=? AND name=?\", userid, skillName)\r\n return bool( self.fetchone())",
"def test_func(self):\n return (Student.objects.filter(user=self.request.user).exists())",
"def is_enrolled(cls, user, course_key):\r\n try:\r\n record = CourseEnrollment.objects.get(user=user, course_id=course_key)\r\n return record.is_active\r\n except cls.DoesNotExist:\r\n return False",
"def _enabled_entity_exists(self) -> bool:\n return self.entity_exists(self._enabled_toggle_entity_id)",
"def is_customer_id_exist(customer_id) -> bool:\n with MY_CONNECTION as connection:\n cursor = connection.cursor()\n cursor.execute(\"SELECT exists(SELECT 1 FROM Customers WHERE id_customer=?)\", (customer_id,))\n return cursor.fetchone()[0] == 1",
"def test_instance_profile_exists(self) -> None:\n self.assertTrue(self.validate_instance_profile('s3-access-role', is_prod=self.prod_env))",
"def entrance_exam(self):\n status = False\n tool = ProgrammingTool.create(self.PROGRAMMING_TOOL)\n if tool.connect(self.target_name):\n status = entrance_exam(tool, self.register_map)\n tool.disconnect()\n\n return status == EntranceExamErrors.OK",
"def is_ixtools_account_present(self, item_name):\n self.click_element(self.ixtools_account_arrow_dropdown_locator)\n ixtools_account_locator = (By.XPATH, \"//ul[contains(@id, 'ddlCustomerVendorAccount')]/li[text()='%s']\" % item_name)\n return self.is_element_present(ixtools_account_locator)",
"def _do_check(self):\n try:\n #breakpoint()\n ApplicationsItem.objects.exists()\n #print (\"Checking\")\n return True\n\n except Exception:\n client.captureException()\n return False",
"def exist_import_record(session, kg_table_id, kg_table_primary_key):\n\n try:\n team = session.query(DocumentSourceRecord).filter_by(kg_table_id=kg_table_id,\n kg_table_primary_key=kg_table_primary_key).first()\n if team:\n return True\n else:\n return False\n except Exception:\n traceback.print_exc()\n return False",
"def test_enabled(self):\n # OSA script should have been installed in setUp function, which sets\n # enabled to True by default.\n self.assertTrue(self.run_function(\"assistive.enabled\", [OSA_SCRIPT]))\n # Disable OSA Script\n self.run_function(\"assistive.enable\", [OSA_SCRIPT, False])\n # Assert against new disabled status\n self.assertFalse(self.run_function(\"assistive.enabled\", [OSA_SCRIPT]))",
"def is_resource_enabled(resource):\n return use_resources is not None and resource in use_resources",
"def has_permission(self, request, view):\r\n api_key = getattr(settings, \"EDX_API_KEY\", None)\r\n return (\r\n (settings.DEBUG and api_key is None) or\r\n (api_key is not None and request.META.get(\"HTTP_X_EDX_API_KEY\") == api_key)\r\n )",
"def _exists(isamAppliance, id):\n exists = False\n ret_obj = get_all(isamAppliance)\n\n for snmp in ret_obj['data']:\n if snmp['id'] == id:\n exists = True\n break\n\n return exists",
"def _onchange_restrict_access(self, stage_id):\n print('----------',self.env.uid)\n # if self.env.uid != 1 :\n raise exceptions.Warning('You are not allowed to change the stages, Please contact the Administrator')\n return True\n return {}",
"def isValidrequest(cls, mgr, fid, op, tmpcls, slot, session=None):\n ormop = clsmanager.getConfigOperation(op)\n if session is not None:\n cls.getclsoptions(tmpcls, session)\n if ormop in optionsdict[tmpcls]['OPTIONS']:\n if cls.getClsStageSupported(tmpcls, op, slot) is True:\n inputs = mgr.get(fid, tmpcls, op, slot, session)\n if len(inputs) > 0:\n return True\n return False",
"def check_enable_mode(self, *args, **kwargs):\n pass",
"def enable_metrics(self, rest_api_id, stage_name):\n print \"[INFO] Enabling INFO Level Logs on stage %s\" % stage_name\n self.api_client.update_stage(\n restApiId=rest_api_id,\n stageName=stage_name,\n patchOperations=[\n {\n 'op': 'replace',\n 'path': '/*/*/logging/loglevel',\n 'value': \"INFO\"\n },\n ]\n )\n print \"[INFO] Enabling Detailed Metrics on stage %s\" % stage_name\n self.api_client.update_stage(\n restApiId=rest_api_id,\n stageName=stage_name,\n patchOperations=[\n {\n 'op': 'replace',\n 'path': '/*/*/metrics/enabled',\n 'value': \"true\"\n },\n ]\n )\n print \"[INFO] Enabling Datatrace on stage %s\" % stage_name\n self.api_client.update_stage(\n restApiId=rest_api_id,\n stageName=stage_name,\n patchOperations=[\n {\n 'op': 'replace',\n 'path': '/*/*/logging/dataTrace',\n 'value': \"true\"\n },\n ]\n )\n\n return True",
"def test_exists_true(self):\n self.assertTrue(PrepSample.exists(self.sample_id, self.prep_template))",
"def exists_intent_action(self, intent_keyword):\n pass",
"def entry_exists(conn, aid):\n # Select row with mid\n cursor = conn.execute(\"SELECT * FROM AffirmedAssignments WHERE aid=?\", (aid,))\n row = cursor.fetchone()\n\n if row is None:\n # Assignments entry does not exist.\n return False\n\n # Assignments entry exists\n return True",
"def __contains__(self, tool_name):\n return tool_name in self._tool_data",
"def AgentProfileAvailable(self, agent_id):\n membership = getToolByName(self.context, 'portal_membership')\n agent = membership.getMemberById(agent_id)\n\n if(len(agent.getProperty(\"agent_profile_en\"))>0 or len(agent.getProperty(\"agent_profile_es\"))>0 or len(agent.getProperty(\"agent_profile_de\"))>0): \n return True\n else:\n return False",
"def validate(self, name: str, expansion: str) -> bool:\n\n try:\n logger.info(f'Validating card {name} existence against external API...')\n self._get_card(name=name)\n except requests.HTTPError:\n logger.error(f'There is no such card named {name}!')\n return False\n\n try:\n logger.info(f'Validating card {name} existence in expansion set {expansion}')\n sets = self.get_card_sets(name=name)\n if expansion in sets:\n logger.debug(f'{expansion} found in {sets}')\n return True\n else:\n logger.error(f'{expansion} not found in {sets}')\n return False\n except requests.HTTPError:\n logger.error(f'There is not such card {name} in expansion set {expansion}')\n return False",
"def test_purchase_request(self):\n self.purchase_request.button_to_approve()\n account_analytic_ids = [line.analytic_account_id\n for line in self.purchase_request.line_ids]\n self.assertEqual(len(account_analytic_ids),\n len(self.purchase_request.line_ids),\n 'All PR Line do not have Analytic Account')",
"def isActive(self, handler):\n if self.plugin_manager:\n enable_manager = self.plugin_manager.EnableManager()\n enable_manager.initFrom(self.c,self.handler_path) \n return handler.__module__ in enable_manager.actives\n else:\n return True",
"def _exists(isvgAppliance, uuid):\n exists = False\n ret_obj = get_all(isvgAppliance)\n\n for snmp in ret_obj['data']['snmpObjects']:\n if snmp['uuid'] == uuid:\n exists = True\n break\n\n return exists",
"def is_employee():\n return _is_member('uw_employee')"
] | [
"0.6090357",
"0.561119",
"0.54877293",
"0.51553667",
"0.5133626",
"0.507918",
"0.50128716",
"0.5011236",
"0.49842843",
"0.49654028",
"0.4948384",
"0.49361253",
"0.4929839",
"0.49206465",
"0.490764",
"0.48858306",
"0.48345122",
"0.48265818",
"0.48164898",
"0.48089364",
"0.4788811",
"0.47815105",
"0.4780199",
"0.47800633",
"0.47546777",
"0.47490528",
"0.47486052",
"0.4744081",
"0.4738114",
"0.47250012"
] | 0.6131015 | 0 |
Creates/Updates the enablement for given skillId/stage and customerId (retrieved from Auth token) | def set_skill_enablement_v1(self, skill_id, stage, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "set_skill_enablement_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage' is set
if ('stage' not in params) or (params['stage'] is None):
raise ValueError(
"Missing the required parameter `stage` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage' in params:
path_params['stage'] = params['stage']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message="No Content; Confirms that enablement is successfully created/updated."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=409, message="The request could not be completed due to a conflict with the current state of the target resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="PUT",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"delete_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully deleted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def get_skill_enablement_status_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"get_skill_enablement_status_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement resource exists for given skillId & stage.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def create_stage(self, ApiId: str, StageName: str, AccessLogSettings: Dict = None, ClientCertificateId: str = None, DefaultRouteSettings: Dict = None, DeploymentId: str = None, Description: str = None, RouteSettings: Dict = None, StageVariables: Dict = None) -> Dict:\n pass",
"def reset_entitlement_for_product_v1(self, product_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"reset_entitlement_for_product_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}/entitlement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. No content.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"Request is forbidden.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=412, message=\"Precondition failed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def test_activate_customer(self):\n # create a customer to activate\n body = {\n \"name\": \"Kendall\",\n \"address\": \"333 Bedford Street\",\n \"phone_number\": \"555-555-3333\",\n \"email\": \"[email protected]\",\n \"credit_card\": \"VISA\"\n }\n resp_create = self.app.post('/customers',\n json=body,\n content_type='application/json')\n self.assertEqual(resp_create.status_code, status.HTTP_201_CREATED)\n self.assertEqual(resp_create.get_json()['active'], True)\n customer_id = resp_create.get_json()[\"id\"]\n\n # deactivate the customer\n logging.debug(customer_id)\n resp_deactivate = self.app.put(\"/customers/{}/deactivate\".format(customer_id),\n json=body,\n content_type=\"application/json\")\n self.assertEqual(resp_deactivate.status_code, status.HTTP_200_OK)\n self.assertEqual(resp_deactivate.get_json()[\"active\"], False)\n\n # activate the customer\n logging.debug(customer_id)\n resp_activate = self.app.put(\"/customers/{}/activate\".format(customer_id),\n json=body,\n content_type=\"application/json\")\n self.assertEqual(resp_activate.status_code, status.HTTP_200_OK)\n self.assertEqual(resp_activate.get_json()[\"active\"], True)",
"def add_stage(self, stage_name: str) -> \"CdkStage\":\n return jsii.invoke(self, \"addStage\", [stage_name])",
"def createDeveloper(self):\n self.createUser()\n self.user.is_developer = True\n self.user.put()",
"def GroundExcelAddEnemySightPointAdd(builder, EnemySightPointAdd):\n return AddEnemySightPointAdd(builder, EnemySightPointAdd)",
"def enable_metrics(self, rest_api_id, stage_name):\n print \"[INFO] Enabling INFO Level Logs on stage %s\" % stage_name\n self.api_client.update_stage(\n restApiId=rest_api_id,\n stageName=stage_name,\n patchOperations=[\n {\n 'op': 'replace',\n 'path': '/*/*/logging/loglevel',\n 'value': \"INFO\"\n },\n ]\n )\n print \"[INFO] Enabling Detailed Metrics on stage %s\" % stage_name\n self.api_client.update_stage(\n restApiId=rest_api_id,\n stageName=stage_name,\n patchOperations=[\n {\n 'op': 'replace',\n 'path': '/*/*/metrics/enabled',\n 'value': \"true\"\n },\n ]\n )\n print \"[INFO] Enabling Datatrace on stage %s\" % stage_name\n self.api_client.update_stage(\n restApiId=rest_api_id,\n stageName=stage_name,\n patchOperations=[\n {\n 'op': 'replace',\n 'path': '/*/*/logging/dataTrace',\n 'value': \"true\"\n },\n ]\n )\n\n return True",
"def test_create(self, client, job, agent_token):\n stage_url = '{base}/stages/teststage'.format(base=job_url_for(job))\n response = client.put(\n stage_url,\n headers={'x_dockci_api_key': agent_token},\n data={'success': 'true'},\n )\n\n assert response.status_code == 200 # TODO 201\n\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == True\n\n response = client.get(stage_url)\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == True",
"def customer_created_handler(event):\n obj = event.obj\n\n # submit customer after creation\n obj.workflow.submit()",
"def upsert_job(new_job, bulk_request):\n return SalesforceBulkJob('upsert', 'Lead', external_id_field='The_External_ID__c')",
"def test_approve(self):\n\n username,userpass = self.testdata.find_account_for('toolsubmitter')\n\n self.utils.account.login_as(username,userpass)\n\n self.contribtool.approve(TOOLNAME,TOOLLICENSEDATA)",
"def __init__(__self__, *,\n alexa_skill_id: pulumi.Input[str],\n is_enabled: pulumi.Input[bool]):\n pulumi.set(__self__, \"alexa_skill_id\", alexa_skill_id)\n pulumi.set(__self__, \"is_enabled\", is_enabled)",
"def update(self, customerguid, name=\"\", login=\"\", password=\"\", email=\"\", address=\"\", vat=\"\", jobguid=\"\", executionparams=None):",
"def GroundExcelAddEnemySightPointRate(builder, EnemySightPointRate):\n return AddEnemySightPointRate(builder, EnemySightPointRate)",
"def test_update(self, client, stage, agent_token):\n stage_url = stage_url_for(stage)\n response = client.put(\n stage_url,\n headers={'x_dockci_api_key': agent_token},\n data={'success': 'false'},\n )\n\n assert response.status_code == 200\n\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == False\n\n response = client.get(stage_url)\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == False",
"def purchase_indent_jao_approve(request, request_id):\n # Check if logged in user is JAO\n if not request.user.groups.filter(name='JrAO_AccountsDepartment').exists():\n raise PermissionDenied\n\n current_employee = request.user.employee_set.all()[0]\n purchase_indent_request = get_object_or_404(PurchaseIndentRequest, pk=request_id)\n form = PurchaseIndentBudgetDetailsForm(request.POST, instance=purchase_indent_request)\n\n if form.is_valid():\n if request.POST.get('Approve'):\n if not can_proceed(purchase_indent_request.jao_approve):\n raise PermissionDenied\n\n purchase_indent_request.jao_approve()\n purchase_indent_request.save()\n\n remark = request.POST.get('remark')\n transition_record = TransitionHistory(\n approver=current_employee,\n form=purchase_indent_request,\n from_state=STATE.APPROVED_BY_HOD,\n to_state=STATE.APPROVED_BY_JAO,\n remark=remark\n )\n transition_record.save()\n messages.success(request, 'The Purchase Indent form was Approved')\n\n elif request.POST.get('Reject'):\n if not can_proceed(purchase_indent_request.reject):\n raise PermissionDenied\n\n purchase_indent_request.reject()\n purchase_indent_request.save()\n\n remark = request.POST.get('remark')\n transition_record = TransitionHistory(\n approver=current_employee,\n form=purchase_indent_request,\n from_state=STATE.APPROVED_BY_HOD,\n to_state=STATE.REJECT,\n remark=remark\n )\n transition_record.save()\n messages.warning(request, 'The Purchase Indent form was Rejected')\n\n return redirect('purchase:purchase-requests-pending')\n else:\n return render(request, 'purchase/purchase_indent/show_jao.html',\n {'purchase_indent_request': purchase_indent_request}, {'form': form})",
"def addSkill(self, skillName, maxLevel, creditStart, creditIncrement):\r\n self.skills[skillName] = SkillObject(skillName, maxLevel, creditStart, creditIncrement)\r\n self.orderedSkills.append(skillName)",
"def create_export_request_for_skill_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"create_export_request_for_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/exports'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accepted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def enroll(cls, user, course_key, mode=\"honor\"):\r\n enrollment = cls.get_or_create_enrollment(user, course_key)\r\n enrollment.update_enrollment(is_active=True, mode=mode)\r\n return enrollment",
"def enable_user(request):\n user_id = request.POST.get('user_id')\n if user_id is None:\n response = {'status': -1, 'status_message': 'No user with id {} exists'.format(user_id)}\n return HttpResponse(json.dumps(response))\n try:\n user_obj = User.objects.get(id=user_id)\n except User.DoesNotExist:\n response = {'status': -1, 'status_message': 'No user with id {} exists'.format(user_id)}\n return HttpResponse(json.dumps(response))\n user_obj.is_active = True\n user_obj.save()\n response = {'status': 1, 'status_message': 'Success'}\n return HttpResponse(json.dumps(response))",
"def GroundExcelAddPlayerSightPointAdd(builder, PlayerSightPointAdd):\n return AddPlayerSightPointAdd(builder, PlayerSightPointAdd)",
"def invoke_skill_end_point_v2(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, BadRequestError_765e0ac6, InvocationsApiResponse_3d7e3234, Error_ea6c1a5a]\n operation_name = \"invoke_skill_end_point_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/invocations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'invocations_api_request' in params:\n body_params = params['invocations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\", status_code=200, message=\"Skill was invoked.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow invocation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def purchase_indent_approve(request, request_id):\n purchase_indent_request = get_object_or_404(PurchaseIndentRequest, pk=request_id)\n current_employee = request.user.employee_set.all()[0]\n\n if purchase_indent_request.state == 'Submitted':\n if purchase_indent_request.indenter.department.hod_id != current_employee.id:\n raise PermissionDenied\n return render(request, 'purchase/purchase_indent/show_hod.html',\n {'purchase_indent_request': purchase_indent_request})\n\n elif purchase_indent_request.state == 'Approved by Head of Department':\n if not request.user.groups.filter(name='JrAO_AccountsDepartment').exists():\n raise PermissionDenied\n form = PurchaseIndentBudgetDetailsForm()\n\n return render(request, 'purchase/purchase_indent/show_jao.html',\n {'purchase_indent_request': purchase_indent_request, 'form': form})\n\n elif purchase_indent_request.state == 'Approved by Junior Accounts Officer':\n if not request.user.groups.filter(name='DR_AccountsDepartment').exists():\n raise PermissionDenied\n return render(request, 'purchase/purchase_indent/show_dr.html',\n {'purchase_indent_request': purchase_indent_request})\n\n else:\n return PermissionDenied",
"def activate():\n try:\n body = request.get_json()\n\n activate_token = body[\"activate_token\"]\n password = body[\"password\"]\n\n if len(password) < 3 or len(password) > 50:\n return bad_request()\n\n if not models.token_exists(activate_token):\n\n return bad_request()\n\n student_hash = models.create_hash(password)\n models.save_hash(student_hash, activate_token)\n\n except KeyError:\n return bad_request()\n except Exception as e:\n print(e)\n return server_error()\n\n return created()",
"def enable_account(id, value):\n account_repo = AccountRepository(db)\n acc: Account = account_repo.get_by_id(id)\n if acc is None:\n click.echo(\"Account with specified ID does not exists.\")\n return ERROR_ACCOUNT_DOES_NOT_EXIST\n\n acc.enabled = value\n account_repo.enable_account(acc, value)\n msg = \"Account n°\" + str(id) + \" enabled is now \" + str(value) + \".\"\n click.echo(msg)",
"def add_customer(customer_id, name, lastname, home_address,\n phone_number, email_address, status, credit_limit):\n try:\n with database.transaction():\n customer = Customer.create(\n customer_id=customer_id,\n name=name,\n lastname=lastname,\n home_address=home_address,\n phone_number=phone_number,\n email_address=email_address,\n status=status,\n credit_limit=credit_limit,\n )\n customer.save()\n except Exception as unknown_error:\n print(unknown_error)",
"def _activate(self, context, id_, resource_inventory, data):\n LOG.info(\"[%s] Activation started \" % id_)\n resource_driver = driver.load_resource_driver(\n resource_inventory[eon_const.EON_RESOURCE_TYPE])\n try:\n run_playbook = data.get(eon_const.RUN_PLAYBOOK, True)\n input_model_data = data.get(eon_const.INPUT_MODEL)\n resource_driver.activate(context,\n id_,\n data,\n resource_inventory=resource_inventory,\n input_model_info=input_model_data,\n run_playbook=run_playbook)\n LOG.info(\"[%s] Activation finished successfully\" % id_)\n try:\n message = {\"resource_id\": id_,\n \"resource_state\": eon_const.EON_RESOURCE_STATE_ACTIVATED,\n \"resource_details\": resource_inventory, }\n message_notifier.notify(context,\n message_notifier.EVENT_PRIORITY_INFO,\n message_notifier.EVENT_TYPE[\n eon_const.EON_RESOURCE_STATE_ACTIVATED],\n message)\n except Exception as ex:\n LOG.exception(\n \"Exception while notifying the message : %s\" % ex)\n except Exception as e:\n LOG.exception(e)\n try:\n self.db_api.delete_property(context, id_,\n eon_const.HYPERVISOR_ID)\n except exception.NotFound:\n pass # ignore\n raise exception.ActivationFailure(\n resource_name=resource_inventory['id'],\n err=str(e.message))",
"def put(self, customer_id):\n data = request.json\n return edit_customer(customer_id=customer_id, data=data)"
] | [
"0.60831606",
"0.58041424",
"0.54635733",
"0.5185661",
"0.5182032",
"0.51508754",
"0.5087371",
"0.50694853",
"0.49154156",
"0.4913449",
"0.4885336",
"0.48156297",
"0.4790354",
"0.47659588",
"0.47657377",
"0.4749646",
"0.4744482",
"0.4742327",
"0.47352886",
"0.47029427",
"0.46841154",
"0.46626905",
"0.46548888",
"0.4644814",
"0.46282703",
"0.46172586",
"0.4605537",
"0.45961937",
"0.45872787",
"0.4572719"
] | 0.72542006 | 0 |
Creates a new export for a skill with given skillId and stage. | def create_export_request_for_skill_v1(self, skill_id, stage, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "create_export_request_for_skill_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage' is set
if ('stage' not in params) or (params['stage'] is None):
raise ValueError(
"Missing the required parameter `stage` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stage}/exports'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage' in params:
path_params['stage'] = params['stage']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message="Accepted."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=409, message="The request could not be completed due to a conflict with the current state of the target resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="POST",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_stage(self, ApiId: str, StageName: str, AccessLogSettings: Dict = None, ClientCertificateId: str = None, DefaultRouteSettings: Dict = None, DeploymentId: str = None, Description: str = None, RouteSettings: Dict = None, StageVariables: Dict = None) -> Dict:\n pass",
"def copy_stage(self, stack_id, rest_api_id, from_stage, to_stage_name):\n to_stage_variables = {}\n\n # Add lambda alias as a suffix to stage variables\n for k, v in from_stage[\"variables\"].iteritems():\n to_stage_variables[k] = v.replace(\":%s\" % from_stage[\"stageName\"], \":%s\" % to_stage_name)\n # if lambda function is a variable add permission to invoke\n if (\":%s\" % from_stage[\"stageName\"]) in v:\n self.add_lambda_permission(function_name=to_stage_variables[k],\n region=self.session.region_name,\n account_id=stack_id,\n rest_api_id=rest_api_id\n )\n # Create New Stage\n self.api_client.create_stage(\n restApiId=rest_api_id,\n stageName=to_stage_name,\n deploymentId=from_stage['deploymentId'],\n description=to_stage_name,\n # cacheClusterEnabled=True|False,\n # cacheClusterSize='0.5'|'1.6'|'6.1'|'13.5'|'28.4'|'58.2'|'118'|'237',\n variables=to_stage_variables\n # documentationVersion='string'\n )\n\n return True",
"def add_stage(self, stage_name: str) -> \"CdkStage\":\n return jsii.invoke(self, \"addStage\", [stage_name])",
"def stage(self, stage_id):\r\n return pipelines.Stage(self, stage_id)",
"def create_deployment(self, ApiId: str, Description: str = None, StageName: str = None) -> Dict:\n pass",
"def _create_deployment(self) -> aws.apigateway.Stage:\n deployment = aws.apigateway.Deployment(\n f\"{self.rest_api._name}-deployment\",\n rest_api=self.rest_api.id,\n # TODO: Still want to have a triggers function\n opts=pulumi.ResourceOptions(\n parent=self, depends_on=[p.lambda_integration for p in self.proxies]\n ),\n )\n\n stage = aws.apigateway.Stage(\n f\"{self.rest_api._name}-prod-stage\",\n deployment=deployment.id,\n rest_api=self.rest_api.id,\n stage_name=\"prod\",\n opts=pulumi.ResourceOptions(parent=self),\n )\n\n return stage",
"def _create_source_stage(\n self, stage_name: str, output: codepipeline.Artifact):\n secret_token = ''\n repo = ''\n owner = ''\n github_action = codepipeline_actions.GitHubSourceAction(\n action_name='Github_Source',\n owner=owner,\n repo=repo,\n oauth_token=secret_token,\n output=output\n )\n return {\n 'stageName': stage_name,\n 'actions': [github_action]\n }",
"def set_stage(stage):\n try:\n filename = os.path.join(get_var('SITE'), \".stage\")\n f = open(filename, \"w\")\n f.write(\"%s\\n\" % stage)\n f.close()\n logger.debug(\"set stage: %s\" % (stage))\n except:\n raise AssertionError(\"Unable to save setup/teardown stage! %s\" % (sys.exc_info()[1]))\n return stage",
"def get_sample_stage(sample_id, stage_id):\n s = get_resource(Sample.query.filter_by(obfuscated_id=sample_id))\n return get_resource(SampleStage.query.filter_by(_sample_id=s.id))",
"def GroundExcelAddStageFileName(builder, StageFileName):\n return AddStageFileName(builder, StageFileName)",
"def test_create(self, client, job, agent_token):\n stage_url = '{base}/stages/teststage'.format(base=job_url_for(job))\n response = client.put(\n stage_url,\n headers={'x_dockci_api_key': agent_token},\n data={'success': 'true'},\n )\n\n assert response.status_code == 200 # TODO 201\n\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == True\n\n response = client.get(stage_url)\n response_data = json.loads(response.data.decode())\n assert response_data.pop('success') == True",
"def delete_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"delete_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully deleted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def stage(self, stage):\n self._stage = stage\n self._layer = Sdf.Layer.CreateAnonymous()\n self._stage.GetSessionLayer().subLayerPaths.append(self._layer.identifier)",
"def _add_stage(self, name):\n def stage_func(self, *args, **kwargs):\n \"\"\" Stage function.\n\n :param args: Positional arguments.\n :param kwargs: Keyword arguments.\n :return: Pipeline (for method chaining).\n \"\"\"\n self._pipe.append(Stage(name, args, kwargs))\n return self\n\n setattr(Pipeline, name, stage_func)",
"def skill(ctx: Context, public_id: PublicId):\n _eject_item(ctx, \"skill\", public_id)",
"def set_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"set_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully created/updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def experimentExport(request, experiment_id):\n json_data = ExperimentAdmin.exportToJSON(experiment_id)\n response = HttpResponse(json.dumps(json_data), content_type='application/json')\n response['Content-Disposition'] = 'attachment; filename=\\\"' + Experiment.objects.get(id=experiment_id).exp_name + '.json\\\"'\n return response",
"def stage_by_id(self, stage_id):\n response = self._session.get(\n path=self._session.urljoin(\n self.STAGES_RESOURCE_PATH,\n '{}.xml'.format(stage_id)\n ).format(\n base_api=self.base_api\n ),\n headers={'Accept': 'application/xml'},\n )\n\n return response.text",
"def export_pipeline(scikit_pipeline):\n steps_obj = {'steps':[]}\n for name, md in scikit_pipeline.steps:\n steps_obj['steps'].append({\n 'name': name,\n 'class_name': fullname(md),\n 'params': md.get_params()\n })\n\n return steps_obj",
"def create_beta_test_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"create_beta_test_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/betaTest'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'create_test_body' in params:\n body_params = params['create_test_body']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=201, message=\"Success. Return a URL to track the resource in 'Location' header.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def GroundExcelAddStageTopography_(builder, StageTopography_):\n return AddStageTopography_(builder, StageTopography_)",
"def register_stage(key, module):\n register(key, module, stage_dict)",
"def stage(self, stage: osbuild.Stage):",
"def export(\n self,\n source: TestReport,\n export_context: Optional[ExportContext] = None,\n ) -> Optional[Dict]:\n\n export_context = verify_export_context(\n exporter=self, export_context=export_context\n )\n result = None\n json_path = pathlib.Path(self.cfg.json_path).resolve()\n\n if len(source):\n json_path.parent.mkdir(parents=True, exist_ok=True)\n\n test_plan_schema = TestReportSchema()\n data = test_plan_schema.dump(source)\n attachments_dir = json_path.parent / ATTACHMENTS\n\n # Save the Testplan report.\n if self.cfg.split_json_report:\n (\n structure_filename,\n assertions_filename,\n ) = gen_attached_report_names(json_path)\n structure_filepath = attachments_dir / structure_filename\n assertions_filepath = attachments_dir / assertions_filename\n\n meta, structure, assertions = self.split_json_report(data)\n attachments_dir.mkdir(parents=True, exist_ok=True)\n\n with open(structure_filepath, \"w\") as json_file:\n json.dump(structure, json_file)\n with open(assertions_filepath, \"w\") as json_file:\n json.dump(assertions, json_file)\n\n meta[\"attachments\"] = save_attachments(\n report=source, directory=attachments_dir\n )\n meta[\"version\"] = 2\n meta[\"attachments\"][structure_filename] = str(\n structure_filepath\n )\n meta[\"attachments\"][assertions_filename] = str(\n assertions_filepath\n )\n meta[\"structure_file\"] = structure_filename\n meta[\"assertions_file\"] = assertions_filename\n\n with open(json_path, \"w\") as json_file:\n json.dump(meta, json_file)\n else:\n data[\"attachments\"] = save_attachments(\n report=source, directory=attachments_dir\n )\n data[\"version\"] = 1\n\n with open(json_path, \"w\") as json_file:\n json.dump(data, json_file)\n\n self.logger.user_info(\"JSON generated at %s\", json_path)\n result = {\"json\": self.cfg.json_path}\n else:\n self.logger.user_info(\n \"Skipping JSON creation for empty report: %s\", source.name\n )\n return result",
"def create_dataset(request):\n body = json.loads(request.body)\n try:\n org = Organization.objects.get(pk=body['organization_id'])\n except Organization.DoesNotExist:\n return {\"status\": 'error',\n 'message': 'organization_id not provided'}\n record = ImportRecord.objects.create(\n name=body['name'],\n app=\"seed\",\n start_time=datetime.datetime.now(),\n created_at=datetime.datetime.now(),\n last_modified_by=request.user,\n super_organization=org,\n owner=request.user,\n )\n\n return {\n 'status': 'success',\n 'id': record.pk,\n 'name': record.name,\n }",
"def invoke_skill_end_point_v2(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, BadRequestError_765e0ac6, InvocationsApiResponse_3d7e3234, Error_ea6c1a5a]\n operation_name = \"invoke_skill_end_point_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/invocations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'invocations_api_request' in params:\n body_params = params['invocations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\", status_code=200, message=\"Skill was invoked.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow invocation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def create_export_object(xform, export_type, options):\n export_options = get_export_options(options)\n return Export(\n xform=xform,\n export_type=export_type,\n options=export_options,\n created_on=timezone.now(),\n )",
"def createStageWithNewLayer():\n\n # Simply create a proxy shape. Since it does not have a USD file associated\n # (in the .filePath attribute), the proxy shape base will create an empty\n # stage in memory. This will create the session and root layer as well.\n if hasattr(mayaUsd, 'ufe') and hasattr(mayaUsd.ufe, 'createStageWithNewLayer'):\n shapeNode = mayaUsd.ufe.createStageWithNewLayer('|world')\n cmds.select(shapeNode, replace=True)\n return shapeNode\n else:\n shapeNode = cmds.createNode('mayaUsdProxyShape', skipSelect=True, name='stageShape1')\n cmds.connectAttr('time1.outTime', shapeNode+'.time')\n cmds.select(shapeNode, replace=True)\n fullPath = cmds.ls(shapeNode, long=True)\n return fullPath[0]",
"def slo_create(obj, product_name, title, description, slo_file):\n client = get_client(obj)\n\n product = client.product_list(name=product_name)\n if not product:\n fatal_error('Product {} does not exist'.format(product_name))\n\n product = product[0]\n\n with Action('Creating SLO for product: {}'.format(product_name), nl=True) as act:\n if slo_file:\n slo = json.load(slo_file)\n else:\n slo = {'title': title, 'description': description}\n\n validate_slo(slo, act)\n\n if not act.errors:\n new_slo = client.slo_create(product, slo['title'], slo.get('description', ''))\n\n print(json.dumps(new_slo, indent=4))\n\n for target in slo.get('targets', []):\n t = client.target_create(new_slo, target['sli_uri'], target_from=target['from'], target_to=target['to'])\n act.ok('Created a new target')\n print(json.dumps(t, indent=4))",
"def write_to_usd(self, file_path, scene_path, shader='UsdPreviewSurface', bound_prims=None, time=None,\n texture_dir='', texture_file_prefix=''):\n assert os.path.splitext(file_path)[1] in ['.usd', '.usda'], f'Invalid file path \"{file_path}\".'\n assert shader in self.shaders, f'Shader {shader} is not support. Choose from {list(self.shaders.keys())}.'\n if os.path.exists(file_path):\n stage = Usd.Stage.Open(file_path)\n else:\n stage = usd.create_stage(file_path)\n if time is None:\n time = Usd.TimeCode.Default()\n\n writer = self.shaders[shader]['writer']\n return writer(stage, file_path, scene_path, bound_prims, time, texture_dir, texture_file_prefix)"
] | [
"0.5956866",
"0.55754095",
"0.5476709",
"0.5302441",
"0.5278265",
"0.52532685",
"0.5133455",
"0.51053697",
"0.5054077",
"0.5003153",
"0.49137205",
"0.4842088",
"0.4833242",
"0.4788251",
"0.47626925",
"0.47567585",
"0.47514442",
"0.4733472",
"0.47299838",
"0.47062933",
"0.46693534",
"0.4668571",
"0.46480513",
"0.46303385",
"0.46157867",
"0.46141377",
"0.4578438",
"0.45775875",
"0.45527896",
"0.4528028"
] | 0.73437005 | 0 |
Get the list of inskill products for the skillId. | def get_isp_list_for_skill_id_v1(self, skill_id, stage, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05, ListInSkillProductResponse_505e7307]
operation_name = "get_isp_list_for_skill_id_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage' is set
if ('stage' not in params) or (params['stage'] is None):
raise ValueError(
"Missing the required parameter `stage` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stage}/inSkillProducts'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage' in params:
path_params['stage'] = params['stage']
query_params = [] # type: List
if 'next_token' in params:
query_params.append(('nextToken', params['next_token']))
if 'max_results' in params:
query_params.append(('maxResults', params['max_results']))
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.isp.list_in_skill_product_response.ListInSkillProductResponse", status_code=200, message="Response contains list of in-skill products for the specified skillId and stage."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Bad request. Returned when a required parameter is not present, badly formatted. "))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.error.Error", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.error.Error", status_code=404, message="Requested resource not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.error.Error", status_code=429, message="Too many requests received."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.error.Error", status_code=500, message="Internal Server Error"))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.isp.list_in_skill_product_response.ListInSkillProductResponse")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_products(self):\n return [item.code for item in self._products]",
"def list_products(self):\n url = self.base_url\n # TODO add filtering support when holvi api supports it\n obdata = self.connection.make_get(url)\n return ProductList(obdata, self)",
"def list_products(self):\n return self._make_get_request(self._urls['products'])",
"def product_list(id):\r\n\r\n db = get_db()\r\n product_list = db.execute(\r\n \"SELECT product_id, product_name, quantity FROM product WHERE for_business = ? AND quantity > 0\",\r\n (id,),\r\n ).fetchall()\r\n return product_list",
"def products(self):\n return list(Product.select())",
"def getListOfProducts(self, *args):\n return _libsbml.Reaction_getListOfProducts(self, *args)",
"def skills():\n with app.app_context():\n results = Skill.query.all()\n return SkillsResponse(skills=results).json(), 200",
"def get_skills(self):\n return self.skills[:]",
"def get_products_by_id(product_id):\n rs_api = woo_request_helper().get_details(wc_endpoint='products/{}'.format(product_id))\n return rs_api",
"def products(self):\n return self._products",
"def get_product_by_id(productId): # noqa: E501\n return 'do some magic!'",
"def products(self):\n response = requests.get(self._url(self._PRODUCTS_PATH), headers=self._headers)\n return response.json()",
"def get_products(self):\n con = dbcon()\n cur = con.cursor()\n cur.execute(\"SELECT * FROM products;\")\n res = cur.fetchall()\n if res:\n prdcts=[]\n for prodct_item in res:\n picked_prdct = {\n 'product_id':prodct_item[0],\n 'product_name':prodct_item[1],\n 'price':prodct_item[2],\n 'quantity':prodct_item[3]\n }\n prdcts.append(picked_prdct)\n return jsonify({\"Products\": prdcts}), 200\n return jsonify({\"message\":\"No products in store\"})",
"def getSkills(self):\n return self.skills",
"def source_products(self, uuid):\n return self._backend.source_products(uuid)",
"def products(self):\r\n return self._products",
"def listProducts(self):\n response = self.productClient.list_products(parent=self.locationPath)\n return [ProductSearch.Product._fromResponse(self.productSearch, x) for x in response]",
"def specific_product(self, product_id):\n for product in self.products_list:\n if product['product_id'] == product_id:\n return jsonify({\"Product\":product}), 200",
"def get_products(self, query_args={}):\n endpoint = '/v3/educator/products'\n result = self.request(endpoint, query_args)\n\n products = []\n for data in result.response:\n # Dynamically load product instance.\n class_name = data.type.capitalize()\n product = Product.instance(class_name, data)\n products.append(product)\n\n return products",
"def data_skill_list(self):\n data_skill_list = []\n for skill in self.data_skill:\n if 'name' in skill.keys():\n data_skill_list.append(skill['name'])\n return data_skill_list",
"def get_product(self, page_size=10, pages_number=1):\n products = []\n params = self.params.copy()\n params[\"page_size\"] = page_size\n\n try:\n response = requests.get(self.url, params=params, timeout=3)\n response.json()\n except requests.ConnectionError:\n print(\"Error when fetching the API\")\n for i in range(pages_number):\n params[\"page\"] = i + 1\n response = requests.get(self.url, params=params)\n if response.status_code == 200:\n products.extend(response.json()[\"products\"])\n return products",
"async def all_skills_data(self) -> AllSkillsData:\n return AllSkillsData(**await self.get(\"/skill/all\"))",
"def loadproducts(lid):\r\n db = get_db()\r\n\r\n b_id = session.get(\"user_id\")\r\n product_list = {}\r\n\r\n if lid == \"Products\":\r\n query = \"SELECT product_id, product_name FROM product WHERE for_business = ? AND quantity > 0\"\r\n warehouses = db.execute(query, (b_id,)).fetchall()\r\n for products in warehouses:\r\n product_list[products[0]] = products[1]\r\n else:\r\n query = \"SELECT prod_id FROM warehouse where loc_id = ? AND b_id = ?\"\r\n warehouses = db.execute(query, (lid, b_id,)).fetchall()\r\n for products in warehouses:\r\n product_name = db.execute(\r\n \"SELECT product_name FROM product WHERE product_id = ? AND for_business = ?\",\r\n (products[\"prod_id\"], b_id,),\r\n ).fetchone()\r\n product_list[products[\"prod_id\"]] = product_name[\"product_name\"]\r\n\r\n return jsonify(product_list)",
"def get_product_list_async(self, observations):\n\n # getting the obsid list\n if type(observations) == Row:\n observations = observations[\"obsid\"]\n if np.isscalar(observations):\n observations = [observations]\n if type(observations) == Table:\n observations = observations['obsid']\n\n service = 'Mast.Caom.Products'\n params = {'obsid': ','.join(observations)}\n\n return self.service_request_async(service, params)",
"def get_all_products(self):\n\t\tpass",
"def ListProducts(self):\n return copy.deepcopy(self._products)",
"def ListProducts(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def get_product_list(include_details = True):\n \n json_obj = requests.get(api_base_url + 'products')\n products_list = json.loads(json_obj.content)['products']\n d = OrderedDict(zip([x.pop('product') for x in products_list], \n products_list))\n if include_details: return d\n return d.keys()",
"def associate_isp_with_skill_v1(self, product_id, skill_id, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"associate_isp_with_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/skills/{skillId}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. No content.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"Request is forbidden.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def ajax_av_subproducts_list(request, product_id):\n product = Product.service.get_my_product(request.user, product_id)\n all_in_range = Product.service.get_available_subproducts(\n owner=request.user,\n package_level=product.package_level\n )\n return jsonify(data=[\n {\n 'DT_RowId': product.id,\n 'gtin': product.gtin,\n 'package_level': product.package_level.unit_descriptor,\n 'description': product.description\n } for product in all_in_range\n ])"
] | [
"0.59632087",
"0.59197265",
"0.590533",
"0.5828244",
"0.57245255",
"0.5663034",
"0.56141776",
"0.55580974",
"0.55575114",
"0.5537327",
"0.553261",
"0.55187964",
"0.5504587",
"0.54901576",
"0.54774684",
"0.54679954",
"0.54339874",
"0.5425092",
"0.5394326",
"0.53756917",
"0.53679717",
"0.53398174",
"0.533483",
"0.5305466",
"0.52929854",
"0.52907366",
"0.5284797",
"0.52836573",
"0.5283569",
"0.5278553"
] | 0.70336187 | 0 |
Delete AccountLinking information of a skill for the given stage. | def delete_account_linking_info_v1(self, skill_id, stage_v2, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "delete_account_linking_info_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/accountLinkingClient'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message="Success. No content."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The specified skill/stage/accountLinkingClient doesn't exist."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="DELETE",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_stage(stage):\n folder = stage_folder(stage)\n shutil.rmtree(folder) # delete old\n ensure_path(folder) # create new",
"def delete_account(self, account):\n \n pass",
"def delprofile(variable, account):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n\n if not account:\n account = mph.config[\"default_account\"]\n if not unlock_wallet(stm):\n return\n acc = Account(account, morphene_instance=stm)\n json_metadata = Profile(acc[\"json_metadata\"])\n\n for var in variable:\n json_metadata.remove(var)\n\n tx = acc.update_account_profile(json_metadata)\n tx = json.dumps(tx, indent=4)\n print(tx)",
"def delete_secret_link(link_id):\n\n Secret_Link.objects.filter(link_id=link_id).delete()",
"def delete_account(self):\n signals.before_gameaccount_deleted.send(gameaccount=self.gameaccount)\n db.delete(self.gameaccount)",
"def delete_skill(id, skill):\n with app.app_context():\n user = User.query.get(id)\n if user is None:\n return \"User not found\", 404\n skill_db = Skill.query.filter_by(name=skill).first()\n if skill_db is None:\n return \"Skill not found\", 404\n user.skills.remove(skill_db)\n user_response = UsersResponse(\n users=[\n {\n \"id\": user.id,\n \"name\": user.name,\n \"skills\": [skill.name for skill in user.skills]\n }\n ]\n )\n db.session.commit()\n return user_response.json(), 200",
"def delete(self):\n if jwthandler.authorize_action(self, 1) == False:\n return None\n\n userdata = jwthandler.decode_userdata(self.request.headers[\"Authorization\"])\n\n body_categories = {\"link_id\": 1}\n link_dict = errorutil.check_fields(self.request.body.decode(), body_categories, self)\n\n if link_dict == False or linkutil.delete_link(link_dict[\"link_id\"], self) == False:\n return None\n\n formatted_message = loggerhandler.form_delete_message_dictionary(userdata, \n \"link\", \n link_dict[\"link_id\"])\n\n\n loggerhandler.log_message(\"delete\", formatted_message)\n\n self.write({\"message\":\"Success\"})",
"def __delitem__(self, skillName):\r\n self.removeSkill(skillName)",
"def delete_link(self, word):\n meaning = self.word2meaning[word]\n print(str(self.unique_id) + \" forgot \" +\n str(word) + \" for \" + str(meaning))\n del self.word2meaning[word]\n del self.meaning2word[meaning]\n del self.wordsuccess[word]\n\n # If the agent was the only one using the word, delete the word\n if len(self.model.vocabulary[meaning][word]) == 1:\n del self.model.vocabulary[meaning][word]\n # Else simply remove the agent\n else:\n self.model.vocabulary[meaning][word].remove(self.unique_id)",
"def delete(account):\n account.stripe_account.delete()\n account.delete()",
"def remove_study_version_dbgap_link(apps, schema_editor):\n SourceStudyVersion = apps.get_model('trait_browser', 'SourceStudyVersion')\n for ssv in SourceStudyVersion.objects.all():\n ssv.dbgap_link = ''\n ssv.save()",
"def delete_deployment(request, deployment, **_kwargs):\n pass",
"def dropSkill(skill, db):\n skill_data = db.execute(\n 'SELECT * FROM mystatus WHERE skill = ?', (str(skill), )).fetchone()\n if not skill_data:\n return colored(\"ERROR: Skill {S} is not in your skill set!\".format(S=str(skill)), \"red\", \"on_white\")\n db.execute(\n 'DELETE FROM mystatus WHERE skill = ?', (str(skill), ))\n db.commit()\n return colored(\"Drop skill: \" + str(skill), 'cyan')",
"def delete_account(self):\n Credential.account_list.remove(self)",
"async def unlink(self, ctx):\n # Remove all link tokens and spotify details for this user\n remove_tokens(ctx.author.id)\n remove_spotify_details(ctx.author.id)\n await ctx.reply(\"All your linked accounts were removed, if you had any!\")",
"def delete_analysis_scheme(DomainName=None, AnalysisSchemeName=None):\n pass",
"def del_awcomment(request, pk):\n comment = get_object_or_404(AwardComment, pk=pk)\n comment.delete()\n award = comment.award\n url = '../../' + str(comment.award.pk)\n return redirect(url)",
"def skill(ctx: Context, public_id: PublicId):\n _eject_item(ctx, \"skill\", public_id)",
"def delete_network_profile(arn=None):\n pass",
"def delete_provisioning(self, identifier):\n return self.client.call(\"SoftLayer_Provisioning_Hook\", \"deleteObject\", id=identifier)",
"def award_delete(request, slug,id):\n \n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n\n if request.method == 'POST':\n return HttpResponseRedirect('/company/'+str(slug))\n else: \n #verifies if the company exists if not returns a 404 page\n award_reference = get_object_or_404(Award, id=id,company=company)\n\n #deletes the view and redirects to the page.\n award_reference.delete()\n return HttpResponseRedirect('/company/'+str(slug))",
"def remove_skills_from_profile(profile_id=None, skills_id=None):\n # get specific objects\n profile = storage.get(\"Profile\", profile_id)\n skills = storage.get(\"Skills\", skills_id)\n if profile is not None and skills is not None:\n # check every skill in profile\n for profile_skill in profile.skills:\n # if the given skill matches skill in profile, remove it\n if profile_skill.id == skills.id:\n profile.skills.remove(skills)\n # save to update database\n profile.save()\n return jsonify({}), 200\n\n # if id not in database, abort\n abort(404)",
"def delete_account(request):\n ubanks = request.user.userbank.all()\n for ubank in ubanks:\n ubank.delete()\n user = request.user\n log_out(request)\n user.delete()\n return HttpResponse(\"Account succesfully deleted\")",
"def delete_phone_asset(self, asset_id):\n return self.delete_asset(asset_id, 'PHONE')",
"def delete(profile, environment, force=True):\n client = boto3client.get(\"elasticbeanstalk\", profile)\n params = {}\n params[\"EnvironmentName\"] = environment\n params[\"TerminateResources\"] = force\n return client.terminate_environment(**params)",
"def delete(self, **kwargs):\n self._plans.delete(**kwargs)",
"def delete(self):\n _url = (\n f\"{self.connector.base_url}/projects/{self.project_id}/links/{self.link_id}\"\n )\n\n self.connector.http_call(\"delete\", _url)\n\n self.project_id = None\n self.link_id = None",
"def delete_marketing(request):\n marketing = request.session.get(CONSENT_USER_INFORMATION_KEY_SESSION, None)\n\n if marketing:\n del request.session[CONSENT_USER_INFORMATION_KEY_SESSION]",
"def unload(self, skillName):\r\n es.unload(\"%s/skills/%s\" % (info.basename, skillName))",
"def delete_object(self, account, container, object):#opposite to get\n \n pass"
] | [
"0.59574074",
"0.5869852",
"0.55502105",
"0.5394746",
"0.53713995",
"0.53487486",
"0.5339688",
"0.5338711",
"0.5329208",
"0.5319457",
"0.5267009",
"0.5248969",
"0.5230982",
"0.5224596",
"0.5199011",
"0.5187306",
"0.51706123",
"0.515707",
"0.5131738",
"0.51230943",
"0.5116736",
"0.5115954",
"0.5092514",
"0.5088374",
"0.5086476",
"0.5082604",
"0.507281",
"0.5053496",
"0.5022201",
"0.50221664"
] | 0.63780856 | 0 |
Get AccountLinking information for the skill. | def get_account_linking_info_v1(self, skill_id, stage_v2, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, AccountLinkingResponse_b1f92882, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "get_account_linking_info_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/accountLinkingClient'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.account_linking.account_linking_response.AccountLinkingResponse", status_code=200, message="Returns AccountLinking response of the skill."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.skill.account_linking.account_linking_response.AccountLinkingResponse")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_account_details(self):\n pass",
"def account_info(self):\n url, params, headers = self.request(\"/account/info\", method='GET')\n\n return self.rest_client.GET(url, headers)",
"def update_account_linking_info_v1(self, skill_id, stage_v2, account_linking_request, **kwargs):\n # type: (str, str, AccountLinkingRequest_cac174e, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"update_account_linking_info_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'account_linking_request' is set\n if ('account_linking_request' not in params) or (params['account_linking_request'] is None):\n raise ValueError(\n \"Missing the required parameter `account_linking_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/accountLinkingClient'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n if 'if_match' in params:\n header_params.append(('If-Match', params['if_match']))\n\n body_params = None\n if 'account_linking_request' in params:\n body_params = params['account_linking_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error e.g. Authorization Url is invalid.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=412, message=\"Precondition failed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def get_account_info(self):\n resp = requests.get(\n self.URL + 'info/',\n headers={'Authorization': 'Token ' + self.api_key}\n )\n\n return self.__handle_response(resp)",
"def account_information(self) -> MetatraderAccountInformation:\n return self._accountInformation",
"def accounts_info(self):\r\n param = {}\r\n param['appid'] = self.apiKey\r\n param['nonce'] = int(time.time() * 1000)\r\n param['timestamp'] = int(time.time())\r\n return self.__signed_GET('/api/v1/account/all', param, self.timeout)",
"def get_account(self):\n return self._account",
"def get_account(self):\n return self._account",
"def get_account(self, account):\n \n pass",
"def account_look_up_info(self):\n return self._account_look_up_info",
"def get_account_info(self):\n resource = self.domain + \"/account\"\n self.logger.debug(\"Pulling data from {0}\".format(resource))\n response = self.session.get(resource)\n\n if response.status_code != requests.codes.ok:\n return response.raise_for_status()\n data = response.text\n root = Et.fromstring(data)\n bf = BadgerFish(dict_type=dict)\n account_info = bf.data(root)\n return account_info",
"def linkedin(self):\n return self._linkedin",
"def _get_linkedin_accounts(self, linkedin_access_token):\n response = requests.get(\n 'https://api.linkedin.com/v2/me?projection='\n + '(id,localizedLastName,localizedFirstName,'\n + 'profilePicture(displayImage~:playableStreams))',\n headers={\n 'Authorization': 'Bearer ' + linkedin_access_token,\n 'cache-control': 'no-cache',\n 'X-Restli-Protocol-Version': '2.0.0'\n }\n ).json()\n\n if ('id' in response and 'localizedLastName' in response\n and 'localizedFirstName' in response):\n linkedin_account_id = 'urn:li:person:' + response['id']\n\n try:\n image_url = response['profilePicture']['displayImage~']['elements'][0]['identifiers'][0]['identifier']\n linkedin_profile_image = base64.b64encode(requests.get(image_url).content)\n except Exception:\n linkedin_profile_image = ''\n\n # TODO - STD: add each companies page\n return [{\n 'name': response['localizedLastName'] + ' ' + response['localizedFirstName'],\n 'linkedin_account_id': linkedin_account_id,\n 'linkedin_access_token': linkedin_access_token,\n 'image': linkedin_profile_image\n }]\n\n return []",
"def account_info(request):\r\n user = request.user\r\n\r\n return _api_response(request, user.safe_data())",
"def get_account_information(self):\n self.account_information = retry(lambda: self.client\n .futures_account_v2())\n return self.account_information",
"def delete_account_linking_info_v1(self, skill_id, stage_v2, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"delete_account_linking_info_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/accountLinkingClient'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. No content.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill/stage/accountLinkingClient doesn't exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def account(self):\n return self.request('/account')",
"def batch_get_account_user_link(\n account_id: str, account_user_link_id: str, transport: str = None\n):\n client = AnalyticsAdminServiceClient(transport=transport)\n response = client.batch_get_user_links(\n BatchGetUserLinksRequest(\n parent=f\"accounts/{account_id}\",\n names=[f\"accounts/{account_id}/userLinks/{account_user_link_id}\"],\n )\n )\n\n print(\"Result:\")\n for user_link in response.user_links:\n print(user_link)\n print()",
"def get_account(self, accountid):\n payload = {'appkey': self._lr_object._get_api_key(), 'appsecret': self._lr_object._get_api_secret(),\n 'accountid': accountid}\n url = SECURE_API_URL + \"raas/v1/account\"\n return self._lr_object._get_json(url, payload)",
"def get_account():\n\n # get user\n user = g.user\n\n # response\n return jsonify({'user_account': UserAccountAdminSchema().dump(user)}), 200",
"def account(self) -> str:\n return self._account",
"def account(self) -> str:\n return self._account",
"def getaccountaddress(self, account):\n return self.proxy.getaccountaddress(account)",
"def account_credential_details(self) -> Sequence['outputs.AccountCredentialDetailsResponse']:\n return pulumi.get(self, \"account_credential_details\")",
"def account_credential_details(self) -> Sequence['outputs.AccountCredentialDetailsResponse']:\n return pulumi.get(self, \"account_credential_details\")",
"def get_account_details(account_id, writer, key):\n query = iroha.query(\n \"GetAccountDetail\", account_id=account_id, writer=writer, key=key\n )\n ic.sign_query(query, user_private_key)\n response = net.send_query(query)\n data = json.loads(response.account_detail_response.detail)\n pprint(data)",
"def get_accounts(self):\n return self.accounts",
"def get_account():\n\n bus = session_bus()\n\n goa_manager = bus.get_object(GOA_NAME, GOA_PATH)\n\n goa_objects = goa_manager.GetManagedObjects(dbus_interface=OBJECT_MANAGER)\n\n accounts = [\n obj for obj in goa_objects\n if obj != GOA_MANAGER_PATH\n ]\n\n if len(accounts) > 1:\n sys.exit(\"More than one account found.\")\n\n (account_path,) = accounts\n\n return bus.get_object(GOA_NAME, account_path)",
"def get_relationship(self, guid):\n results = None\n atlas_endpoint = self.endpoint_url + f\"/relationship/guid/{guid}\"\n\n getResponse = requests.get(\n atlas_endpoint,\n headers=self.authentication.get_authentication_headers()\n )\n\n results = self._handle_response(getResponse)\n\n return results",
"def get_account_alias(self):\r\n return self.get_response('ListAccountAliases', {},\r\n list_marker='AccountAliases')"
] | [
"0.6023335",
"0.5835163",
"0.5693071",
"0.5611704",
"0.55987525",
"0.55865586",
"0.54766816",
"0.54766816",
"0.539679",
"0.53627443",
"0.5356714",
"0.531904",
"0.52245855",
"0.52083486",
"0.51816183",
"0.51422125",
"0.5121313",
"0.5116706",
"0.5106151",
"0.5069737",
"0.5050836",
"0.5050836",
"0.501784",
"0.5009983",
"0.5009983",
"0.5006482",
"0.5005322",
"0.49789855",
"0.49529853",
"0.49475667"
] | 0.65804845 | 0 |
Create AccountLinking information for the skill. | def update_account_linking_info_v1(self, skill_id, stage_v2, account_linking_request, **kwargs):
# type: (str, str, AccountLinkingRequest_cac174e, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "update_account_linking_info_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
# verify the required parameter 'account_linking_request' is set
if ('account_linking_request' not in params) or (params['account_linking_request'] is None):
raise ValueError(
"Missing the required parameter `account_linking_request` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/accountLinkingClient'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
query_params = [] # type: List
header_params = [] # type: List
if 'if_match' in params:
header_params.append(('If-Match', params['if_match']))
body_params = None
if 'account_linking_request' in params:
body_params = params['account_linking_request']
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message="Success"))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error e.g. Authorization Url is invalid."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=412, message="Precondition failed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="PUT",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_account_linking_info_v1(self, skill_id, stage_v2, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, AccountLinkingResponse_b1f92882, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"get_account_linking_info_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/accountLinkingClient'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.account_linking.account_linking_response.AccountLinkingResponse\", status_code=200, message=\"Returns AccountLinking response of the skill.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.account_linking.account_linking_response.AccountLinkingResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def create_account_user_link(\n account_id: str, email_address: str, transport: str = None\n):\n client = AnalyticsAdminServiceClient(transport=transport)\n user_link = client.create_user_link(\n CreateUserLinkRequest(\n parent=f\"accounts/{account_id}\",\n user_link=UserLink(\n email_address=email_address, direct_roles=[\"predefinedRoles/read\"]\n ),\n notify_new_user=True,\n )\n )\n\n print(\"Result:\")\n print(user_link)",
"def newaccount(accountname, account, owner, active, memo, posting, create_claimed_account):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n if not account:\n account = mph.config[\"default_account\"]\n if not unlock_wallet(stm):\n return\n acc = Account(account, morphene_instance=stm)\n if owner is None or active is None or memo is None or posting is None:\n password = click.prompt(\"Keys were not given - Passphrase is used to create keys\\n New Account Passphrase\", confirmation_prompt=True, hide_input=True)\n if not password:\n print(\"You cannot chose an empty password\")\n return\n if create_claimed_account:\n tx = mph.create_claimed_account(accountname, creator=acc, password=password)\n else:\n tx = mph.create_account(accountname, creator=acc, password=password)\n else:\n if create_claimed_account:\n tx = mph.create_claimed_account(accountname, creator=acc, owner_key=owner, active_key=active, memo_key=memo, posting_key=posting)\n else:\n tx = mph.create_account(accountname, creator=acc, owner_key=owner, active_key=active, memo_key=memo, posting_key=posting) \n tx = json.dumps(tx, indent=4)\n print(tx)",
"def create_account():\n user_id = get_jwt_identity()\n user = User.filter(id=user_id)[0]\n data = json.loads(request.data)\n\n if 'title' not in data:\n return jsonify_response({\"errors\": \"`title` field is required.\"}, 400)\n\n held_accounts = user.get_held_accounts(user.id)\n if held_accounts:\n user_accounts = \",\".join(f\"'{i}'\" for i in held_accounts)\n user_account_names_q = \\\n f\"g.V().hasLabel('{Account.LABEL}')\" + \\\n f\".has('id', within({user_accounts}))\" + \\\n f\".values('title')\"\n user_account_names = client.submit(user_account_names_q).all().result()\n\n if data[\"title\"] in user_account_names:\n return jsonify_response(\n {\"errors\": \"Users with the title already exist\"}, 400)\n\n account = Account.create(title=data[\"title\"])\n edge = UserHoldsAccount.create(user=user.id, account=account.id,\n relationType=\"secondary\")\n\n response = {\n \"title\": account.title\n }\n return jsonify_response(response, 201)",
"def link_account(self, accountid, provider, providerid):\n auth = 'appkey='+ self._lr_object._get_api_key()+ '&appsecret='+ self._lr_object._get_api_secret()\n payload = {'accountid': accountid, 'provider': provider, 'providerid': providerid}\n url = SECURE_API_URL + \"raas/v1/account/link\" + \"?\" + auth\n return self._lr_object._post_json(url, payload)",
"def create_account():\n if not request.json or not 'name' in request.json:\n abort(400)\n account = {\n 'id': accounts[-1]['id'] + 1, #last id + 1\n 'name': request.json['name'],\n 'surname': request.json['surname'],\n 'product': request.json.get('product', \"\"),\n 'balance': request.json.get('balance', 0.00)\n }\n\n accounts.append(account)\n\n return json.dumps({'New Account': account}, ensure_ascii=False), 201, {'Content-Type': 'text/css; charset=utf-8'}",
"def create_account():\n account = w3.eth.account.create()\n return account",
"def create_with_account(ctx, amount, save_to, skale_amount, eth_amount, type):\n skale = ctx.obj['skale']\n print(save_to)\n for i in range(amount):\n wallet, private_key = create_account(skale, skale_amount, eth_amount)\n schain_info = create_schain(skale, wallet, type)\n save_info(i, schain_info, wallet, private_key, save_to)\n logger.info(LONG_LINE)\n show_all_schain_ids(skale)",
"def delete_account_linking_info_v1(self, skill_id, stage_v2, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"delete_account_linking_info_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/accountLinkingClient'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. No content.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill/stage/accountLinkingClient doesn't exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def create_account(row, issue_map):\n acc_type = account_type(row)\n name = row['PROJ_NAME1']\n if Account.objects.filter(name=name).first():\n name = name + ' (' + row['PROJ_NO'] + ')'\n account = Account(name=name, code=row['PROJ_NO'], category=acc_type)\n if acc_type == Account.PROJECT:\n create_pcpp(account, row, issue_map)\n else:\n create_campaign(account, row, name, acc_type)",
"def new_account(firstname, lastname, pin):\n pass",
"def create(self, account):\n model = models.load('Account', account)\n\n return self.client.create_account(model=model)",
"def associate_member_account(memberAccountId=None):\n pass",
"def createProfile(self):\n if self.profile:\n return\n from soc.modules.gsoc.models.profile import GSoCProfile\n user = self.createUser()\n properties = {'link_id': user.link_id, 'student_info': None, 'user': user,\n 'parent': user, 'scope': self.program, 'status': 'active'}\n self.profile = seeder_logic.seed(GSoCProfile, properties)",
"def create(self, vals):\n res = super(AccountAnalyticAccount, self).create(vals)\n if self._context.get('is_landlord_rent'):\n res.code = self.env['ir.sequence'].next_by_code(\n 'landlord.rent')\n if res.is_landlord_rent:\n res.write({'is_property': False})\n if 'property_id' in vals:\n prop_brw = self.env['account.asset'].browse(\n vals['property_id'])\n if not prop_brw.property_owner:\n prop_brw.write(\n {'property_owner': vals.get('property_owner_id')})\n return res",
"def create_link(self, word, meaning):\n print(str(self.unique_id) + \" learned \" +\n str(word) + \" for \" + str(meaning))\n self.meaning2word[meaning] = word\n self.word2meaning[word] = meaning\n self.wordsuccess[word] = []\n\n if meaning not in self.model.vocabulary:\n self.model.vocabulary[meaning] = {}\n\n # If word not in vocabulary, add it\n if word not in self.model.vocabulary[meaning]:\n self.model.vocabulary[meaning][word] = [self.unique_id]\n # Else append this agent to its users\n else:\n self.model.vocabulary[meaning][word].append(self.unique_id)",
"def add_account(insert_dict):\n return ar.add_account(insert_dict)",
"def create_account(self, account_tree):\n\n # Couple this object to the account object in order\n # to access the request_xml methods and other account info\n account_data = dict()\n account_data['client'] = self\n\n for param in account_tree.iter('CardData'):\n name = param.get('name',\"NA\")\n if name != \"NA\":\n account_data[name] = param.text\n\n for summary_element in account_tree.iter('AccountSummaryData'):\n key = 'value' if 'value' in summary_element.attrib else 'formattedValue'\n name = summary_element.get('name',\"NA\")\n if name != \"NA\":\n account_data[name] = summary_element.attrib[key]\n\n # Extract the loyalty programmes from the XML\n # for element in account_tree.findall('LoyaltyData/RewardsData/param'):\n # name = element.attrib['label']\n # value = element.attrib['formattedValue'].replace(',', '')\n # loyalty_programme = LoyaltyProgramme(name, value)\n # self.loyalty_programmes.append(loyalty_programme)\n\n\n return CardAccount(account_data)",
"def create(self, data):\n url = self.base_url + '/v2/account/create/'\n return self._call_vendasta(url, data)",
"def create_link(self, key, link, default):\n\n setting = self.new_link(key, link, default)\n setting.create()\n return setting",
"def createLink(self, source, destination):\n log(\"creating link\")\n\n if \"flix\" in source:\n return \"%s\" % +OSUtils.createLink(source, destination)\n return \"0\"",
"def create_account(self, user):\n tx = self.iroha.transaction(\n [\n self.iroha.command(\n \"CreateAccount\",\n account_name=user.gov_id,\n domain_id=\"afyamkononi\",\n public_key=user.public_key,\n )\n ]\n )\n IrohaCrypto.sign_transaction(tx, self.creator_account_details.private_key)\n return self.send_transaction_and_return_status(tx)",
"def create_custom_audience(self, account_id, name, subtype=None,\n description=None, rule=None, opt_out_link=None,\n retention_days=30, batch=False):\n path = \"act_%s/customaudiences\" % account_id\n args = {\n 'name': name,\n }\n if subtype:\n args['subtype'] = subtype\n if description:\n args['description'] = description\n if rule:\n args['rule'] = json.dumps(rule)\n if opt_out_link:\n args['opt_out_link'] = opt_out_link\n if retention_days:\n args['retention_days'] = retention_days\n return self.make_request(path, 'POST', args, batch=batch)",
"def create_links(self, name):\n for target, linknames in self._link_map.iteritems():\n for linkname in linknames:\n self._api.path.mock_copy_paths(target, linkname)\n self._api.python(\n name,\n self._resource,\n args = [\n '--link-json',\n self._api.json.input({str(target) : linkname\n for target, linkname in self._link_map.iteritems()\n }),\n ],\n infra_step=True)",
"def create_account():\n\n return render_template('account.html')",
"def create_an_account_for_user(list_of_all_accounts_known, starting_account_balance_amount):\n #account_numbers, balance_ammounts = zip(*zip(list_of_all_accounts_known)) # Error - cannot unzip single object, only list of two elements\n last_unique_account_ID = list_of_all_accounts_known[len(list_of_all_accounts_known) - 1].account_id\n new_account = ATMBankAccount(str(int(last_unique_account_ID) + 1), str(starting_account_balance_amount))\n return list_of_all_accounts_known.append(new_account)",
"def add_skills_to_profile():\n # get specific objects\n profile = storage.get(\"Profile\", profile_id)\n skills = storage.get(\"Skills\", skills_id)\n if profile is not None and skills is not None:\n # check every skill in profile\n for profile_skill in profile.skills:\n # if the given skill is already linked to profile, return\n if profile_skill.id == skills.id:\n return jsonify(skills.to_dict()), 200\n # if skill is not in profile, append skill and save\n profile.skills.append(skills)\n profile.save()\n return jsonify(skills.to_dict()), 201\n\n # if id not in database, abort\n abort(404)",
"async def create_accounts(self):\n self._logger.info(\"Creating accounts...\")\n\n validator_peer_id = ((self.my_id - 1) % self.num_validators) + 1\n host, _ = self.experiment.get_peer_ip_port_by_id(validator_peer_id)\n horizon_uri = \"http://%s:%d\" % (host, 19000 + validator_peer_id)\n\n root_keypair = Keypair.from_secret(\"SDJ5AQWLIAYT22TCYSKOQALI3SNUMPAR63SEL73ASALDP6PYDN54FARM\")\n async with Server(horizon_url=horizon_uri, client=AiohttpClient()) as server:\n root_account = await server.load_account(root_keypair.public_key)\n self.root_seq_num = root_account.sequence\n self._logger.info(\"Setting root sequence number to %d\", self.root_seq_num)\n\n builder = TransactionBuilder(\n source_account=root_account,\n network_passphrase=\"Standalone Pramati Network ; Oct 2018\"\n )\n\n async def append_create_account_op(builder, root_keypair, receiver_pub_key, amount):\n builder.append_create_account_op(receiver_pub_key, amount, root_keypair.public_key)\n if len(builder.operations) == 100:\n self._logger.info(\"Sending create transaction ops...\")\n tx = builder.build()\n tx.sign(root_keypair)\n response = requests.get(\"http://%s:%d/tx?blob=%s\" % (host, 11000 + validator_peer_id,\n quote_plus(tx.to_xdr())))\n self._logger.info(\"Received response for create accounts request: %s\", response.text)\n\n await sleep(2)\n\n self.root_seq_num += 1\n\n partial_root_acc = Account(root_keypair.public_key, self.root_seq_num)\n builder = TransactionBuilder(\n source_account=partial_root_acc,\n network_passphrase=\"Standalone Pramati Network ; Oct 2018\"\n )\n\n return builder\n\n for client_index in range(self.num_validators + 1, self.num_validators + self.num_clients + 1):\n receiver_keypair = Keypair.random()\n builder = await append_create_account_op(builder, root_keypair, receiver_keypair.public_key, \"10000000\")\n self.experiment.send_message(client_index, b\"receive_account_seed\", receiver_keypair.secret.encode())\n\n # Create the sender accounts\n for account_ind in range(self.num_accounts_per_client):\n sender_keypair = Keypair.random()\n builder = await append_create_account_op(builder, root_keypair, sender_keypair.public_key, \"10000000\")\n self.experiment.send_message(client_index, b\"send_account_seed_%d\" % account_ind,\n sender_keypair.secret.encode())\n\n # Send the remaining operations\n if builder.operations:\n self._logger.info(\"Sending remaining create transaction ops...\")\n tx = builder.build()\n tx.sign(root_keypair)\n response = requests.get(\"http://%s:%d/tx?blob=%s\" % (host, 11000 + validator_peer_id,\n quote_plus(tx.to_xdr())))\n self._logger.info(\"Received response for create accounts request: %s\", response.text)\n self.root_seq_num += 1",
"def addSkill(self, skillName, maxLevel, creditStart, creditIncrement):\r\n self.skills[skillName] = SkillObject(skillName, maxLevel, creditStart, creditIncrement)\r\n self.orderedSkills.append(skillName)",
"def create_account_alias(self, alias):\r\n params = {'AccountAlias': alias}\r\n return self.get_response('CreateAccountAlias', params)"
] | [
"0.5784822",
"0.56779265",
"0.5625924",
"0.55338156",
"0.53889227",
"0.5382797",
"0.5333097",
"0.52733284",
"0.5263881",
"0.52172035",
"0.5188085",
"0.5145356",
"0.5105096",
"0.50658125",
"0.50535816",
"0.50370497",
"0.502856",
"0.49960983",
"0.49535173",
"0.49480695",
"0.49304116",
"0.48853722",
"0.48629603",
"0.48569125",
"0.48553202",
"0.48525855",
"0.483703",
"0.48285377",
"0.48279515",
"0.48274353"
] | 0.5760728 | 1 |
Creates a new clone locale workflow for a skill with given skillId, source locale, and target locales. In a single workflow, a locale can be cloned to multiple target locales. However, only one such workflow can be started at any time. | def clone_locale_v1(self, skill_id, stage_v2, clone_locale_request, **kwargs):
# type: (str, str, CloneLocaleRequest_2e00cdf4, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "clone_locale_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
# verify the required parameter 'clone_locale_request' is set
if ('clone_locale_request' not in params) or (params['clone_locale_request'] is None):
raise ValueError(
"Missing the required parameter `clone_locale_request` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/cloneLocale'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
if 'clone_locale_request' in params:
body_params = params['clone_locale_request']
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message="Accepted."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=409, message="The request could not be completed due to a conflict with the current state of the target resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="POST",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_clone_locale_status_v1(self, skill_id, stage_v2, clone_locale_request_id, **kwargs):\n # type: (str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, CloneLocaleStatusResponse_8b6e06ed]\n operation_name = \"get_clone_locale_status_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'clone_locale_request_id' is set\n if ('clone_locale_request_id' not in params) or (params['clone_locale_request_id'] is None):\n raise ValueError(\n \"Missing the required parameter `clone_locale_request_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/cloneLocaleRequests/{cloneLocaleRequestId}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n if 'clone_locale_request_id' in params:\n path_params['cloneLocaleRequestId'] = params['clone_locale_request_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.clone_locale_status_response.CloneLocaleStatusResponse\", status_code=200, message=\"OK.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.clone_locale_status_response.CloneLocaleStatusResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def clone_workflow(workflow, reana_spec, restart_type):\n try:\n cloned_workflow = Workflow(\n id_=str(uuid4()),\n name=workflow.name,\n owner_id=workflow.owner_id,\n reana_specification=reana_spec or workflow.reana_specification,\n type_=restart_type or workflow.type_,\n logs=\"\",\n workspace_path=workflow.workspace_path,\n restart=True,\n run_number=workflow.run_number,\n )\n Session.add(cloned_workflow)\n Session.object_session(cloned_workflow).commit()\n return cloned_workflow\n except SQLAlchemyError as e:\n message = \"Database connection failed, please retry.\"\n logging.error(\n f\"Error while creating {cloned_workflow.id_}: {message}\\n{e}\", exc_info=True\n )",
"def copy_workflow(self, old_playbook_name, new_playbook_name, old_workflow_name, new_workflow_name):\n workflow = self.get_workflow(old_playbook_name, old_workflow_name)\n workflow_copy = deepcopy(workflow)\n workflow_copy.playbook_name = new_playbook_name\n workflow_copy.name = new_workflow_name\n workflow_copy.playbook_name = new_playbook_name\n\n key = _WorkflowKey(new_playbook_name, new_workflow_name)\n self.workflows[key] = workflow_copy\n logger.info('Workflow copied from {0}-{1} to {2}-{3}'.format(old_playbook_name, old_workflow_name,\n new_playbook_name, new_workflow_name))",
"def clone_stack(SourceStackId=None, Name=None, Region=None, VpcId=None, Attributes=None, ServiceRoleArn=None, DefaultInstanceProfileArn=None, DefaultOs=None, HostnameTheme=None, DefaultAvailabilityZone=None, DefaultSubnetId=None, CustomJson=None, ConfigurationManager=None, ChefConfiguration=None, UseCustomCookbooks=None, UseOpsworksSecurityGroups=None, CustomCookbooksSource=None, DefaultSshKeyName=None, ClonePermissions=None, CloneAppIds=None, DefaultRootDeviceType=None, AgentVersion=None):\n pass",
"def ssp_lu_clone_simple(cs, cluster_id):\n\n cluster = cs.cluster.get(cluster_id)\n ssp_id = cluster.sharedstoragepool_id()\n ssp = cs.sharedstoragepool.get(ssp_id)\n # add LogicalUnits\n n = cs.sharedstoragepool.create_unique_name\n (source_lu, ssp) = ssp.update_append_lu(n(\"P2Z-SOURCE-DM\"), 10, thin=True,\n logicalunittype=\"VirtualIO_Image\")\n (target_lu, ssp) = ssp.update_append_lu(n(\"P2Z-TARGET-DM\"), 10, thin=True)\n\n print (\"Created Source LU: >%s< and Target LU: >%s<\" %\n (source_lu.unique_device_id, target_lu.unique_device_id))\n\n cluster = cs.cluster.get(cluster_id)\n status, job_id = cluster.lu_linked_clone(\n source_lu.unique_device_id,\n target_lu.unique_device_id)\n\n print (\"lu_linked_clone job_id: >%s<, completed w/ status: >%s<\" %\n (job_id, status,))\n\n ssp = ssp.update_del_lus([target_lu.unique_device_id,\n source_lu.unique_device_id])\n\n print (\"Deleted Source LU: >%s< and Target LU: >%s<\" %\n (source_lu.unique_device_id, target_lu.unique_device_id))",
"def create(self, request, *args, **kwargs):\n # check if the lesson is allowed to copy\n lesson_to_copy = get_object_or_404(Lesson.objects.all(), id=request.data.get('oldLessonId'))\n self.check_object_permissions(self.request, lesson_to_copy)\n # check if the project is allowed to edit\n to_project = get_object_or_404(Project.objects.all(), id=request.data.get('newProjectId'))\n self.check_object_permissions(self.request, to_project)\n # copy lesson and return\n new_lesson = lesson_to_copy.copy_lesson_to_project(to_project)\n new_lesson.change_parent_updated_field(new_lesson.updated)\n serializer = self.get_serializer(new_lesson)\n headers = self.get_success_headers(serializer.data)\n return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)",
"def copy_translations(self, oldinstance, language=None):\n query = CourseRunTranslation.objects.filter(master=oldinstance)\n if language:\n query = query.filter(language_code=language)\n\n for translation_object in query:\n try:\n target_pk = CourseRunTranslation.objects.filter(\n master=self, language_code=translation_object.language_code\n ).values_list(\"pk\", flat=True)[0]\n except IndexError:\n translation_object.pk = None\n else:\n translation_object.pk = target_pk\n translation_object.master = self\n translation_object.save()",
"def clone_lc(client, lc, name, image_id):\n PARAMS_TO_CLONE = [\n 'KeyName',\n 'SecurityGroups',\n 'ClassicLinkVPCId',\n 'ClassicLinkVPCSecurityGroups',\n 'UserData',\n 'InstanceType',\n 'BlockDeviceMappings',\n 'InstanceMonitoring',\n 'SpotPrice',\n 'IamInstanceProfile',\n 'EbsOptimized',\n 'AssociatePublicIpAddress',\n 'PlacementTenancy',\n ]\n try:\n params = {\n key: lc[key] for key in PARAMS_TO_CLONE if key in lc\n }\n except KeyError:\n print(list(lc.keys()))\n raise\n # We need special handling for kernel ID and ramdisk ID.\n if lc['KernelId']:\n params['KernelId'] = lc['KernelId']\n if lc['RamdiskId']:\n params['RamdiskId'] = lc['RamdiskId']\n client.create_launch_configuration(\n LaunchConfigurationName=name,\n ImageId=image_id,\n **params\n )\n return client.describe_launch_configurations(\n LaunchConfigurationNames=[name],\n )['LaunchConfigurations'][0]",
"def copy_workspace(self, user_id, source_alias=None, source_uuid=None, target_alias=None):\n uuid_mapping = self._get_uuid_mapping_object(user_id)\n if source_alias == 'default_workspace':\n source_uuid = uuid_mapping.get_uuid(source_alias, 'default') \n elif source_alias:\n source_uuid = uuid_mapping.get_uuid(source_alias, user_id) \n \n if not source_uuid:\n self._logger.warning('No alias named \"{}\"'.format(source_alias))\n return False\n \n if not source_alias:\n source_alias = uuid_mapping.get_alias(source_uuid)\n \n # Add UUID for workspace in uuid_mapping \n target_uuid = uuid_mapping.add_new_uuid_for_alias(target_alias, user_id)\n if not target_uuid:\n self._logger.debug('Could not add workspace with alias \"{}\". Workspace already exists!'.format(target_alias)) \n return False\n \n # Copy all directories and files in workspace \n source_workspace_path = '/'.join([self.workspace_directory, source_uuid])\n target_workspace_path = '/'.join([self.workspace_directory, target_uuid])\n \n print('source_workspace_path:', source_workspace_path)\n print('target_workspace_path:', target_workspace_path)\n \n \n self._logger.debug('Trying to copy workspace \"{}\" with alias \"{}\". Copy has alias \"{}\"'.format(source_uuid, source_alias, target_alias))\n \n # Copy files\n shutil.copytree(source_workspace_path, target_workspace_path)\n \n \"\"\"\n No data is loaded yet \n Now we need to change uuid for subsets. \n Do this by creating an UUID mapping object the subset and: \n 1: rename in mapping file\n 2: rename subset folder\n \"\"\" \n target_subset_uuid_mapping_file = '{}/subsets/uuid_mapping.txt'.format(target_workspace_path) \n uuid_object = core.UUIDmapping(target_subset_uuid_mapping_file)\n \n uuid_list = uuid_object.get_uuid_list_for_user(user_id)\n for u_id in uuid_list:\n new_uuid = uuid_object.set_new_uuid(u_id)\n current_subset_path = '{}/subsets/{}'.format(target_workspace_path, u_id)\n new_subset_path = '{}/subsets/{}'.format(target_workspace_path, new_uuid)\n os.rename(current_subset_path, new_subset_path)\n \n status = uuid_mapping.get_status(unique_id=target_uuid) # Check in case default is changed\n \n return {'alias': target_alias,\n 'uuid': target_uuid,\n \t 'status': status}",
"def create(*, db_session, workflow_in: WorkflowCreate) -> Workflow:\n project = project_service.get_by_name_or_raise(\n db_session=db_session, project_in=workflow_in.project\n )\n plugin_instance = plugin_service.get_instance(\n db_session=db_session, plugin_instance_id=workflow_in.plugin_instance.id\n )\n workflow = Workflow(\n **workflow_in.dict(exclude={\"plugin_instance\", \"project\"}),\n plugin_instance=plugin_instance,\n project=project,\n )\n\n db_session.add(workflow)\n db_session.commit()\n return workflow",
"def project_clone(request, proj_id=None):\n\n if not proj_id or not request.user.is_authenticated():\n raise Http404\n\n project = get_object_or_404(Project, id=proj_id)\n\n if project.user != request.user and project.is_private:\n raise Http404\n\n project.pk = None\n project.user = request.user\n project.save()\n\n for scenario in Scenario.objects \\\n .filter(project_id=proj_id) \\\n .order_by('created_at'):\n scenario.pk = None\n scenario.project = project\n scenario.save()\n\n return redirect('/project/{0}'.format(project.id))",
"def _reconstruct_workflow(workflow_record, hints, requirements, inputs, outputs):\n rec = workflow_record[\"w\"]\n return Workflow(name=rec[\"name\"], hints=hints, requirements=requirements, inputs=inputs,\n outputs=outputs, workflow_id=rec[\"id\"])",
"def clone(self):\n\n if not self.can_clone:\n return None\n\n _clone = Issue.objects.create(\n sla=self.sla,\n service=self.service,\n title=self.title + \" [clone]\",\n text=self.text,\n assignee=self.assignee,\n contact=self.contact,\n status=self.status\n )\n\n for comment in self.comments.all():\n comment_clone = _clone.comments.create(comment=comment.comment)\n comment_clone.date = comment.date\n comment_clone.save()\n\n self.status_history.create(name=self.status,\n issue=_clone,\n comment=\"Cloned status\")\n\n return _clone",
"def clone_create(self, volume, source_path, ignore_streams=None, destination_path=None, ignore_locks=None, bypass_license_check=None, qos_policy_group_name=None, block_ranges=None, space_reserve=None, snapshot_name=None, destination_exists=None):\n return self.request( \"clone-create\", {\n 'ignore_streams': [ ignore_streams, 'ignore-streams', [ bool, 'None' ], False ],\n 'destination_path': [ destination_path, 'destination-path', [ basestring, 'None' ], False ],\n 'ignore_locks': [ ignore_locks, 'ignore-locks', [ bool, 'None' ], False ],\n 'bypass_license_check': [ bypass_license_check, 'bypass-license-check', [ bool, 'None' ], False ],\n 'volume': [ volume, 'volume', [ basestring, 'None' ], False ],\n 'qos_policy_group_name': [ qos_policy_group_name, 'qos-policy-group-name', [ basestring, 'None' ], False ],\n 'block_ranges': [ block_ranges, 'block-ranges', [ BlockRange, 'None' ], True ],\n 'space_reserve': [ space_reserve, 'space-reserve', [ bool, 'None' ], False ],\n 'snapshot_name': [ snapshot_name, 'snapshot-name', [ basestring, 'None' ], False ],\n 'source_path': [ source_path, 'source-path', [ basestring, 'None' ], False ],\n 'destination_exists': [ destination_exists, 'destination-exists', [ bool, 'None' ], False ],\n }, {\n } )",
"def adc_api_workflow_create():\n workflow_json = request.get_json(force=True)\n\n return jsonify(adc.workflow_create(workflow_json=workflow_json))",
"def clone(context, request):\n if request.has_permission('create'):\n return {\n 'name': 'clone',\n 'title': 'Clone',\n 'profile': '/profiles/{ti.name}.json'.format(ti=context.type_info),\n 'href': '{item_uri}#!clone'.format(item_uri=request.resource_path(context)),\n }",
"def copy_skeleton(self, newdomain=None):\n if not newdomain:\n newdomain = self.parent\n \n args = [Parameter(p.name, p.type) for p in self.args]\n adict = dict((a.name, a) for a in args)\n agents = [adict[a.name] for a in self.agents]\n vars = [adict[a.name] for a in self.vars]\n params = [a for a in args if a not in agents and a not in vars]\n \n return MAPLAction(self.name, agents, params, vars, None, None, None, [], newdomain)",
"def clone(self, new_vm_name, new_vm_root_path, bCreateTemplate = False):\n\t\treturn Job(SDK.PrlVm_Clone(self.handle, new_vm_name, new_vm_root_path, bCreateTemplate)[0])",
"def _copy_from_template(\n self, source_structures, source_keys, dest_structure, new_parent_block_key, user_id, head_validation\n ):\n new_blocks = set()\n\n new_children = list() # ordered list of the new children of new_parent_block_key\n\n for usage_key in source_keys:\n src_course_key = usage_key.course_key\n hashable_source_id = src_course_key.for_version(None)\n block_key = BlockKey(usage_key.block_type, usage_key.block_id)\n source_structure = source_structures[src_course_key]\n\n if block_key not in source_structure['blocks']:\n raise ItemNotFoundError(usage_key)\n source_block_info = source_structure['blocks'][block_key]\n\n # Compute a new block ID. This new block ID must be consistent when this\n # method is called with the same (source_key, dest_structure) pair\n unique_data = \"{}:{}:{}\".format(\n str(hashable_source_id).encode(\"utf-8\"),\n block_key.id,\n new_parent_block_key.id,\n )\n new_block_id = hashlib.sha1(unique_data.encode('utf-8')).hexdigest()[:20]\n new_block_key = BlockKey(block_key.type, new_block_id)\n\n # Now clone block_key to new_block_key:\n new_block_info = copy.deepcopy(source_block_info)\n # Note that new_block_info now points to the same definition ID entry as source_block_info did\n existing_block_info = dest_structure['blocks'].get(new_block_key, BlockData())\n # Inherit the Scope.settings values from 'fields' to 'defaults'\n new_block_info.defaults = new_block_info.fields\n\n # <workaround>\n # CAPA modules store their 'markdown' value (an alternate representation of their content)\n # in Scope.settings rather than Scope.content :-/\n # markdown is a field that really should not be overridable - it fundamentally changes the content.\n # capa modules also use a custom editor that always saves their markdown field to the metadata,\n # even if it hasn't changed, which breaks our override system.\n # So until capa modules are fixed, we special-case them and remove their markdown fields,\n # forcing the inherited version to use XML only.\n if usage_key.block_type == 'problem' and 'markdown' in new_block_info.defaults:\n del new_block_info.defaults['markdown']\n # </workaround>\n\n # Preserve any existing overrides\n new_block_info.fields = existing_block_info.fields\n\n if 'children' in new_block_info.defaults:\n del new_block_info.defaults['children'] # Will be set later\n\n new_block_info.edit_info = existing_block_info.edit_info\n new_block_info.edit_info.previous_version = new_block_info.edit_info.update_version\n new_block_info.edit_info.update_version = dest_structure['_id']\n # Note we do not set 'source_version' - it's only used for copying identical blocks\n # from draft to published as part of publishing workflow.\n # Setting it to the source_block_info structure version here breaks split_draft's has_changes() method.\n new_block_info.edit_info.edited_by = user_id\n new_block_info.edit_info.edited_on = datetime.datetime.now(UTC)\n new_block_info.edit_info.original_usage = str(usage_key.replace(branch=None, version_guid=None))\n new_block_info.edit_info.original_usage_version = source_block_info.edit_info.update_version\n dest_structure['blocks'][new_block_key] = new_block_info\n\n children = source_block_info.fields.get('children')\n if children:\n children = [src_course_key.make_usage_key(child.type, child.id) for child in children]\n new_blocks |= self._copy_from_template(\n source_structures, children, dest_structure, new_block_key, user_id, head_validation\n )\n\n new_blocks.add(new_block_key)\n # And add new_block_key to the list of new_parent_block_key's new children:\n new_children.append(new_block_key)\n\n # Update the children of new_parent_block_key\n dest_structure['blocks'][new_parent_block_key].fields['children'] = new_children\n\n return new_blocks",
"def request_workspace_add(self, request):\n user_id = request['user_id']\n alias = request['alias'] \n source_uuid = request['source'] \n# print('###', user_id)\n# print('###', alias)\n# print('###', source_uuid)\n \n response = self.copy_workspace(user_id, source_uuid=source_uuid, target_alias=alias)\n \n return response",
"def target_create(obj, product_name, slo_id, sli_name, target_from, target_to, target_file):\n client = get_client(obj)\n\n product = client.product_list(name=product_name)\n if not product:\n fatal_error('Product {} does not exist'.format(product_name))\n\n product = product[0]\n\n slo = client.slo_list(product, id=slo_id)\n if not slo:\n fatal_error('SLO {} does not exist'.format(slo_id))\n\n slo = slo[0]\n\n product = client.product_list(name=slo['product_name'])[0]\n\n sli = client.sli_list(product=product, name=sli_name)\n if not sli or not sli_name:\n fatal_error('SLI {} does not exist'.format(sli_name))\n sli = sli[0]\n\n with Action(\n 'Creating Targets for SLO: {} for product: {}'.format(slo['title'], slo['product_name']), nl=True) as act:\n if target_file:\n target = json.load(target_file)\n else:\n target = {'sli_uri': sli['uri'], 'from': target_from, 'to': target_to}\n\n validate_target(target, act)\n\n if not act.errors:\n t = client.target_create(slo, target['sli_uri'], target_from=target.get('from'), target_to=target.get('to'))\n\n print(json.dumps(t, indent=4))",
"def create_cloned_project_with_mapping_types(project_id, author_id, mapping_types):\n test_project = Project.clone(project_id, author_id)\n test_project.mapping_types = mapping_types\n test_project.status = ProjectStatus.PUBLISHED.value\n test_project.save()\n return test_project",
"def make_env(self, env_id, seed, logger_dir=None, reward_scale=1.0, mpi_rank=0, subrank=0, info_keywords=()):\n scenario = scenarios.load('{}.py'.format(env_id)).Scenario()\n world = scenario.make_world()\n env_dict = {\n \"world\": world,\n 'reset_callback': scenario.reset_world,\n 'reward_callback': scenario.reward, \n 'observation_callback': scenario.observation,\n 'info_callback': None,\n 'done_callback': scenario.done, \n 'shared_viewer': True\n }\n env = gym.make('MultiAgent-v0', **env_dict)\n env.seed(seed + subrank if seed is not None else None)\n env = Monitor(env,\n logger_dir and os.path.join(logger_dir, str(mpi_rank) + '.' + str(subrank)),\n allow_early_resets=True,\n info_keywords=info_keywords)\n env = ClipActionsWrapper(env)\n if reward_scale != 1.0:\n from baselines.common.retro_wrappers import RewardScaler\n env = RewardScaler(env, reward_scale)\n return env",
"def replicate(self, source, target, **params):\n replicator = cloudant.replicator.Replication(self.cloudant_client)\n source_db = Database(self.cloudant_client, source)\n target_db = Database(self.cloudant_client, target)\n return replicator.create_replication(source_db, target_db, **params)",
"def clone( m, orig):\r\n if m.ObjType not in (1, 6): return\r\n if not orig: return\r\n \r\n if m.ObjType == 6: # Target is a Folder\r\n if orig.ObjType == 6: cloned = m.CopyFolderDisp( orig) # Orig is Folder too\r\n else: cloned = m.CopyFCODisp( orig) # Orig is FCO\r\n elif m.ObjType == 1:\r\n cloned = m.CopyFCODisp( orig, metaRole( orig)) # Target is Model, Orig is FCO\r\n \r\n if cloned:\r\n \tcloned.Name = \"Cloned\" + orig.Name\r\n return cloned",
"def construct_master(self, master_scenarios):\n lr_instance = self.tsdro.lr_instance\n\n self.master, self.stage1_vars = lr_instance.construct_stage1()\n if self.method == \"RO\":\n self.wass_mult = 0\n else:\n self.wass_mult = self.master.addVar(name=\"wass_multiplier\", lb=0)\n self.epi_vars = self.master.addVars(self.tsdro.samples.keys(), lb=0,\n name=\"epi_vars\")\n objexpr_stage1 = lr_instance.get_objective_stage1(self.stage1_vars)\n objexpr_stage2 = quicksum(\n self.tsdro.probs[sample_name] * self.epi_vars[sample_name]\n for sample_name in self.tsdro.samples.keys())\n\n self.objexpr_master = (objexpr_stage1\n + self.tsdro.wass_rad * self.wass_mult\n + objexpr_stage2)\n self.master.setObjective(self.objexpr_master, GRB.MINIMIZE)\n\n for scenario_name in master_scenarios:\n scenario = self.tsdro.scenarios[scenario_name]\n curr_vars, _ = lr_instance.add_stage2(\n self.master, self.stage1_vars, scenario, scenario_name)\n for sample_name, sample in self.tsdro.samples.items():\n objexpr_stage2 = lr_instance.get_objective_stage2(curr_vars,\n scenario)\n if scenario_name == sample_name or self.method == \"RO\":\n scenario_distance = 0\n else:\n scenario_distance = sc.get_scenario_distance(\n scenario, sample, lr_instance)\n rhs = objexpr_stage2 - self.wass_mult * scenario_distance\n self.master.addLConstr(self.epi_vars[sample_name], \">\", rhs,\n name=(\"epi_constr_\" + str(scenario_name)\n + \"_\" + str(sample_name)))\n self.stage2_vars[scenario_name] = curr_vars\n return",
"def clone():\n require('PROJECT_NAME')\n require('PROJECT_REPO')\n require('MERCURIAL_BIN')\n\n # Create the \"apps\" directory if it does not exist.\n run('mkdir -p {}'.format(utils.home('apps')))\n\n if files.exists(utils.home('apps', env.PROJECT_NAME)):\n delete()\n\n with cd(utils.home('apps')):\n run('{0} clone {1} {2}'.format(env.MERCURIAL_BIN,\n env.PROJECT_REPO,\n env.PROJECT_NAME))",
"def clone_project(\n project_name,\n from_project,\n project_description=None,\n copy_annotation_classes=True,\n copy_settings=True,\n copy_workflow=True,\n copy_contributors=False\n):\n try:\n get_project_metadata_bare(project_name)\n except SANonExistingProjectNameException:\n pass\n else:\n raise SAExistingProjectNameException(\n 0, \"Project with name \" + project_name +\n \" already exists. Please use unique names for projects to use with SDK.\"\n )\n metadata = get_project_metadata(\n from_project, copy_annotation_classes, copy_settings, copy_workflow,\n copy_contributors\n )\n metadata[\"name\"] = project_name\n if project_description is not None:\n metadata[\"description\"] = project_description\n\n return create_project_from_metadata(metadata)",
"def clone(self, params: JSONDict, workspace: Workspace) -> LocalStateResourceMixin:\n workspace._get_local_scratch_space_for_resource(params[\"name\"], create_if_not_present=True)\n return ApiResource(params[\"name\"], params[\"role\"], workspace)",
"def clone(self, *args):\n return _SALOMERuntime.InputPresetPort_clone(self, *args)"
] | [
"0.52280587",
"0.51885384",
"0.49525946",
"0.48021084",
"0.4720899",
"0.46277058",
"0.45962957",
"0.45327118",
"0.44870907",
"0.44661537",
"0.44243488",
"0.43685842",
"0.42493874",
"0.42013294",
"0.4179492",
"0.41775367",
"0.4125034",
"0.41231197",
"0.4115265",
"0.41032803",
"0.40850574",
"0.40781075",
"0.40765458",
"0.40666538",
"0.40637484",
"0.40539542",
"0.40438464",
"0.40351242",
"0.4013867",
"0.39873695"
] | 0.6417149 | 0 |
Returns the status of a clone locale workflow associated with the unique identifier of cloneLocaleRequestId. | def get_clone_locale_status_v1(self, skill_id, stage_v2, clone_locale_request_id, **kwargs):
# type: (str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, CloneLocaleStatusResponse_8b6e06ed]
operation_name = "get_clone_locale_status_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
# verify the required parameter 'clone_locale_request_id' is set
if ('clone_locale_request_id' not in params) or (params['clone_locale_request_id'] is None):
raise ValueError(
"Missing the required parameter `clone_locale_request_id` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/cloneLocaleRequests/{cloneLocaleRequestId}'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
if 'clone_locale_request_id' in params:
path_params['cloneLocaleRequestId'] = params['clone_locale_request_id']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.clone_locale_status_response.CloneLocaleStatusResponse", status_code=200, message="OK."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.skill.clone_locale_status_response.CloneLocaleStatusResponse")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def clone_list_status(self, clone_id=None):\n return self.request( \"clone-list-status\", {\n 'clone_id': [ clone_id, 'clone-id', [ CloneIdInfo, 'None' ], False ],\n }, {\n 'status': [ OpsInfo, True ],\n } )",
"def clone_locale_v1(self, skill_id, stage_v2, clone_locale_request, **kwargs):\n # type: (str, str, CloneLocaleRequest_2e00cdf4, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"clone_locale_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'clone_locale_request' is set\n if ('clone_locale_request' not in params) or (params['clone_locale_request'] is None):\n raise ValueError(\n \"Missing the required parameter `clone_locale_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/cloneLocale'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'clone_locale_request' in params:\n body_params = params['clone_locale_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accepted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def workflow_status(self):\n return self._workflow_status",
"def clone(self):\n\n if not self.can_clone:\n return None\n\n _clone = Issue.objects.create(\n sla=self.sla,\n service=self.service,\n title=self.title + \" [clone]\",\n text=self.text,\n assignee=self.assignee,\n contact=self.contact,\n status=self.status\n )\n\n for comment in self.comments.all():\n comment_clone = _clone.comments.create(comment=comment.comment)\n comment_clone.date = comment.date\n comment_clone.save()\n\n self.status_history.create(name=self.status,\n issue=_clone,\n comment=\"Cloned status\")\n\n return _clone",
"def mesos_status(self, submissionId):\n get_tasks = self.driver.getTasks()['get_tasks']\n task_state = None\n\n tasks = get_tasks['tasks'] + get_tasks.get('completed_tasks')\n tasks_list = list(filter(lambda x: x['task_id']['value'] == submissionId, tasks))\n if len(tasks_list) > 0:\n task = tasks_list[0]\n task_state = task['state']\n self._log.debug(\"Task state = \" + task_state)\n else:\n self._log.debug(\"Task not found\")\n\n return task_state",
"def workflow_state(self):\n return self.getattr('workflow_state')",
"def updateRcloneJobStatus():\n global jobIds, jobStatusGauge\n\n # Check if the jobs are running, update the variables\n for jobName, jobId in jobIds.items():\n jobIsRunning = getRcloneJobRunning(jobId)\n jobIds[jobName] = jobId if jobIsRunning else None\n jobStatusGauge.labels(rclone_job=jobName).set(1 if jobIsRunning else 0)",
"def get_workflow_status(github_token: str, workflow_id: str) -> Tuple[str, str, str]:\n\n # get the workflow run status\n workflow_url = GET_WORKFLOW_URL.format(workflow_id)\n res = requests.get(workflow_url,\n headers={'Authorization': f'Bearer {github_token}'},\n verify=False)\n if res.status_code != 200:\n logging.critical(\n f'Failed to gets private repo workflow, request to {workflow_url} failed with error: {str(res.content)}')\n sys.exit(1)\n\n # parse response\n try:\n workflow = json.loads(res.content)\n except ValueError:\n logging.exception('Enable to parse private repo workflows response')\n sys.exit(1)\n\n # get the workflow job from the response to know what step is in progress now\n jobs = workflow.get('jobs', [])\n\n if not jobs:\n logging.critical(f'Failed to gets private repo workflow jobs, build url: {WORKFLOW_HTML_URL}/{workflow_id}')\n sys.exit(1)\n\n curr_job = jobs[0]\n job_status = curr_job.get('status')\n job_conclusion = curr_job.get('conclusion')\n\n if job_status == 'completed':\n return 'completed', job_conclusion, ''\n\n # check for failure steps\n failure_steps = [step for step in jobs[0].get('steps') if step.get('conclusion') == 'failure']\n if failure_steps:\n return 'completed', 'failure', failure_steps[0].get('name')\n\n # if the job is still in progress - get the current step\n curr_step = next((step for step in jobs[0].get('steps') if step.get('status') == 'in_progress'), None)\n if not curr_step:\n logging.info('All the steps completed waiting for job to get updated, and finish')\n return job_status, job_conclusion, 'unknown'\n return job_status, job_conclusion, curr_step.get('name')",
"def get_status(self):\n data = self.client._perform_json(\n \"GET\", \"/projects/%s/recipes/%s/status\" % (self.project_key, self.recipe_name))\n return DSSRecipeStatus(self.client, data)",
"def get_active_milestone(self, contract_reference):\n data = {}\n\n url = 'fp/milestones/statuses/active/contracts/{0}'.format(contract_reference)\n return self.get(url, data)",
"def get_status(self):\n\t\treturn call_sdk_function('PrlJob_GetStatus', self.handle)",
"def status(self):\n return self.job_proto.status",
"def status(self) -> pulumi.Output['outputs.JobStatus']:\n return pulumi.get(self, \"status\")",
"def get_workflow_state(self):\n state = self._gdb_interface.get_workflow_state()\n return state",
"def get_job_state(self, job_origin_id):",
"def get_workflow_state(self):\n return self._read_transaction(tx.get_workflow_state)",
"def success_code(self):\n if self._results is None:\n return None\n return self._results.fields['omci_message'].fields['success']",
"def get_pir_status(self):\n response = self.parent.pir.status()\n return response[0]",
"def _get_module_status(status, i):\n\n # iterate through modules and find the one that was run previously\n for module_status in status.data.values():\n i_current = module_status.get('pipeline_index', -99)\n if str(i) == str(i_current):\n out = module_status\n break\n\n return out",
"def workflow_details(self) -> Optional[pulumi.Input['ServerWorkflowDetailsArgs']]:\n return pulumi.get(self, \"workflow_details\")",
"def workflow_details(self) -> Optional[pulumi.Input['ServerWorkflowDetailsArgs']]:\n return pulumi.get(self, \"workflow_details\")",
"def status(self):\n return self.m.status",
"def get_state(self):\n\t\treturn Job(SDK.PrlVm_GetState(self.handle)[0])",
"def status_code(self) -> int:\n return pulumi.get(self, \"status_code\")",
"def status(self):\n return self.get(self._names[\"status\"])",
"def get_status(self):\n return self._status",
"def get_job_state(self, response) -> JobState:\n return response.state",
"def status(self):\n if self._get_hcell().get(\"UNTRANSLATED\"):\n return \"Status: error (ctx needs translation)\"\n cell = self._get_cell()\n return cell.status",
"def export_getRequestStatus(self,requestName):\n\n if type(requestName) in StringTypes:\n result = requestDB._getRequestAttribute('RequestID',requestName=requestName)\n if not result['OK']:\n return result\n requestID = result['Value']\n else:\n requestID = requestName\n\n result = requestDB.getRequestStatus(requestID)\n return result",
"def get_status(self):\n statuses = dict(ACTIVITY_STATUS_CHOICES)\n return statuses.get(self.status, \"N/A\")"
] | [
"0.60118484",
"0.5969789",
"0.55662626",
"0.52257687",
"0.4927066",
"0.4838041",
"0.4720488",
"0.4703005",
"0.45899856",
"0.45641983",
"0.4541535",
"0.45344925",
"0.4407275",
"0.43921122",
"0.4388319",
"0.43734744",
"0.43686804",
"0.43637437",
"0.4355599",
"0.43407077",
"0.43407077",
"0.43212867",
"0.43127427",
"0.4301044",
"0.42928278",
"0.4289763",
"0.42820373",
"0.4279303",
"0.42638043",
"0.4259222"
] | 0.72772926 | 0 |
Gets the `InteractionModel` for the skill in the given stage. The path params skillId, stage and locale are required. | def get_interaction_model_v1(self, skill_id, stage_v2, locale, **kwargs):
# type: (str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, InteractionModelData_487fc9ea]
operation_name = "get_interaction_model_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
# verify the required parameter 'locale' is set
if ('locale' not in params) or (params['locale'] is None):
raise ValueError(
"Missing the required parameter `locale` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
if 'locale' in params:
path_params['locale'] = params['locale']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.interaction_model.interaction_model_data.InteractionModelData", status_code=200, message="Returns interaction model object on success."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The specified skill doesn't exist or there is no model defined for the locale."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.skill.interaction_model.interaction_model_data.InteractionModelData")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_interaction_model_metadata_v1(self, skill_id, stage_v2, locale, **kwargs):\n # type: (str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"get_interaction_model_metadata_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'locale' is set\n if ('locale' not in params) or (params['locale'] is None):\n raise ValueError(\n \"Missing the required parameter `locale` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n if 'locale' in params:\n path_params['locale'] = params['locale']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. There is no content but returns etag.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill or stage or locale does not exist\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"HEAD\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def set_interaction_model_v1(self, skill_id, stage_v2, locale, interaction_model, **kwargs):\n # type: (str, str, str, InteractionModelData_487fc9ea, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"set_interaction_model_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'locale' is set\n if ('locale' not in params) or (params['locale'] is None):\n raise ValueError(\n \"Missing the required parameter `locale` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'interaction_model' is set\n if ('interaction_model' not in params) or (params['interaction_model'] is None):\n raise ValueError(\n \"Missing the required parameter `interaction_model` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n if 'locale' in params:\n path_params['locale'] = params['locale']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n if 'if_match' in params:\n header_params.append(('If-Match', params['if_match']))\n\n body_params = None\n if 'interaction_model' in params:\n body_params = params['interaction_model']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Returns build status location link on success.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error e.g. the input interaction model is invalid.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill or stage or locale does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=412, message=\"Precondition failed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def get_interaction_model_version_v1(self, skill_id, stage_v2, locale, version, **kwargs):\n # type: (str, str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, InteractionModelData_487fc9ea]\n operation_name = \"get_interaction_model_version_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'locale' is set\n if ('locale' not in params) or (params['locale'] is None):\n raise ValueError(\n \"Missing the required parameter `locale` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'version' is set\n if ('version' not in params) or (params['version'] is None):\n raise ValueError(\n \"Missing the required parameter `version` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}/versions/{version}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n if 'locale' in params:\n path_params['locale'] = params['locale']\n if 'version' in params:\n path_params['version'] = params['version']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.interaction_model.interaction_model_data.InteractionModelData\", status_code=200, message=\"Returns interaction model object on success.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error e.g. the input interaction model is invalid.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill doesn't exist or there is no model defined for the locale or version.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.interaction_model.interaction_model_data.InteractionModelData\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_skill(skillpath):\n return Skill.query.filter_by(path=skillpath).first()",
"def get_isp_associated_skills_v1(self, product_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, AssociatedSkillResponse_12067635, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"get_isp_associated_skills_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}/skills'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\", status_code=200, message=\"Returns skills associated with the in-skill product.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_skill(self, utterance, lang=\"en-us\"):\n intent = self.get_intent(utterance, lang)\n if not intent:\n return None\n # theoretically skill_id might be missing\n if intent.get(\"skill_id\"):\n return intent[\"skill_id\"]\n # retrieve skill from munged intent name\n if intent.get(\"intent_name\"): # padatious + adapt\n return intent[\"name\"].split(\":\")[0]\n if intent.get(\"intent_type\"): # adapt\n return intent[\"intent_type\"].split(\":\")[0]\n return None # raise some error here maybe? this should never happen",
"def get_sample_stage(sample_id, stage_id):\n s = get_resource(Sample.query.filter_by(obfuscated_id=sample_id))\n return get_resource(SampleStage.query.filter_by(_sample_id=s.id))",
"def get_model(recipe, **overrides):\n\n # \"model\", \"stages__*__model\"\n if isinstance(overrides, dict):\n recipe = {**recipe, **overrides} # override parameters\n\n return get_instance(**recipe) # expand (shallow copy)",
"def list_interaction_model_versions_v1(self, skill_id, stage_v2, locale, **kwargs):\n # type: (str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, ListResponse_cb936759]\n operation_name = \"list_interaction_model_versions_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'locale' is set\n if ('locale' not in params) or (params['locale'] is None):\n raise ValueError(\n \"Missing the required parameter `locale` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}/versions'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n if 'locale' in params:\n path_params['locale'] = params['locale']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n if 'sort_direction' in params:\n query_params.append(('sortDirection', params['sort_direction']))\n if 'sort_field' in params:\n query_params.append(('sortField', params['sort_field']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.interaction_model.version.list_response.ListResponse\", status_code=200, message=\"Returns list of interactionModel versions of a skill for the vendor.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error e.g. the input interaction model is invalid.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill doesn't exist or there is no model defined for the locale.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.interaction_model.version.list_response.ListResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def invoke_skill_end_point_v2(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, BadRequestError_765e0ac6, InvocationsApiResponse_3d7e3234, Error_ea6c1a5a]\n operation_name = \"invoke_skill_end_point_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/invocations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'invocations_api_request' in params:\n body_params = params['invocations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\", status_code=200, message=\"Skill was invoked.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow invocation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_experiment_v1(self, skill_id, experiment_id, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, GetExperimentResponse_fcd92c35, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"get_experiment_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'experiment_id' is set\n if ('experiment_id' not in params) or (params['experiment_id'] is None):\n raise ValueError(\n \"Missing the required parameter `experiment_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/experiments/{experimentId}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'experiment_id' in params:\n path_params['experimentId'] = params['experiment_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.experiment.get_experiment_response.GetExperimentResponse\", status_code=200, message=\"Returned skill experiment.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.experiment.get_experiment_response.GetExperimentResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_skill_simulation_v2(self, skill_id, stage, simulation_id, **kwargs):\n # type: (str, str, str, **Any) -> Union[ApiResponse, object, SimulationsApiResponse_e4ad17d, BadRequestError_765e0ac6, Error_ea6c1a5a]\n operation_name = \"get_skill_simulation_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'simulation_id' is set\n if ('simulation_id' not in params) or (params['simulation_id'] is None):\n raise ValueError(\n \"Missing the required parameter `simulation_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/simulations/{simulationId}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n if 'simulation_id' in params:\n path_params['simulationId'] = params['simulation_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\", status_code=200, message=\"Successfully retrieved skill simulation information.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission or is currently in a state that does not allow calls to this API. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill or simulation does not exist. The error response will contain a description that indicates the specific resource type that was not found. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def create_export_request_for_skill_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"create_export_request_for_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/exports'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accepted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def _request_model(self, instance, success, get_embedded=True):\n coll = self.get_collection('_model')\n if get_embedded:\n callback = partial(self._get_embedded_model_names,\n instance=instance,\n success=success)\n else:\n callback = success\n\n try:\n instance['_model']\n except KeyError:\n raise tornado.web.HTTPError(400, 'Missing model key')\n coll.find_one({'_id': instance['_model']},\n callback=callback)",
"def GetModel(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)",
"def get_objective(self):\n # Note that this makes the generic objectives call to Handcar\n # without specifying the objectiveBank:\n url_str = (self._base_url + '/objectives/' +\n self._my_map['objectiveId'])\n return Objective(self._load_json(url_str))",
"def get_model(self):\n return self.chain.model",
"def model(self) -> Optional[str]:\n return pulumi.get(self, \"model\")",
"def simulate_skill_v2(self, skill_id, stage, simulations_api_request, **kwargs):\n # type: (str, str, SimulationsApiRequest_ae2e6503, **Any) -> Union[ApiResponse, object, SimulationsApiResponse_e4ad17d, BadRequestError_765e0ac6, Error_ea6c1a5a]\n operation_name = \"simulate_skill_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'simulations_api_request' is set\n if ('simulations_api_request' not in params) or (params['simulations_api_request'] is None):\n raise ValueError(\n \"Missing the required parameter `simulations_api_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/simulations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'simulations_api_request' in params:\n body_params = params['simulations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\", status_code=200, message=\"Skill simulation has successfully began.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow simulation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=409, message=\"This requests conflicts with another one currently being processed. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def model(self) -> Optional['outputs.DeploymentModelResponse']:\n return pulumi.get(self, \"model\")",
"def model(self) -> 'outputs.ModelDefinitionResponse':\n return pulumi.get(self, \"model\")",
"def skill(self):\n return self._get(\"skill\")",
"def get_isp_list_for_skill_id_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05, ListInSkillProductResponse_505e7307]\n operation_name = \"get_isp_list_for_skill_id_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/inSkillProducts'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.list_in_skill_product_response.ListInSkillProductResponse\", status_code=200, message=\"Response contains list of in-skill products for the specified skillId and stage.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.list_in_skill_product_response.ListInSkillProductResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_skill_from_id(skill_id):\n return Skill.query.filter_by(id=skill_id).first()",
"def getCurrentModel(self) -> Optional[Type[Fit]]:\n current_module = self.fitting_modules[self.module_combo.currentText()]\n model_selected = self.model_list.currentItem()\n if model_selected is None:\n return None\n model = getattr(current_module, model_selected.text())\n return model",
"def get_model(model):\n all_models = cmd.get_object_list()\n\n if len(all_models) == 0:\n logging.parser_error('No models are opened.')\n return\n\n model = model.lower()\n\n if model and (model in all_models):\n return model\n\n if len(all_models) > 1:\n logging.parser_error(\"Please specify which model you want to use. {}\".format(all_models))\n return\n\n return all_models[0]",
"def _lookup_model(self, name, experiment_dict):\n if experiment_dict.get(name) is None:\n return None\n return self._lookups[name][experiment_dict[name]]",
"def get_model(self, name):\n bundle_name, model_name = name.split(\".\")\n bundle = self.bundles[bundle_name]\n model = bundle.models[name]\n return model",
"def get_utterance_data_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, IntentRequests_35db15c7]\n operation_name = \"get_utterance_data_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/history/intentRequests'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n if 'sort_direction' in params:\n query_params.append(('sortDirection', params['sort_direction']))\n if 'sort_field' in params:\n query_params.append(('sortField', params['sort_field']))\n if 'stage' in params:\n query_params.append(('stage', params['stage']))\n if 'locale' in params:\n query_params.append(('locale', params['locale']))\n if 'dialog_act_name' in params:\n query_params.append(('dialogAct.name', params['dialog_act_name']))\n if 'intent_confidence_bin' in params:\n query_params.append(('intent.confidence.bin', params['intent_confidence_bin']))\n if 'intent_name' in params:\n query_params.append(('intent.name', params['intent_name']))\n if 'intent_slots_name' in params:\n query_params.append(('intent.slots.name', params['intent_slots_name']))\n if 'interaction_type' in params:\n query_params.append(('interactionType', params['interaction_type']))\n if 'publication_status' in params:\n query_params.append(('publicationStatus', params['publication_status']))\n if 'utterance_text' in params:\n query_params.append(('utteranceText', params['utterance_text']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.history.intent_requests.IntentRequests\", status_code=200, message=\"Returns a list of utterance items for the given skill.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad Request.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"Unauthorized.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"Skill Not Found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.history.intent_requests.IntentRequests\")\n\n if full_response:\n return api_response\n return api_response.body",
"def intent_model(self) -> ControllerIntentModel:\n return self._intent_model"
] | [
"0.5837195",
"0.5794024",
"0.556514",
"0.5178054",
"0.51161706",
"0.4783202",
"0.47309718",
"0.4666063",
"0.46613026",
"0.46403983",
"0.45909518",
"0.4589444",
"0.4541329",
"0.45185766",
"0.4510075",
"0.4501116",
"0.44332525",
"0.436137",
"0.43485343",
"0.4298251",
"0.4294916",
"0.42889518",
"0.42652035",
"0.42628402",
"0.4258941",
"0.41894856",
"0.41825",
"0.41647038",
"0.41561672",
"0.41499233"
] | 0.6707876 | 0 |
Get the list of interactionModel versions of a skill for the vendor. | def list_interaction_model_versions_v1(self, skill_id, stage_v2, locale, **kwargs):
# type: (str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, ListResponse_cb936759]
operation_name = "list_interaction_model_versions_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
# verify the required parameter 'locale' is set
if ('locale' not in params) or (params['locale'] is None):
raise ValueError(
"Missing the required parameter `locale` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}/versions'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
if 'locale' in params:
path_params['locale'] = params['locale']
query_params = [] # type: List
if 'next_token' in params:
query_params.append(('nextToken', params['next_token']))
if 'max_results' in params:
query_params.append(('maxResults', params['max_results']))
if 'sort_direction' in params:
query_params.append(('sortDirection', params['sort_direction']))
if 'sort_field' in params:
query_params.append(('sortField', params['sort_field']))
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.interaction_model.version.list_response.ListResponse", status_code=200, message="Returns list of interactionModel versions of a skill for the vendor."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error e.g. the input interaction model is invalid."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The specified skill doesn't exist or there is no model defined for the locale."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.skill.interaction_model.version.list_response.ListResponse")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ListModelVersions(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def list_versions_for_skill_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, ListSkillVersionsResponse_7522147d, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"list_versions_for_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/versions'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.list_skill_versions_response.ListSkillVersionsResponse\", status_code=200, message=\"Successfully retrieved skill versions\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.list_skill_versions_response.ListSkillVersionsResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_versions(self):\n raise NotImplementedError",
"def list_versions(self, project_id, model_id):\n endpoint = \"/project/{}/model/{}/version\".format(project_id, model_id)\n return self._get(endpoint, _ModelVersionSchema(many=True))",
"def get_versions():\n ret_obj = {'versions': picard_versions(current_app)}\n return make_response(jsonify(ret_obj), 200)",
"def get_interaction_model_version_v1(self, skill_id, stage_v2, locale, version, **kwargs):\n # type: (str, str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, InteractionModelData_487fc9ea]\n operation_name = \"get_interaction_model_version_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'locale' is set\n if ('locale' not in params) or (params['locale'] is None):\n raise ValueError(\n \"Missing the required parameter `locale` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'version' is set\n if ('version' not in params) or (params['version'] is None):\n raise ValueError(\n \"Missing the required parameter `version` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}/versions/{version}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n if 'locale' in params:\n path_params['locale'] = params['locale']\n if 'version' in params:\n path_params['version'] = params['version']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.interaction_model.interaction_model_data.InteractionModelData\", status_code=200, message=\"Returns interaction model object on success.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error e.g. the input interaction model is invalid.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill doesn't exist or there is no model defined for the locale or version.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.interaction_model.interaction_model_data.InteractionModelData\")\n\n if full_response:\n return api_response\n return api_response.body",
"def getVersions(self):\n logger.debug(\"Func: getVersions\")\n\n try:\n return self._currentSceneInfo[\"Versions\"]\n except:\n return []",
"def do_list_versions(**kwargs):\n mle = MLEngineHook()\n model_name = kwargs['dag_run'].conf.get('model_name')\n model_versions = mle.list_versions(PROJECT, model_name)\n kwargs['ti'].xcom_push(key='model_versions', value=model_versions)",
"def get_model_versions(cause_id, age_start, age_end, model_version_type_id):\n call = \"\"\"\n SELECT model_version_id FROM cod.model_version\n WHERE cause_id = {c}\n AND age_start = {a_start} AND age_end = {a_end}\n AND model_version_type_id = {mvt}\n AND gbd_round_id > 5 AND status = 1\n \"\"\".format(c=cause_id, a_start=age_start, a_end=age_end,\n mvt=model_version_type_id)\n model_versions = query(call, conn_def='codem')['model_version_id'].tolist()\n return model_versions",
"def model(self):\n return ProductVersion",
"def versions(self):\n return self._versions",
"def select_versions(self):\n return []",
"def versions(self) -> pulumi.Output[List['outputs.RegionInstanceGroupManagerVersion']]:\n return pulumi.get(self, \"versions\")",
"def ListVersions(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)",
"def versions(self) -> Dict[str, str]:\n self.__logger.debug('Eva.versions called')\n return self.__http_client.api_versions()",
"def versions(self, name):\n if not len(self):\n self.update()\n return [version for version in self if os.path.basename(version) == name]",
"def list_versions(self):\n version_url = self._get_base_version_url()\n\n resp, body = self.raw_request(version_url, 'GET')\n # NOTE: We need a raw_request() here instead of request() call because\n # \"list API versions\" API doesn't require an authentication and we can\n # skip it with raw_request() call.\n self._error_checker(resp, body)\n\n body = json.loads(body)\n self.validate_response(schema.list_versions, resp, body)\n return rest_client.ResponseBody(resp, body)",
"def get_github_chandra_models_version_info():\n with urlopen('https://api.github.com/repos/sot/chandra_models/tags') as url:\n response = url.read()\n tags = json.loads(response.decode('utf-8'))\n\n with urlopen('https://api.github.com/repos/sot/chandra_models/branches') as url:\n response = url.read()\n branches = json.loads(response.decode('utf-8'))\n\n all_versions_info = {t[\"name\"]: t for t in tags}\n all_versions_info.update({b[\"name\"]: b for b in branches})\n return all_versions_info",
"def ListModels(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)",
"def versions(self) -> List['RadsProjectVersion']:\n logger.debug(f\"retrieve versions of {self}\")\n listing = self.storage.request_text(f\"{self.path}/releaselisting\")\n return [RadsProjectVersion(self, RadsVersion(l)) for l in listing.splitlines()]",
"def vendor_list():\n return ['nxos', 'eos', 'cumulus']",
"def models(self):\n models = []\n for bundle in self.bundles.values():\n models.extend(list(bundle.models.values()))\n\n return models",
"def versionIdentifiers(self):\n for bid in self.boundIdentifiers:\n yield from self[bid:owl.versionIRI]",
"def all(self):\r\n if self._versions is None or \\\r\n len(self._versions) == 0:\r\n url = \"%s/versions\" % self._url\r\n params = {'f':'json'}\r\n res = self._con.get(url, params)\r\n self._versions = []\r\n if 'versions' in res:\r\n for v in res['versions']:\r\n guid = v['versionGuid'][1:-1]\r\n vurl = \"%s/versions/%s\" % (self._url, guid)\r\n self._versions.append(Version(url=vurl,\r\n flc=self._flc,\r\n gis=self._gis))\r\n return self._versions\r\n return self._versions",
"def available_versions(self):\n return list(sorted(self.onxs))",
"def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")",
"def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")",
"def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")",
"def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")",
"def index(self, request):\n versions = []\n for key, data in VERSIONS.items():\n v = BaseVersion(\n data[\"id\"],\n data[\"status\"],\n request.application_url,\n data[\"updated\"])\n versions.append(v)\n return wsgi.Result(VersionsDataView(versions))"
] | [
"0.5731109",
"0.5648163",
"0.554995",
"0.5511418",
"0.5418477",
"0.5396421",
"0.53299284",
"0.53104043",
"0.5279472",
"0.5241545",
"0.52111715",
"0.5177248",
"0.5130413",
"0.51191485",
"0.50555",
"0.5019464",
"0.4986843",
"0.49674267",
"0.49502325",
"0.49276555",
"0.49149188",
"0.4845661",
"0.48441693",
"0.48303908",
"0.48296916",
"0.4825074",
"0.4825074",
"0.4825074",
"0.4825074",
"0.4809101"
] | 0.6314473 | 0 |
Gets the specified version `InteractionModel` of a skill for the vendor. Use `~current` as version parameter to get the current version model. | def get_interaction_model_version_v1(self, skill_id, stage_v2, locale, version, **kwargs):
# type: (str, str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, InteractionModelData_487fc9ea]
operation_name = "get_interaction_model_version_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
# verify the required parameter 'locale' is set
if ('locale' not in params) or (params['locale'] is None):
raise ValueError(
"Missing the required parameter `locale` when calling `" + operation_name + "`")
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError(
"Missing the required parameter `version` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}/versions/{version}'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
if 'locale' in params:
path_params['locale'] = params['locale']
if 'version' in params:
path_params['version'] = params['version']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.interaction_model.interaction_model_data.InteractionModelData", status_code=200, message="Returns interaction model object on success."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error e.g. the input interaction model is invalid."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The specified skill doesn't exist or there is no model defined for the locale or version."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.skill.interaction_model.interaction_model_data.InteractionModelData")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def GetModelVersion(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def get_version():\n global __model\n return __model.__version__",
"def model(self):\n return ProductVersion",
"def model_version(self) -> str:\n return pulumi.get(self, \"model_version\")",
"def get_version(self, project_id, model_id, version_id):\n endpoint = \"/project/{}/model/{}/version/{}\".format(\n project_id, model_id, version_id\n )\n return self._get(endpoint, _ModelVersionSchema())",
"def model(self) -> Optional['outputs.DeploymentModelResponse']:\n return pulumi.get(self, \"model\")",
"def version(self) -> 'outputs.VersionResponse':\n return pulumi.get(self, \"version\")",
"def get_version(self):\n return self.__make_api_call('get/version')",
"def get_current_version(self):\n raise NotImplementedError(\"get_current_version is not implemented\")",
"def get_version(self):\n\n r = self._create_operation_request(self, method=\"GET\")\n root_info = send_session_request(self._session, r).json()\n return root_info[\"currentVersion\"]",
"def get_version(self):\n\t\treturn call_sdk_function('PrlApi_GetVersion')",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"version\")",
"def model_version_id(self) -> Optional[str]:\n return pulumi.get(self, \"model_version_id\")",
"def version(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"version\")",
"def get_version(model_instance, version):\n version_field = get_version_fieldname(model_instance)\n kwargs = {'pk': model_instance.pk, version_field: version}\n return model_instance.__class__.objects.get(**kwargs)",
"def GetVersion(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)"
] | [
"0.609386",
"0.60471684",
"0.59729415",
"0.5941532",
"0.57474136",
"0.5744456",
"0.5716244",
"0.5673265",
"0.5659888",
"0.55117637",
"0.5506471",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5478209",
"0.5469697",
"0.5429649",
"0.54254735",
"0.5389754"
] | 0.63348377 | 0 |
Returns the skill manifest for given skillId and stage. | def get_skill_manifest_v1(self, skill_id, stage_v2, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, SkillManifestEnvelope_fc0e823b, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "get_skill_manifest_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/manifest'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.manifest.skill_manifest_envelope.SkillManifestEnvelope", status_code=200, message="Response contains the latest version of skill manifest."))
error_definitions.append(ServiceClientResponse(response_type=None, status_code=303, message="See Other"))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.skill.manifest.skill_manifest_envelope.SkillManifestEnvelope")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_skill_manifest_v1(self, skill_id, stage_v2, update_skill_request, **kwargs):\n # type: (str, str, SkillManifestEnvelope_fc0e823b, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"update_skill_manifest_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'update_skill_request' is set\n if ('update_skill_request' not in params) or (params['update_skill_request'] is None):\n raise ValueError(\n \"Missing the required parameter `update_skill_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/manifest'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n if 'if_match' in params:\n header_params.append(('If-Match', params['if_match']))\n\n body_params = None\n if 'update_skill_request' in params:\n body_params = params['update_skill_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accepted; Returns a URL to track the status in 'Location' header.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=412, message=\"Precondition failed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def get_sample_stage(sample_id, stage_id):\n s = get_resource(Sample.query.filter_by(obfuscated_id=sample_id))\n return get_resource(SampleStage.query.filter_by(_sample_id=s.id))",
"def get_isp_associated_skills_v1(self, product_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, AssociatedSkillResponse_12067635, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"get_isp_associated_skills_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}/skills'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\", status_code=200, message=\"Returns skills associated with the in-skill product.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def stage_by_id(self, stage_id):\n response = self._session.get(\n path=self._session.urljoin(\n self.STAGES_RESOURCE_PATH,\n '{}.xml'.format(stage_id)\n ).format(\n base_api=self.base_api\n ),\n headers={'Accept': 'application/xml'},\n )\n\n return response.text",
"def get_skill_from_id(skill_id):\n return Skill.query.filter_by(id=skill_id).first()",
"def get_manifest(self):\n url = f'samples/{self.uuid}/manifest'\n return self.knex.get(url)",
"def create_export_request_for_skill_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"create_export_request_for_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/exports'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accepted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def stage(self, stage_id):\r\n return pipelines.Stage(self, stage_id)",
"def getSkill(userId, skill=-1) -> list:\n # fetch user\n try:\n user = fetchUser(userId=userId)\n except:\n user = []\n\n skill_temp = -1\n # get skills if user is found\n if (len(user) != 0):\n for u in user:\n if (skill != -1):\n for entry in u[\"skills\"]:\n if (skill == entry[\"id\"]):\n skill_temp = entry\n if (skill_temp == -1):\n return \"No such skill exist for the given user\"\n else:\n return skill_temp\n else:\n skill_temp = u[\"skills\"]\n for i in skill_temp:\n name = getSkillName(i['id'])\n i['name'] = name\n return skill_temp",
"def get_stage():\n try:\n filename = os.path.join(get_var('SITE'), \".stage\")\n f = open(filename, \"r\")\n stage = f.readline().strip()\n f.close()\n logger.debug(\"get stage: %s\" % (stage))\n return stage\n except:\n return reset_stage()",
"def getSkill(self, skillName):\r\n if self.__contains__(skillName):\r\n return self.skills[skillName]\r\n return None",
"def download_job_manifest(bucket: Bucket, job_id: str) -> JobManifest:\n path = f\"thor_jobs/v1/job-{job_id}/manifest.json\"\n as_str = bucket.blob(path).download_as_string()\n return JobManifest.from_str(as_str)",
"def manifest(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:\n return pulumi.get(self, \"manifest\")",
"def get_manifest_data(bucket,team, dataset,manifest_key):\n dynamo_config = DynamoConfiguration()\n dynamo_interface = DynamoInterface(dynamo_config)\n s3_interface = S3Interface()\n local_path = s3_interface.download_object(bucket, manifest_key)\n ddb_keys=[]\n items=[]\n with open(local_path, \"r\") as raw_file:\n file_names = [file_name.strip().split(\"/\")[-1]\n for file_name in raw_file]\n for file in file_names:\n ddb_keys.append({\n \"dataset_name\": team+\"-\"+dataset,\n \"manifest_file_name\": manifest_key.split(\"/\")[-1], \"datafile_name\": file\n })\n for ddb_key in ddb_keys:\n try:\n items.append(dynamo_interface.get_item_from_manifests_control_table(\n ddb_key[\"dataset_name\"], ddb_key[\"manifest_file_name\"], ddb_key[\"datafile_name\"]))\n except KeyError:\n logger.error(\"The manifest file has not been processed in Stage A\")\n raise Exception(\"Manifest File has not been processed in Stage A\")\n\n return items",
"def manifest(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:\n return pulumi.get(self, \"manifest\")",
"def skills():\n with app.app_context():\n results = Skill.query.all()\n return SkillsResponse(skills=results).json(), 200",
"def get_skill(skillpath):\n return Skill.query.filter_by(path=skillpath).first()",
"def get_manifest(self):\r\n if os.path.exists(self.manifestfile):\r\n return Manifest(json.loads(file(self.manifestfile).read()))\r\n return Manifest({})",
"def gen_manifest(stage_dir):\n manifest = {'files': []}\n\n for root, dirs, files in os.walk(stage_dir):\n for file_ in files:\n fullpath = os.path.join(root, file_)\n contents = open(fullpath, 'rb').read()\n sha1 = hashlib.sha1(contents).hexdigest()\n filename = os.path.relpath(fullpath, stage_dir)\n mode = get_permission(fullpath)\n manifest['files'].append({'path': filename, 'sha1': sha1,\n 'mode': mode})\n return manifest",
"def get(self, stage_id):\n stage_order = self.staging_service.get_stage_order_by_id(stage_id)\n if stage_order:\n self.write_json({'status': stage_order.status.name, 'size': stage_order.size})\n else:\n self.set_status(NOT_FOUND, reason='No stage order with id: {} found.'.format(stage_id))",
"def skill(self):\n return self._get(\"skill\")",
"def stage(self) -> Stage:\n if self._stage is None:\n stage_key = \"STAGE\"\n stage_str = self._get_env(stage_key)\n assert stage_str in _STAGES, (\n f\"the {stage_key} environment variable value must be one of \"\n f\"{_STAGES}, {stage_str=}\"\n )\n self._stage = Stage[stage_str]\n\n return self._stage",
"def get_stage(stage_string):\n stage_choices = {\n 'PUT_START' : MigrationRequest.PUT_START,\n 'PUT_BUILDING' : MigrationRequest.PUT_BUILDING,\n 'PUT_PENDING' : MigrationRequest.PUT_PENDING,\n 'PUT_PACKING' : MigrationRequest.PUT_PACKING,\n 'PUTTING' : MigrationRequest.PUTTING,\n 'VERIFY_PENDING' : MigrationRequest.VERIFY_PENDING,\n 'VERIFY_GETTING' : MigrationRequest.VERIFY_GETTING,\n 'VERIFYING' : MigrationRequest.VERIFYING,\n 'PUT_TIDY' : MigrationRequest.PUT_TIDY,\n 'PUT_COMPLETED' : MigrationRequest.PUT_COMPLETED,\n\n 'GET_START' : MigrationRequest.GET_START,\n 'GET_PENDING' : MigrationRequest.GET_PENDING,\n 'GETTING' : MigrationRequest.GETTING,\n 'GET_UNPACKING' : MigrationRequest.GET_UNPACKING,\n 'GET_RESTORE' : MigrationRequest.GET_RESTORE,\n 'GET_TIDY' : MigrationRequest.GET_TIDY,\n 'GET_COMPLETED' : MigrationRequest.GET_COMPLETED,\n\n 'DELETE_START' : MigrationRequest.DELETE_START,\n 'DELETE_PENDING' : MigrationRequest.DELETE_PENDING,\n 'DELETING' : MigrationRequest.DELETING,\n 'DELETE_TIDY' : MigrationRequest.DELETE_TIDY,\n 'DELETE_COMPLETED' : MigrationRequest.DELETE_COMPLETED,\n\n 'FAILED' : MigrationRequest.FAILED,\n 'FAILED_COMPLETED' : MigrationRequest.FAILED_COMPLETED\n }\n return(stage_choices[stage_string])",
"def add_stage(self, stage_name: str) -> \"CdkStage\":\n return jsii.invoke(self, \"addStage\", [stage_name])",
"def get_manifest(path: str):\n base_url = urlparse(path.strip(\"/\"))\n if base_url.scheme != \"s3\":\n raise click.UsageError(\n f\"URL scheme should be s3, but received {base_url.geturl()}\"\n )\n\n s3 = boto3.resource(\"s3\")\n manifest_filenames = [\"lecida__manifest.yml\", \"manifest.yml\"]\n\n def read_s3(base_url: ParseResult, filename: str) -> Optional[bytes]:\n try:\n obj = s3.Object(\n bucket_name=base_url.netloc,\n key=base_url.path.strip(\"/\") + f\"/{filename}\"\n )\n return obj.get()['Body'].read()\n except ClientError as e:\n # Only allow NoSuchKey errors, blow up on any other errors\n if e.response['Error']['Code'] == 'NoSuchKey':\n return None\n raise e\n\n body: Optional[bytes] = None\n for mf in manifest_filenames:\n body = read_s3(base_url, mf)\n if body is not None:\n break\n if body is None:\n raise click.ClickException(\n f\"Can't find any manifest files ({manifest_filenames}) in {path}\"\n )\n\n click.secho(\n f\"Found manifest in {base_url.geturl()}/{mf}\", fg='green', err=True\n )\n click.echo(body.decode(\"utf-8\"))",
"def copy_stage(self, stack_id, rest_api_id, from_stage, to_stage_name):\n to_stage_variables = {}\n\n # Add lambda alias as a suffix to stage variables\n for k, v in from_stage[\"variables\"].iteritems():\n to_stage_variables[k] = v.replace(\":%s\" % from_stage[\"stageName\"], \":%s\" % to_stage_name)\n # if lambda function is a variable add permission to invoke\n if (\":%s\" % from_stage[\"stageName\"]) in v:\n self.add_lambda_permission(function_name=to_stage_variables[k],\n region=self.session.region_name,\n account_id=stack_id,\n rest_api_id=rest_api_id\n )\n # Create New Stage\n self.api_client.create_stage(\n restApiId=rest_api_id,\n stageName=to_stage_name,\n deploymentId=from_stage['deploymentId'],\n description=to_stage_name,\n # cacheClusterEnabled=True|False,\n # cacheClusterSize='0.5'|'1.6'|'6.1'|'13.5'|'28.4'|'58.2'|'118'|'237',\n variables=to_stage_variables\n # documentationVersion='string'\n )\n\n return True",
"def _collect_stages(self) -> Set[str]:\n # Get the stage name associated with the previous deployment and update stage\n # Stage needs to be flushed so that new changes will be visible immediately\n api_resource = get_resource_by_id(self._stacks, ResourceIdentifier(self._api_identifier))\n stage_resources = get_resource_ids_by_type(self._stacks, AWS_APIGATEWAY_STAGE)\n deployment_resources = get_resource_ids_by_type(self._stacks, AWS_APIGATEWAY_DEPLOYMENT)\n\n stages = set()\n # If it is a SAM resource, get the StageName property\n if api_resource:\n if api_resource.get(\"Type\") == AWS_SERVERLESS_API:\n # The customer defined stage name\n stage_name = api_resource.get(\"Properties\", {}).get(\"StageName\")\n if stage_name:\n stages.add(cast(str, stage_name))\n\n # The stage called \"Stage\"\n if stage_name != \"Stage\":\n response_sta = cast(Dict, self._api_client.get_stages(restApiId=self._api_physical_id))\n for item in response_sta.get(\"item\"): # type: ignore\n if item.get(\"stageName\") == \"Stage\":\n stages.add(\"Stage\")\n\n # For both SAM and ApiGateway resource, check if any refs from stage resources\n for stage_resource in stage_resources:\n # RestApiId is a required field in stage\n stage_dict = get_resource_by_id(self._stacks, stage_resource)\n if not stage_dict:\n continue\n rest_api_id = stage_dict.get(\"Properties\", {}).get(\"RestApiId\")\n dep_id = stage_dict.get(\"Properties\", {}).get(\"DeploymentId\")\n # If the stage doesn't have a deployment associated then no need to update\n if dep_id is None:\n continue\n # If the stage's deployment ID is not static and the rest API ID matchs, then update\n for deployment_resource in deployment_resources:\n if deployment_resource.resource_iac_id == dep_id and rest_api_id == self._api_identifier:\n stages.add(cast(str, stage_dict.get(\"Properties\", {}).get(\"StageName\")))\n break\n\n return stages",
"def get_skill(self, utterance, lang=\"en-us\"):\n intent = self.get_intent(utterance, lang)\n if not intent:\n return None\n # theoretically skill_id might be missing\n if intent.get(\"skill_id\"):\n return intent[\"skill_id\"]\n # retrieve skill from munged intent name\n if intent.get(\"intent_name\"): # padatious + adapt\n return intent[\"name\"].split(\":\")[0]\n if intent.get(\"intent_type\"): # adapt\n return intent[\"intent_type\"].split(\":\")[0]\n return None # raise some error here maybe? this should never happen",
"def skills(self):\n if \"skills\" in self._prop_dict:\n return self._prop_dict[\"skills\"]\n else:\n return None",
"def get_isp_list_for_skill_id_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05, ListInSkillProductResponse_505e7307]\n operation_name = \"get_isp_list_for_skill_id_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/inSkillProducts'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.list_in_skill_product_response.ListInSkillProductResponse\", status_code=200, message=\"Response contains list of in-skill products for the specified skillId and stage.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.list_in_skill_product_response.ListInSkillProductResponse\")\n\n if full_response:\n return api_response\n return api_response.body"
] | [
"0.58649564",
"0.5746228",
"0.5555346",
"0.54651296",
"0.5207292",
"0.51761097",
"0.510083",
"0.50791603",
"0.49944592",
"0.48343024",
"0.4796685",
"0.47939762",
"0.4792742",
"0.47889155",
"0.47459278",
"0.47426957",
"0.47282284",
"0.47118324",
"0.46940255",
"0.4669653",
"0.46601176",
"0.4653115",
"0.46469292",
"0.46090722",
"0.45976877",
"0.4571195",
"0.45491546",
"0.45352867",
"0.45340118",
"0.45089525"
] | 0.7073502 | 0 |
Updates skill manifest for given skillId and stage. | def update_skill_manifest_v1(self, skill_id, stage_v2, update_skill_request, **kwargs):
# type: (str, str, SkillManifestEnvelope_fc0e823b, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "update_skill_manifest_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage_v2' is set
if ('stage_v2' not in params) or (params['stage_v2'] is None):
raise ValueError(
"Missing the required parameter `stage_v2` when calling `" + operation_name + "`")
# verify the required parameter 'update_skill_request' is set
if ('update_skill_request' not in params) or (params['update_skill_request'] is None):
raise ValueError(
"Missing the required parameter `update_skill_request` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/stages/{stageV2}/manifest'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage_v2' in params:
path_params['stageV2'] = params['stage_v2']
query_params = [] # type: List
header_params = [] # type: List
if 'if_match' in params:
header_params.append(('If-Match', params['if_match']))
body_params = None
if 'update_skill_request' in params:
body_params = params['update_skill_request']
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message="Accepted; Returns a URL to track the status in 'Location' header."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=409, message="The request could not be completed due to a conflict with the current state of the target resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=412, message="Precondition failed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="PUT",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_skill_manifest_v1(self, skill_id, stage_v2, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, SkillManifestEnvelope_fc0e823b, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"get_skill_manifest_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/manifest'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.manifest.skill_manifest_envelope.SkillManifestEnvelope\", status_code=200, message=\"Response contains the latest version of skill manifest.\"))\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=303, message=\"See Other\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.manifest.skill_manifest_envelope.SkillManifestEnvelope\")\n\n if full_response:\n return api_response\n return api_response.body",
"def upload_skills_data(self, data):\n if not isinstance(data, dict):\n raise ValueError('data must be of type dict')\n\n _data = deepcopy(data) # Make sure the input data isn't modified\n # Strip the skills.json down to the bare essentials\n to_send = {}\n if 'blacklist' in _data:\n to_send['blacklist'] = _data['blacklist']\n else:\n LOG.warning('skills manifest lacks blacklist entry')\n to_send['blacklist'] = []\n\n # Make sure skills doesn't contain duplicates (keep only last)\n if 'skills' in _data:\n skills = {s['name']: s for s in _data['skills']}\n to_send['skills'] = [skills[key] for key in skills]\n else:\n LOG.warning('skills manifest lacks skills entry')\n to_send['skills'] = []\n\n for s in to_send['skills']:\n # Remove optional fields backend objects to\n if 'update' in s:\n s.pop('update')\n\n # Finalize skill_gid with uuid if needed\n s['skill_gid'] = s.get('skill_gid', '').replace(\n '@|', '@{}|'.format(self.identity.uuid))\n\n self.request({\n \"method\": \"PUT\",\n \"path\": \"/\" + UUID + \"/skillJson\",\n \"json\": to_send\n })",
"def set_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"set_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully created/updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def updateSkillForPlayer(self, userid, name, level):\r\n if not isinstance(userid, int):\r\n userid = self.getUserIdFromSteamId(userid)\r\n self.execute(\"UPDATE Skill SET level=? WHERE UserID=? AND name=?\", level, userid, name)",
"def skill(ctx: Context, public_id: PublicId):\n _eject_item(ctx, \"skill\", public_id)",
"def upgrade_skill(self, skill_string):\r\n skill = self.__skills[skill_string]\r\n skill.skill_level += 1\r\n\r\n # Downgrading enabled the first time a skill is upgraded.\r\n if skill.skill_level == 1:\r\n self.skill_down_enable(skill_string)\r\n\r\n # Updates the UI and skill point value\r\n self.update_skill_level_info(skill_string)\r\n self.deduct_skill_points(skill.points_to_up)\r\n self.update_skill_info_box(skill_string)\r\n\r\n # Checks other requirements.\r\n for skill_string2 in self.__skills:\r\n self.check_skill_requirements(skill_string2)",
"def test_skills_updated(self):\n assert self.skill_config.skills == {self.new_skill_id}",
"def test_skills_updated(self):\n assert self.agent_config.skills == {self.new_skill_id}",
"def addSkill(self, newskill):\n self.skills.append( newskill )",
"def copy_stage(self, stack_id, rest_api_id, from_stage, to_stage_name):\n to_stage_variables = {}\n\n # Add lambda alias as a suffix to stage variables\n for k, v in from_stage[\"variables\"].iteritems():\n to_stage_variables[k] = v.replace(\":%s\" % from_stage[\"stageName\"], \":%s\" % to_stage_name)\n # if lambda function is a variable add permission to invoke\n if (\":%s\" % from_stage[\"stageName\"]) in v:\n self.add_lambda_permission(function_name=to_stage_variables[k],\n region=self.session.region_name,\n account_id=stack_id,\n rest_api_id=rest_api_id\n )\n # Create New Stage\n self.api_client.create_stage(\n restApiId=rest_api_id,\n stageName=to_stage_name,\n deploymentId=from_stage['deploymentId'],\n description=to_stage_name,\n # cacheClusterEnabled=True|False,\n # cacheClusterSize='0.5'|'1.6'|'6.1'|'13.5'|'28.4'|'58.2'|'118'|'237',\n variables=to_stage_variables\n # documentationVersion='string'\n )\n\n return True",
"def update_isp_for_product_v1(self, product_id, stage, update_in_skill_product_request, **kwargs):\n # type: (str, str, UpdateInSkillProductRequest_ee975cf1, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"update_isp_for_product_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'update_in_skill_product_request' is set\n if ('update_in_skill_product_request' not in params) or (params['update_in_skill_product_request'] is None):\n raise ValueError(\n \"Missing the required parameter `update_in_skill_product_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n if 'if_match' in params:\n header_params.append(('If-Match', params['if_match']))\n\n body_params = None\n if 'update_in_skill_product_request' in params:\n body_params = params['update_in_skill_product_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"Request is forbidden.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=412, message=\"Precondition failed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def update_manifest(self, filename: Optional[str] = None, manifest: Optional[Dict[str, str]] = None) -> None:\n filename = filename or self.manifest_filename\n manifest = manifest or {}\n self.log.debug(f\"Updating manifest '{manifest}' to file '{filename}'\")\n with open(filename, \"w\") as f:\n json.dump(manifest, f, indent=2)",
"def modifySkill(skill, db, pwr):\n skill_data = db.execute(\n 'SELECT * FROM mystatus WHERE skill = ?', (str(skill), )).fetchone()\n if not skill_data:\n return colored(\"ERROR: Skill {S} is not in your skill set!\".format(S=str(skill)), \"red\", \"on_white\")\n pwr = int(pwr)\n if pwr < 0:\n return colored(\"ERROR: Power value should alwasy be positive.\", \"red\", \"on_white\")\n db.execute(\n 'UPDATE mystatus SET power = ? WHERE skill = ?', (str(pwr), str(skill)))\n db.commit()\n return colored(\"{S}\\' power is modified from {OLD} -> {NEW}\".format(\n S=str(skill), OLD=str(skill_data['power']), NEW=str(pwr)), 'cyan')",
"def update_stack(StackId=None, Name=None, Attributes=None, ServiceRoleArn=None, DefaultInstanceProfileArn=None, DefaultOs=None, HostnameTheme=None, DefaultAvailabilityZone=None, DefaultSubnetId=None, CustomJson=None, ConfigurationManager=None, ChefConfiguration=None, UseCustomCookbooks=None, CustomCookbooksSource=None, DefaultSshKeyName=None, DefaultRootDeviceType=None, UseOpsworksSecurityGroups=None, AgentVersion=None):\n pass",
"def get_isp_associated_skills_v1(self, product_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, AssociatedSkillResponse_12067635, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"get_isp_associated_skills_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}/skills'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\", status_code=200, message=\"Returns skills associated with the in-skill product.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def delete_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"delete_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully deleted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def add_skills_to_profile():\n # get specific objects\n profile = storage.get(\"Profile\", profile_id)\n skills = storage.get(\"Skills\", skills_id)\n if profile is not None and skills is not None:\n # check every skill in profile\n for profile_skill in profile.skills:\n # if the given skill is already linked to profile, return\n if profile_skill.id == skills.id:\n return jsonify(skills.to_dict()), 200\n # if skill is not in profile, append skill and save\n profile.skills.append(skills)\n profile.save()\n return jsonify(skills.to_dict()), 201\n\n # if id not in database, abort\n abort(404)",
"def update_user(id):\n with app.app_context():\n user = User.query.get(id)\n if user is None:\n return \"User not found\", 404\n skills = validate_skills(request.get_json().get(\"skills\"))\n if not skills:\n return \"Invalid skills\", 400\n\n for skill in skills:\n skill_db = Skill.query.filter_by(name=skill).first()\n if skill_db is None:\n skill_db = Skill(name=skill)\n db.session.add(skill_db)\n \n user.skills = [\n skill for skill in Skill.query.filter(Skill.name.in_(skills)).all()\n ]\n \n users_response = UsersResponse(\n users=[\n {\n \"id\": user.id,\n \"name\": user.name,\n \"skills\": [skill.name for skill in user.skills]\n }\n ]\n )\n db.session.commit()\n return users_response.json(), 200",
"def updateSkillPoints(skill, db, delta):\n skill_data = db.execute(\n 'SELECT * FROM mystatus WHERE skill = ?', (str(skill), )).fetchone()\n if not skill_data:\n return colored(\"ERROR: Skill {S} is not in your skill set!\".format(S=str(skill)), \"red\", \"on_white\")\n new_points = max(0, skill_data['points'] + int(delta))\n db.execute(\n 'UPDATE mystatus SET points = ? WHERE skill = ?', (str(new_points), str(skill)))\n db.commit()\n return colored(\"{S}\\' power is updated from {OLD} -> {NEW}\".format(\n S=str(skill), OLD=str(skill_data['points']), NEW=str(new_points)), 'cyan')",
"def simulate_skill_v2(self, skill_id, stage, simulations_api_request, **kwargs):\n # type: (str, str, SimulationsApiRequest_ae2e6503, **Any) -> Union[ApiResponse, object, SimulationsApiResponse_e4ad17d, BadRequestError_765e0ac6, Error_ea6c1a5a]\n operation_name = \"simulate_skill_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'simulations_api_request' is set\n if ('simulations_api_request' not in params) or (params['simulations_api_request'] is None):\n raise ValueError(\n \"Missing the required parameter `simulations_api_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/simulations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'simulations_api_request' in params:\n body_params = params['simulations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\", status_code=200, message=\"Skill simulation has successfully began.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow simulation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=409, message=\"This requests conflicts with another one currently being processed. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def put(self, id):\n data = request.json\n update_scenario(id, data)\n return None, 204",
"def update_manifest(self, dst):\n # Read the current manifest into memory\n mpath = os.path.join(os.path.dirname(dst), \"manifest.json\")\n try:\n with open(mpath, 'r') as f:\n manifest = json.load(f)\n except IOError:\n manifest = {}\n\n name, _ = os.path.splitext(os.path.basename(dst))\n # Update the manifest record\n manifest[name] = {\n \"url\": os.path.basename(dst),\n \"signature\": sha256sum(dst),\n }\n\n # Write the manifest back to disk\n with open(mpath, 'w') as f:\n json.dump(manifest, f, indent=2)",
"def invoke_skill_end_point_v2(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, BadRequestError_765e0ac6, InvocationsApiResponse_3d7e3234, Error_ea6c1a5a]\n operation_name = \"invoke_skill_end_point_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/invocations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'invocations_api_request' in params:\n body_params = params['invocations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\", status_code=200, message=\"Skill was invoked.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow invocation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def reset_entitlement_for_product_v1(self, product_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"reset_entitlement_for_product_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}/entitlement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. No content.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"Request is forbidden.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=412, message=\"Precondition failed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def add_stage(self, stage_name: str) -> \"CdkStage\":\n return jsii.invoke(self, \"addStage\", [stage_name])",
"def update_manifest(builder):\r\n\r\n manifest_path = join(builder.Config.SourceRootPath, builder.Config.WMAppManifest)\r\n dom = parse(manifest_path)\r\n\r\n #import pdb;pdb.set_trace()\r\n #version = make_version_string(builder)\r\n version = builder.AppVersion\r\n\r\n update_manifest_with_values(dom,\r\n Title = builder.CustomCfg.Title,\r\n #ProductID = builder.CustomCfg.ProductID,\r\n #PublisherID = builder.Config.PublisherID,\r\n Version = version,\r\n Languages = getattr(builder.CustomCfg, \"Languages\", None ) )\r\n\r\n with open(manifest_path, 'wb') as f:\r\n data = dom.toprettyxml(indent = \" \")\r\n # toprettyxml adds extra new lines\r\n lines = [ x for x in data.split(\"\\n\") if len(x.strip()) > 0]\r\n data = \"\\n\".join(lines)\r\n f.write(data)\r\n\r\n return True",
"def addSkill(skill, db, **kwargs):\n skill_data = db.execute(\n 'SELECT * FROM mystatus WHERE skill = ?', (str(skill), )).fetchone()\n if skill_data:\n return colored(\"ERROR: Skill {S} is already in the skill set!\".format(S=str(skill)), \"red\", \"on_white\")\n db.execute(\n 'INSERT INTO mystatus (skill, power, points)'\n 'VALUES (?, ?, ?)', (str(skill), str(kwargs['power']), \"0\"))\n db.commit()\n return colored(\"Add new skill: \" + str(skill), 'cyan')",
"def addSkill(self, skillName, maxLevel, creditStart, creditIncrement):\r\n self.skills[skillName] = SkillObject(skillName, maxLevel, creditStart, creditIncrement)\r\n self.orderedSkills.append(skillName)",
"def handle_patch_deployment(project_id, deployment_id):\n kwargs = request.get_json(force=True)\n kwargs = {to_snake_case(k): v for k, v in kwargs.items()}\n experiment = update_deployment(uuid=deployment_id,\n project_id=project_id,\n **kwargs)\n return jsonify(experiment)",
"def update_beta_test_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"update_beta_test_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/betaTest'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'create_test_body' in params:\n body_params = params['create_test_body']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. No content.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=409, message=\"Thrown if user tries to request a new simulation while the old simulation is in progress.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None"
] | [
"0.58502585",
"0.5430659",
"0.53458565",
"0.53242207",
"0.524239",
"0.51418185",
"0.50488377",
"0.50345784",
"0.5030059",
"0.5013375",
"0.49769396",
"0.49239406",
"0.48924637",
"0.48853952",
"0.4850959",
"0.48124957",
"0.4753419",
"0.4743147",
"0.47148094",
"0.4696485",
"0.46933028",
"0.4688511",
"0.46844578",
"0.46787736",
"0.46746072",
"0.46554977",
"0.46136048",
"0.4610533",
"0.4601574",
"0.4586538"
] | 0.7284281 | 0 |
Get the status of skill resource and its subresources for a given skillId. | def get_skill_status_v1(self, skill_id, **kwargs):
# type: (str, **Any) -> Union[ApiResponse, object, SkillStatus_4fdd647b, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "get_skill_status_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/status'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
query_params = [] # type: List
if 'resource' in params:
query_params.append(('resource', params['resource']))
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.skill_status.SkillStatus", status_code=200, message="Returns status for skill resource and sub-resources."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.skill.skill_status.SkillStatus")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def skills():\n with app.app_context():\n results = Skill.query.all()\n return SkillsResponse(skills=results).json(), 200",
"def getSkill(userId, skill=-1) -> list:\n # fetch user\n try:\n user = fetchUser(userId=userId)\n except:\n user = []\n\n skill_temp = -1\n # get skills if user is found\n if (len(user) != 0):\n for u in user:\n if (skill != -1):\n for entry in u[\"skills\"]:\n if (skill == entry[\"id\"]):\n skill_temp = entry\n if (skill_temp == -1):\n return \"No such skill exist for the given user\"\n else:\n return skill_temp\n else:\n skill_temp = u[\"skills\"]\n for i in skill_temp:\n name = getSkillName(i['id'])\n i['name'] = name\n return skill_temp",
"def skills(self):\n if \"skills\" in self._prop_dict:\n return self._prop_dict[\"skills\"]\n else:\n return None",
"def skill(self):\n return self._get(\"skill\")",
"def get_skill_from_id(skill_id):\n return Skill.query.filter_by(id=skill_id).first()",
"def getSkills(self):\n return self.skills",
"def getSkillLevel(self, skillName):\r\n if skillName in self.currentSkills:\r\n return self.currentSkills[skillName]\r\n return 0",
"async def all_skills_data(self) -> AllSkillsData:\n return AllSkillsData(**await self.get(\"/skill/all\"))",
"def getSkillLevel(self, userid, skillName):\r\n if not isinstance(userid, int):\r\n userid = self.getUserIdFromSteamId(userid)\r\n\r\n self.execute(\"SELECT level FROM Skill WHERE UserID=? AND name=?\",\r\n userid, skillName)\r\n value = self.cursor.fetchone()\r\n if value is None:\r\n return None\r\n return value[0]",
"def get_skill_settings(self):\n return self.request({\n \"method\": \"GET\",\n \"path\": \"/\" + UUID + \"/skill/settings\",\n })",
"def getSkill(self, skillName):\r\n if self.__contains__(skillName):\r\n return self.skills[skillName]\r\n return None",
"def get_status_by_id(cls, request, id):\n return request.dbsession.query(cls).get(id).status",
"def get_isp_associated_skills_v1(self, product_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, AssociatedSkillResponse_12067635, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"get_isp_associated_skills_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}/skills'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\", status_code=200, message=\"Returns skills associated with the in-skill product.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def delete_skill_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"delete_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. No content.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def get_skill(skillpath):\n return Skill.query.filter_by(path=skillpath).first()",
"def list_versions_for_skill_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, ListSkillVersionsResponse_7522147d, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"list_versions_for_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/versions'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.list_skill_versions_response.ListSkillVersionsResponse\", status_code=200, message=\"Successfully retrieved skill versions\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.list_skill_versions_response.ListSkillVersionsResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_skill_enablement_status_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"get_skill_enablement_status_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement resource exists for given skillId & stage.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def get_skills(self):\n return self.skills[:]",
"def get_status_of_id(sku_id):\n if not sku_id:\n return None\n\n status_query = list(sku_database.find({\"SKU_unit\": int(sku_id)}, {'_id': 0, 'Status': 1}))\n status = status_query[0][\"Status\"]\n return status",
"def get_status(self, scenario_id):\n table = self.get_execute_table()\n try:\n return table.loc[int(scenario_id), \"status\"]\n except KeyError:\n raise Exception(f\"Scenario not found in execute list, id = {scenario_id}\")",
"def related_resources(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KlusterletStatusRelatedResourcesArgs']]]]:\n return pulumi.get(self, \"related_resources\")",
"def get_recent_level(user_id, skill_id):\n return Association.query.filter(Association.skill_id == skill_id,\n Association.users_id == user_id).all()[-1].level",
"def test_get_skill_progress(self):\n self._build_sample_graph()\n self._add_student_and_progress()\n tracker = SkillCompletionTracker()\n result = tracker.get_skills_progress(\n self.student, [self.sa.id, self.sb.id, self.sc.id])\n self.assertEqual(SkillCompletionTracker.COMPLETED,\n result[self.sa.id][0])\n self.assertEqual(SkillCompletionTracker.IN_PROGRESS,\n result[self.sb.id][0])\n self.assertEqual(SkillCompletionTracker.NOT_ATTEMPTED,\n result[self.sc.id][0])",
"def load_skill_list(skills_to_load):\n if exists(SKILLS_DIR):\n # checking skills dir and getting all priority skills there\n skill_list = [folder for folder in filter(\n lambda x: os.path.isdir(os.path.join(SKILLS_DIR, x)),\n os.listdir(SKILLS_DIR)) if folder in skills_to_load]\n for skill_folder in skill_list:\n skill = {\"id\": hash(os.path.join(SKILLS_DIR, skill_folder))}\n skill[\"path\"] = os.path.join(SKILLS_DIR, skill_folder)\n # checking if is a skill\n if not MainModule + \".py\" in os.listdir(skill[\"path\"]):\n continue\n # getting the newest modified date of skill\n last_mod = _get_last_modified_date(skill[\"path\"])\n skill[\"last_modified\"] = last_mod\n # loading skill\n skill[\"loaded\"] = True\n skill[\"instance\"] = load_skill(\n create_skill_descriptor(skill[\"path\"]),\n ws, skill[\"id\"])\n loaded_skills[skill_folder] = skill",
"def get_status(person_group_id):\n url = 'persongroups/{}/training'.format(person_group_id)\n\n return util.request('GET', url)",
"def list_skills_for_vendor_v1(self, vendor_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, ListSkillResponse_527462d0, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"list_skills_for_vendor_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'vendor_id' is set\n if ('vendor_id' not in params) or (params['vendor_id'] is None):\n raise ValueError(\n \"Missing the required parameter `vendor_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n\n query_params = [] # type: List\n if 'vendor_id' in params:\n query_params.append(('vendorId', params['vendor_id']))\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n if 'skill_id' in params:\n query_params.append(('skillId', params['skill_id']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.list_skill_response.ListSkillResponse\", status_code=200, message=\"Returns list of skills for the vendor.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.list_skill_response.ListSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_ability_skill(cursor, skill):\n cursor.execute('SELECT id FROM skills WHERE identifier = ?', (skill,))\n data = cursor.fetchone()\n try:\n return data[0]\n except TypeError:\n l.error(\"The Skill {} doesn't exists.\".format(skill))\n return 0",
"def get_resources(self, resource_id):\n url = \"%s/resource/%s\" % ('None', resource_id)\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBody(resp, body)",
"def get_project_job_status(id):\n user = current_user\n\n if user.get_id() is not None:\n _tasks = user.get_project_tasks_in_progress(id)\n running_task_dicts = get_running_task_dicts(_tasks)\n\n _tasks = user.get_finished_project_tasks(id)\n finished_task_dicts = get_finished_task_dicts(_tasks)\n\n response_object = {\n 'running_tasks': running_task_dicts,\n 'finished_tasks': finished_task_dicts\n }\n else:\n response_object = {'status': 'error'}\n # print(jsonify(response_object))\n return jsonify(response_object)",
"def get_isp_list_for_skill_id_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05, ListInSkillProductResponse_505e7307]\n operation_name = \"get_isp_list_for_skill_id_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/inSkillProducts'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.list_in_skill_product_response.ListInSkillProductResponse\", status_code=200, message=\"Response contains list of in-skill products for the specified skillId and stage.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.list_in_skill_product_response.ListInSkillProductResponse\")\n\n if full_response:\n return api_response\n return api_response.body"
] | [
"0.58685875",
"0.5861188",
"0.56543297",
"0.5603727",
"0.558455",
"0.55775243",
"0.5556538",
"0.541805",
"0.53480744",
"0.5343017",
"0.53027284",
"0.5294493",
"0.5292273",
"0.5287303",
"0.52758276",
"0.5232404",
"0.51425916",
"0.5097631",
"0.5069692",
"0.50124115",
"0.49964172",
"0.498886",
"0.4978473",
"0.4974107",
"0.49497822",
"0.4921186",
"0.49138385",
"0.48919022",
"0.48844332",
"0.488122"
] | 0.7183226 | 0 |
Submit the skill for certification. | def submit_skill_for_certification_v1(self, skill_id, **kwargs):
# type: (str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "submit_skill_for_certification_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/submit'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
if 'submit_skill_for_certification_request' in params:
body_params = params['submit_skill_for_certification_request']
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message="Success. There is no content but returns Location in the header."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="POST",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_approve(self):\n\n username,userpass = self.testdata.find_account_for('toolsubmitter')\n\n self.utils.account.login_as(username,userpass)\n\n self.contribtool.approve(TOOLNAME,TOOLLICENSEDATA)",
"def request_certificate(request):\r\n if request.method == \"POST\":\r\n if request.user.is_authenticated():\r\n xqci = XQueueCertInterface()\r\n username = request.user.username\r\n student = User.objects.get(username=username)\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get('course_id'))\r\n course = modulestore().get_course(course_key, depth=2)\r\n\r\n status = certificate_status_for_student(student, course_key)['status']\r\n if status in [CertificateStatuses.unavailable, CertificateStatuses.notpassing, CertificateStatuses.error]:\r\n logger.info('Grading and certification requested for user {} in course {} via /request_certificate call'.format(username, course_key))\r\n status = xqci.add_cert(student, course_key, course=course)\r\n return HttpResponse(json.dumps({'add_status': status}), mimetype='application/json')\r\n return HttpResponse(json.dumps({'add_status': 'ERRORANONYMOUSUSER'}), mimetype='application/json')",
"def submit(request):\n if not request.user.is_authenticated():\n return proceed(request)\n # If dev has already agreed, continue to next step.\n user = UserProfile.objects.get(pk=request.user.id)\n if not user.read_dev_agreement:\n return redirect('submit.app.terms')\n return manifest(request)",
"def SubmitHIT(self, sandbox = 'false'):\n\n if sandbox is 'true':\n self.host = 'mechanicalturk.sandbox.amazonaws.com'\n\n conn = MTurkConnection(host = self.host, aws_access_key_id = self.AWS_KEY, aws_secret_access_key = self.AWS_SECRET)\n\n answer_specification = AnswerSpecification(SelectionAnswer(style = self.answer_style, selections = self.answer_options))\n\n questions = []\n for i in self.question_list:\n\t questions.append(Question(identifier=i[1], content = QuestionContent(i[0]), answer_spec = answer_specification))\n\n question_form = QuestionForm(questions)\n\n self.hit_response = conn.create_hit(question = question_form,\n lifetime = self.lifetime,\n max_assignments = self.assignment_count,\n title = self.title,\n description = self.description,\n keywords = self.keywords,\n reward = self.reward,\n duration = self.duration,\n approval_delay = self.approval_delay,\n annotation = self.annotation)\n\n # Returns the HITId as a unicode string\n self.HITId = self.hit_response.HITId\n return self.HITId",
"def post(\n self,\n email,\n company_name,\n location,\n job_profile,\n salary,\n username,\n password,\n security_question,\n security_answer,\n notes,\n date_applied,\n status,\n):",
"def trigger_assessment():\n\n data_api_client.req.assessments().post(data={\n 'assessment': {\n 'brief_id': request.form['brief_id'],\n 'domain_name': request.form['domain_name'],\n 'supplier_code': request.form['supplier_code']\n },\n 'update_details': {\n 'updated_by': ''\n }\n })\n\n return redirect(url_for('.assessments_review'))",
"def approve(self):\n self.approved = True\n self.quest_node['approved'] = True\n graph.push(self.quest_node)\n self.payout()",
"def purchased_callback(self):\r\n try:\r\n verification_attempt = SoftwareSecurePhotoVerification.active_for_user(self.course_enrollment.user)\r\n verification_attempt.submit()\r\n except Exception as e:\r\n log.exception(\r\n \"Could not submit verification attempt for enrollment {}\".format(self.course_enrollment)\r\n )\r\n self.course_enrollment.change_mode(self.mode)\r\n self.course_enrollment.activate()",
"def submit_essay(self, expected_assessment_type, expected_prompt):\r\n\r\n # Check the assessment type and prompt\r\n self.assertEqual(self.open_response.assessment_type, expected_assessment_type)\r\n self.assertIn(expected_prompt, self.open_response.prompt)\r\n\r\n # Enter a submission, which will trigger a pre-defined response from the XQueue stub.\r\n self.open_response.set_response(self.submission)\r\n\r\n # Save the response and expect some UI feedback\r\n self.open_response.save_response()\r\n self.assertEqual(\r\n self.open_response.alert_message,\r\n \"Answer saved, but not yet submitted.\"\r\n )\r\n\r\n # Submit the response\r\n self.open_response.submit_response()",
"async def submit(self, ctx: commands.Context):\n if ctx.subcommand_passed is None:\n await ctx.send(\"See the subcommands (+help submit) for the categories!\")",
"def submit(request, session, **kwargs):\n\n from ..models import (\n FacilityTransaction,\n Allocation,\n FollowupRequest,\n Instrument,\n )\n\n instrument = (\n Instrument.query_records_accessible_by(request.requester)\n .join(Allocation)\n .join(FollowupRequest)\n .filter(FollowupRequest.id == request.id)\n .first()\n )\n\n name = request.obj.tns_name\n if name is None:\n request.status = 'No TNS name'\n else:\n try:\n lc = Table.read(\n f\"{lightcurve_url}/lc_{name}_cleaned\",\n format='ascii',\n header_start=1,\n )\n\n if 'BTJD' not in list(lc.columns):\n request.status = f\"TESS alert {name} could not be ingested: {lightcurve_url}/lc_{name}_cleaned\"\n else:\n IOLoop.current().run_in_executor(\n None,\n lambda: commit_photometry(\n lc, request.id, instrument.id, request.requester.id\n ),\n )\n\n except FileNotFoundError:\n request.status = f\"TESS alert {name} not found.\"\n except Exception:\n request.status = f\"TESS alert {name} could not be ingested: {lightcurve_url}/lc_{name}_cleaned\"\n\n transaction = FacilityTransaction(\n request=None,\n response=None,\n followup_request=request,\n initiator_id=request.last_modified_by_id,\n )\n\n session.add(transaction)",
"async def submit(client, event,\n submission_reference_url: ('str', 'Please give a link to your submission'),\n ):\n if (event.guild is not None):\n return Embed('Error', 'Please use this channel in a private channel.')\n \n if not event.user.has_roole(ROLE__SUPPORT__VERIFIED):\n return Embed('Permission denied', f'You must have {ROLE__SUPPORT__VERIFIED.mention} role to invoke this '\n f'command.')\n \n if datetime.utcnow() >= QUALIFIER_DEADLINE:\n return Embed('Oh No!', 'Qualifier over', color = COLOR__EVENT)\n \n user = event.user\n await client.message_create(CHANNEL__SUPPORT__EVENT, f'{user:f}, [{user.id}] submitted:\\n'\n f'`{submission_reference_url}`')\n \n return Embed('Success', 'Noice', color = COLOR__EVENT)",
"def submit():\n record = Submission()\n if session.has_key('id'):\n record.uid = session['id']\n else:\n return redirect(url_for('start'))\n\n #check to make sure they haven't submitted the test multiple times\n if (Submission.hasDuplicate(record.uid)):\n flash('You have already submitted your test')\n\n #save the last answers if any\n user_test = session['test']\n for k, v in request.form.iteritems():\n user_test[k] = v\n session['test'] = user_test\n \n #grade the test\n record = gradeTest(record, user_test)\n\n #save the record a keyerror should never happen\n try:\n record.save()\n except KeyError:\n #log on record save failure\n app.logger.error('Record %d failed to save.', record.uid)\n\n session['score'] = record.score\n\n return redirect(url_for('results'))",
"def handle_simpleenroll(self):\n content_length = int(self.headers['Content-Length'])\n csr = self.rfile.read(content_length)\n\n cert = sign_certificate(csr)\n\n self.set_est_rsp_header(len(cert))\n\n self.wfile.write(cert.encode('utf-8'))",
"def test_create_enrollment_term(self):\n # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.\n pass",
"def cat_int_pay():\n print(colors.Color.BLUE + \"Make the payment with digital certificate\" + colors.Color.END)\n pay_and_certificate = urllib.parse.quote(\n 'identitats.aoc.cat/o/oauth2/auth?response_type=code&client_id=tramits.'\n 'transit.cat&redirect_uri=https'\n '://multestransit.gencat.cat/sctPagaments/AppJava/loginIdCat&scope='\n 'autenticacio_usuari&access_type=online'\n '&approval_pompt=false&state=ca_ES')\n print('https://' + pay_and_certificate)\n print(colors.Color.BLUE + \"Make the payment without digital certificate\"\n + colors.Color.END)\n pay_without_certificate = urllib.parse.quote(\n 'multestransit.gencat.cat/sctPagaments/AppJava/views/expedients/cerca.'\n 'xhtml?set-locale=ca_ES')\n print('https://' + pay_without_certificate)",
"def submit_loan_request(self):\n try:\n payload = self.get_data()\n self.mainwindow.show_dialog(\"Processing loan request\",\n 'Your request is being processed and '\n 'your documents are being uploaded.')\n if self.mainwindow.api.create_loan_request(self.mainwindow.app.user, payload):\n if self.mainwindow.api.failed_documents:\n self.mainwindow.show_dialog(\"Documents error\", 'Some of the documents could not be sent.')\n else:\n self.mainwindow.show_dialog(\"Loan request created\", 'Your loan request has been sent.')\n else:\n self.mainwindow.show_dialog(\"Loan request error\", 'You can only have a single loan request.')\n except ValueError:\n self.mainwindow.show_dialog(\"Loan request error\", 'You didn\\'t enter the required information.')",
"async def skill(self, ctx, *, skill: str):\n\n try:\n skill = self.get_entry('Skill', skill.lower())\n except RuntimeError as e:\n return await ctx.send(e)\n\n name = skill['Name']\n\n embed = discord.Embed(title=name)\n embed.set_thumbnail(url='attachment://skill.png')\n embed.add_field(name='Learned', value=skill['Class/Rank'], inline=False)\n embed.add_field(name='Effect', value=skill['Effect'])\n\n await ctx.send(file=discord.File(f'xenox/skills/{name}.png', 'skill.png'), embed=embed)",
"def submitRequest(self, json):\n uID = json.get('uID')\n request = True\n approval = \"Wait\"\n if uID:\n\n RequestsDAO().insertRequest(uID, request, approval)\n mapped_result = self.buildRequestToDict(uID, request, approval)\n return jsonify(TURN=mapped_result), 201\n\n else:\n return jsonify(Error=\"Unexpected attributes in post request\"), 400",
"def proceed(request):\n if request.user.is_authenticated():\n return submit(request)\n agreement_form = forms.DevAgreementForm({'read_dev_agreement': True},\n instance=None, request=request)\n return render(request, 'submit/terms.html',\n {'step': 'terms', 'agreement_form': agreement_form,\n 'proceed': True})",
"def _submit_request_fun(self, agent, community, pduType, \\\n encoded_oids=[], encoded_vals=[]):\n # Create SNMP session\n ses = session.session(agent, community)\n question = ses.encode_request(pduType, encoded_oids, encoded_vals)\n\n # Submit question to bulkrole manager\n self.mgr.append(((agent, self.port), question, ses))",
"def _post(self, data=None, headers=None):\n return self.api.send_http_request_v2(method=\"POST\", url=\"https://auth.iqoption.com/api/v2/verify/2fa\",data=json.dumps(data), headers=headers)",
"def add_cert():\n\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n \n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n form = Cert_Form()\n\n if form.validate_on_submit():\n cert = Cert(\n cert_name = form.cert_name.data,\n hours = form.hours.data,\n is_required = form.is_required.data,\n expire = form.expire.data,\n good_for_time = form.good_for_time.data,\n good_for_unit = form.good_for_unit.data,\n \n )\n db.session.add(cert)\n db.session.commit()\n\n flash(\"Certification Added!\", \"success\")\n return redirect(\"/administrator\")\n\n else: \n\n return render_template(\"/admin/add_cert.html\", form = form)",
"def test_successful_program_certificate_generation(self):\n final_grade = FinalGradeFactory.create(\n user=self.user,\n course_run=self.run_1,\n passed=True,\n status='complete',\n grade=0.8\n )\n CourseRunGradingStatus.objects.create(course_run=self.run_1, status='complete')\n with mute_signals(post_save):\n MicromastersCourseCertificate.objects.create(course=final_grade.course_run.course, user=self.user)\n\n cert_qset = MicromastersProgramCertificate.objects.filter(user=self.user, program=self.program)\n assert cert_qset.exists() is False\n api.generate_program_certificate(self.user, self.program)\n assert cert_qset.exists() is True",
"def test_decision_maker_hand_tx_ready_for_signing(self):\n tx_message = TransactionMessage(\n performative=TransactionMessage.Performative.PROPOSE_FOR_SIGNING,\n skill_callback_ids=[PublicId(\"author\", \"a_skill\", \"0.1.0\")],\n tx_id=self.tx_id,\n tx_sender_addr=self.tx_sender_addr,\n tx_counterparty_addr=self.tx_counterparty_addr,\n tx_amount_by_currency_id={\"FET\": -20},\n tx_sender_fee=0,\n tx_counterparty_fee=0,\n tx_quantities_by_good_id={\"good_id\": 0},\n ledger_id=self.ledger_id,\n info=self.info,\n signing_payload={\"key\": b\"some_bytes\"},\n )\n self.decision_maker.handle(tx_message)\n assert not self.decision_maker.message_out_queue.empty()",
"def submitToReview(self, obj):\n self.wftool.doActionFor(obj, \"submit\")",
"def submitRequest(self, request):\n reqmgr = RequestManagerImpl()\n workflow = reqmgr.makeRequest( self.endpoint, request.getRequestDict() )\n workflowName = workflow['RequestName']\n reqmgr.approveRequest( self.endpoint, workflow )\n reqmgr.assignRequest( self.endpoint, workflow, request.getTargetTeam() )\n request.setWorkflowName( workflowName )\n return request",
"def do_POST(self):\r\n if 'grade' in self.path and self._send_graded_result().status_code == 200:\r\n status_message = 'LTI consumer (edX) responded with XML content:<br>' + self.server.grade_data['TC answer']\r\n content = self._create_content(status_message)\r\n self.send_response(200, content)\r\n elif 'lti2_outcome' in self.path and self._send_lti2_outcome().status_code == 200:\r\n status_message = 'LTI consumer (edX) responded with HTTP {}<br>'.format(\r\n self.server.grade_data['status_code'])\r\n content = self._create_content(status_message)\r\n self.send_response(200, content)\r\n elif 'lti2_delete' in self.path and self._send_lti2_delete().status_code == 200:\r\n status_message = 'LTI consumer (edX) responded with HTTP {}<br>'.format(\r\n self.server.grade_data['status_code'])\r\n content = self._create_content(status_message)\r\n self.send_response(200, content)\r\n # Respond to request with correct lti endpoint\r\n elif self._is_correct_lti_request():\r\n params = {k: v for k, v in self.post_dict.items() if k != 'oauth_signature'}\r\n\r\n if self._check_oauth_signature(params, self.post_dict.get('oauth_signature', \"\")):\r\n status_message = \"This is LTI tool. Success.\"\r\n\r\n # Set data for grades what need to be stored as server data\r\n if 'lis_outcome_service_url' in self.post_dict:\r\n self.server.grade_data = {\r\n 'callback_url': self.post_dict.get('lis_outcome_service_url').replace('https', 'http'),\r\n 'sourcedId': self.post_dict.get('lis_result_sourcedid')\r\n }\r\n\r\n submit_url = '//{}:{}'.format(*self.server.server_address)\r\n content = self._create_content(status_message, submit_url)\r\n self.send_response(200, content)\r\n\r\n else:\r\n content = self._create_content(\"Wrong LTI signature\")\r\n self.send_response(200, content)\r\n else:\r\n content = self._create_content(\"Invalid request URL\")\r\n self.send_response(500, content)",
"def send_request(self):\r\n # If AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING is True, we want to\r\n # skip posting anything to Software Secure. We actually don't even\r\n # create the message because that would require encryption and message\r\n # signing that rely on settings.VERIFY_STUDENT values that aren't set\r\n # in dev. So we just pretend like we successfully posted\r\n if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):\r\n fake_response = requests.Response()\r\n fake_response.status_code = 200\r\n return fake_response\r\n\r\n headers, body = self.create_request()\r\n response = requests.post(\r\n settings.VERIFY_STUDENT[\"SOFTWARE_SECURE\"][\"API_URL\"],\r\n headers=headers,\r\n data=json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8'),\r\n verify=False\r\n )\r\n log.debug(\"Sent request to Software Secure for {}\".format(self.receipt_id))\r\n log.debug(\"Headers:\\n{}\\n\\n\".format(headers))\r\n log.debug(\"Body:\\n{}\\n\\n\".format(body))\r\n log.debug(\"Return code: {}\".format(response.status_code))\r\n log.debug(\"Return message:\\n\\n{}\\n\\n\".format(response.text))\r\n\r\n return response",
"def submit(self):\n data = self.getFSNDataDict()\n if data != []:\n MOSES.addToPiggyBank(data, self.user_id, self.password)"
] | [
"0.62475556",
"0.57329047",
"0.56955576",
"0.5541429",
"0.5416008",
"0.5341066",
"0.52661514",
"0.5255306",
"0.52315617",
"0.5223188",
"0.52101547",
"0.5195959",
"0.51856583",
"0.51673204",
"0.51597625",
"0.5145797",
"0.51179224",
"0.5105908",
"0.51035655",
"0.50881016",
"0.5087675",
"0.5080794",
"0.5069492",
"0.506557",
"0.50601757",
"0.5056025",
"0.5035926",
"0.5024432",
"0.50227803",
"0.5006038"
] | 0.6781027 | 0 |
Retrieve a list of all skill versions associated with this skill id | def list_versions_for_skill_v1(self, skill_id, **kwargs):
# type: (str, **Any) -> Union[ApiResponse, object, ListSkillVersionsResponse_7522147d, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "list_versions_for_skill_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/versions'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
query_params = [] # type: List
if 'next_token' in params:
query_params.append(('nextToken', params['next_token']))
if 'max_results' in params:
query_params.append(('maxResults', params['max_results']))
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.list_skill_versions_response.ListSkillVersionsResponse", status_code=200, message="Successfully retrieved skill versions"))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.skill.list_skill_versions_response.ListSkillVersionsResponse")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ListVersions(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)",
"def get_versions(self):\n raise NotImplementedError",
"def get_versions():\n ret_obj = {'versions': picard_versions(current_app)}\n return make_response(jsonify(ret_obj), 200)",
"def versions(self) -> List['RadsProjectVersion']:\n logger.debug(f\"retrieve versions of {self}\")\n listing = self.storage.request_text(f\"{self.path}/releaselisting\")\n return [RadsProjectVersion(self, RadsVersion(l)) for l in listing.splitlines()]",
"def getVersions(self):\n logger.debug(\"Func: getVersions\")\n\n try:\n return self._currentSceneInfo[\"Versions\"]\n except:\n return []",
"def versions(self, stored=False) -> List['RadsSolutionVersion']:\n\n if stored:\n fspath = self.storage.fspath(self.path)\n if not os.path.isdir(fspath):\n return [] # solution not in storage\n listing = []\n for path in os.listdir(fspath):\n if not os.path.isdir(os.path.join(fspath, path)):\n continue\n listing.append(path)\n else:\n logger.debug(f\"retrieve versions of {self}\")\n listing = self.storage.request_text(f\"{self.path}/releaselisting\").splitlines()\n return sorted(RadsSolutionVersion(self, RadsVersion(l)) for l in listing)",
"def all(self):\r\n if self._versions is None or \\\r\n len(self._versions) == 0:\r\n url = \"%s/versions\" % self._url\r\n params = {'f':'json'}\r\n res = self._con.get(url, params)\r\n self._versions = []\r\n if 'versions' in res:\r\n for v in res['versions']:\r\n guid = v['versionGuid'][1:-1]\r\n vurl = \"%s/versions/%s\" % (self._url, guid)\r\n self._versions.append(Version(url=vurl,\r\n flc=self._flc,\r\n gis=self._gis))\r\n return self._versions\r\n return self._versions",
"def list_interaction_model_versions_v1(self, skill_id, stage_v2, locale, **kwargs):\n # type: (str, str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, ListResponse_cb936759]\n operation_name = \"list_interaction_model_versions_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage_v2' is set\n if ('stage_v2' not in params) or (params['stage_v2'] is None):\n raise ValueError(\n \"Missing the required parameter `stage_v2` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'locale' is set\n if ('locale' not in params) or (params['locale'] is None):\n raise ValueError(\n \"Missing the required parameter `locale` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stageV2}/interactionModel/locales/{locale}/versions'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage_v2' in params:\n path_params['stageV2'] = params['stage_v2']\n if 'locale' in params:\n path_params['locale'] = params['locale']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n if 'sort_direction' in params:\n query_params.append(('sortDirection', params['sort_direction']))\n if 'sort_field' in params:\n query_params.append(('sortField', params['sort_field']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.interaction_model.version.list_response.ListResponse\", status_code=200, message=\"Returns list of interactionModel versions of a skill for the vendor.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error e.g. the input interaction model is invalid.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill doesn't exist or there is no model defined for the locale.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.interaction_model.version.list_response.ListResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def list_versions(self):\n version_url = self._get_base_version_url()\n\n resp, body = self.raw_request(version_url, 'GET')\n # NOTE: We need a raw_request() here instead of request() call because\n # \"list API versions\" API doesn't require an authentication and we can\n # skip it with raw_request() call.\n self._error_checker(resp, body)\n\n body = json.loads(body)\n self.validate_response(schema.list_versions, resp, body)\n return rest_client.ResponseBody(resp, body)",
"def select_versions(self):\n return []",
"def list_versions(self, project_id, model_id):\n endpoint = \"/project/{}/model/{}/version\".format(project_id, model_id)\n return self._get(endpoint, _ModelVersionSchema(many=True))",
"def list_versions(self, service_id):\n return [self.fastly_cache[service_id]['service_details']]",
"def versions(self):\n return self._versions",
"def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")",
"def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")",
"def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")",
"def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")",
"def versions(self) -> Dict[str, str]:\n self.__logger.debug('Eva.versions called')\n return self.__http_client.api_versions()",
"def versions(self, name):\n if not len(self):\n self.update()\n return [version for version in self if os.path.basename(version) == name]",
"def list_all_dataset_versions(self):\n assert self.dataset_id, 'dataset_id required!'\n return self._datasets_request('GET', dataset_id=self.dataset_id, versions_request=True)",
"def available_versions(self, **kwargs):\n return self.raw_version_data(**kwargs)",
"def index(self, request):\n versions = []\n for key, data in VERSIONS.items():\n v = BaseVersion(\n data[\"id\"],\n data[\"status\"],\n request.application_url,\n data[\"updated\"])\n versions.append(v)\n return wsgi.Result(VersionsDataView(versions))",
"def versions():\n result = timeline.versions()\n if result:\n click.echo('\\n'.join(result))",
"def list_versions(quartus_versions):\n for key in quartus_versions.keys():\n print(key)",
"def ListVersions(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def list_dataset_version(self, version_id):\n assert self.dataset_id, 'dataset_id required!'\n return self._datasets_request('GET', dataset_id=self.dataset_id, versions_request=True,\n version_id=version_id)",
"def active_versions(self, request, **kwargs):\n project = self.get_project_for_user_or_404(\n kwargs[self.lookup_field]\n )\n versions = project.versions.filter(active=True, privacy_level=PUBLIC)\n return Response({\n 'versions': VersionSerializer(versions, many=True).data,\n })",
"def skills():\n with app.app_context():\n results = Skill.query.all()\n return SkillsResponse(skills=results).json(), 200",
"def admission_review_versions(self) -> Sequence[str]:\n return pulumi.get(self, \"admission_review_versions\")",
"def admission_review_versions(self) -> Sequence[str]:\n return pulumi.get(self, \"admission_review_versions\")"
] | [
"0.6214144",
"0.618844",
"0.6146266",
"0.6140041",
"0.61164033",
"0.60716397",
"0.60430825",
"0.5990867",
"0.59689295",
"0.59645283",
"0.5942911",
"0.5922136",
"0.58993137",
"0.57575744",
"0.57575744",
"0.57575744",
"0.57575744",
"0.5735697",
"0.5722875",
"0.56572497",
"0.56403875",
"0.5594554",
"0.5591867",
"0.55707884",
"0.55044574",
"0.54880923",
"0.548766",
"0.5480002",
"0.5456102",
"0.5456102"
] | 0.75883734 | 0 |
Withdraws the skill from certification. | def withdraw_skill_from_certification_v1(self, skill_id, withdraw_request, **kwargs):
# type: (str, WithdrawRequest_d09390b7, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]
operation_name = "withdraw_skill_from_certification_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'withdraw_request' is set
if ('withdraw_request' not in params) or (params['withdraw_request'] is None):
raise ValueError(
"Missing the required parameter `withdraw_request` when calling `" + operation_name + "`")
resource_path = '/v1/skills/{skillId}/withdraw'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
if 'withdraw_request' in params:
body_params = params['withdraw_request']
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message="Success."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=400, message="Server cannot process the request due to a client error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.bad_request_error.BadRequestError", status_code=403, message="The operation being requested is not allowed."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=404, message="The resource being requested is not found."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=429, message="Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.skill.standardized_error.StandardizedError", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="POST",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type=None)
if full_response:
return api_response
return None | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def safeWithdrawal(self):\n if self._after_dead_line():\n # each contributor can withdraw the amount they contributed if the goal was not reached\n if not self._funding_goal_reached.get():\n amount = self._balances[self.msg.sender]\n self._balances[self.msg.sender] = 0\n if amount > 0:\n if self.icx.send(self.msg.sender, amount):\n self.FundTransfer(self.msg.sender, amount, False)\n Logger.debug(f'FundTransfer({self.msg.sender}, {amount}, False)', TAG)\n else:\n self._balances[self.msg.sender] = amount\n\n # The sales target has been met. Owner can withdraw the contribution.\n if self._funding_goal_reached.get() and self._addr_beneficiary.get() == self.msg.sender:\n if self.icx.send(self._addr_beneficiary.get(), self._amount_raised.get()):\n self.FundTransfer(self._addr_beneficiary.get(), self._amount_raised.get(), False)\n Logger.debug(f'FundTransfer({self._addr_beneficiary.get()},'\n f'{self._amount_raised.get()}, False)', TAG)\n # reset amount_raised\n self._amount_raised.set(0)\n else:\n # if the transfer to beneficiary fails, unlock contributors balance\n Logger.debug(f'Failed to send to beneficiary!', TAG)\n self._funding_goal_reached.set(False)",
"async def legwithdraw(self, ctx):\n\n new_value = await self.toggle_dm_setting(ctx.author.id, \"leg_session_withdraw\")\n\n if new_value:\n message = f\":white_check_mark: You will now receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} and someone withdraws their Bill or Motion. \" \\\n f\"Note that you will never get a DM when a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} is the one withdrawing.\"\n\n else:\n message = f\":white_check_mark: You will no longer receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} and someone withdraws their Bill or Motion.\"\n\n await ctx.send(message)",
"def withdraw(self,withdrawal_money):\r\n if self.balance < withdrawal_money:\r\n print(\"Funds are insufficient\")\r\n \r\n else:\r\n self.balance -= withdrawal_money\r\n print(\"Withdrawal Accepted\")",
"def withdraw(self, amount):\n self.deposit(-amount)",
"def withdrawal(cls, amount):\n if amount >= 0 and cls.is_logged_in():\n cls.__current_acct.__transaction(-amount)\n else:\n print('withdrawal error')",
"def register_withdraw(self, withdraw_intent): \n if withdraw_intent > 0:\n self.teo.register_withdraw(self, withdraw_intent)",
"def withdraw(self, responder):\n self._apply_decision(self.Status.WITHDRAWN, responder)",
"def withdraw(self, amount):\n self.balance -= amount",
"def withdraw(account, amount):\n pass",
"def do_withdraw(self, args):\n \n amount = float(input(\"How much? \"))\n \n balance = self.cur.execute(\"SELECT * FROM balance ORDER BY date DESC\").fetchone()[2]\n if amount > balance:\n print(\"Insufficient funds! Withdrawl canceled.\")\n print(\"Use the `balance` command to check your account balance\")\n return\n \n balance -= amount\n now = time()\n self.cur.execute(\"INSERT INTO withdrawls VALUES (?,?)\", (now, amount))\n self.cur.execute(\"INSERT INTO balance VALUES (?,?,?)\", (now, 0.0, balance))\n self.db.commit()\n print(\"Withdrawl complete. Your new balance is $%.2f\" % balance)",
"async def resign(self, ctx):\n currency = await bank.get_currency_name(ctx.guild)\n await self.config.user(ctx.author).gameRole.set(\"User\")\n await ctx.send(\n f\"{ctx.author} has spent 10,000 {currency}- to resign from their current job.\"\n )",
"def withdrawal(self, amount):\n if self.balance - amount < self.minimum_balance:\n print \"This would take you below your minimum balance.\"\n return\n else:\n self.balance -= amount\n print \"Please take your cash.\"\n print \"Your balance is now $%d.\" % self.balance\n self.transactions.append((\"Withdrawal\", amount))",
"def confirm(userid, choice, popupid):\r\n if choice:\r\n players[userid].resetSkills()",
"def perform(self, context):\r\n context.owner.spendPower(self.power)",
"def withdrawn(self, withdrawn):\n\n self._withdrawn = withdrawn",
"def deduct_skill_points(self, amount):\r\n self.__skill_points -= amount\r\n self.__skill_points_indicator.configure(\r\n text=\"Available skill points: \" + str(self.__skill_points))",
"def withdraw_money():\n print(\"\\n\")\n print(messages.account_credentials)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.withdraw_money(credentials)\n start_again() if result else BankOperationsUi.withdraw_money()",
"def withdraw(self, amount):\n self.withdrw = amount\n \n if (self.balance-self.withdrw) < 0:\n self.balance = self.balance - 5 - self.withdrw\n self.fee += 5\n else:\n self.balance -= self.withdrw",
"def withdraw(self, amount, trigger_transaction, trans=None):\n\n #\n # validates the amount is positive\n self.validate_amount(amount)\n\n #\n # Validate the user has the amount for the withdraw\n if not self.check_sufficient_funds(amount):\n raise OverdraftException(self.user.username)\n\n #\n # creates the transaction\n category = TransactionType.objects.get(pk=TransactionTypeConstants.BonusCashWithdraw.value)\n\n #\n # makes the amount negative because it is a withdrawal\n self.create(category, -amount, trans)\n self.transaction_detail.trigger_transaction = trigger_transaction\n self.transaction_detail.save()\n\n Logger.log(ErrorCodes.INFO,\"Bonus Cash Withdraw\", self.user.username+\" withdrew \"+str(amount)+\" \"+self.accountName+\" from their account.\")",
"def withdraw(self, amount):\n\n print(\"\\nWithdrawal - {self.name}\".format(self=self))\n\n # retrieves the available balance in the account\n availableBalance = self.getAvailableBalance()\n \n # checks for negative amount value \n if amount < 0:\n print(\"Cannot withdraw £{0:.2f}\".format(amount))\n print(\"Deposit amount cannot be a negative value.\")\n\n # checks whether amount requested is greater than the available balance\n elif amount > availableBalance:\n print(\"Cannot withdraw £{0:.2f}\".format(amount))\n print(\"Insufficient funds.\")\n\n # subtracts amount from account balance\n else:\n self.balance -= amount\n print(\"{0} has withdrew £{1:.2f}. New balance is £{2:.2f}\".format(self.name, amount, self.balance))",
"def withdraw(self, amount):\n if self.overdrawn:\n print('You have overdrawn, please add more money!')\n return self.balance\n self.balance = self.balance - amount\n return self.balance",
"def save(self, *args, **kwargs):\n wallet = self.wallet.withdraw(self.value)\n super(Payment, self).save(*args, **kwargs)",
"def rule_withdraw(self, st_acct, st_amount, st_idx):\n if self.active_token_ids.get(st_acct):\n # choose from the caller's valid NFT token IDs, if there are any\n idx = int(st_idx * len(self.active_token_ids[st_acct]))\n token_id = self.active_token_ids[st_acct][idx]\n else:\n # if the caller does not own any NFTs, choose from any token ID\n token_ids = self._all_token_ids()\n idx = int(st_idx * len(token_ids))\n token_id = token_ids[idx]\n\n amount = int(st_amount * 10 ** 18)\n if self.active_token_ids.get(st_acct):\n # when the action is possible, don't exceed the max underlying balance\n balance = self.swap.token_info(token_id)[\"underlying_balance\"]\n amount = min(amount, balance)\n\n if self.active_token_ids.get(st_acct):\n self.swap.withdraw(token_id, amount, {\"from\": st_acct})\n if balance == amount:\n self.active_token_ids[st_acct].remove(token_id)\n self.used_token_ids.append(token_id)\n else:\n with brownie.reverts():\n self.swap.withdraw(token_id, amount, {\"from\": st_acct})",
"def checkSkillForSelling(userid, choice, popupid, resend=True, gainCredits = True):\r\n player = players[userid]\r\n skill = skills[choice]\r\n level = player[skill.name]\r\n creditsGained = int( ( (level - 1) * int(skill.creditIncrement) + int(skill.startCredit)) * float(sellPercentage) / 100.)\r\n if creditsGained > 0 or gainCredits is not True:\r\n \"\"\" Only do the purchase if credits are obtained \"\"\"\r\n player[skill.name] -= 1\r\n if gainCredits is True:\r\n player['credits'] += creditsGained\r\n \r\n \"\"\" If the skill downgrade sound is not blank, emmit it from the player \"\"\"\r\n if str(skillDowngradeSound):\r\n es.emitsound('player', userid, str(skillDowngradeSound), '0.7', '0.5')\r\n \r\n \"\"\" Notify the user of their sale \"\"\"\r\n tokens = {}\r\n tokens['level'] = player[skill.name]\r\n tokens['skill'] = skill.name \r\n tell(userid, 'skill downgrade', tokens)\r\n \r\n \"\"\" Fire the skill downgrade event \"\"\"\r\n values = {}\r\n values[\"userid\"] = (\"setint\", userid)\r\n values[\"level\"] = (\"setint\", player[skill.name])\r\n values[\"gained\"] = (\"setint\", creditsGained)\r\n values[\"skill\"] = (\"setstring\", skill.name)\r\n gamethread.delayed(0, fireEvent, (\"sourcerpg_skilldowngrade\", values))\r\n \r\n if resend is True:\r\n buildSellMenu(userid)",
"def do_withdraw(self,args):\n try:\n address = raw_input(\"Enter the address you want to withdraw to: \")\n totalbalance = prompt(\"Do you want to withdraw your ENTIRE balance?\",False)\n if totalbalance == False:\n amount = D(raw_input(\"Enter the amount of BTC to withdraw: \"))\n else:\n amount,_ = bal()\n \n result = bitstamp.bitcoin_withdraw(address,amount)\n if result:\n print \"%s BTC successfully sent to %s\" % (amount,address)\n else:\n print \"There was an error withdrawing.\"\n except Exception as e:\n traceback.print_exc()\n print \"Unexpected Error: %s\" % e\n self.onecmd('help withdraw')",
"def withdraw(self, currency, amount, address):\n pass",
"def withdraw(self, cr, uid, ids, amount, context=None):\n record = self.browse(cr, uid, ids, context=context)[0]\n current_amount = record.current_amount\n withdraw_amount = record.withdraw_amount\n if amount > current_amount:\n raise osv.except_osv(_('Constraint Error'), _(\"The the amount is greater than the Current Money!\"))\n\n record.write({'current_amount':current_amount - amount,\n 'withdraw_amount':withdraw_amount + amount })\n return True",
"def leave_combat(self):\n self.__regen()",
"def Withdrawal(self):\n self.amount = (int)(raw_input (\" Enter your withdrawal amount \"))\n return self.amount",
"def withdraw(self, amount):\n message = self.account.withdraw(float(amount))\n if message:\n return message\n else:\n self.myView.displayAccount()\n return \"success\""
] | [
"0.6243882",
"0.61899906",
"0.58751225",
"0.58357584",
"0.58351916",
"0.58030045",
"0.5802118",
"0.5768247",
"0.5725085",
"0.5640627",
"0.5611762",
"0.5605854",
"0.5592408",
"0.55664814",
"0.5524832",
"0.5522101",
"0.5470772",
"0.54591274",
"0.544937",
"0.5436389",
"0.5402934",
"0.5400564",
"0.5392518",
"0.5374922",
"0.5351793",
"0.533468",
"0.52982414",
"0.5268509",
"0.5266874",
"0.52653396"
] | 0.6694325 | 0 |
Get the list of Vendor information. | def get_vendor_list_v1(self, **kwargs):
# type: (**Any) -> Union[ApiResponse, object, Error_fbe913d9, Vendors_f5f1b90b]
operation_name = "get_vendor_list_v1"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
resource_path = '/v1/vendors'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
query_params = [] # type: List
header_params = [] # type: List
body_params = None
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.vendor_management.vendors.Vendors", status_code=200, message="Return vendor information on success."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.error.Error", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.error.Error", status_code=429, message="Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.error.Error", status_code=500, message="Internal Server Error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v1.error.Error", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="GET",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v1.vendor_management.vendors.Vendors")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def vendor_list():\n return ['nxos', 'eos', 'cumulus']",
"def do_command(self, args):\n vendorops = dbops.Vendors()\n listing = vendorops.list(args)\n ordering = ['vendor_name']\n do_list(listing, ordering)",
"def test_listVendorWithNoParams(self):\r\n result = self.client.listVendors({'i_customer': 1})\r\n assert result['result']=='OK'",
"def get_vendors(self, count: int = 10) -> list:\n return list(itertools.islice(self.client.vendors.get_all_generator(), count))",
"def vendor(self):\n return self._vendor",
"def vendor(self) -> str:\n return self.properties[DBUS_ATTR_VENDOR]",
"def get_vendors_and_products_seen(cls, cb):\n url = \"/device_control/v3/orgs/{0}/products\".format(cb.credentials.org_key)\n resp = cb.get_object(url)\n return resp.get(\"results\", [])",
"def GetVendorCount(self):\n regionVectorData = self.VectorData[self.SelectedRegion]\n return regionVectorData['Vendors']",
"def list_devices(cls):\n # get all matching devices\n return usb.core.find(\n find_all=True,\n custom_match=lambda dev: (\n dev.idVendor == cls.vendor_id and dev.idProduct in cls.product_ids\n ),\n )",
"def bios_vendor(self):\n\t\treturn self.__info_dict['info']['bios_vendor']['value']",
"def vendor_name(self):\n return self._device.vendor",
"def get_drivers():\n return [str(d) for d in drivers.values()]",
"def list_drivers():\n return jsonify(drivers)",
"def device_info(self):\n\n return {\n \"identifiers\": {(DOMAIN, self._vin)}\n }",
"def readPCIList(self):\n\n self.vendors = {}\n self.devices = {}\n\n vendorId = None\n vendorName = None\n for line in PCIList.split('\\n'):\n stripped = line.lstrip()\n if not stripped or stripped[0] == ';':\n continue\n if line[0] != '\\t':\n # A vendor line.\n vendorId, vendorName = line.split('\\t', 1)\n vendorId = int(vendorId, 16)\n self.vendors[vendorId] = vendorName.strip()\n else:\n # A device line, continuing the previous vendor.\n deviceId, deviceName = line[1:].split('\\t', 1)\n deviceId = deviceId.split(' ', 1)[0]\n try:\n deviceId = int(deviceId, 16)\n except:\n deviceId = None\n self.devices[(vendorId, deviceId)] = deviceName.strip()\n\n self.addExtraDevices()",
"def product_vendor(request, id=None):\n data = {}\n error = {\n 'status': False,\n 'name': None,\n 'text': None,\n 'level': None,\n 'debug': None\n }\n limit, error = get_limit(request, error)\n\n try:\n product_list = Product.objects.filter(\n productpreparation__vendorproduct__vendor__id__exact=id)[:limit]\n except Exception as e:\n data['error'] = {\n 'status': True,\n 'name': 'Vendor Not Found',\n 'text': 'Vendor with id %s not found!' % id,\n 'level': 'Error',\n 'debug': \"{0}: {1}\".format(type(e).__name__, str(e))\n }\n data['products'] = []\n return HttpResponse(\n json.dumps(data),\n content_type=\"application/json\"\n )\n\n serializer = FreshSerializer()\n\n if not product_list:\n error = {\n \"status\": True,\n \"name\": \"No Products\",\n \"text\": \"No Products found\",\n \"level\": \"Information\",\n \"debug\": \"\"\n }\n\n data = {\n \"products\": json.loads(serializer.serialize(product_list)),\n \"error\": error\n }\n\n return HttpResponse(json.dumps(data), content_type=\"application/json\")",
"def __str__(self):\n return '%s' % (self.vendor)",
"def get_public_vendors(self, components):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Destiny2//Vendors/\"))",
"def get_vendor(self, result, host, mac):\n if \"vendor\" in result['scan'][host] and mac in result['scan'][host]['vendor']:\n return result['scan'][host]['vendor'][mac]\n else:\n return \"\"",
"def get_vendor_price_lists_details(self):\n try:\n self.vendor_price_lists_dict = self.get_grid_row_details(self.customer_price_list_grid_div_id, self.vendor_price_lists_dict)\n return True\n except:\n return False",
"def get(self):\n devs = Device.query.all()\n lista = []\n for d in devs:\n lista.append(d.json())\n return lista",
"def do_list(self, _):\n devices = []\n for source in self._target.devices:\n devices.append({\n 'name': source.device['name'],\n 'path': source.device['path'],\n })\n return devices",
"def list_devices(self):\n return [x for x in self.devices.keys()]",
"def list_devices():\r\n DeviceManagerCLI.BuildDeviceList()\r\n return DeviceManagerCLI.GetDeviceList()",
"def vendor_id(self):\n return self._device.vendor_id",
"def vendor(n, vendors_from_inn):\n inns = list(map(str, vendors_from_inn.keys()))\n\n for i in inns:\n if str(n).startswith(i):\n return vendors_from_inn[int(i)]",
"def get_vendor(mac):\r\n return p.get_manuf(mac) or 'None'",
"def get_devices(self):\n return self.api_request('GET', self.url + '/device', {})",
"def device_info(self):\n return {\n \"identifiers\": {(DOMAIN, self.unique_id)},\n \"name\": self.name,\n \"manufacturer\": \"Brightech\",\n }",
"def VendorId(self):\n\t\treturn self._get_attribute('vendorId')"
] | [
"0.73159623",
"0.708257",
"0.66518545",
"0.659405",
"0.6470233",
"0.6464649",
"0.6369177",
"0.63546693",
"0.63237035",
"0.6315904",
"0.619651",
"0.6145371",
"0.61172646",
"0.60784054",
"0.60150975",
"0.5963091",
"0.5943793",
"0.59241074",
"0.5872928",
"0.58535665",
"0.58413684",
"0.58406246",
"0.5785656",
"0.57838225",
"0.5776948",
"0.57681835",
"0.57289666",
"0.57209796",
"0.5691059",
"0.5680511"
] | 0.720569 | 1 |
Invokes the Lambda or third party HTTPS endpoint for the given skill against a given stage. This is a synchronous API that invokes the Lambda or third party HTTPS endpoint for a given skill. A successful response will contain information related to what endpoint was called, payload sent to and received from the endpoint. In cases where requests to this API results in an error, the response will contain an error code and a description of the problem. In cases where invoking the skill endpoint specifically fails, the response will contain a status attribute indicating that a failure occurred and details about what was sent to the endpoint. The skill must belong to and be enabled by the user of this API. Also, note that calls to the skill endpoint will timeout after 10 seconds. This API is currently designed in a way that allows extension to an asynchronous API if a significantly bigger timeout is required. | def invoke_skill_end_point_v2(self, skill_id, stage, **kwargs):
# type: (str, str, **Any) -> Union[ApiResponse, object, BadRequestError_765e0ac6, InvocationsApiResponse_3d7e3234, Error_ea6c1a5a]
operation_name = "invoke_skill_end_point_v2"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage' is set
if ('stage' not in params) or (params['stage'] is None):
raise ValueError(
"Missing the required parameter `stage` when calling `" + operation_name + "`")
resource_path = '/v2/skills/{skillId}/stages/{stage}/invocations'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage' in params:
path_params['stage'] = params['stage']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
if 'invocations_api_request' in params:
body_params = params['invocations_api_request']
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse", status_code=200, message="Skill was invoked."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.bad_request_error.BadRequestError", status_code=400, message="Bad request due to invalid or missing data."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.bad_request_error.BadRequestError", status_code=403, message="API user does not have permission to call this API or is currently in a state that does not allow invocation of this skill. "))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=404, message="The specified skill does not exist."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=429, message="API user has exceeded the permitted request rate."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=500, message="Internal service error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="POST",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def invoke_skill_v1(self, skill_id, invoke_skill_request, **kwargs):\n # type: (str, InvokeSkillRequest_8cf8aff9, **Any) -> Union[ApiResponse, object, InvokeSkillResponse_6f32f451, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"invoke_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'invoke_skill_request' is set\n if ('invoke_skill_request' not in params) or (params['invoke_skill_request'] is None):\n raise ValueError(\n \"Missing the required parameter `invoke_skill_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/invocations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'invoke_skill_request' in params:\n body_params = params['invoke_skill_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.invocations.invoke_skill_response.InvokeSkillResponse\", status_code=200, message=\"Skill was invoked.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow invocation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.invocations.invoke_skill_response.InvokeSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def lambda_handler(event, context):\n logger.info(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"Check that this is being called by our skill\"\"\"\n logger.info(\"Calling app: \"+str(event['session']['application']['applicationId']))\n if (event['session']['application']['applicationId'] !=\n \"amzn1.ask.skill.\"+skill_id):\n logger.error(\"Invalid application ID\")\n raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started(event, {'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event, event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event, event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event, event['request'], event['session'])\n\n # Otherwise deal with it gracefully\n logger.info(\"Unexpected request type:\")\n logger.info(json.dumps(event))\n return build_response({}, build_speechlet_response(\"Leeds Bins\", \"Welcome to Leeds Bins. Now you can find out which waste bins to take out when. Try asking: what's my next collection.\", None, False))",
"def simulate_skill_v2(self, skill_id, stage, simulations_api_request, **kwargs):\n # type: (str, str, SimulationsApiRequest_ae2e6503, **Any) -> Union[ApiResponse, object, SimulationsApiResponse_e4ad17d, BadRequestError_765e0ac6, Error_ea6c1a5a]\n operation_name = \"simulate_skill_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'simulations_api_request' is set\n if ('simulations_api_request' not in params) or (params['simulations_api_request'] is None):\n raise ValueError(\n \"Missing the required parameter `simulations_api_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/simulations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'simulations_api_request' in params:\n body_params = params['simulations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\", status_code=200, message=\"Skill simulation has successfully began.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow simulation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=409, message=\"This requests conflicts with another one currently being processed. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_isp_associated_skills_v1(self, product_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, AssociatedSkillResponse_12067635, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"get_isp_associated_skills_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}/skills'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\", status_code=200, message=\"Returns skills associated with the in-skill product.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def lambda_handler(event, context):\n\n # Get Params\n pipeline = event['detail']['pipeline']\n executionId = event['detail']['execution-id']\n status = event['detail']['state']\n stage = event['detail']['stage']\n \n if stage.lower() == \"Source\".lower():\n return '200 OK'\n\n # Get Pipeline execution details.\n client = boto3.client('codepipeline')\n pipelineDetails = client.get_pipeline_execution(\n pipelineName=pipeline,\n pipelineExecutionId=executionId\n )\n\n # Format Slack Message.\n message = '*Commit*\\n' + pipelineDetails['pipelineExecution']['artifactRevisions'][0]['revisionSummary']\n message = message + '\\n*Link*\\n' + pipelineDetails['pipelineExecution']['artifactRevisions'][0]['revisionUrl']\n color = {\n 'SUCCEEDED': 'good',\n 'FAILED': 'danger',\n 'STARTED': '#4682B4'\n }.get(status, '#2a2a2a')\n\n # JSON Payload.\n payload = {\n 'channel': CHANNEL,\n 'attachments': [\n {\n 'color': color,\n 'text': message,\n 'fields': [\n {\n 'title': 'Stage',\n 'value': stage.title(),\n 'short': 'true'\n },\n {\n 'title': 'Status',\n 'value': status.title(),\n 'short': 'true'\n }\n ],\n }\n ]\n }\n # Construct the HTTP request that will be sent to the Slack API.\n request = urllib.request.Request(\n SLACK_URL,\n method='POST',\n data=json.dumps(payload).encode('utf-8')\n )\n # Add Headers.\n request.add_header('Authorization', 'Bearer ' + TOKEN)\n request.add_header('Content-Type', 'application/json')\n \n # Make the Request.\n urllib.request.urlopen(request).read()\n\n # Return Success.\n return '200 OK'",
"def invoke_url(self) -> pulumi.Output[str]:\n return self.stage.invoke_url # type: ignore[no-any-return]",
"def lambda_handler(event, context):\r\n if 'session' in event:\r\n print(\"event.session.application.applicationId=\" +\r\n event['session']['application']['applicationId'])\r\n\r\n \"\"\"\r\n Uncomment this if statement and populate with your skill's application ID to\r\n prevent someone else from configuring a skill that sends requests to this\r\n function.\r\n \"\"\"\r\n if ('session' in event and (event['session']['application']['applicationId'] !=\r\n \"amzn1.ask.skill.57119d91-fb3c-487f-be53-4e7fac12fb83\")):\r\n raise ValueError(\"Invalid Application ID\")\r\n\r\n \"\"\"if event['session']['new']:\r\n on_session_started({'requestId': event['request']['requestId']},\r\n event['session'])\"\"\"\r\n\r\n if event['request']['type'] == \"LaunchRequest\":\r\n return on_launch(event['request'], event['session'])\r\n elif event['request']['type'] == \"IntentRequest\":\r\n return on_intent(event['request'], event['session'])\r\n elif event['request']['type'] == \"SessionEndedRequest\":\r\n return on_session_ended(event['request'], event['session'])\r\n elif event['request']['type'] == 'UPDATE':\r\n return saveCoffeeMachineStatus(event['request'])\r\n elif event['request']['type'] == \"GLASS\":\r\n return glassStatus(event['request'])\r\n elif event['request']['type'] == \"WATER\":\r\n return waterStatus(event['request'])\r\n elif event['request']['type'] == \"COFFEE\":\r\n return coffeeStatus(event['request'])\r\n elif event['request']['type'] == \"ON_OFF\":\r\n return on_off_status(event['request'])\r\n elif event['request']['type'] == \"ONLINE\":\r\n return online_status_f(event['request'])\r\n elif event['request']['type'] == 'BUSY':\r\n return busyStatus(event['request'])",
"def handler(event, context):\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def start_beta_test_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"start_beta_test_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/betaTest/start'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accept. Return a URL to track the resource in 'Location' header.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def end_beta_test_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"end_beta_test_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/betaTest/end'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accept. Return a URL to track the resource in 'Location' header.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n league = brasileirao.get()\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'], league)\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def set_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"set_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully created/updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def lambda_handler(event, context):\n print(\"Incoming request...\")\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n if (event['session']['application']['applicationId'] !=\n \"amzn1.ask.skill.2994421a-75ef-4502-9d4a-bf83f20a7ade\"):\n raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def lambda_handler(event, context):\r\n print(\"Incoming request...\")\r\n\r\n \"\"\"\r\n Uncomment this if statement and populate with your skill's application ID to\r\n prevent someone else from configuring a skill that sends requests to this\r\n function.\r\n \"\"\"\r\n # if (event['session']['application']['applicationId'] !=\r\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\r\n # raise ValueError(\"Invalid Application ID\")\r\n\r\n if event['session']['new']:\r\n on_session_started({'requestId': event['request']['requestId']},\r\n event['session'])\r\n\r\n if event['request']['type'] == \"LaunchRequest\":\r\n return on_launch(event['request'], event['session'])\r\n elif event['request']['type'] == \"IntentRequest\":\r\n return on_intent(event['request'], event['session'])\r\n elif event['request']['type'] == \"SessionEndedRequest\":\r\n return on_session_ended(event['request'], event['session'])",
"def lambda_handler(event, context):\n\n operations = {\n 'POST': main,\n }\n\n if event.get('httpMethod', False):\n operation = event['httpMethod']\n else:\n operation = \"not available\"\n\n payload = base64.b64decode(event['body'])\n try:\n payload = json.loads(payload)\n except TypeError:\n pass\n\n if operation in operations:\n return respond(None, operations[operation](payload))\n else:\n return respond(ValueError(f'Unsupported method {operation}'))",
"def lambda_handler(event, context):\n print('HANDLING EVENT')\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n #if (event['session']['application']['applicationId'] != \"<APPLICATION_ID>\"):\n # raise ValueError(\"Invalid Application ID\")\n\n\n if event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])",
"def get_skill_enablement_status_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"get_skill_enablement_status_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement resource exists for given skillId & stage.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def then_app_running_stage(context):\n result = context.result\n result | should.equal('Success').desc(\"Application is reachable in the Stage stage.\")",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])\n else:\n print (\"********************** Unknown Request\")",
"def lambda_handler(Event, Context):\n if 'StateMachineArn' in Event.keys():\n step_function_arn = Event['StateMachineArn']\n r = step_function_client.start_execution(\n stateMachineArn=step_function_arn,\n input=json.dumps({\"last_updated\": \"\"}))\n\n else:\n stepfunctions = [os.getenv(\"CHARGEBEEDOWNLOADARN\"), os.getenv(\"EXCHANGERATESDOWNLOADARN\")]\n\n for stepfunction in stepfunctions:\n step_function_arn = stepfunction\n r = step_function_client.start_execution(\n stateMachineArn=step_function_arn,\n input=json.dumps({\"last_updated\": \"\"}))",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'], state)\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'], state)\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" + event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']}, event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def skill(ctx: Context, public_id: PublicId):\n _eject_item(ctx, \"skill\", public_id)",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n if (event['session']['application']['applicationId'] !=\n \"amzn1.ask.skill.xxxx\"):\n #Set Alexa Skill ID\n raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def create_export_request_for_skill_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"create_export_request_for_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/exports'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accepted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def delete_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"delete_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully deleted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"DELETE\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def lambda_handler(event, context):\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])"
] | [
"0.5358969",
"0.5293235",
"0.528769",
"0.50172585",
"0.49989742",
"0.49976173",
"0.49679202",
"0.49613965",
"0.48948103",
"0.48303246",
"0.47997808",
"0.4771908",
"0.4771333",
"0.47675943",
"0.47169495",
"0.47111094",
"0.47069886",
"0.47030997",
"0.46804383",
"0.46728724",
"0.46634692",
"0.46537963",
"0.4650149",
"0.4648687",
"0.46477118",
"0.46411216",
"0.46391428",
"0.4615398",
"0.46142733",
"0.46142733"
] | 0.70120853 | 0 |
Simulate executing a skill with the given id against a given stage. This is an asynchronous API that simulates a skill execution in the Alexa ecosystem given an utterance text of what a customer would say to Alexa. A successful response will contain a header with the location of the simulation resource. In cases where requests to this API results in an error, the response will contain an error code and a description of the problem. The skill being simulated must belong to and be enabled by the user of this API. Concurrent requests per user is currently not supported. | def simulate_skill_v2(self, skill_id, stage, simulations_api_request, **kwargs):
# type: (str, str, SimulationsApiRequest_ae2e6503, **Any) -> Union[ApiResponse, object, SimulationsApiResponse_e4ad17d, BadRequestError_765e0ac6, Error_ea6c1a5a]
operation_name = "simulate_skill_v2"
params = locals()
for key, val in six.iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
# verify the required parameter 'skill_id' is set
if ('skill_id' not in params) or (params['skill_id'] is None):
raise ValueError(
"Missing the required parameter `skill_id` when calling `" + operation_name + "`")
# verify the required parameter 'stage' is set
if ('stage' not in params) or (params['stage'] is None):
raise ValueError(
"Missing the required parameter `stage` when calling `" + operation_name + "`")
# verify the required parameter 'simulations_api_request' is set
if ('simulations_api_request' not in params) or (params['simulations_api_request'] is None):
raise ValueError(
"Missing the required parameter `simulations_api_request` when calling `" + operation_name + "`")
resource_path = '/v2/skills/{skillId}/stages/{stage}/simulations'
resource_path = resource_path.replace('{format}', 'json')
path_params = {} # type: Dict
if 'skill_id' in params:
path_params['skillId'] = params['skill_id']
if 'stage' in params:
path_params['stage'] = params['stage']
query_params = [] # type: List
header_params = [] # type: List
body_params = None
if 'simulations_api_request' in params:
body_params = params['simulations_api_request']
header_params.append(('Content-type', 'application/json'))
header_params.append(('User-Agent', self.user_agent))
# Response Type
full_response = False
if 'full_response' in params:
full_response = params['full_response']
# Authentication setting
access_token = self._lwa_service_client.get_access_token_from_refresh_token()
authorization_value = "Bearer " + access_token
header_params.append(('Authorization', authorization_value))
error_definitions = [] # type: List
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse", status_code=200, message="Skill simulation has successfully began."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.bad_request_error.BadRequestError", status_code=400, message="Bad request due to invalid or missing data."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=401, message="The auth token is invalid/expired or doesn't have access to the resource."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.bad_request_error.BadRequestError", status_code=403, message="API user does not have permission to call this API or is currently in a state that does not allow simulation of this skill. "))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=404, message="The specified skill does not exist."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=409, message="This requests conflicts with another one currently being processed. "))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=429, message="API user has exceeded the permitted request rate."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=500, message="Internal service error."))
error_definitions.append(ServiceClientResponse(response_type="ask_smapi_model.v2.error.Error", status_code=503, message="Service Unavailable."))
api_response = self.invoke(
method="POST",
endpoint=self._api_endpoint,
path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
response_definitions=error_definitions,
response_type="ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse")
if full_response:
return api_response
return api_response.body | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def simulate_skill_v1(self, skill_id, simulations_api_request, **kwargs):\n # type: (str, SimulationsApiRequest_606eed02, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05, SimulationsApiResponse_328955bc]\n operation_name = \"simulate_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'simulations_api_request' is set\n if ('simulations_api_request' not in params) or (params['simulations_api_request'] is None):\n raise ValueError(\n \"Missing the required parameter `simulations_api_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/simulations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'simulations_api_request' in params:\n body_params = params['simulations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.simulations.simulations_api_response.SimulationsApiResponse\", status_code=200, message=\"Skill simulation has successfully began.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow simulation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=409, message=\"This requests conflicts with another one currently being processed. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.simulations.simulations_api_response.SimulationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def invoke_skill_end_point_v2(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, BadRequestError_765e0ac6, InvocationsApiResponse_3d7e3234, Error_ea6c1a5a]\n operation_name = \"invoke_skill_end_point_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/invocations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'invocations_api_request' in params:\n body_params = params['invocations_api_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\", status_code=200, message=\"Skill was invoked.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow invocation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.invocations.invocations_api_response.InvocationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def start_beta_test_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"start_beta_test_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/betaTest/start'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accept. Return a URL to track the resource in 'Location' header.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def get_skill_simulation_v2(self, skill_id, stage, simulation_id, **kwargs):\n # type: (str, str, str, **Any) -> Union[ApiResponse, object, SimulationsApiResponse_e4ad17d, BadRequestError_765e0ac6, Error_ea6c1a5a]\n operation_name = \"get_skill_simulation_v2\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'simulation_id' is set\n if ('simulation_id' not in params) or (params['simulation_id'] is None):\n raise ValueError(\n \"Missing the required parameter `simulation_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v2/skills/{skillId}/stages/{stage}/simulations/{simulationId}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n if 'simulation_id' in params:\n path_params['simulationId'] = params['simulation_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\", status_code=200, message=\"Successfully retrieved skill simulation information.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission or is currently in a state that does not allow calls to this API. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=404, message=\"The specified skill or simulation does not exist. The error response will contain a description that indicates the specific resource type that was not found. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v2.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v2.skill.simulations.simulations_api_response.SimulationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def run(self,identity,params=None, headers=None):\n path = self._sub_url_params('/scenario_simulators/:identity/actions/run', {\n \n 'identity': identity,\n })\n \n if params is not None:\n params = {'data': params}\n response = self._perform_request('POST', path, params, headers,\n retry_failures=False)\n return self._resource_for(response)",
"def invoke_skill_v1(self, skill_id, invoke_skill_request, **kwargs):\n # type: (str, InvokeSkillRequest_8cf8aff9, **Any) -> Union[ApiResponse, object, InvokeSkillResponse_6f32f451, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"invoke_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'invoke_skill_request' is set\n if ('invoke_skill_request' not in params) or (params['invoke_skill_request'] is None):\n raise ValueError(\n \"Missing the required parameter `invoke_skill_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/invocations'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'invoke_skill_request' in params:\n body_params = params['invoke_skill_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.invocations.invoke_skill_response.InvokeSkillResponse\", status_code=200, message=\"Skill was invoked.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request due to invalid or missing data.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission to call this API or is currently in a state that does not allow invocation of this skill. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The specified skill does not exist.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.invocations.invoke_skill_response.InvokeSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def get_skill_simulation_v1(self, skill_id, simulation_id, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05, SimulationsApiResponse_328955bc]\n operation_name = \"get_skill_simulation_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'simulation_id' is set\n if ('simulation_id' not in params) or (params['simulation_id'] is None):\n raise ValueError(\n \"Missing the required parameter `simulation_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/simulations/{simulationId}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'simulation_id' in params:\n path_params['simulationId'] = params['simulation_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.simulations.simulations_api_response.SimulationsApiResponse\", status_code=200, message=\"Successfully retrieved skill simulation information.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"API user does not have permission or is currently in a state that does not allow calls to this API. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The specified skill or simulation does not exist. The error response will contain a description that indicates the specific resource type that was not found. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"API user has exceeded the permitted request rate.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal service error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.simulations.simulations_api_response.SimulationsApiResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def test_skills(\n self, mock_get_ai_details, mock_get_ai, mock_get_categories\n ):\n\n mock_get_ai.return_value = self.ai\n mock_get_ai_details.return_value = self.ai_details\n\n mock_get_ai_details.return_value['skills'] = [\n {'name': 'bot 1'},\n {'name': 'bot 2'},\n {'name': 'bot 3'},\n {'name': 'bot 4'},\n {'name': 'bot 5'},\n {'name': 'bot 6'},\n ]\n\n response = self.client.get(reverse(\n 'studio:edit_bot',\n kwargs={'aiid': self.ai['aiid']}\n ))\n\n self.assertContains(response, 'bot 1')\n self.assertContains(response, 'bot 2')\n self.assertContains(response, 'bot 3')\n self.assertContains(response, 'bot 4')\n self.assertContains(response, 'bot 5')\n self.assertNotContains(response, 'bot 6')\n self.assertNotContains(response, 'Speed up your bot building process by '\n 'starting with one of our Templates from the store.')",
"def get_isp_associated_skills_v1(self, product_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, AssociatedSkillResponse_12067635, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"get_isp_associated_skills_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'product_id' is set\n if ('product_id' not in params) or (params['product_id'] is None):\n raise ValueError(\n \"Missing the required parameter `product_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/inSkillProducts/{productId}/stages/{stage}/skills'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'product_id' in params:\n path_params['productId'] = params['product_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n if 'next_token' in params:\n query_params.append(('nextToken', params['next_token']))\n if 'max_results' in params:\n query_params.append(('maxResults', params['max_results']))\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\", status_code=200, message=\"Returns skills associated with the in-skill product.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Bad request. Returned when a required parameter is not present, badly formatted. \"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"Requested resource not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Too many requests received.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.isp.associated_skill_response.AssociatedSkillResponse\")\n\n if full_response:\n return api_response\n return api_response.body",
"def test_get_skill_name(self):\n result = self.runner.invoke(\n cli,\n [*CLI_LOG_OPTION, \"config\", \"get\", \"skills.dummy.name\"],\n standalone_mode=False,\n )\n assert result.exit_code == 0\n assert result.output == \"dummy\\n\"",
"def run(self, aws_credentials_secret: str = \"AWS_CREDENTIALS\"):\n\n ## get AWS credentials\n aws_credentials = Secret(aws_credentials_secret).get()\n aws_access_key = aws_credentials[\"ACCESS_KEY\"]\n aws_secret_access_key = aws_credentials[\"SECRET_ACCESS_KEY\"]\n step_client = boto3.client(\n \"stepfunctions\",\n aws_access_key_id=aws_access_key,\n aws_secret_access_key=aws_secret_access_key,\n )\n\n response = step_client.start_execution(\n stateMachineArn=self.state_machine_arn,\n name=self.execution_name,\n input=self.execution_input,\n )\n\n return response",
"def put(self, id):\n data = request.json\n update_scenario(id, data)\n return None, 204",
"def end_beta_test_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"end_beta_test_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/betaTest/end'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accept. Return a URL to track the resource in 'Location' header.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def simulate(scene_name):\n fixtures = simulate_fixtures()\n scene = build_scene_from_fixtures(fixtures, scene_name)\n write_to_json(scene, scene_name)",
"def test_dispatch_launch(self):\n @self.skill.launch\n def sample_func():\n \"\"\"Decorated function.\"\"\"\n self.skill.response.sessionAttributes['run'] = True\n self.skill.request.request.type = 'LaunchRequest'\n self.skill.dispatch()\n self.assertTrue(self.skill.response.sessionAttributes['run'])",
"def skill(ctx: Context, public_id: PublicId):\n _eject_item(ctx, \"skill\", public_id)",
"def test_dispatch_intent(self):\n @self.skill.intent('test_intent')\n def sample_func():\n \"\"\"Decorated function.\"\"\"\n self.skill.response.sessionAttributes['run'] = True\n self.skill.request.request.type = 'IntentRequest'\n self.skill.request.request.intent = interface.Intent()\n self.skill.request.request.intent.name = 'test_intent'\n self.skill.dispatch()\n self.assertTrue(self.skill.response.sessionAttributes['run'])",
"def update_beta_test_v1(self, skill_id, **kwargs):\n # type: (str, **Any) -> Union[ApiResponse, object, Error_fbe913d9, BadRequestError_f854b05]\n operation_name = \"update_beta_test_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/betaTest'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'create_test_body' in params:\n body_params = params['create_test_body']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"Success. No content.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=409, message=\"Thrown if user tries to request a new simulation while the old simulation is in progress.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.error.Error\", status_code=500, message=\"Internal Server Error.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def set_skill_enablement_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"set_skill_enablement_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/enablement'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No Content; Confirms that enablement is successfully created/updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"PUT\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def on_intent(intent_request, session):\n\n print(\"on_intent requestId=\" + intent_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n\n # Dispatch to your skill's intent handlers\n if intent_name == \"<YOUR INTENT NAME HERE>\":\n # Update the wordsmith_data variable with your data. Use key, value\n # pairs where the key is the column name in Wordsmith and the value is\n # the value contained in that column\n wordsmith_data = { 'column1': 'value1', 'column2': 'value2' }\n narrative = wordsmith.generate(WORDSMITH_API_KEY, WORDSMITH_PROJECT_SLUG, WORDSMITH_TEMPLATE_SLUG, wordsmith_data)\n if 'errors' not in narrative:\n return build_response(session.get('attributes', {}), build_speechlet_response('Wordsmith Generated Response', narrative['data']['content'],\n '<REPROMPT TEXT HERE>', True))\n else:\n if not isinstance(narrative['errors'], list) :\n return build_response(session.get('attributes', {}), build_speechlet_response('Wordsmith Generation Error', 'Wordsmith reported the following error: {}'.format(narrative['errors']['detail']),\n '<REPROMPT TEXT HERE>', True))\n else:\n details = ', '.join([e['details'] for e in narrative['errors']])\n return build_response(session.get('attributes', {}), build_speechlet_response('Wordsmith Generation Error', 'Wordsmith reported the following error: {}'.format(details),\n '<REPROMPT TEXT HERE>', True))\n elif intent_name == \"AMAZON.HelpIntent\":\n return get_welcome_response()\n elif intent_name == \"AMAZON.CancelIntent\" or intent_name == \"AMAZON.StopIntent\":\n return handle_session_end_request()\n else:\n raise ValueError(\"Invalid intent\")",
"def handler(event, context):\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])",
"def test_single_skill_request(self):\n self._add_aggregates()\n actions.login(ADMIN_EMAIL)\n get_url = '%s?%s' % (self.URL, urllib.urlencode({\n 'ids': [self.skill_ids[0]]}, True))\n\n response = self.get(get_url)\n self.assertEqual(200, response.status_int)\n payload = transforms.loads(response.body)['payload']\n\n expected_header = ['Date', str(self.skill_ids[0])]\n expected_data = [[self.day1, 1], [self.day2, 2]]\n result = transforms.loads(payload)\n self.assertEqual(expected_header, result['column_headers'])\n self.assertEqual(len(expected_data), len(result['data']))\n for row in expected_data:\n self.assertIn(row, result['data'])",
"def alexa_skill_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"alexa_skill_id\")",
"def create_export_request_for_skill_v1(self, skill_id, stage, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"create_export_request_for_skill_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'skill_id' is set\n if ('skill_id' not in params) or (params['skill_id'] is None):\n raise ValueError(\n \"Missing the required parameter `skill_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'stage' is set\n if ('stage' not in params) or (params['stage'] is None):\n raise ValueError(\n \"Missing the required parameter `stage` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/{skillId}/stages/{stage}/exports'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'skill_id' in params:\n path_params['skillId'] = params['skill_id']\n if 'stage' in params:\n path_params['stage'] = params['stage']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=202, message=\"Accepted.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn't have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=409, message=\"The request could not be completed due to a conflict with the current state of the target resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None",
"def run_sample(self):\n # there will be validation failures for sample data\n self.validate_req(ignore_failure=True)\n runner_fn = self.model_runner.execute_model_for_sample_data\n return self.do_handle_request(runner_fn)",
"def on_launch(launch_request, session):\n\n print(\"on_launch requestId=\" + launch_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n # Dispatch to your skill's launch\n return get_welcome_response(session)",
"def on_launch(launch_request, session):\n print(\"on_launch requestId=\" + launch_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n # Dispatch to your skill's launch\n return get_welcome_response()",
"def on_launch(launch_request, session):\n print(\"on_launch requestId=\" + launch_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n # Dispatch to your skill's launch\n return get_welcome_response()",
"def on_launch(launch_request, session):\n print(\"on_launch requestId=\" + launch_request['requestId'] + \", sessionId=\" + session['sessionId'])\n \n # Dispatch to your skill's launch\n return get_welcome_response(session)",
"def on_launch(launch_request, session):\r\n print(\"on_launch requestId=\" + launch_request['requestId'] +\r\n \", sessionId=\" + session['sessionId'])\r\n # Dispatch to your skill's launch\r\n return get_welcome_response()"
] | [
"0.62365896",
"0.568461",
"0.554595",
"0.5485227",
"0.540054",
"0.5173351",
"0.5050576",
"0.50475633",
"0.4991332",
"0.49681288",
"0.49511895",
"0.4949384",
"0.4929767",
"0.48983842",
"0.48752064",
"0.4865798",
"0.48548737",
"0.4825328",
"0.48094267",
"0.4808317",
"0.48056155",
"0.47995868",
"0.47939706",
"0.47699893",
"0.476321",
"0.47561085",
"0.47527394",
"0.47527394",
"0.47465435",
"0.47411633"
] | 0.7121819 | 0 |
Brute force O(N^3) square_matrix is a matrix of 0 and 1, 0 means black, 1 means white return the maximum sub square in tuple (start_r, start_c, size) | def maximum_sub_square(square_matrix):
if not square_matrix:
return (0, 0, 0)
n = len(square_matrix)
start_r, start_c, size = 0, 0, 0
for i in range(n):
# if there is no hope to find larger one, then break
if i + size >= n:
break
# O(n^n)
new_c, new_size = get_max_black_square(square_matrix, i, size)
if new_size > size:
start_r = i
start_c = new_c
size = new_size
return (start_r, start_c, size) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_largest_square(serial):\n max_size = 1\n max_square = None\n max_power = -float('inf')\n\n # Precompute all single cell powers\n powers = []\n for y in range(300):\n powers.append([])\n for x in range(300):\n powers[y].append(cell_power(x+1, y+1, serial))\n\n # Memoize the total powers of squares of previous steps\n previous_power = copy.deepcopy(powers)\n\n for size in range(1, 300):\n x = y = 1\n while x + size <= 300:\n while y + size <= 300:\n power = previous_power[y-1][x-1]\n if size != 1:\n # Add the new row/column\n for i in range(x, x + size):\n power += powers[y+size-2][i-1]\n # Do not add the corner twice\n for j in range(y, y + size - 1):\n power += powers[j-1][x+size-2]\n # Update the map\n previous_power[y-1][x-1] = power\n\n if power > max_power:\n max_power = power\n max_square = (x, y)\n max_size = size\n y += 1\n x += 1\n y = 1\n\n return max_square, max_size",
"def largestSquare(arr):\n c = arr # Cache for storing computations\n for i in c:\n print(i)\n # Won't touch arr[0] or arr[n][0] because they can't be bottom right corners\n for row in range(1, len(arr)):\n for col in range(1, len(arr[0])):\n if arr[row][col] > 0: # 0s can't make squares\n c[row][col] = min(c[row-1][col-1], c[row][col-1], c[row-1][col]) + arr[row][col]\n # Minimum of surrounding squares + current square = maximum size square\n print(\"-\" *20)\n for i in c:\n print(i)\n return max([max(i) for i in c])",
"def find_chessboard_squares(image, min_square_dim):\n\n return compute_chessboard_squares(find_chessboard(image, min_square_dim))",
"def l2_square_from_inner_product(matrix):\n return np.diag(matrix)",
"def square(i, j):\n return map(sq_start, [i, j, i + 1, j + 1])",
"def python_square_matrix(matrix):\n\n transposed_matrix = np.zeros([matrix.shape[0],matrix.shape[0]])\n start = time.time()\n # for i in range(matrix.shape[0]):\n # for j in range(matrix.shape[0]):\n # transposed_matrix[i,j] = matrix[j,i]\n transposed_matrix = np.transpose(matrix)\n end = time.time()-start\n\n #Testing\n if not(np.allclose(transposed_matrix,np.transpose(matrix))):\n print(transposed_matrix)\n\n # print('python transpose time: %.2E' % end)\n return [transposed_matrix, end]",
"def test_large_square(self):\n for n in [10, 20, 30, 40, 50]:\n A = np.arange(n*n)\n A = np.reshape(A, (n, n))\n x = Variable(n, n)\n p = Problem(Minimize(at.square(x[0, 0])),\n [x >= A])\n result = p.solve()\n self.assertAlmostEqual(result, 0)",
"def create(matrix):\n limit_y = len(matrix)\n limit_x = len(matrix[0])\n\n for y in range(1, limit_y):\n bit.create(matrix[y])\n\n for x in range(1, limit_x):\n for y in range(1, limit_y):\n k = y + (y & -y)\n if k < limit_y:\n matrix[k][x] += matrix[y][x]",
"def make_magic_square(N): # part a\n if N % 2 == 0:\n print('N must be odd.')\n my_magic_square = np.zeros((N, N))\n i = 0\n j = np.ceil(N / 2.).astype(int)\n n = 1\n while n <= N**2:\n my_magic_square[i, j] = n\n n += 1\n i_next =\n j_next =\n if my_magic_square[i_next, j_next] > 0:\n i =\n else:\n i =\n j =\n return my_magic_square",
"def max_rectangle_in_sparse_matrix(matrix):\n\t# keeps track of the previous line heights\n\tprev_line_histogram = None\n\n\t# holds the best result found so far\n\tmax_area = 0\n\n\tfor i, row in enumerate(matrix):\n\t\tline_histogram = []\n\t\tfor j, value in enumerate(row):\n\n\t\t\tif bool(value):\n\t\t\t\tprev_line_height = prev_line_histogram[j] if prev_line_histogram else 0\n\t\t\t\thistogram_height = prev_line_height + 1\n\t\t\telse:\n\t\t\t\thistogram_height = 0\n\n\t\t\tline_histogram.append(histogram_height)\n\n\t\tmax_area = max(max_area, max_area_under_histogram(line_histogram))\n\t\tprev_line_histogram = line_histogram\n\n\treturn max_area",
"def max_submatrix(matrix):\n\tmax_sum = 0\n\n\tfor i in xrange(len(matrix)):\n\t\trunning_sum = [0] * len(matrix[0])\n\t\tfor j in xrange(i, len(matrix)):\n\t\t\t# compute the running sum\n\t\t\tfor k in xrange(len(matrix[0])):\n\t\t\t\trunning_sum[k] += matrix[j][k]\n\n\t\t\tmax_sum = max(max_subarray(running_sum), max_sum)\n\n\treturn max_sum",
"def weight_matrix(board, color):\n if next_player(board, color) is None:\n return count_colors(board, color) * 2 ** 24\n s = 0\n other = cinv(color)\n for i, square in enumerate(board):\n if square == color:\n s += SQUARE_WEIGHTS[i]\n elif square == other:\n s -= SQUARE_WEIGHTS[i]\n return s",
"def compute_largest_diagonal2_product(grid):\n max_product = 0\n for row in range(len(grid) - 1 , 2 , -1):\n for column in range(len(grid) - 3):\n current_product = 1\n for j in range(4):\n current_product *= grid[row - j][column + j]\n if current_product > max_product:\n max_product = current_product\n\n if current_product == 70600674:\n print(row , column)\n return max_product",
"def total_power_square(x, y, serial, size):\n result = 0\n for i in range(x, x + size):\n for j in range(y, y + size):\n result += cell_power(i, j, serial)\n return result",
"def phantom_squares(n_points,S):\n \n #Rescaling according to image size \n S[:,0] = S[:,0]*n_points/2\n S[:,1] = S[:,1]*n_points/2\n S[:,2] = S[:,2]*n_points/2\n S[:,3] = S[:,3]*math.pi/180\n \n x,y = np.meshgrid(np.arange(0,n_points)-n_points//2 ,np.arange(0,n_points)-n_points//2 ) \n nrow,ncol = S.shape\n phantom1 = np.zeros((y.shape[0], y.shape[1], nrow)) \n\n for k in range(nrow): #itero sui quadrati\n x_new = x - S[k,0]\n y_new = y - S[k,1]\n\n u = abs(x_new*math.cos(S[k,3])+y_new*math.sin(S[k,3]))\n v = abs(-x_new*math.sin(S[k,3])+y_new*math.cos(S[k,3]))\n\n cond = np.maximum(u,v)\n\n for i in range(x.shape[0]):\n for j in range(x.shape[1]):\n if (cond[i,j] < S[k,2]/2):\n phantom1[i,j,k] = S[k,4]; # gray scale\n else:\n phantom1[i,j,k] = 0.0;\n #endif\n #endfor\n #endfor\n #endfor\n\n phantom1 = phantom1.sum(axis=2)\n phantom = np.flipud(phantom1)\n return phantom",
"def find_empty_squares(board):\n for i in range(len(board)):\n for j in range(len(board[0])):\n if board[i][j] == 0:\n return (i,j) #row , column\n\n #if there are no blank squres\n return None",
"def test(matrix, n, m):\r\n t_matrix = zip(*matrix)\r\n for i in range(0, n):\r\n for j in range(0, m):\r\n cur = matrix[i][j]\r\n max_row = max(matrix[i])\r\n max_col = max(t_matrix[j])\r\n if cur < max_row and cur < max_col:\r\n return 'NO'\r\n return 'YES'",
"def is_square(matrix):\n return is_matrix(matrix) and matrix.shape[0] == matrix.shape[1]",
"def maximalRectangle(self, matrix: List[List[str]]) -> int:\n if not matrix or not matrix[0]:\n return 0\n n, m = len(matrix), len(matrix[0])\n columns_height = [0] * m\n res = 0\n for i in range(n):\n \n for j in range(m):\n if matrix[i][j] == '1':\n columns_height[j] += 1\n else:\n columns_height[j] = 0\n # then do #84 for each round\n res = max(res, self.largestRectangleArea(columns_height))\n \n return res",
"def inner_product_to_L2_square(matrix):\n\n length = matrix.shape[0]\n squared_norm = np.reshape(np.diag(matrix), (length, 1))\n\n return squared_norm + np.transpose(squared_norm) - 2 * matrix",
"def is_perfect_square():",
"def maxit(board):\n maxval = -2\n\n row_index = None\n col_index = None\n # if terminal board, terminate the function.\n if terminal(board) == True:\n result = utility(board)\n return (result, 0, 0) \n # for each possible move, calculate its utility, saving the maximum.\n for i in range(0, 3):\n for j in range(0, 3):\n if board[i][j] == EMPTY:\n board[i][j] = X\n (m, mini, minj) = minit(board)\n if m > maxval:\n maxval=m\n row_index=i\n col_index=j\n board[i][j] = EMPTY\n return (maxval, row_index, col_index)",
"def score_cell(i,j):\r\n match = 1\r\n mismatch = -1\r\n ins_del = -1\r\n \r\n up = matrix[i-1][j]+ ins_del\r\n left = matrix[i][j-1]+ ins_del\r\n \r\n if self.s1[i-1] == self.s2[j-1]:\r\n diag = matrix[i-1][j-1] + match\r\n else:\r\n diag = matrix[i-1][j-1] + mismatch\r\n \r\n return max(up, left, diag)",
"def possible(matrix: List[List[int]], x: int, y: int, n:int) -> bool:\n\n # Check for problem in row\n for i in range(0, 9):\n if matrix[x][i] == n:\n return False\n\n # Check for problem in column\n for j in range(0, 9):\n if matrix[j][y] == n:\n return False\n \n # Initial indexes for inner square\n x0 = (x // 3) * 3\n y0 = (y // 3) * 3\n\n # Check for problem in inner square\n for i in range(0, 3):\n for j in range(0, 3):\n if matrix[x0 + i][y0 + j] == n:\n return False\n \n return True",
"def score_cell(i,j):\r\n match = 3\r\n mismatch = -3\r\n ins_del = -2\r\n \r\n up = matrix[i-1][j]+ ins_del\r\n left = matrix[i][j-1]+ ins_del\r\n \r\n if self.s1[i-1] == self.s2[j-1]:\r\n diag = matrix[i-1][j-1] + match\r\n else:\r\n diag = matrix[i-1][j-1] + mismatch\r\n \r\n return max(up, left, diag)",
"def fn(i, j):\n ans = 1\n for ii, jj in (i-1, j), (i, j-1), (i, j+1), (i+1, j): \n if 0 <= ii < m and 0 <= jj < n and matrix[i][j] < matrix[ii][jj]: \n ans = max(ans, 1 + fn(ii, jj))\n return ans",
"def get_cross_size_grid(n, m, grid):\n grid = [[int(c == '#') for c in row] for row in grid]\n acc = [[[0] * 4 for _ in range(m)] for _ in range(n)]\n for i in range(n):\n acc[i][0][L] = grid[i][0]\n acc[i][-1][R] = grid[i][-1]\n for j in range(1, m):\n val = grid[i][j]\n acc[i][j][L] = acc[i][j-1][L] + val if val else 0\n val = grid[i][-j-1]\n acc[i][-j-1][R] = acc[i][-j][R] + val if val else 0\n for j in range(m):\n acc[0][j][T] = grid[0][j]\n acc[-1][j][B] = grid[-1][j]\n for i in range(1, n):\n val = grid[i][j]\n acc[i][j][T] = acc[i-1][j][T] + val if val else 0\n val = grid[-i-1][j]\n acc[-i-1][j][B] = acc[-i][j][B] + val if val else 0\n\n for i in range(n):\n for j in range(m):\n grid[i][j] = min(acc[i][j])\n return grid",
"def fn(i, j):\n if grid[i][j] <= 0: return 0\n grid[i][j] *= -1 # mark as visited \n ans = 0\n for ii, jj in (i-1, j), (i, j-1), (i, j+1), (i+1, j): \n if 0 <= ii < m and 0 <= jj < n: \n ans = max(ans, fn(ii, jj) - grid[i][j])\n grid[i][j] *= -1 # backtracking \n return ans",
"def make_square(x, size):\n return [ [x, -size/2, size/2],\n\t\t\t [x, size/2, size/2],\n [x, size/2, -size/2],\n\t\t\t [x, -size/2, -size/2]]",
"def make_sudoku(size):\r\n def mutate_list_1(lst, size):\r\n \"\"\"Helper function for removing part of a list from the beginning and add it to the end.\"\"\"\r\n count = 0\r\n while count < size:\r\n elem = lst[0]\r\n lst.remove(elem)\r\n lst.append(elem)\r\n count += 1\r\n return lst\r\n\r\n def mutate_list_2(lst):\r\n \"\"\"Helper function for removing element from the beginning of a list and add it to the end.\"\"\"\r\n elem = lst[0]\r\n lst.remove(elem)\r\n lst.append(elem)\r\n return lst\r\n\r\n count = 0\r\n matrix_length = size ** 2 # define a size of matrix\r\n matrix = [[] * matrix_length] # create an empty matrix\r\n matrix[0] = range(1, matrix_length + 1) # set a first row to a range from 1 to size ** 2\r\n while count < matrix_length - 1:\r\n l = matrix[count][:] # create a new list object that is a copy of previous row in a matrix\r\n if (count + 1) % size == 0: # check if a row in inner square of a matrix\r\n l = matrix[count - (size-1)][:] # if it is, l set to the first row of previous square\r\n matrix.append(mutate_list_2(l))\r\n else:\r\n matrix.append(mutate_list_1(l, size)) # mutate l and add it to the matrix\r\n count += 1\r\n\r\n\r\n return matrix"
] | [
"0.6787707",
"0.6666938",
"0.6283164",
"0.60780114",
"0.5999622",
"0.59710145",
"0.5955869",
"0.5921515",
"0.5901971",
"0.5876056",
"0.57982016",
"0.5794456",
"0.5760254",
"0.5742327",
"0.5723517",
"0.57230926",
"0.5693841",
"0.5693019",
"0.5653902",
"0.5641276",
"0.5639415",
"0.56121063",
"0.5609348",
"0.5563183",
"0.5553292",
"0.55530447",
"0.55438673",
"0.55346906",
"0.5495922",
"0.54818666"
] | 0.8229806 | 0 |
Returns any single fusion resource by uri `generic keywords` | def fusion_api_get_resource(self, uri, api=None, headers=None):
if api:
headers = self.fusion_client._set_req_api_version(api=api)
elif not headers:
headers = self.fusion_client._headers
uri = 'https://%s%s' % (self.fusion_client._host, uri)
return self.fusion_client.get(uri, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_rack(self, uri=None, param='', api=None, headers=None):\n return self.rack.get(uri=uri, api=api, headers=headers, param=param)",
"def fusion_api_get_li(self, uri=None, api=None, headers=None, param=''):\n return self.li.get(uri=uri, api=api, headers=headers, param=param)",
"def lookup():",
"def do_get(self, **kwargs):\n # TODO(danielrsmith): This request gives two independent return types\n # based on whether a feature_id was specified. Determine the best\n # way to handle this in a strictly-typed manner and implement it.\n feature_id = kwargs.get('feature_id', None)\n if feature_id:\n return self.get_one_feature(feature_id)\n return self.do_search()",
"def fusion_api_get_lig(self, uri=None, param='', api=None, headers=None):\n return self.lig.get(uri=uri, param=param, api=api, headers=headers)",
"def getSpecific(self, keyword, key):",
"def foodtruckByKeyword(request, format=None):\n\t#print \"foodtruckByID\"\n\n\tif request.method == 'GET':\n\t\tif not 'keyword' in request.GET or not request.GET['keyword']:\n\t\t\treturn Response(status=status.HTTP_400_BAD_REQUEST)\n\t\telse:\n\t\t\tkeyword = request.GET['keyword']\n\t\tfoodtrucks = FoodTruck.objects.filter(Q(applicant__icontains=keyword) | Q(fooditems__icontains=keyword))\n\n\t\tserializer = FoodTruckSerializer(foodtrucks, many=True)\n\t\treturn Response(serializer.data)",
"def fusion_api_index_resource(self, uri=None, api=None, headers=None):\n return self.index_resource.get(uri, api, headers)",
"def fusion_api_get_lsg(self, uri=None, param='', api=None, headers=None):\n return self.lsg.get(uri=uri, param=param, api=api, headers=headers)",
"def get(self, identifier, **kwargs):\n\n all_data = self._load()\n # if matches\n for feature in all_data['features']:\n if str(feature.get('id')) == identifier:\n return feature\n # default, no match\n err = f'item {identifier} not found'\n LOGGER.error(err)\n raise ProviderItemNotFoundError(err)",
"def lookup(name):",
"def lookup(name):",
"def fusion_api_get_connection_templates(self, uri=None, param='', api=None, headers=None):\n return self.ct.get(uri=uri, api=api, headers=headers, param=param)",
"def fusion_api_get_fabric(self, uri=None, param='', api=None, headers=None):\n return self.fabric.get(uri=uri, api=api, headers=headers, param=param)",
"def lookup(geo):\r\n\r\n # check cache for geo\r\n if geo in lookup.cache:\r\n return lookup.cache[geo]\r\n\r\n # get feed from Google\r\n feed = feedparser.parse(\"http://news.google.com/news?geo={}&output=rss\".format(urllib.parse.quote(geo, safe=\"\")))\r\n\r\n # if no items in feed, get feed from Onion\r\n if not feed[\"items\"]:\r\n feed = feedparser.parse(\"http://www.theonion.com/feeds/rss\")\r\n\r\n # cache results\r\n lookup.cache[geo] = [{\"link\": item[\"link\"], \"title\": item[\"title\"]} for item in feed[\"items\"]]\r\n\r\n # return results\r\n return lookup.cache[geo]",
"def get(self, identifier):\n all_data = self._load()\n for feature in all_data['features']:\n if feature['id'] == identifier:\n return {\n 'type': 'FeatureCollection',\n 'features': [feature]}\n\n # default, no match\n raise RuntimeError(\"Should be a 404 error\")",
"def get_uri(cls, uri='/api/', search_spec='name', search_string='test'):\n response = cls.find(\n uri=uri,\n search_spec=search_spec,\n search_string=search_string,\n extended_fetch=False\n )\n\n uri_list = {}\n if len(response.json()) == 1:\n # Create a dict out of the response\n uri_list = {response.json()[0]['description']: response.json()[0]['URI']}\n else:\n # Create a dict out of the response\n for value in response.json():\n uri_list.update({value['description']: value['URI']})\n\n return uri_list",
"def fusion_api_get_interconnect(self, uri=None, param='', api=None, headers=None):\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)",
"def get_resource(self, *args, **kwargs):\n target_uri = self._build_uri(**kwargs)\n resource_type = None\n if args:\n resource_type = args[2]\n elif not args and kwargs:\n resource_type = kwargs.get('resource_level')\n return self.get_request(\n target_uri, resource_type, kwargs.get('params'))",
"def _discoverLocation(self, uri):",
"def lookup(self, **kwargs):\n raise NotImplementedError()",
"def getContentTypeFeatured(ct):",
"def _get_source(self, uri: str) -> Optional[_Source]:\n\n for source in self._sources:\n if uri == source.uri:\n return source\n\n return None",
"def select_uri(uris):\n return sorted(uris, key=uri_sort_key)[0]",
"def term_from_uri(uri):\n if uri is None:\n return None\n # This insures that if we get a Literal with an integer value (as we\n # do for modification positions), it will get converted to a string,\n # not an integer.\n if isinstance(uri, rdflib.Literal):\n uri = str(uri.toPython())\n # This is to handle URIs like\n # http://www.openbel.org/bel/namespace//MAPK%20Erk1/3%20Family\n # or\n # http://www.openbel.org/bel/namespace/MAPK%20Erk1/3%20Family\n # In the current implementation, the order of the patterns\n # matters.\n patterns = ['http://www.openbel.org/bel/namespace//(.*)',\n 'http://www.openbel.org/vocabulary//(.*)',\n 'http://www.openbel.org/bel//(.*)',\n 'http://www.openbel.org/bel/namespace/(.*)',\n 'http://www.openbel.org/vocabulary/(.*)',\n 'http://www.openbel.org/bel/(.*)']\n for pr in patterns:\n match = re.match(pr, uri)\n if match is not None:\n term = match.groups()[0]\n term = unquote(term)\n return term\n # If none of the patterns match then the URI is actually a simple term\n # for instance a site: \"341\" or a substitution: \"sub(V,600,E)\"\n return uri",
"def uri_dispatch(uri):\n\n return uri_dispatch_map[os.path.splitext(uri)[1]]",
"def get_by_unique_key(self, unique_key, name, default=None, item_type=None):\n ignored(item_type) # TODO: unique keys are globally unique - could modify baked_query_unique_key to change this\n session = self.DBSession()\n try:\n key = baked_query_unique_key(session).params(name=unique_key, value=name).one()\n except NoResultFound:\n return default\n else:\n return key.resource",
"def fusion_api_get_repository(self, uri=None, param='', api=None, headers=None):\n return self.repository.get(uri=uri, param=param, api=api, headers=headers)",
"def get_keywords_for_movie(url):\n pass",
"def lookup(self):\r\n return resources.Lookup(self)"
] | [
"0.5364149",
"0.5301263",
"0.5261827",
"0.5252303",
"0.5217312",
"0.5171613",
"0.5139872",
"0.5135265",
"0.5113561",
"0.50387925",
"0.5027206",
"0.5027206",
"0.50230473",
"0.5008003",
"0.4985036",
"0.4965123",
"0.4952138",
"0.49429497",
"0.4936138",
"0.49227104",
"0.4913706",
"0.4903873",
"0.4899616",
"0.48580647",
"0.48194948",
"0.47876552",
"0.47822323",
"0.47801277",
"0.47761235",
"0.47729614"
] | 0.555259 | 0 |
Deletes any single fusion resource by uri | def fusion_api_delete_resource(self, uri, api=None, headers=None):
if api:
headers = self.fusion_client._set_req_api_version(api=api)
elif not headers:
headers = self.fusion_client._headers
uri = 'https://%s%s' % (self.fusion_client._host, uri)
return self.fusion_client.delete(uri, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete(self, _uri):\n print(\"Deleting '%s'\"%(_uri))\n response = self.__httpsRequest('DELETE', _uri, '')",
"def fusion_api_delete_lsg(self, name=None, uri=None, api=None, headers=None):\n return self.lsg.delete(name=name, uri=uri, api=api, headers=headers)",
"def delete(cls, uri):\n return cls._perform_request(uri, 'DELETE')",
"def fusion_api_delete_rack_manager(self, uri, name=None, param='', api=None, headers=None):\n return self.rackmanager.delete(uri=uri, name=name, param=param, api=api, headers=headers)",
"def fusion_api_delete_storage_pool(self, uri=None, api=None, headers=None):\n return self.pool.delete(uri=uri, api=api, headers=headers)",
"def fusion_api_delete_lig(self, name=None, uri=None, api=None, headers=None, etag=None):\n return self.lig.delete(name=name, uri=uri, api=api, headers=headers, etag=etag)",
"def delete(self, uri, where, selectionArgs):\n pass",
"def fusion_api_delete_repository(self, uri, api=None, headers=None):\n return self.repository.delete(uri=uri, api=api, headers=headers)",
"def delete(self, uri, **kwargs):\n return self.session.delete(uri, **kwargs)",
"def fusion_api_delete_sas_li(self, name=None, uri=None, api=None, headers=None):\n return self.sasli.delete(name=name, uri=uri, api=api, headers=headers)",
"def fusion_api_remove_rack(self, name=None, uri=None, api=None, headers=None):\n return self.rack.delete(name, uri, api, headers)",
"def fusion_api_remove_enclosure(self, name=None, uri=None, param='', api=None, headers=None):\n return self.enclosure.delete(name=name, uri=uri, param=param, api=api, headers=headers)",
"def delete():",
"def fusion_api_delete_fabric(self, name=None, uri=None, api=None, headers=None):\n return self.fabric.delete(name, uri, api, headers)",
"def fusion_api_delete_logical_enclosure(self, name=None, uri=None, param='', api=None, headers=None):\n return self.logical_enclosure.delete(name=name, uri=uri, param=param, api=api, headers=headers)",
"def delete_from_backend(uri, **kwargs):\n\n parsed_uri = urlparse.urlparse(uri)\n scheme = parsed_uri.scheme\n\n backend_class = get_backend_class(scheme)\n\n if hasattr(backend_class, 'delete'):\n return backend_class.delete(parsed_uri, **kwargs)",
"def fusion_api_delete_ls(self, name=None, uri=None, api=None, headers=None):\n return self.ls.delete(name=name, uri=uri, api=api, headers=headers)",
"def fusion_api_delete_sas_lig(self, name=None, uri=None, api=None, headers=None):\n return self.saslig.delete(name=name, uri=uri, api=api, headers=headers)",
"def _delete(self, uri, headers=None):\n if self.openam_url[-1:] == '/':\n openam_path = self.openam_url + uri\n else:\n openam_path = self.openam_url + \"/\" + uri\n\n try:\n data = requests.delete(openam_path, headers=headers, timeout=self.timeout, verify=self.verify)\n except requests.exceptions.RequestException as e:\n data = {'error': e}\n return data",
"def fusion_api_delete_scope(self, uri=None, api=None, headers=None):\n return self.scope.delete(uri, api, headers)",
"def delete(openstack_resource):\n openstack_resource.delete()",
"def delete_node(self, uri):\n if self.sm.already_exists('nodes', uri):\n self.sm.delete_node(uri)\n else:\n raise VOSpaceError(404, \"The specified node does not exist.\")",
"def delete(self, host, file):",
"def delete(self, uri, body=None, headers=None, auth=False):\n return self.send_request('DELETE', uri, body, headers, auth)",
"def fusion_api_delete_storage_system(self, uri=None, api=None, headers=None):\n return self.system.delete(uri=uri, api=api, headers=headers)",
"def rm(self, uri):\n path = osaka.utils.get_uri_path(uri)\n try:\n osaka.utils.LOGGER.debug(\"Removing {0} as a file\".format(uri))\n self.webdav.delete(path)\n except Exception as e:\n osaka.utils.LOGGER.debug(\n \"Removing {0} as a directory, file encountered error {1}\".format(uri, e)\n )\n self.webdav.rmdir(path)",
"def sorl_delete(**kwargs):\n from sorl.thumbnail import delete\n delete(kwargs['file'])",
"def fusion_api_delete_uplink_set(self, name=None, uri=None, api=None, headers=None):\n return self.uplink_set.delete(name, uri, api, headers)",
"def delete(\n self, uri, resource=None, logon_required=True, renew_session=True):\n try:\n self._urihandler.delete(self._hmc, uri, logon_required)\n except HTTPError as exc:\n new_exc = zhmcclient.HTTPError(exc.response())\n new_exc.__cause__ = None\n raise new_exc # zhmcclient.HTTPError\n except ConnectionError as exc:\n new_exc = zhmcclient.ConnectionError(exc.message, None)\n new_exc.__cause__ = None\n raise new_exc # zhmcclient.ConnectionError",
"def delete(uri, backend, context=None):\n if backend:\n loc = location.get_location_from_uri_and_backend(\n uri, backend, conf=CONF)\n store = get_store_from_store_identifier(backend)\n return store.delete(loc, context=context)\n\n LOG.warning('Backend is not set to image, searching all backends based on '\n 'location URI.')\n\n backends = CONF.enabled_backends\n for backend in backends:\n try:\n if not uri.startswith(backends[backend]):\n continue\n\n loc = location.get_location_from_uri_and_backend(\n uri, backend, conf=CONF)\n store = get_store_from_store_identifier(backend)\n return store.delete(loc, context=context)\n except (exceptions.NotFound, exceptions.UnknownScheme):\n continue\n\n raise exceptions.NotFound(_(\"Image not found in any configured backend\"))"
] | [
"0.7476854",
"0.70417976",
"0.69936746",
"0.698172",
"0.69173676",
"0.6905657",
"0.67658347",
"0.6746503",
"0.6724385",
"0.6719894",
"0.6693471",
"0.6664271",
"0.6633366",
"0.66274506",
"0.6536663",
"0.6512758",
"0.6498299",
"0.6482712",
"0.6479271",
"0.6465669",
"0.64460176",
"0.6383703",
"0.63630563",
"0.63536817",
"0.6349774",
"0.63462526",
"0.6343717",
"0.63389903",
"0.6330066",
"0.632212"
] | 0.75143975 | 0 |
Compares the TOP LEVEL keys and values (using regex) of a response Dict vs. validation Dict and returns a dictionary containing overall success, as well as detailed list of keys\values\success. | def fusion_api_validate_response(self, respDict, valDict):
success = True
returnDict = {}
keys = []
for key in valDict:
if not valDict[key]:
continue
# logger._log_to_console_and_log_file('key: %s' % (key))
keyDict = {'key': key, 'expected': valDict[
key], 'actual': respDict[key], 'success': True}
if key in respDict:
pattern = re.compile(str(valDict[key]))
# if not re.search(str(valDict[key]), str(respDict[key])):
# t = re.compile('(?i)Warning|Unknown|Terminated|Killed|Error|Completed')
if not re.search(pattern, str(respDict[key])):
success = False
keyDict['success'] = False
else:
success = False
keyDict['success'] = False
keys.append(keyDict)
returnDict['success'] = success
returnDict['keys'] = keys
return returnDict | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _check_response(self, response_contents, correct_jsons):\r\n for username, content in response_contents.items():\r\n\r\n # Used in debugger for comparing objects.\r\n # self.maxDiff = None\r\n\r\n # We should compare top_words for manually,\r\n # because they are unsorted.\r\n keys_to_compare = set(content.keys()).difference(set(['top_words']))\r\n self.assertDictEqual(\r\n {k: content[k] for k in keys_to_compare},\r\n {k: correct_jsons[username][k] for k in keys_to_compare})\r\n\r\n # comparing top_words:\r\n top_words_content = sorted(\r\n content['top_words'],\r\n key=itemgetter('text')\r\n )\r\n top_words_correct = sorted(\r\n correct_jsons[username]['top_words'],\r\n key=itemgetter('text')\r\n )\r\n self.assertListEqual(top_words_content, top_words_correct)",
"def validate_get_response(response, status, count, job_templates, keys=None):\n assert (response[\"status\"]) == status\n json_response = json.loads(response[\"body\"])\n assert (json_response[\"count\"]) == count\n results = json_response[\"results\"]\n for item in results:\n matching_item = find_by_id(item[\"id\"], job_templates)\n if not keys:\n keys = list(matching_item.keys())\n assert sorted(keys) == sorted(list(item.keys()))\n compare(item, matching_item, keys)",
"def validate_post_response(response, status, job, keys=None):\n assert (response[\"status\"]) == status\n json_response = json.loads(response[\"body\"])\n if not keys:\n keys = list(job.keys())\n assert sorted(keys) == sorted(list(json_response.keys()))\n compare(json_response, job, keys)",
"def response_validator(url_dict, host_name_ip, api_endpoint):\r\n for key, value in url_dict.items():\r\n url_framed = url_framer_or_formatter(value.strip(),host_name_ip) + api_endpoint\r\n logger.debug(\"{} Executing request for {}::{} {}\".format(\"#\" * 20, key,url_framed, \"#\" * 20))\r\n status_code, response_data, error_msg = common_http_validator(method='GET', url=url_framed)\r\n if status_code == 200:\r\n logger.debug(\"{} ok status obtained with response message as {}\".format(status_code,json.loads(response_data)['status']))\r\n else:\r\n logger.debug(\"{} status with response as {} and exception message as {}\".format(status_code,response_data,error_msg))\r\n\r\n logger.debug(\"{} Request execution completed for {}::{} {}\".format(\"#\" * 20, key,url_framed, \"#\" * 20))",
"def test_comparing(self):\n for test in self.test_dict_data:\n self.assertEqual(dottedDict(test[0]), test[1])",
"def form_services_data(responses, necessary_code):\n\n services_data = {}\n\n for key in responses:\n if responses[key][1] != necessary_code:\n services_data[key] = parse_errors(responses[key][0])\n\n else:\n services_data[key] = responses[key][0]['check_result']\n\n return services_data",
"def fusion_api_validate_response_follow(self, expected, response, uriCache={}, wordy=False, depth=0,\n disable_dict_sorting=False,\n disable_list_sorting=False,\n called_by_logged=False):\n\n tabs = '\\t' * depth\n\n try:\n TEST_NAME = BuiltIn().get_variable_value(\"${TEST NAME}\")\n except:\n TEST_NAME = \"Suite Setup\"\n\n SUITE_NAME = BuiltIn().get_variable_value(\"${SUITE NAME}\")\n\n keyValueErrors = 0\n if BuiltIn().get_variable_value(\"${VALIDATE_ENTIRE_DTO}\"):\n VALIDATE_ENTIRE_DTO = BuiltIn().get_variable_value(\"${VALIDATE_ENTIRE_DTO}\")\n else:\n VALIDATE_ENTIRE_DTO = False\n\n CALLED_BY_LOGGED_MESSAGE = \"First fail in this Fusion API Validate Response Follow.\\nSuite Name: %s, TEST CASE: %s at key: %s, depth: %s\"\n\n for key in expected.keys():\n if wordy:\n logger.info((\"%sKey: %s\" % (tabs, key)), also_console=False)\n\n if expected[key] is None and response[key] is None:\n logger.info((\"%sExpected and response are None: Key %s\" % (tabs, key)), also_console=False)\n elif expected[key] is None and response[key] is not None:\n if (isinstance(response[key], str) or isinstance(response[key], unicode)) and response[key] == '':\n logger.info((\"%sExpected is None and response is empty string: Key %s\" % (tabs, key)), also_console=False)\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sExpected is None but something returned in response: Key %s\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif expected[key] is not None and response[key] is None:\n if (isinstance(expected[key], str) or isinstance(expected[key], unicode)) and expected[key] == '':\n logger.info((\n \"%sExpected is empty string and response is None: Key %s\" % (tabs, key)), also_console=False)\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sExpected something but response is None: Key %s\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n if isinstance(expected[key], list):\n if len(expected[key]) == 0 and len(response[key]) == 0:\n continue\n elif len(expected[key]) == 0 and len(response[key]) != 0:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sFor key %s, Expected is empty but actual is not\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif len(expected[key]) != 0 and len(response[key]) == 0:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sFor key %s, Actual is empty but expcted is not\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n if (key in response) and isinstance(response[key], list) and (len(expected[key]) == len(response[key])):\n # Lists of dictionaries can return in any order. Try to sort\n if isinstance(expected[key][0], dict):\n # logger.info((\"Pre sort Res: %s\" % response[key][0])\n # logger.info((\"Pre sort Exp: %s\" % expected[key][0])\n if not disable_dict_sorting:\n if \"name\" in expected[key][0] and expected[key][0][\"name\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: name\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('name'))\n expected[key] = sorted(expected[key], key=itemgetter('name'))\n if \"userName\" in expected[key][0] and expected[key][0][\"userName\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: userName\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('userName'))\n expected[key] = sorted(expected[key], key=itemgetter('userName'))\n elif \"portName\" in expected[key][0] and expected[key][0][\"portName\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: portName\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('portName'))\n expected[key] = sorted(expected[key], key=itemgetter('portName'))\n elif \"bayNumber\" in expected[key][0] and expected[key][0][\"bayNumber\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: bayNumber\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('bayNumber'))\n expected[key] = sorted(expected[key], key=itemgetter('bayNumber'))\n elif \"enclosureIndex\" in expected[key][0] and expected[key][0][\"enclosureIndex\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: enclosureIndex\") % tabs, also_console=False)\n # First sort on logicalLocation as a dict if it exists, then enclosureIndex.\n # Order of first sort is maintained in the second sort\n if \"logicalLocation\" in expected[key][0] and expected[key][0][\"logicalLocation\"] is not None:\n response[key] = sorted(response[key], key=itemgetter('logicalLocation'))\n expected[key] = sorted(expected[key], key=itemgetter('logicalLocation'))\n response[key] = sorted(response[key], key=itemgetter('enclosureIndex'))\n expected[key] = sorted(expected[key], key=itemgetter('enclosureIndex'))\n elif \"connectionId\" in expected[key][0] and expected[key][0][\"connectionId\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: connectionId\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('connectionId'))\n expected[key] = sorted(expected[key], key=itemgetter('connectionId'))\n elif \"id\" in expected[key][0] and expected[key][0][\"id\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: id\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('id'))\n expected[key] = sorted(expected[key], key=itemgetter('id'))\n elif \"relativeValue\" in expected[key][0] and expected[key][0][\"relativeValue\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: relativeValue\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('relativeValue'))\n expected[key] = sorted(expected[key], key=itemgetter('relativeValue'))\n elif \"serialNumber\" in expected[key][0] and expected[key][0][\"serialNumber\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: serialNumber\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('serialNumber'))\n expected[key] = sorted(expected[key], key=itemgetter('serialNumber'))\n elif \"deviceSlot\" in expected[key][0] and expected[key][0][\"deviceSlot\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: deviceSlot\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('deviceSlot'))\n expected[key] = sorted(expected[key], key=itemgetter('deviceSlot'))\n elif \"type\" in expected[key][0] and expected[key][0][\"type\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: type\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('type'))\n expected[key] = sorted(expected[key], key=itemgetter('type'))\n elif \"iSCSIBootAttemptInstance\" in expected[key][0] and expected[key][0][\"iSCSIBootAttemptInstance\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: iSCSIBootAttemptInstance\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('iSCSIBootAttemptInstance'))\n expected[key] = sorted(expected[key], key=itemgetter('iSCSIBootAttemptInstance'))\n elif \"iSCSIAttemptInstance\" in expected[key][0] and expected[key][0][\"iSCSIAttemptInstance\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: iSCSIAttemptInstance\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('iSCSIAttemptInstance'))\n expected[key] = sorted(expected[key], key=itemgetter('iSCSIAttemptInstance'))\n else:\n # sort on a key with \"name\" in it, if it has an actual value\n randomkey = \"changeme\"\n for namekey in expected[key][0].keys():\n if (re.match(r'.*name', namekey, re.I)) and (expected[key][0][namekey] is not None):\n randomkey = namekey\n break\n\n # if randomkey not changed then just sort on a random key and hope for the best\n if randomkey == \"changeme\":\n randomkey = random.choice(expected[key][0].keys())\n if wordy:\n logger.info((\"%sSorting List of Dict by random: %s\" % (tabs, namekey)), also_console=False)\n response[key] = sorted(response[key], key=itemgetter(randomkey))\n expected[key] = sorted(expected[key], key=itemgetter(randomkey))\n\n for i in xrange(0, len(expected[key])):\n if isinstance(expected[key][i], dict) or isinstance(expected[key][i], list):\n results, called_by_logged = self.fusion_api_validate_response_follow(expected[key][i], response[key][i], uriCache, wordy, depth + 1, called_by_logged=called_by_logged)\n if not results:\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif isinstance(expected[key][i], int):\n if expected[key][i] == response[key][i]:\n if wordy:\n logger.info((\"%ssimple %s == %s\" % (tabs, expected[key][i], response[key][i])), also_console=False)\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%ssimple %s != %s\" % (tabs, expected[key][i], response[key][i]))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n words = expected[key][i].split(\":\")\n if len(words) < 2:\n match = False\n if not disable_list_sorting:\n for j in xrange(0, len(response[key])):\n if expected[key][i] == response[key][j]:\n logger.info((\"%sfound item in list. Will remove 1 matching item: [%s]\" % (tabs, expected[key][i])), also_console=False)\n response[key].pop(j)\n match = True\n break\n if not match:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sDidn't find item in list: [%s]\" % (tabs, expected[key][i]))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif disable_list_sorting:\n if expected[key][i] == response[key][i]:\n logger.info((\"%sFound matching item: [%s]\" % (tabs, expected[key][i])), also_console=False)\n match = True\n break\n if not match:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sActual value [%s] doesn't match expected value [%s]\" % (tabs, response[key][i], expected[key][i]))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif words[0] == \"REGEX\":\n match = False\n exp = \":\".join(words[1:])\n for j in xrange(0, len(response[key])):\n if re.search(exp, response[key][j], re.M | re.I):\n logger.info((\"%sfound item in list: [%s]\" % (tabs, exp)), also_console=False)\n match = True\n break\n if not match:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sDidn't match item in list: [%s]\" % (tabs, exp))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n match = False\n for j in xrange(0, len(response[key])):\n if re.search('/rest/', response[key][j]):\n resp = self.fusion_api_get_resource(str(response[key][j]))\n if resp['name'] == words[1]:\n logger.info((\"%sfound item in list: [%s]\" % (tabs, words[1])), also_console=False)\n match = True\n break\n elif expected[key][i] == response[key][j]:\n if wordy:\n logger.info((\"%ssimple %s == %s\" % (tabs, expected[key][i], response[key][i])), also_console=False)\n match = True\n break\n if not match:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sDidn't find item via uri lookup %s: [exp: %s != ret: %s]\" % (tabs, str(response[key][j]), words[1], resp['name']))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sList item not in Res or diff len list: %s\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n continue\n\n if isinstance(expected[key], dict):\n if key in response:\n results, called_by_logged = self.fusion_api_validate_response_follow(expected[key], response[key], uriCache, wordy, depth + 1, called_by_logged=called_by_logged)\n if not results:\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n continue\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sDict item not in Res: %s\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n # only str, int, bool, unicode left\n if key in response:\n if (isinstance(response[key], str) or isinstance(response[key], unicode)) and re.search(r'/rest/', response[key], re.I):\n words = expected[key].split(\":\")\n compare_as_is = False\n compare_as_regex = False\n if len(words) < 2:\n if wordy:\n logger.info((\"%sExpected did not split into type,name: %s\" % (tabs, expected[key])), also_console=False)\n logger.info((\"%swill compare as is.\") % tabs, also_console=False)\n compare_as_is = True\n exp_name = words[0]\n else:\n if key == 'serverHardwareTypeUri':\n if words[0] == \"SHT\":\n logger.info((\"%sSHT lookup. Call 'Get Server Hardware Type URI By Name And Mezz' for: %s\" % (tabs, expected[key])), also_console=False)\n sht_uri = BuiltIn().run_keyword(\"Get Server Hardware Type URI By Name And Mezz\", \":\".join(words[1:]))\n else: # support for SHT Uri lookup by ServerHardware (SH:wpst14, bay 1)\n logger.info((\"SH lookup for SHT: %s\" % expected[key]), also_console=False)\n sh_resp = BuiltIn().run_keyword(\"Get Resource\", expected[key])\n sht_uri = sh_resp['serverHardwareTypeUri']\n\n sht_resp = self.fusion_api_get_resource(sht_uri)\n exp_name = sht_resp['name']\n else:\n if words[0] == \"REGEX\":\n compare_as_regex = True\n exp_name = \":\".join(words[1:])\n\n if wordy:\n logger.info((\"%sResponse has URI, get uri: %s\" % (tabs, response[key])), also_console=False)\n logger.info((\"%sExpecting name: %s\" % (tabs, exp_name)), also_console=False)\n\n if compare_as_regex:\n found = re.search(exp_name, response[key], re.M | re.I)\n msg = \"[\" + key + \"] \" + exp_name + \" vs \" + response[key]\n if found:\n if wordy:\n logger.info((\"%sregex match %s\" % (tabs, msg)), also_console=False)\n continue\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sregex not match %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif compare_as_is:\n msg = \"[\" + key + \"] \" + expected[key] + \" vs \" + response[key]\n if expected[key] != response[key]:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%ssimple != %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n if wordy:\n logger.info((\"%ssimple == %s\" % (tabs, msg)), also_console=False)\n continue\n else:\n if response[key] in uriCache:\n if wordy:\n msg = \"[\" + key + \"] \" + response[key] + \" --> \" + uriCache[response[key]]\n logger.info((\"%suriCache lookup %s\" % (tabs, msg)), also_console=False)\n resp_name = uriCache[response[key]]\n else:\n resp = self.fusion_api_get_resource(str(response[key]))\n resp_name = resp['name']\n uriCache[response[key]] = resp_name\n if wordy:\n msg = response[key] + \" --> \" + resp_name\n logger.info((\"%sGET uri and save in cache %s\" % (tabs, msg)), also_console=False)\n\n if resp_name != exp_name:\n msg = \"[\" + key + \"] \" + exp_name + \" vs \" + resp_name\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sExpected Name does not match URI name: %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif (isinstance(expected[key], str) or isinstance(expected[key], unicode)) and (expected[key].find(\"REGEX:\") > -1):\n words = expected[key].split(\":\")\n pattern = \":\".join(words[1:])\n found = re.search(pattern, str(response[key]), re.M | re.I)\n msg = \"[\" + key + \"] \" + pattern + \" vs \" + str(response[key])\n if found:\n if wordy:\n logger.info((\"%sregex match %s\" % (tabs, msg)), also_console=False)\n continue\n else:\n logger.warn(\"%sregex not match %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif (isinstance(expected[key], str) or isinstance(expected[key], unicode)) and (expected[key].find(\"RANGE:\") > -1):\n words = expected[key].split(\":\")\n wmin = words[1]\n wmax = words[2]\n msg = wmin + \" - \" + wmax + \":\" + str(response[key])\n if (int(response[key]) >= int(wmin)) and (int(response[key]) <= int(wmax)):\n if wordy:\n logger.info((\"%s%s Value in Range: %s\" % (tabs, key, msg)), also_console=False)\n continue\n else:\n logger.warn(\"%s%s Value NOT in Range: %s\" % (tabs, key, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n elif str(expected[key]) != str(response[key]):\n msg = \"[\" + key + \"] \" + str(expected[key]) + \" vs \" + str(response[key])\n if \"name\" != key and \"name\" in expected:\n msg = \"@ dict name=%s : %s\" % (str(expected[\"name\"]), msg)\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%ssimple != %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n if wordy:\n logger.info((\"%sExpected match response: %s\" % (tabs, expected[key])), also_console=False)\n\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sResponse does not have key %s:\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n if keyValueErrors:\n logger.warn(\"%sDTO had %s failing keys:\" % (tabs, keyValueErrors))\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n else:\n if depth == 0:\n return True\n else:\n return True, called_by_logged",
"def _verify_response(self, text_response, orig_otp, orig_nonce):\n response_dict = dict([line.strip(' ').split('=', 1) for line in\n re.split(r'\\r\\n', text_response)\n if line.strip()])\n\n if 'otp' in response_dict and response_dict['otp'] != orig_otp:\n raise YubiKeyVerificationError(\n \"Received response that does not match the OTP that was \"\n \"sent to be verified.\")\n\n if 'nonce' in response_dict and response_dict['nonce'] != orig_nonce:\n raise YubiKeyVerificationError(\n \"Received response that does not match the OTP that was \"\n \"sent to be verified.\")\n\n if self.api_key is not None:\n sig = sign_query(response_dict, self.api_key)\n if response_dict['h'].decode('base64') != sig.decode('base64'):\n raise YubiKeyVerificationError(\n \"Received a response whose signature is invalid\")\n\n return response_dict",
"def test_call_result_as_dict(self):\r\n exp_assignments = rdp_test1_expected_dict\r\n min_confidence = self.default_app.Params['Confidence']\r\n\r\n # Since there is some variation in the assignments, run\r\n # 10 trials and make sure we get the expected result at least once\r\n num_trials = 10\r\n unverified_seq_ids = set(exp_assignments.keys())\r\n for i in range(num_trials):\r\n obs_assignments = self.default_app(self.tmp_seq_filepath)\r\n for seq_id in list(unverified_seq_ids):\r\n obs_assignment, obs_confidence = obs_assignments[seq_id]\r\n exp_assignment, exp_confidence = exp_assignments[seq_id]\r\n self.assertTrue(obs_confidence >= min_confidence)\r\n if obs_assignment == exp_assignment:\r\n unverified_seq_ids.remove(seq_id)\r\n if not unverified_seq_ids:\r\n break\r\n\r\n messages = []\r\n for seq_id in unverified_seq_ids:\r\n messages.append(\r\n \"Unable to verify %s in %s trials\" % (seq_id, num_trials))\r\n messages.append(\" Expected: %s\" % exp_assignments[seq_id][0])\r\n messages.append(\" Observed: %s\" % obs_assignments[seq_id][0])\r\n messages.append(\" Confidence: %s\" % obs_assignments[seq_id][1])\r\n\r\n # make sure all taxonomic results were correct at least once\r\n self.assertFalse(unverified_seq_ids, msg='\\n'.join(messages))",
"def matches_p1(info: Mapping[str, int],\n test_result: Mapping[str, int]) -> bool:\n for key, value in info.items():\n if test_result[key] != value:\n return False\n return True",
"def validate_dict(data_dict, entity):\r\n fields = []\r\n for key, value in data_dict.items():\r\n if not value:\r\n fields.append(key)\r\n continue\r\n if len(fields) > 0:\r\n return provide_field_value(entity, fields)\r\n elif key == hqAddKey:\r\n status = validate_hqadd(value)\r\n if not status == ok_str:\r\n return status\r\n elif key == logoUrlKey:\r\n status = validate_logourl(value)\r\n if not status == ok_str:\r\n return status\r\n elif key == type_key:\r\n status = validate_officeType(value)\r\n if not status == ok_str:\r\n return status\r\n elif key == name_key:\r\n status = None\r\n if entity == party_key:\r\n status = validate_partyname(value)\r\n elif entity == office_key:\r\n status = validate_officeName(value)\r\n if not status == ok_str:\r\n return status\r\n if fields:\r\n return provide_field_value(entity, fields)\r\n return ok_str",
"def check_for_dict(check):",
"def test_create_results_dict_1(self):\n dict = find_domains.create_results_dict(self.rps_results)\n with self.subTest():\n self.assertEqual(len(dict.keys()), 4)\n with self.subTest():\n self.assertEqual(len(dict[\"ABCDE\"]), 2)\n with self.subTest():\n self.assertEqual(len(dict[\"FGHIJ\"]), 2)",
"def verify_response_dict(api_key, response):\n LOGGER.debug('Verifying WSAPI response signature')\n\n # Remove signature from the response\n r = dict(response)\n del r['h']\n\n # Convert to HTML query as that is used by Yubico to sign the response\n query = sorted_urlencode(list(r.iteritems()))\n\n # We unquote it because it's not the HTTP quoted version\n query = urllib.unquote_plus(query)\n\n status = sign(api_key, query) == response['h']\n LOGGER.debug('Signature result ' + str(status))\n return status",
"def test_response(self):\n for i, response in enumerate(RESPONSES):\n with self.subTest(i=i):\n self.assertDictContainsSubset(response, dict(self.responses[i].data))",
"def _process_validator_results(ret, level, object_data, obj):\n\n # The first object in the tuple is the one being validated\n if isinstance(obj, tuple):\n real_obj = obj[0]\n else:\n real_obj = obj\n\n if not ret:\n is_valid = True\n return is_valid\n\n if isinstance(ret, string_types):\n ledger.add_message(ret, level, object_data)\n is_valid = False\n\n elif isinstance(ret, dict):\n for field_name, error in list(ret.items()):\n # verbose_field_name = ledger.map_field_name_to_verbose_name(obj, field_name)\n object_data_with_field = object_data.copy()\n object_data_with_field[\"field\"] = field_name\n if field_name_mapper is None:\n # raise RuntimeError(\"A field_name_mapper was not supplied to this validator.\")\n verbose_name = None\n else:\n verbose_name = field_name_mapper(real_obj, field_name)\n if verbose_name is None:\n from titlecase import titlecase\n\n verbose_name = titlecase(\" \".join(field_name.split(\"_\")))\n\n object_data_with_field[\"verbose_name\"] = verbose_name\n if include_field_name_in_message:\n error = \"{}: {}\".format(verbose_name, error)\n else:\n error = \"{}\".format(error)\n ledger.add_message(error, level, object_data_with_field)\n is_valid = False\n\n else:\n for validator_ret_item in ret:\n if isinstance(validator_ret_item, str):\n ledger.add_message(validator_ret_item, level, object_data)\n is_valid = False\n elif isinstance(validator_ret_item, dict):\n for field_name, error in list(validator_ret_item.items()):\n # verbose_field_name = ledger.map_field_name_to_verbose_name(obj, field_name)\n object_data_with_field = object_data.copy()\n object_data_with_field[\"field\"] = field_name\n verbose_name = field_name_mapper(real_obj, field_name)\n if verbose_name is None:\n from titlecase import titlecase\n\n verbose_name = titlecase(\" \".join(field_name.split(\"_\")))\n\n object_data_with_field[\"verbose_name\"] = verbose_name\n if include_field_name_in_message:\n error = \"{}: {}\".format(verbose_name, error)\n else:\n error = \"{}\".format(error)\n \n ledger.add_message(error, level, object_data_with_field)\n is_valid = False\n\n return is_valid",
"def diff_json(response_data, assert_data):\n if isinstance(response_data, dict):\n \"\"\" dict format \"\"\"\n for key in assert_data:\n if key not in response_data:\n info = \"❌ Response data has no key: {}\".format(key)\n print(info)\n AssertInfo.data.append(info)\n for key in response_data:\n if key in assert_data:\n \"\"\" recursion \"\"\"\n diff_json(response_data[key], assert_data[key])\n else:\n info = \"💡 Assert data has not key: {}\".format(key)\n print(info)\n elif isinstance(response_data, list):\n \"\"\" list format \"\"\"\n if len(response_data) == 0:\n print(\"response is []\")\n if len(response_data) != len(assert_data):\n print(\"list len: '{}' != '{}'\".format(len(response_data), len(assert_data)))\n\n if isinstance(response_data[0], dict):\n response_data = sorted(response_data, key=lambda x: x[list(response_data[0].keys())[0]])\n else:\n response_data = sorted(response_data)\n if isinstance(assert_data[0], dict):\n assert_data = sorted(assert_data, key=lambda x: x[list(assert_data[0].keys())[0]])\n else:\n assert_data = sorted(assert_data)\n\n for src_list, dst_list in zip(response_data, assert_data):\n \"\"\" recursion \"\"\"\n diff_json(src_list, dst_list)\n else:\n if str(response_data) != str(assert_data):\n info = \"❌ Value are not equal: {}\".format(response_data)\n print(info)\n AssertInfo.data.append(info)",
"def field_compare(self, test_case_results, new_results):\n dict_test = {} if not test_case_results else test_case_results\n dict_new = {} if not new_results else new_results\n\n for key in dict_test:\n if key not in dict_new:\n self.missing.append(u\"{}: {!r}\".format(key, dict_test[key]))\n elif set(dict_test[key]) != set(dict_new[key]):\n self.missing.append(u\"{}: {!r}\".format(key, dict_test[key]))\n\n for key in dict_new:\n if key not in dict_test:\n self.unexpected.append(u\"{}: {!r}\".format(key, dict_new[key]))\n elif set(dict_new[key]) != set(dict_test[key]):\n self.unexpected.append(u\"{}: {!r}\".format(key, dict_new[key]))",
"def __verify_root(self):\n output = self.output\n for key in self.data:\n if key != self.root:\n output[\"status\"] = False\n output[\"message\"] = \"{0} is not is the correct format.\"\n print(\"-- An Error Occurred -- {0}\".format(output[\"message\"]))\n break\n return output",
"def json(self) -> dict:\n search = {\n 'criteria': self.search_criteria,\n 'matchesExact': {\n 'avgIndexDiff': self.avg_index_diff(TestSearchResult.MatchType.EXACT.value),\n 'firstFailIndex': self.fail_index(TestSearchResult.MatchType.EXACT.value),\n 'missedMatches': self.missed_matches(TestSearchResult.MatchType.EXACT.value),\n 'resultsApi': self.get_results(\n TestSearchResult.MatchType.EXACT.value, TestSearchResult.Source.API.value),\n 'resultsLegacy': self.get_results(\n TestSearchResult.MatchType.EXACT.value, TestSearchResult.Source.LEGACY.value)\n },\n 'matchesSimilar': {\n 'avgIndexDiff': self.avg_index_diff(TestSearchResult.MatchType.SIMILAR.value),\n 'firstFailIndex': self.fail_index(TestSearchResult.MatchType.SIMILAR.value),\n 'missedMatches': self.missed_matches(TestSearchResult.MatchType.SIMILAR.value),\n 'resultsApi': self.get_results(\n TestSearchResult.MatchType.SIMILAR.value, TestSearchResult.Source.API.value),\n 'resultsLegacy': self.get_results(\n TestSearchResult.MatchType.SIMILAR.value, TestSearchResult.Source.LEGACY.value)\n },\n 'runTime': self.run_time,\n }\n\n search['matchesExact']['passed'] = (\n len(search['matchesExact']['missedMatches']) == 0 and\n search['matchesExact']['firstFailIndex'] == -1\n )\n search['matchesSimilar']['passed'] = (\n len(search['matchesSimilar']['missedMatches']) == 0 and\n search['matchesSimilar']['firstFailIndex'] == -1\n )\n\n return search",
"def _validate_dict_data(self, expected, actual):\n for k, v in expected.iteritems():\n if k in actual:\n if (isinstance(v, basestring) or\n isinstance(v, bool) or\n isinstance(v, (int, long))):\n if v != actual[k]:\n return \"{}:{}\".format(k, actual[k])\n elif not v(actual[k]):\n return \"{}:{}\".format(k, actual[k])\n else:\n return \"key '{}' does not exist\".format(k)\n return None",
"def condensed(self):\n condensed = {}\n key_format = '{0}_{1}_{2}'\n for basekey, basevalue in sorted(self._errors.iteritems()):\n for localkey, localvalue in sorted(basevalue.iteritems()):\n for msgkey, msglist in sorted(localvalue.iteritems()):\n new_key = key_format.format(basekey, localkey, msgkey)\n condensed[new_key] = msglist\n return condensed",
"def CompareValuesDict(dic1, dic2, accepteddiff=0.04, debug =False):\n identical = True\n for key in dic1:\n val1 = dic1[key]\n val2 = dic2.get(key,None)\n if val2:\n diff = np.abs(val1-val2)\n if debug:\n print (key, diff)\n if diff > accepteddiff:\n identical = False\n print(key, val1, val2, diff, accepteddiff)\n if debug:\n print (\"Comparing \", identical)\n return identical",
"def test_postive_get_auth_horizon_check_keys(self):\n r = self.res.get('/auth/config/'+utils.partner, headers=utils.headers)\n logging.info(\"Return response is '%s'\" % r)\n # convert string to dictionary\n rd = ast.literal_eval(r)\n logging.info(\"Return response in dictionary format is '%s'\" % rd)\n self.assertEqual(self.res.response.status, 200)\n keys = ['type', 'web_endpoint', 'client_endpoint', 'org_name']\n self.assertTrue(utils.is_same_array(keys, rd.keys()), \"Keys are not correct!\")",
"def checkResultsCorrection(self, result, valid_keys):\n for key in result:\n if key not in valid_keys:\n print(\"[ERROR] Key '%s' does not exist.\" % key)\n return False\n return True",
"def validate_state(retval):\n success = True\n for data in retval.itervalues():\n for result in data.itervalues():\n if not result.get('result'):\n success = False\n break\n return success",
"def create_response_dict(split_response, response_dict):\n\n for res in split_response:\n split_sub_response = res.split('=')\n if split_sub_response[0] == \"VendorTxCode\":\n response_dict['payment_id'] = split_sub_response[1]\n if split_sub_response[0] == \"VPSTxId\":\n response_dict['Payment_gateway_reference_id'] = split_sub_response[1][1:-1]\n if split_sub_response[0] == \"Status\":\n if split_sub_response[1] == \"OK\" or split_sub_response[1] == \"ABORT\":\n response_dict['status'] = split_sub_response[1]\n else:\n response_dict['status'] = \"FAILED\"\n if split_sub_response[0] == \"Amount\":\n response_dict['Amount'] = split_sub_response[1]",
"def test_compare_data():\n orgInputDict_XX = {\n \"AA_02\": 5.5, \n \"AA_04\": 10,\n \"AA_05\": \"it is a string\", \n \"AA_03\": [\n {\n \"BB_02\": 30, \n \"BB_03\": \"string again\", \n \"BB_01\": [\n {\n \"CC_02\": 55,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n },\n {\n \"CC_02\": 35,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n },\n {\n \"CC_02\": 25,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n }\n ]\n }, \n {\n \"BB_02\": 20, \n \"BB_03\": \"string again\", \n \"BB_01\": [\n {\n \"CC_02\": 25,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n },\n {\n \"CC_02\": 35,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n },\n {\n \"CC_02\": 55,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n }\n ]\n }, \n {\n \"BB_02\": 10, \n \"BB_03\": \"string again\", \n \"BB_01\": [\n {\n \"CC_02\": 25,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n },\n {\n \"CC_02\": 35,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n },\n {\n \"CC_02\": 55,\n \"CC_03\": \"T_T\",\n \"CC_01\": \"what ever\"\n }\n ]\n }\n ], \n \"AA_01\": {\n \"y\": 10, \n \"z\": 15,\n \"x\": 5\n }\n }\n\n orgInputDict_YY = {\n \"AA_01\": {\n \"x\": 5, \n \"y\": 10, \n \"z\": 15\n }, \n \"AA_02\": 5.5, \n \"AA_03\": [\n {\n \"BB_02\": 10, \n \"BB_03\": \"string again\", \n \"BB_01\": [\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 25,\n \"CC_03\": \"T_T\"\n },\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 35,\n \"CC_03\": \"T_T\"\n },\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 55,\n \"CC_03\": \"T_T\"\n }\n ]\n }, \n {\n \"BB_02\": 20, \n \"BB_03\": \"string again\", \n \"BB_01\": [\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 25,\n \"CC_03\": \"T_T\",\n },\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 35,\n \"CC_03\": \"T_T\",\n },\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 55,\n \"CC_03\": \"T_T\",\n }\n ]\n }, \n {\n \"BB_02\": 30, \n \"BB_03\": \"AAAAstring again\", \n \"BB_01\": [\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 25,\n \"CC_03\": \"T_T\"\n },\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 35,\n \"CC_03\": \"T_T\"\n },\n {\n \"CC_01\": \"what ever\", \n \"CC_02\": 55,\n \"CC_03\": \"T_T\"\n }\n ]\n },\n ], \n \"AA_04\": 1000,\n \"AA_05\": \"it is a string\"\n }\n\n compare_data(orgInputDict_XX, orgInputDict_YY, \"tryTest\")",
"def status_check() -> dict:\r\n avail_books = [k for k, v in books_di.keys() if v[-1] == \"\"]\r\n avail_students = [i for i, j in students_di.keys() if j[-1] == \"\"]\r\n reserved_books = [k for k, v in books_di.keys() if v[-1] != \"\"]\r\n reserved_students = [i for i, j in students_di.keys() if j[-1] != \"\"]\r\n total_book = books_di.keys()\r\n total_students = students_di.keys()\r\n return {\"available_books\": avail_books, \"available_students\": avail_students, \"total_book\": total_book,\r\n \"total_students\": total_students, \"reserved_books\": reserved_books, \"reserved_students\": reserved_students}",
"def test_invalid_value_age(self):\r\n expected = {1: {'ID': 'A233', 'Gender': 'M', 'Age': '22', 'Sales': '245', 'BMI': 'Normal', 'Salary': '23',\r\n 'Birthday': '24-06-1995'}, 2: {'ID': 'A244', 'Gender': 'M', 'Age': '30', 'Sales': '666',\r\n 'BMI': 'Underweight', 'Salary': '23', 'Birthday': '05-05-1988'},\r\n 3: {'ID': 'A253', 'Gender': 'M', 'Age': '35', 'Sales': '456', 'BMI': 'Obesity', 'Salary': '23',\r\n 'Birthday': '01-08-1983'}, 4: {'ID': 'A262', 'Gender': 'M', 'Age': '24', 'Sales': '999',\r\n 'BMI': 'Normal', 'Salary': '23', 'Birthday': '24-05-1993'}}\r\n data = {1: {'ID': 'A233', 'Gender': 'M', 'Age': '22', 'Sales': '245', 'BMI': 'Normal', 'Salary': '23',\r\n 'Birthday': '24-06-1995'}, 2: {'ID': 'A244', 'Gender': 'M', 'Age': '30', 'Sales': '666',\r\n 'BMI': 'Underweight', 'Salary': '23', 'Birthday': '05-05-1988'},\r\n 3: {'ID': 'A253', 'Gender': 'M', 'Age': '35', 'Sales': '456', 'BMI': 'Obesity', 'Salary': '23',\r\n 'Birthday': '01-08-1983'}, 4: {'ID': 'A262', 'Gender': 'M', 'Age': '24', 'Sales': '999',\r\n 'BMI': 'Normal', 'Salary': '23', 'Birthday': '24-05-1993'},\r\n 5: {'ID': 'A233', 'Gender': 'F', 'Age': '692', 'Sales': '245', 'BMI': 'Normal', 'Salary': '23',\r\n 'Birthday': '24-06-1995'}}\r\n result = Validator.save_dict(data)\r\n self.assertDictEqual(expected, result)"
] | [
"0.6134816",
"0.609543",
"0.6012108",
"0.5904799",
"0.5892711",
"0.5874886",
"0.58204603",
"0.58195275",
"0.5783584",
"0.5708685",
"0.56933355",
"0.5689764",
"0.56611013",
"0.56599575",
"0.5631075",
"0.5600514",
"0.5553418",
"0.55305266",
"0.55140865",
"0.5511245",
"0.55012983",
"0.54825443",
"0.5479334",
"0.54660976",
"0.54434663",
"0.5441978",
"0.5441921",
"0.54333615",
"0.5428082",
"0.5375544"
] | 0.780489 | 0 |
Updates an alert. [Arguments] | def fusion_api_update_alert(self, body, uri, api=None, headers=None):
return self.alert.update(body, uri, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def alert(self, alert):\n\n self._alert = alert",
"def javaScriptAlert(self, frame, message):\n\n self._robot._alert = message\n self._robot.popup_messages = message\n logger.debug(\"alert('%s')\" % message)",
"def alert(bot, update, args, job_queue):\n continue_on = 1\n chat_id = update.message.chat_id\n message_id = update.message.message_id\n user = str(update.message.from_user)\n if not args:\n update.message.reply_text('please enter a time')\n return\n if '|' in args:\n message = ' '.join(args)\n argstemp = message.split('|')\n due = alerts.lastDitchAttempt(argstemp[0])\n if due > 0:\n argstemp.pop(0)\n message = ' '.join(argstemp)\n continue_on = -1\n if continue_on == 1:\n due = alerts.parseADate(args[0])\n if due <= 0:\n due = alerts.regexmatch(args[0])\n args.pop(0)\n message = ' '.join(args)\n if due <= 0:\n update.message.reply_text('Sorry that is not a valid time')\n return\n\n # Add job to queue\n my_context = '' + str(chat_id) + ':' + str(message_id)\n job = Job(alarm, due, repeat=False, context=my_context)\n USERS[my_context] = user\n MESSAGES[my_context] = message\n TIMERS[my_context] = job\n job_queue.run_once(alarm, due, context=my_context)\n current_time = datetime.now()\n due = int((current_time - datetime(1970, 1, 1)).total_seconds() + due)\n fileIO.writeAlertJob(\"alerts\", str(chat_id),\n str(message_id), user, due, message)\n set_for = alerts.timeSetFor(due)\n bot.sendMessage(update.message.chat_id, 'Timer successfully set for: ' + str(set_for) +\n '\\nYour ID is:' + str(message_id))",
"def alert_expr(self, alert_expr):\n\n self._alert_expr = alert_expr",
"async def alert(self, entry):\n\n if self.outputs.get('log.enabled'):\n rssalertbot.alerts.alert_log(self, self.outputs.get('log'), entry)\n\n if self.outputs.get('email.enabled'):\n rssalertbot.alerts.alert_email(self, self.outputs.get('email'), entry)\n\n if self.outputs.get('slack.enabled'):\n await rssalertbot.alerts.alert_slack(self, self.outputs.get('slack'), entry)",
"def alert(self, txt, title=\"Alert\"):\r\n self.message(txt, title)",
"def javaScriptAlert(self, frame, message):\n print 'Alert:', message",
"def alert(self, name, url):\n email = \"\"\n if app.is_checked.get():\n email = app.email_addr_entry.get()\n SendEmail.sendEmail(email, name, url)\n\n # tempWin = tk.Tk() # Temporary, invisible window to use as a popup's root\n # # This way the root will always be in the same thread as the popup\n # tempWin.withdraw()\n # popup = ItemAlertDialogue(tempWin, \"Item Restocked!\", name, url)\n\n kwargs = {\n \"title\": \"Item Stock Tracker\",\n \"ticker\": \"~Item Stock Tracker~\",\n \"app_name\": \"Item Stock Tracker\",\n \"timeout\": 10,\n \"message\": name + \" is restocked! \",\n }\n plyer.notification.notify(**kwargs)\n\n popup = ItemAlertDialogue(self, \"Item Restocked!\", name, url)",
"def alert_id(self, alert_id):\n\n self._alert_id = alert_id",
"def alert(self, alert_str):\n # Make sure alerts have the same type\n alert_str = str(alert_str)\n self._output_object.add_alert(\n html_tag(plain_to_html(alert_str), alert_str, self.proc)\n )\n self.alerts.append((alert_str, self.proc))",
"def test_edit_alert_by_id(self):\n pass",
"def alert(title: str, text: str, *, level: str = \"warning\", ID: str = None):\n if level not in (\"info\", \"warning\"):\n raise ValueError(\"Level must be among 'info', 'warning'\")\n if alert.has_disable_been_called:\n raise RuntimeError(\n \"The function alert() is called after disable_old_alert() has generated \"\n \"the javascript code to handle hidding closed alerts. This breaks the \"\n \"system completely, make sure disable_old_alerts is called last\"\n )\n if ID is None:\n alert_id = alert.numid\n alert.numid += 1\n else:\n alert_id = str(ID)\n alert.strid.append(alert_id)\n\n indent = \" \" * 4 * 4\n msg = format_html(f\"<div>{text!s}</div>\").replace(\"\\n\", \"\\n\" + indent)\n return textwrap.dedent(\n f\"\"\"\\\n <input type=\"hidden\" class=\"alert-hidder\" name=\"attr_alert-{alert_id}\" value=\"0\"/>\n <div class=\"alert alert-{level}\">\n <div>\n <h3> {level.title()} - {title}</h3>\n {msg}\n </div>\n <label class=\"fakebutton\">\n <input type=\"checkbox\" name=\"attr_alert-{alert_id}\" value=\"1\" /> ×\n </label>\n </div>\"\"\"\n )",
"def upsert_alert(\n self,\n alerts: str,\n project_id: str = PROVIDE_PROJECT_ID,\n retry: Retry | _MethodDefault = DEFAULT,\n timeout: float | None = None,\n metadata: Sequence[tuple[str, str]] = (),\n ) -> None:\n policy_client = self._get_policy_client()\n channel_client = self._get_channel_client()\n\n record = json.loads(alerts)\n existing_policies = [\n policy[\"name\"] for policy in self.list_alert_policies(project_id=project_id, format_=\"dict\")\n ]\n existing_channels = [\n channel[\"name\"]\n for channel in self.list_notification_channels(project_id=project_id, format_=\"dict\")\n ]\n policies_ = []\n channels = []\n for channel in record.get(\"channels\", []):\n channels.append(NotificationChannel(**channel))\n for policy in record.get(\"policies\", []):\n policies_.append(AlertPolicy(**policy))\n\n channel_name_map = {}\n\n for channel in channels:\n channel.verification_status = (\n monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED\n )\n\n if channel.name in existing_channels:\n channel_client.update_notification_channel(\n request={\"notification_channel\": channel},\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )\n else:\n old_name = channel.name\n channel.name = None\n new_channel = channel_client.create_notification_channel(\n request={\"name\": f\"projects/{project_id}\", \"notification_channel\": channel},\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )\n channel_name_map[old_name] = new_channel.name\n\n for policy in policies_:\n policy.creation_record = None\n policy.mutation_record = None\n\n for i, channel in enumerate(policy.notification_channels):\n new_channel = channel_name_map.get(channel)\n if new_channel:\n policy.notification_channels[i] = new_channel\n\n if policy.name in existing_policies:\n with contextlib.suppress(InvalidArgument):\n policy_client.update_alert_policy(\n request={\"alert_policy\": policy},\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )\n else:\n policy.name = None\n for condition in policy.conditions:\n condition.name = None\n policy_client.create_alert_policy(\n request={\"name\": f\"projects/{project_id}\", \"alert_policy\": policy},\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )",
"def test_set_alert(self):\n alert = dweepy.set_alert(\n self.my_thing_id,\n ['[email protected]', '[email protected]'],\n test_alert_condition,\n test_key,\n )\n self.assertEqual(alert['condition'], test_alert_condition)",
"def update(*args):",
"def update_alerts(self, display_dict):\n self._go_to_first_blank(self.box2)\n if display_dict[\"status_code\"] == 1:\n y = self._go_to_first_blank(self.box2)\n self.box2.addstr(y, 4, self.HIGH_TRAFFIC_TEMPLATE.format(display_dict[\"debit\"]))\n self.box2.addstr(y + 1, 4, \"Triggered at: {time}\".format(time=time.strftime(\"%H:%M:%S\", time.localtime())))\n elif display_dict[\"status_code\"] == 0:\n y = self._go_to_first_blank(self.box2)\n self.box2.addstr(y - 1, 4, self.RECOVER.format(time.strftime(\"%H:%M:%S\", time.localtime()),\n display_dict[\"debit\"]))",
"async def send_update_metric(self, title: str, update_func: str) -> None:\n msg, sending_dialogue = self.dialogues.create(\n counterparty=self.prometheus_address,\n performative=PrometheusMessage.Performative.UPDATE_METRIC,\n title=title,\n callable=update_func,\n value=1.0,\n labels={},\n )\n assert sending_dialogue is not None\n assert sending_dialogue.last_message is not None\n\n envelope = Envelope(\n to=msg.to,\n sender=msg.sender,\n message=msg,\n )\n await self.prometheus_con.send(envelope)",
"def update(self, msg):\n pass",
"def add_alerts(self):",
"def requestAlert(self, text=\"Error\", buttons=None):\n\t\tself.alerts.append((text, buttons))",
"def do_update(self, addon):\n self.update_error.emit(\n addon,\n NotImplementedError('An updater is not installed.')\n )",
"def update_note(self, alert_id, note):\n return self._alert_service.update_note(alert_id, note)",
"def update(self, *args, **kwargs):",
"def update_remote_alert(client: Client, delta: Dict[str, Any],\n inc_status: IncidentStatus, incident_id: str):\n # XSOAR incident was closed - closing the mirrored prisma alert\n if inc_status == IncidentStatus.DONE and whether_to_close_in_prisma_cloud(client.close_alert, delta):\n demisto.debug(f'Closing incident with remote ID {incident_id} in remote system.')\n close_alert_in_prisma_cloud(client, [incident_id], delta)\n demisto.debug(f'Remote Incident: {incident_id} was updated successfully.')\n\n # XSOAR incident was re-opened - re-opening the mirrored prisma alert\n elif inc_status == IncidentStatus.ACTIVE and whether_to_reopen_in_prisma_cloud(client.close_alert, delta):\n demisto.debug(f'Reopening incident with remote ID {incident_id} in remote system.')\n reopen_alert_in_prisma_cloud(client, [incident_id])\n demisto.debug(f'Remote Incident: {incident_id} was updated successfully.')\n\n else:\n demisto.debug(f\"Skipping the update of remote incident {incident_id} as it has not closed or re-opened in XSOAR.\")",
"def update():\n return 'update api in put'",
"def update(self, args):\n pass",
"def send_alert(alert_object):\n # Placeholder -- alert creation date UTC\n # Eventually this will come from the alert\n\n if alert_object.sent:\n raise RuntimeError(f'Refusing to send alert '\n f'{alert_object.alert[\"objectId\"]},'\n f' alert has already been sent out.')\n\n\n ac = alert_object.created_at\n alert_date = f'{ac.year}{ac.month:02d}{ac.day:02d}'\n alert = alert_object.to_dict()\n\n imtype = alert['candidate']['alert_type']\n if imtype == 'single':\n schema = combine_schemas(\n [\"schema_single/candidate.avsc\", \"schema_single/light_curve.avsc\",\n \"schema_single/alert.avsc\"])\n topicname = \"ztf_%s_programid2_zuds\" %alert_date\n send(topicname, [alert], schema)\n elif imtype == 'stack':\n schema = combine_schemas(\n [\"schema_stack/candidate.avsc\", \"schema_stack/light_curve.avsc\",\n \"schema_stack/alert.avsc\"])\n topicname = \"ztf_%s_programid2_zuds_stack\" %alert_date\n send(topicname, [alert], schema)",
"def update(self, *args, **kw):\n pass",
"def alert(data: Any) -> None:\n\n root = Container()\n root += Label(\"[210 italic bold]Alert!\")\n root += Label()\n root += Label(str(data))\n\n root.center()\n root.print()\n getch()\n root.wipe()",
"def alert(self, message):\n try:\n self.send_message(message)\n except Exception as err:\n logger.exception(\n f\"Slack notification to {self.username} failed with {err.__repr__()}\"\n )"
] | [
"0.69158286",
"0.6260183",
"0.61714727",
"0.5955488",
"0.5913151",
"0.5898235",
"0.58023983",
"0.5789328",
"0.578736",
"0.5762784",
"0.57449377",
"0.5690385",
"0.5680187",
"0.56483424",
"0.56139004",
"0.55455244",
"0.5529444",
"0.5529032",
"0.54850477",
"0.54788685",
"0.5411761",
"0.5407084",
"0.53675836",
"0.53548485",
"0.5350087",
"0.5344657",
"0.5344373",
"0.5340075",
"0.53337044",
"0.53276974"
] | 0.7288903 | 0 |
Deletes an alert in bulk based on uri. If uri is not specified, ALL alerts are deleted [Arguments] | def fusion_api_delete_alert(self, uri=None, api=None, headers=None, param=''):
return self.alert.delete(uri, api, headers, param=param) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete(self, _uri):\n print(\"Deleting '%s'\"%(_uri))\n response = self.__httpsRequest('DELETE', _uri, '')",
"def delete(self, args, intent):\n if 'all' in args.keys() and args['all'] == True:\n try:\n db = get_db('expressions')\n db_results = db.delete_all_intent_expressions(intent)\n expressions = [x[1] for x in db_results]\n resp = jsonify(intent=intent, expressions=expressions)\n return resp\n except DatabaseError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 500\n return resp\n except DatabaseInputError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 400\n return resp \n elif args['expressions']:\n try:\n db = get_db('expressions')\n db_results = db.delete_expressions_from_intent(intent, args['expressions'])\n expressions = [x[1] for x in db_results]\n resp = jsonify(intent=intent, expressions=expressions, deleted_expressions=args['expressions'])\n return resp\n except DatabaseError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 500\n return resp\n except DatabaseInputError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 400\n return resp",
"def delete(cls, uri):\n return cls._perform_request(uri, 'DELETE')",
"def delete(self, uri, **kwargs):\n return self.session.delete(uri, **kwargs)",
"def delete(self, uri, body=None, headers=None, auth=False):\n return self.send_request('DELETE', uri, body, headers, auth)",
"def delete(self, uri, where, selectionArgs):\n pass",
"def delete_alarm():\r\n name = request.args.get('alarm_item')\r\n logging.info(\"Alarm deleted in delete_alarm(): \" + name)\r\n for alarm in alarms:\r\n if alarm['title'] == name:\r\n alarms.remove(alarm)",
"def _delete(self, uri, headers=None):\n if self.openam_url[-1:] == '/':\n openam_path = self.openam_url + uri\n else:\n openam_path = self.openam_url + \"/\" + uri\n\n try:\n data = requests.delete(openam_path, headers=headers, timeout=self.timeout, verify=self.verify)\n except requests.exceptions.RequestException as e:\n data = {'error': e}\n return data",
"def delete(self, uri: str, **kwargs) -> Dict:\n response = self.session.delete(url=self._url(uri), **kwargs)\n return self._process_response(response)",
"def do_delete_request(self, uri, headers, timeout_ms):\n return self._do_request('DELETE', uri, headers, None, timeout_ms, None)",
"def delete_email_from_all_addressooks(self, email):\n logger.info(\"Function call: delete_email_from_all_addressooks for '{}'\".format(email, ))\n return self.__handle_error('Empty email') if not email else self.__handle_result(self.__send_request('emails/{}'.format(email, ), 'DELETE'))",
"def delete_legislation(self, expr_uri):\n resp = self.session.delete(self.url + expr_uri, timeout=self.timeout)\n self.check_for_error(resp)",
"def delete(self, args):\n try:\n db = get_db('intents')\n intents = db.delete_intent(args['intent'])\n resp = jsonify(intents=intents)\n resp.status_code = 200\n return resp\n except DatabaseError as error:\n resp = jsonify(error=error)\n resp.status_code = 500\n return resp\n except DatabaseInputError as error:\n resp = jsonify(error=error)\n resp.status_code = 400\n return resp",
"def delete_via_api(sync_object, es_url_data, elasticsearch_host, folder, debug=False, dry_run=False):\n header(\"Deleting ({0})...\\n{1}\".format(sync_object, elasticsearch_host))\n sub_header(folder)\n\n files = get_local_files(folder)\n\n for filename in files:\n file_title = os.path.basename(os.path.splitext(filename)[0])\n print(file_title)\n\n es_url = '/'.join([elasticsearch_host,\n es_url_data['index'],\n es_url_data['type']])\n es_url = es_url.rstrip('/')\n\n url = \"%s/%s\" % (es_url,\n file_title)\n\n elasticsearch_api_request(url, 'DELETE', debug=debug, dry_run=dry_run)",
"def __delete_cmd_parse(self, _cmd: dict):\n for sitem in _cmd:\n #\n # alle sets durch\n # {\"delete\":[{\"alert\":\"alert-04\"}]}\n #\n alert_name = sitem['alert']\n self.log.debug(\"found alert {} with DELETE command\".format(alert_name))\n if alert_name in self.config:\n ConfigFileObj.config_lock.acquire()\n del self.config[alert_name]\n ConfigFileObj.config_lock.release()\n self.config_hash['version'] = self.__get_hashstr(self.config)\n if self.on_config_change is not None:\n self.on_config_change(int(time()))\n return json.dumps({'ok': \"alert {} is deleted in config...\".format(alert_name)}).encode(\n encoding='utf-8')\n else:\n self.log.fatal(\"to delete alert {} is not found in config...\".format(alert_name))\n return json.dumps({'error': \"to delete alert {} is not found in config...\".format(alert_name)}).encode(\n encoding='utf-8')\n # ENDE __set_cmd_parse",
"def delete_files(self, urls=None, delete_all=True):\n locations_to_delete = []\n if not urls and delete_all:\n locations_to_delete = self.indexed_file_locations\n else:\n locations_to_delete = list(map(IndexedFileLocation.from_url, urls))\n response = (\"No URLs to delete\", 200)\n for location in locations_to_delete:\n bucket = location.bucket_name()\n\n file_suffix = \"\"\n try:\n file_suffix = location.file_name()\n except Exception as e:\n logger.info(e)\n file_suffix = self.file_id\n\n logger.info(\n \"Attempting to delete file named {} from bucket {}.\".format(\n file_suffix, bucket\n )\n )\n response = location.delete(bucket, file_suffix)\n\n # check status code not in 200s\n response_status_code = response[1]\n\n if response_status_code > 399:\n break\n return response",
"def bulk_delete(self, **kwargs: Any) -> Response:\n tags = kwargs[\"rison\"]\n try:\n DeleteTagsCommand(tags).run()\n return self.response(200, message=f\"Deleted {len(tags)} tags\")\n except TagNotFoundError:\n return self.response_404()\n except TagInvalidError as ex:\n return self.response(422, message=f\"Invalid tag parameters: {tags}. {ex}\")\n except TagDeleteFailedError as ex:\n return self.response_422(message=str(ex))",
"def delete(event, context):\n\n if type(event) == str:\n event = json.loads(event)\n\n for record in event['Records']:\n data = json.loads(record['Sns']['Message'])\n\n item = table.get_item(\n Key={\n 'id':data['id']\n }\n )\n\n if 'Item' in item:\n result = table.update_item(\n Key={ 'id': data['id']},\n ExpressionAttributeNames={\n '#is_deleted': 'deleted'\n },\n ExpressionAttributeValues={\n ':delete_flg': True,\n ':updatedAt': str(dt.datetime.utcnow())\n },\n UpdateExpression='SET #is_deleted = :delete_flg, modifiedAt = :updatedAt',\n ReturnValues='ALL_NEW'\n )\n\n\n response = {\n 'statusCode': 200\n }\n\n return response",
"def del_alarm(self, index_list):\n params = [\n ('totaldelnum', len(index_list)),\n ]\n params += [('index', int(i)) for i in index_list]\n\n self.get(COMMAND_UIC, 'DelAlarm', params)",
"def delete_bulk_orders(self, event_ids=None, market_ids=None, runner_ids=None, offer_ids=None, session=None):\n params = clean_locals(locals())\n date_time_sent = datetime.datetime.utcnow()\n method = 'offers'\n response = self.request('DELETE', self.client.urn_edge, method, data=params, session=session)\n date_time_received = datetime.datetime.utcnow()\n return self.process_response(\n response.json().get('offers', []), resources.Order, date_time_sent, date_time_received\n )",
"def DELETE(self, uri):\n def body(conn, cur):\n self.enforce_right('owner', uri)\n if web.ctx.ermrest_history_snaptime is not None:\n raise exception.Forbidden('deletion of catalog at previous revision')\n if web.ctx.ermrest_history_snaprange is not None:\n # should not be possible bug check anyway...\n raise NotImplementedError('deletion of catalog with snapshot range')\n self.set_http_etag( web.ctx.ermrest_catalog_model.etag() )\n self.http_check_preconditions(method='DELETE')\n self.emit_headers()\n return True\n\n def post_commit(destroy):\n web.ctx.ermrest_registry.unregister(self.catalog_id)\n web.ctx.status = '204 No Content'\n return ''\n\n return self.perform(body, post_commit)",
"def delete(self):\n rest_params = common.get_restful_params(self.request.uri)\n if rest_params is None:\n common.echo_json_response(self, 405, \"Not Implemented: Use /agents/ interface\")\n return\n\n if \"agents\" not in rest_params:\n common.echo_json_response(self, 400, \"uri not supported\")\n return\n\n agent_id = rest_params[\"agents\"]\n\n if agent_id is None:\n common.echo_json_response(self, 400, \"uri not supported\")\n logger.warning('DELETE returning 400 response. uri not supported: ' + self.request.path)\n\n agent = self.db.get_agent(agent_id)\n\n if agent is None:\n common.echo_json_response(self, 404, \"agent id not found\")\n logger.info('DELETE returning 404 response. agent id: ' + agent_id + ' not found.')\n return\n\n op_state = agent['operational_state']\n if op_state == cloud_verifier_common.CloudAgent_Operational_State.SAVED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.FAILED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.TERMINATED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.TENANT_FAILED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.INVALID_QUOTE:\n self.db.remove_agent(agent_id)\n common.echo_json_response(self, 200, \"Success\")\n logger.info('DELETE returning 200 response for agent id: ' + agent_id)\n else:\n self.db.update_agent(agent_id, 'operational_state',cloud_verifier_common.CloudAgent_Operational_State.TERMINATED)\n common.echo_json_response(self, 202, \"Accepted\")\n logger.info('DELETE returning 202 response for agent id: ' + agent_id)",
"def fusion_api_delete_vsn_range(self, name=None, uri=None, api=None, headers=None):\n return self.vsnrange.delete(name, uri, api, headers)",
"def delete(socket, args, config, library, cmd=False):\n files=args['<nameid>']\n ignore=args['--ignore']\n\n for nameid in files:\n receipt = library.get_receipt( nameid )\n if not receipt:\n if cmd: print \"Could not find receipt for:\",nameid\n if not ignore: return False\n continue\n\n if receipt.get_oid() == None:\n if cmd: print \"You do not have deletion permission for:\",nameid\n if not ignore: return False\n continue\n\n if cmd: print \"Delete\", receipt.get_filename(), \"?\"\n\n response = raw_input(\"Are you sure? [y/N]\")\n if response.lower() not in ['yes','y']:\n print \"File was not deleted.\"\n return False\n\n if delete_file( socket, receipt ):\n #Succeeded, so remove receipt from library\n library.remove_receipt( nameid )\n\n if cmd: print \"Deletion succeeded!\"\n elif cmd: print \"Deletion failed!\"\n\n # Return Success.\n return True",
"def deleteDocument(cred, documentPaths):\n for documentPath in documentPaths:\n url = cred.base_url + \"documents/\" + documentPath\n\n makeRequest(cred, url, 'DELETE')",
"def fusion_api_delete_storage_pool(self, uri=None, api=None, headers=None):\n return self.pool.delete(uri=uri, api=api, headers=headers)",
"def delete_request(\n self,\n alias,\n uri,\n data=None,\n json=None,\n params=None,\n headers=None,\n allow_redirects=None,\n timeout=None):\n session = self._cache.switch(alias)\n data = utils.format_data_according_to_header(session, data, headers)\n # XXX workaround to restore library default behaviour. Not needed in new keywords\n redir = True if allow_redirects is None else allow_redirects\n\n response = self._common_request(\n \"delete\",\n session,\n uri,\n data=data,\n json=json,\n params=params,\n headers=headers,\n allow_redirects=redir,\n timeout=timeout)\n\n return response",
"def fusion_api_delete_ls(self, name=None, uri=None, api=None, headers=None):\n return self.ls.delete(name=name, uri=uri, api=api, headers=headers)",
"def build_delete_by_guids_request(\n *,\n guids: List[str],\n **kwargs: Any\n) -> HttpRequest:\n accept = \"application/json\"\n\n # Construct URL\n url = kwargs.pop(\"template_url\", '/atlas/v2/entity/bulk')\n\n # Construct parameters\n query_parameters = kwargs.pop(\"params\", {}) # type: Dict[str, Any]\n query_parameters['guids'] = [_SERIALIZER.query(\"guids\", q, 'str') if q is not None else '' for q in guids]\n\n # Construct headers\n header_parameters = kwargs.pop(\"headers\", {}) # type: Dict[str, Any]\n header_parameters['Accept'] = _SERIALIZER.header(\"accept\", accept, 'str')\n\n return HttpRequest(\n method=\"DELETE\",\n url=url,\n params=query_parameters,\n headers=header_parameters,\n **kwargs\n )",
"def delete(self, ids):\r\n params = base.get_params(None, locals())\r\n request = http.Request('DELETE', self.get_url(), params)\r\n return request, parsers.parse_json"
] | [
"0.6291771",
"0.62217516",
"0.6171571",
"0.61333543",
"0.60093033",
"0.5914589",
"0.58051556",
"0.5781954",
"0.56324255",
"0.55792224",
"0.55427295",
"0.5505852",
"0.54336566",
"0.5425172",
"0.5400669",
"0.53584784",
"0.5339873",
"0.5333717",
"0.53255534",
"0.53215915",
"0.5303926",
"0.5227082",
"0.5165218",
"0.51557016",
"0.5154014",
"0.51465386",
"0.51295936",
"0.51285833",
"0.5126607",
"0.5126037"
] | 0.65942943 | 0 |
Deletes an alert's changelog based on alertId. [Arguments] | def fusion_api_delete_alert_changelog(self, alertId=None, api=None, headers=None):
return self.alert.delete(alertId, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_alert_by_id(self):\n pass",
"def delete(\n id: int = typer.Argument(\n ...,\n help=\"ID of the log entry\"\n )\n):\n manager = LogBookManager()\n deleted, message = manager.delete(id)\n\n if deleted:\n typer.echo(\n typer.style(message, fg=typer.colors.GREEN, bold=True)\n )\n else:\n typer.echo(\n typer.style(message, fg=typer.colors.RED, bold=True)\n )",
"def delete_activity(self, activity_log_id):\n self._db.execute(\"\"\"\n DELETE FROM exception_log\n WHERE activity_log = ?\"\"\", (activity_log_id, ))",
"def fusion_api_delete_alert(self, uri=None, api=None, headers=None, param=''):\n return self.alert.delete(uri, api, headers, param=param)",
"def removeAlertFromDb(self):\n sql_query = \"DELETE FROM Alert WHERE symbol='\" + self.symbol + \"' AND cross='\" + self.cross + \"' AND level=\" + str(self.level)\n db.exec_query(sql_query)",
"def cli_jira_webhook_delete(ctx, webhook_id):\n jira_webhook_path = \"rest/webhooks/1.0/webhook\"\n _url = f'{jira_webhook_path}/{webhook_id}'\n _res = ctx.obj['connect'].delete(_url, headers=json_headers, auth=True)\n ctx.obj['writer'].out(_res)",
"def delete(self, context, artifact_id):\n session = api.get_session()\n api.delete(context, artifact_id, session)",
"def clear_alert(self, context, storage_id, alert):\n # Currently not implemented\n pass",
"def cli(ctx,id):\n if not id:\n id = click.prompt(\n click.style('You didn\"t provide the id of the note to delete. Please provide one',fg=\"white\",bg=\"red\"), type=int)\n db = ctx.database()\n cursor = db.cursor()\n query = \"SELECT * from `notes` where id = {}\".format(id)\n cursor.execute(query)\n notes = cursor.fetchall()\n\n if notes:\n if click.confirm(click.style('Are you sure?',fg=\"magenta\")):\n query = \"DELETE from `notes` where id = {}\".format(id)\n cursor.execute(query)\n db.commit()\n click.secho(\"Note with id {} has been deleted\".format(id),fg=\"white\",bg=\"green\")\n else:\n click.secho(\"Nothing deleted. Delete action aborted.\",fg=\"white\",bg=\"green\")\n return\n click.secho(\"No note found with id {}. Delete action aborted.\".format(id),fg=\"white\",bg=\"red\")",
"def history_delete(name, version):\n if click.confirm(format_text('Deleting service history is a permanent action, are you sure you want to delete '\n 'this record?',\n TextStyle.WARNING)):\n click.echo(remove_service_history(name, version))",
"def delete_snapshot(SnapshotId=None):\n pass",
"def delete(log, args):\n log('dataset id: {highlight}{id}{reset}',\n highlight=Fore.GREEN,\n id=args.id,\n reset=Style.RESET_ALL)\n log.warn('delete dataset command coming soon.')",
"def delete(self):\n request = self.triggers_service.delete(path=self._path)\n request.execute()",
"def delete (self, webhook_id: str) -> NoReturn:\r\n try:\r\n return self.api(\r\n method=\"DELETE\",\r\n endpoint=f\"all/{config('TWITTER_ENV_NAME')}/webhooks/{webhook_id}.json\",\r\n )\r\n except Exception as e:\r\n raise e",
"def rpc_campaign_alerts_unsubscribe(self, campaign_id):\n\t\tusername = self.basic_auth_user\n\t\tsession = db_manager.Session()\n\t\tquery = session.query(db_models.AlertSubscription)\n\t\tquery = query.filter_by(campaign_id=campaign_id, user_id=username)\n\t\tsubscription = query.first()\n\t\tif subscription:\n\t\t\tsession.delete(subscription)\n\t\t\tsession.commit()\n\t\tsession.close()\n\t\treturn",
"def delete_bug(self,id):\n self.execute(TABELLE['bugs']['delete'],(id,))",
"def test_remove_alert(self):\n dweepy.set_alert(\n self.my_thing_id,\n ['[email protected]', '[email protected]'],\n test_alert_condition,\n test_key,\n )\n dweepy.remove_alert(self.my_thing_id, test_key)",
"def delete_backup(BackupId=None):\n pass",
"def delete_exception(self, exception_log_id):\n self._db.execute(\"\"\"\n DELETE FROM exception_log\n WHERE exception_log_id = ?\"\"\", (exception_log_id, ))",
"def delete(self):\n backend = self._get_backend()\n if not backend:\n raise NotImplementedError(\"No deleting backend provided\")\n backend.logbook_destroy(self.uuid)",
"def cmd_analysis_delete(logger, session, analysis_id):\n\n aname = session.query(sample.Analysis.name).filter(sample.Analysis.id == analysis_id).one()[0]\n\n review_comment = (\n session.query(workflow.InterpretationLog.review_comment)\n .join(workflow.AnalysisInterpretation)\n .filter(workflow.AnalysisInterpretation.analysis_id == analysis_id)\n .filter(~workflow.InterpretationLog.review_comment.is_(None))\n .order_by(workflow.InterpretationLog.date_created.desc())\n .limit(1)\n .one_or_none()\n )\n\n if review_comment:\n overview_comment = \"overview comment '{}'\".format(review_comment[0])\n else:\n overview_comment = \"no overview comment\"\n\n workflow_status = (\n session.query(\n workflow.AnalysisInterpretation.status, workflow.AnalysisInterpretation.workflow_status\n )\n .filter(workflow.AnalysisInterpretation.analysis_id == analysis_id)\n .order_by(workflow.AnalysisInterpretation.id.desc())\n .limit(1)\n .one()\n )\n\n workflow_status = \"{} ({})\".format(*workflow_status)\n\n answer = input(\n \"Are you sure you want to delete analysis {} with {} in workflow status: {}\\nType 'y' to confirm.\\n\".format(\n aname, overview_comment, workflow_status\n )\n )\n\n if answer == \"y\":\n try:\n delete_analysis(session, analysis_id)\n session.commit()\n logger.echo(\"Analysis {} ({}) deleted successfully\".format(analysis_id, aname))\n except Exception:\n logger.exception(\"Something went wrong while deleting analysis {}\".format(analysis_id))\n else:\n logger.echo(\"Lacking confirmation, aborting...\")",
"def delete_trigger(self, trigger_id):\n self._delete(path=\"triggers/{}\".format(trigger_id))",
"def log_delete(sender, instance, **kwargs):\n if instance.pk is not None:\n changes = model_instance_diff(instance, None)\n\n log_entry = LogEntry.objects.log_create(\n instance,\n action=LogEntry.Action.DELETE,\n changes=json.dumps(changes),\n )\n log_created.send(\n sender=LogEntry,\n old_instance=instance,\n new_instance=None,\n log_instance=log_entry,\n )",
"async def deleteAudit(self, auditid) -> DeleteAuditResponse:\n # send request to grpc server\n # async with Channel(self.host, self.port) as channel:\n # self.stub = UsersStub(channel)\n return await self.stub.DeleteAudit(\n DeleteAuditRequest(_id=auditid\n ))",
"def cncl_auditlog_entry_report(session):\n url = session.get_url('audit', 'main')\n\n req = re.Request('DELETE', url)\n\n return session.send_recv(req, 'Cancelled request to create audit log report.')",
"def endace_delete_archive_task_command(app, args):\r\n jobid = args.get(\"jobid\")\r\n if not re.fullmatch(r'[0-9a-zA-Z\\-]+', jobid) is None:\r\n\r\n # calling delete archive task function of app instance\r\n result = app.delete_archive_task(jobid)\r\n\r\n # create entry context to return to Demisto\r\n output = {'Endace.Archive.Delete(val.JobID == obj.JobID)': result}\r\n table_header = [\"Task\", \"JobID\", \"Status\", \"Error\"]\r\n readable_output = tableToMarkdown('EndaceResult', result, headers=table_header, removeNull=False)\r\n raw_response = result\r\n return readable_output, output, raw_response\r\n else:\r\n raise ValueError(\"Incorrect JOB ID provided\")",
"def delete_alarm():\r\n name = request.args.get('alarm_item')\r\n logging.info(\"Alarm deleted in delete_alarm(): \" + name)\r\n for alarm in alarms:\r\n if alarm['title'] == name:\r\n alarms.remove(alarm)",
"def remove(bot, update, args):\n try:\n my_context_id = ''.join(args)\n my_context = str(update.message.chat_id) + ':' + my_context_id\n user = str(update.message.from_user)\n if my_context not in TIMERS:\n update.message.reply_text(\n 'You have no active timer with code:' + my_context_id)\n return\n if user != USERS[my_context]:\n update.message.reply_text(\"You did not enter that alert!\")\n return\n job = TIMERS[my_context]\n job.schedule_removal()\n del TIMERS[my_context]\n del MESSAGES[my_context]\n del USERS[my_context]\n bot.sendMessage(update.message.chat_id, 'Timer successfully removed! Removed message id: ' +\n my_context_id)\n\n except (IndexError, ValueError):\n update.message.reply_text('Usage: /alert <seconds>')",
"def delete(self, id):\n return self._post(\n request=ApiActions.DELETE.value,\n uri=ApiUri.HOOKS.value,\n params={'id': id}\n )",
"def delete(self, audit_uuid):\n audit_query = AuditTable.delete().where(AuditTable.uuid == audit_uuid)\n if audit_query.execute() == 0:\n abort(404, \"Not Found\")\n else:\n return {}"
] | [
"0.6262019",
"0.5973016",
"0.58163327",
"0.5728264",
"0.56225884",
"0.56118125",
"0.5533732",
"0.55323935",
"0.54949886",
"0.5468852",
"0.54665476",
"0.54625833",
"0.5449366",
"0.53097075",
"0.5272038",
"0.52292824",
"0.5226958",
"0.5222145",
"0.52169687",
"0.52126294",
"0.5178258",
"0.5166924",
"0.51664054",
"0.5161799",
"0.51404315",
"0.5128807",
"0.5123592",
"0.51210415",
"0.5108166",
"0.50934327"
] | 0.82354224 | 0 |
Update the device read community string. This results in an update of the community string on all the rackmounted servers and devices in an enclosure. [Arguments] | def fusion_api_update_appliance_read_community_string(self, body, api=None, headers=None):
return self.rc.update(body, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def DeviceCommunity(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"DeviceCommunity\"), kwargs)",
"def community_changes(self, community):\n pass",
"def DeviceCommunitySecure(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"DeviceCommunitySecure\"), kwargs)",
"def _set_extended_community(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"extended-community\", rest_name=\"extended-community\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"extended_community must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"extended-community\", rest_name=\"extended-community\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__extended_community = t\n if hasattr(self, '_set'):\n self._set()",
"def community(self, community):\n\n self._community = community",
"def fusion_api_get_appliance_read_community_string(self, api=None, headers=None):\n return self.rc.get(api=api, headers=headers)",
"def get_snmp_information(self):\n\n snmp_output = self._send_command('/snmp print')\n snmp_community_output = self._send_command(\n '/snmp community print terse')\n\n snmp = parse_output(snmp_output)\n community_list = parse_terse_output(snmp_community_output)\n\n community = {}\n\n for item in community_list:\n community.setdefault(item.get('name'), {\n 'acl': item.get('addresses'),\n 'mode': u'rw' if item.get('write-access') == 'yes' else u'ro'\n })\n\n return {\n 'contact': snmp.get('contact'),\n 'location': snmp.get('location'),\n 'community': community,\n 'chassis_id': ''\n }",
"def do_set_online(self):\n controller_name = self._tel.csp.controller\n controller = con_config.get_device_proxy(controller_name, fast_load=True)\n self._log(f\"Setting adminMode for {controller_name} to '0' (ONLINE)\")\n controller.write_attribute(\"adminmode\", 0)\n for index in range(1, self.nr_of_subarrays + 1):\n subarray_name = self._tel.csp.subarray(index)\n subarray = con_config.get_device_proxy(subarray_name, fast_load=True)\n self._log(f\"Setting adminMode for {subarray_name} to '0' (ONLINE)\")\n subarray.write_attribute(\"adminmode\", 0)",
"def get_weak_snmp_community(devices):\n\n tables = nipper_xml.findall(\"./report/part/[@ref='SECURITYAUDIT']/section/[@ref='SNMP.WEAK.COMMUNITY']/\"\n \"section[@ref='FINDING']/table\")\n\n snmp = {}\n\n # Data Structure\n # <device ID> : {}\n # <Community String>: {}\n # <Weakness>: weakness\n # <Host>: host\n\n for table in tables:\n for device in devices:\n if device in table.get('title').split():\n headings = []\n data = []\n for heading in table.findall('./headings/heading'):\n headings.append(heading.text)\n for row in table.findall('./tablebody/tablerow'):\n for item in row.findall('.tablecell'):\n data.append(item.find('./item').text)\n if DEBUG:\n print info + \"SNMP Weak Community String:\"\n print \"\\t\" + info + \"Headers: %s\" % headings\n print \"\\t\" + info + \"Data: %s\" % data\n raw_input(warn + \"Press enter to continue...\")\n if device not in snmp:\n snmp[device] = {}\n c = headings.index('Community')\n w = headings.index('Weakness')\n snmp[device][data[c]] = {headings[w]: data[w]}\n if 'Host' in headings:\n h = headings.index('Host')\n snmp[device][data[c]].update({headings[h]: data[h]})\n\n if DEBUG:\n print \"SNMP Weak Community String Dict:\"\n print info, snmp\n raw_input(warn + \"Press enter to continue...\")\n\n return snmp",
"def set_community(self, community):\n assert community is None or isinstance(community, Community), type(community)\n self._community = community\n if community:\n self._dispersy = community.dispersy",
"def update_partition(c, r_d, numnodes):\n host, port, f = ClusterCFG.parse_uri(c)\n\n # Create our socket.\n sock = Network.open_client(host, port)\n if ErrorHandle.is_error(sock):\n return ErrorHandle.wrap_error_tag('Socket could not be established.')\n\n # Pickle our command list ('K', f, r_d, numnodes), and send our message.\n Network.write(sock, ['K', f, r_d, numnodes])\n\n # Wait for a response to be sent back, and record this response.\n net_handler = lambda e: Network.close_wrapper(e, ErrorHandle.default_handler, sock)\n response = Network.read(sock, net_handler)\n\n # If an error exists, return the error.\n if ErrorHandle.is_error(response):\n return response\n\n # Otherwise, return the success message.\n return 'Success'",
"def update_compliance(self):\r\n self.compliance_voltage = str(self.ComplianceVoltage.value())\r\n #self.cmd = None\r\n if self.connected:\r\n self.cmd = \"CURR:COMP \" + self.compliance_voltage\r\n self.I_source.write(self.cmd)",
"async def update_cog(self):\n\n # get the model data for the role assigner object\n data = await self.get_objects(\n model=RoleAssigner, filter={\"bot__name\": str(self.bot_name)}\n )\n\n # role assigner object\n data = data[0]\n\n # fetch the discord message\n guild_id = await self.get_deep_data(data, \"bot__server__uid\")\n\n guild = self.get_guild(int(guild_id))\n channel = self.get_channel(guild, int(data.message.cuid))\n message = await channel.fetch_message(int(data.message.uid))\n self.message_id = int(data.message.uid)\n\n # update the message\n await message.edit(content=\"_ _\", embed=self.create_message_embed(data))\n\n await self.update_reactions(message, data)",
"def ipmitool_shell_reconnect(self):\n\n if not self.connected:\n self.ipmitool_shell_connect()\n if self.crate.crate_resetting and not self.crate.fru_rescan:\n print(\"ipmitool_shell_reconnect: 30 s wait to allow MCH to update sensor list\")\n time.sleep(30.0)\n # Reread the card list\n print(\"ipmitool_shell_reconnect: Updating card and sensor list\")\n self.crate.populate_fru_list()\n # Reset flags\n if self.crate.fru_rescan:\n self.crate.fru_rescan = False\n if self.crate.crate_resetting:\n self.crate.crate_resetting = False\n print(\"ipmitool_shell_reconnect: Lists updated\")\n print(\"ipmitool_shell_reconnect: Reading data values, this will take a few seconds\")\n self.comms_timeout = False",
"def add_communites(self):\n\n query = '''\n MATCH (c1:)-[r:INTERACTS]->(c2:)\n RETURN c1.name, c2.name, r.weight AS weight\n '''\n ig = IGraph.TupleList(self.graph.run(query), weights=True)\n\n clusters = IGraph.community_walktrap(ig, weights=\"weight\").as_clustering()\n\n nodes = [{\"name\": node[\"name\"]} for node in ig.vs]\n for node in nodes:\n idx = ig.vs.find(name=node[\"name\"]).index\n node[\"community\"] = clusters.membership[idx]\n\n write_clusters_query = '''\n UNWIND {nodes} AS n\n MATCH (c:) WHERE c.name = n.name\n SET c.community = toInt(n.community)\n '''\n\n self.graph.run(write_clusters_query, nodes=nodes)",
"def community_in(self, community_in):\n\n self._community_in = community_in",
"def community(self):\n return self._community",
"def community_id(self, community_id):\n\n self._community_id = community_id",
"def _clse(self, adb_info):\n msg = AdbMessage(constants.CLSE, adb_info.local_id, adb_info.remote_id)\n self._io_manager.send(msg, adb_info)\n self._read_until([constants.CLSE], adb_info)",
"def update(self):\n result = modbus.HUB.read_coils(self._slave, self._coil, 1)\n if not result:\n _LOGGER.error(\n 'No response from modbus slave %s coil %s',\n self._slave,\n self._coil)\n return\n self._is_on = bool(result.bits[0])",
"def readNetstring(self, size=2048):\n # Read until we have at least 4 bytes\n while not self.recvLength():\n self.__data += self.sock.recv(size)\n while not self.recvData():\n self.__data += self.sock.recv(size)\n while not self.recvComma():\n self.__data += self.sock.recv(size)\n string = self.__buffer\n self.__buffer = ''\n if self.verbose:\n print \"controller:\",string\n return string",
"def updateString(olddata,newdata,concater):\r\n\r\n if olddata==\"\":\r\n return str(newdata)\r\n else:\r\n return str(olddata + concater + newdata)",
"def update_clients():\n if rabbit.leader_node_is_ready() or rabbit.client_node_is_ready():\n for rid in relation_ids('amqp'):\n for unit in related_units(rid):\n amqp_changed(relation_id=rid, remote_unit=unit)",
"def _set_send_community(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=send_community.send_community, is_container='container', presence=False, yang_name=\"send-community\", rest_name=\"send-community\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Send community attribute to this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"send_community must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=send_community.send_community, is_container='container', presence=False, yang_name=\"send-community\", rest_name=\"send-community\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Send community attribute to this neighbor', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__send_community = t\n if hasattr(self, '_set'):\n self._set()",
"async def update(self):\n \n logging.info('updating state...')\n info = await self.send_command(\"$dat\", \"upd01-\")\n if not info:\n msg = \"Unable to get data about windows and scenes from Gateway\"\n return msg\n\n logging.debug('hub response is :')\n logging.debug(info)\n prefix = None\n lines = re.split(r'[\\n\\r]+', info)\n\n for line in lines:\n line = line.strip()\n if not prefix:\n prefix = line[:2]\n elif not line.startswith(prefix):\n continue\n else:\n line = line[2:]\n\n if line.startswith(\"$cr\"):\n # name of room\n room_id = line[3:5]\n room_name = line.split('-')[-1].strip()\n if(not room_name in self.rooms):\n logging.debug('creating room '+room_name)\n self.rooms[room_name] = HunterDouglasPlatinumRoom(hub=self, name=room_name, id=int(room_id))\n elif line.startswith(\"$cm\"):\n # name of scene\n scene_id = line[3:5]\n scene_name = line.split('-')[-1].strip()\n if(not scene_name in self.scenes):\n logging.debug('creating scene '+scene_name)\n self.scenes[scene_name] = HunterDouglasPlatinumScene(hub=self, name=scene_name, id=int(scene_id))\n elif line.startswith(\"$cs\"):\n # name of a shade\n parts = line.split('-')\n shade_id = line[3:5]\n shade_name = parts[-1].strip()\n room_id = parts[1]\n if(not shade_name in self.shades):\n logging.debug('creating shade '+shade_name)\n self.shades[shade_name] = HunterDouglasPlatinumShade(hub=self, name=shade_name, id=int(shade_id), room=int(room_id))\n elif line.startswith(\"$cp\"):\n # state of a shade\n shade_id = line[3:5]\n state = line[-4:-1]\n state = int(state)\n shade = self.get_shade(id=int(shade_id))\n logging.debug('updating shade state for shade '+shade_id+' to '+str(state)+' for shade '+str(shade))\n if shade:\n shade.set_state(state)\n return \"\"",
"def undo_set_online(self):\n controller_name = self._tel.csp.controller\n controller = con_config.get_device_proxy(controller_name, fast_load=True)\n self._log(f\"Setting adminMode for {controller_name} to '1' (OFFLINE)\")\n controller.write_attribute(\"adminmode\", 1)\n for index in range(1, self.nr_of_subarrays + 1):\n subarray_name = self._tel.csp.subarray(index)\n subarray = con_config.get_device_proxy(subarray_name, fast_load=True)\n self._log(f\"Setting adminMode for {subarray_name} to '1' (OFFLINE)\")\n subarray.write_attribute(\"adminmode\", 1)",
"def _notify_read(self, cuds_object):",
"def assign_communities(graph):\n communities = nx.algorithms.community\\\n .greedy_modularity_communities(nx.Graph(graph))\n for node in graph.nodes:\n graph.nodes[node]['community'] = [i for i,c in enumerate(communities)\n if node in c][0]\n graph.graph['modularity'] = nx.algorithms.community.quality\\\n .modularity(nx.Graph(graph),\n communities)",
"def update(self):\n self._device = self._geizhals.parse()",
"def UpdateOdom(self, msg):\n self.odom = msg"
] | [
"0.51543146",
"0.50361717",
"0.48905048",
"0.48387185",
"0.48207247",
"0.48181173",
"0.48101094",
"0.46338174",
"0.4612225",
"0.46057886",
"0.4605115",
"0.45638037",
"0.44801793",
"0.44793186",
"0.44500574",
"0.44484317",
"0.43733254",
"0.4364182",
"0.43548748",
"0.43480814",
"0.43065023",
"0.42727503",
"0.4271444",
"0.42627043",
"0.42602825",
"0.42581812",
"0.42550063",
"0.4236117",
"0.42241046",
"0.42216036"
] | 0.61829317 | 0 |
Retrieves the global community string. [Example] ${resp} = Fusion Api Get Appliance Read Community String | | | def fusion_api_get_appliance_read_community_string(self, api=None, headers=None):
return self.rc.get(api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def community(self):\n return self._community",
"def fusion_api_update_appliance_read_community_string(self, body, api=None, headers=None):\n return self.rc.update(body, api, headers)",
"def _get_extended_community(self):\n return self.__extended_community",
"def get_latest_community(self):\n return GetLatestCommunityRequest(self)",
"def get_community_info(self, id=None):\n req = GetCommunityInfoRequest(self)\n req.id = id\n return req",
"def DeviceCommunity(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"DeviceCommunity\"), kwargs)",
"def _community(G, u, community):\n node_u = G.node[u]\n try:\n return node_u[community]\n except KeyError:\n raise nx.NetworkXAlgorithmError('No community information')",
"def test_get_community(self):\n get_response = lambda: self.client.get(self.url)\n\n self.assert_authentication_required(get_response)\n self.assert_membership_required(get_response)\n\n # alice is a regular group member, she sees the data:\n self.login_as(\"alice\")\n with self.assertNumQueries(4): # (3) member check (4) get data\n response = get_response()\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(list(response.data.keys()), self.expected_keys)\n self.assertEqual(response.data[\"name\"], self.GROUP)",
"def get_snmp_information(self):\n\n snmp_output = self._send_command('/snmp print')\n snmp_community_output = self._send_command(\n '/snmp community print terse')\n\n snmp = parse_output(snmp_output)\n community_list = parse_terse_output(snmp_community_output)\n\n community = {}\n\n for item in community_list:\n community.setdefault(item.get('name'), {\n 'acl': item.get('addresses'),\n 'mode': u'rw' if item.get('write-access') == 'yes' else u'ro'\n })\n\n return {\n 'contact': snmp.get('contact'),\n 'location': snmp.get('location'),\n 'community': community,\n 'chassis_id': ''\n }",
"async def snmp_v2c_get(\n self,\n address: str,\n community: str,\n oid: str,\n timeout: Optional[int] = 10,\n return_error: bool = False,\n ):\n self.logger.debug(\"SNMP v2c GET %s %s\", address, oid)\n message = \"\"\n try:\n result = await snmp_get(\n address=address,\n oids=oid,\n community=community,\n version=SNMP_v2c,\n tos=config.activator.tos,\n timeout=timeout,\n )\n self.logger.debug(\"SNMP GET %s %s returns %s\", address, oid, result)\n result = smart_text(result, errors=\"replace\") if result else result\n except SNMPError as e:\n metrics[\"error\", (\"type\", \"snmp_v2_error\")] += 1\n result, message = None, repr(e)\n self.logger.debug(\"SNMP GET %s %s returns error %s\", address, oid, e)\n except Exception as e:\n result, message = None, str(e)\n self.logger.debug(\"SNMP GET %s %s returns unknown error %s\", address, oid, e)\n if return_error:\n return result, message\n return result",
"def fusion_api_get_security_modeName(self, uri=None, api=None, headers=None, param=''):\n return self.security_standards.get(uri=uri, api=api, headers=headers, param=param)",
"def _get_send_community(self):\n return self.__send_community",
"def _find_community_resources(dataset_id):\n logging.debug(\"Searching community ressource in dataset %s\", dataset_id)\n url = f\"{DATAGOUV_API}/datasets/community_resources/\"\n params = {\"dataset\": dataset_id, \"organization\": TRANSPORT_ORGANIZATION_ID, \"page_size\": 50}\n ret = requests.get(url, params=params)\n ret.raise_for_status()\n\n data = ret.json()[\"data\"]\n\n if data is not None:\n return data\n raise Exception(\n f\"Searched community ressources of dataset {dataset_id}, could not understand response\"\n )",
"def get_device_value(ip, value, community_string=\"public\"):\n\n iterator = get_iterator(ip, value, community_string)\n\n error_indication, error_status, error_index, var_binds = next(iterator)\n\n if error_indication: # SNMP engine errors\n print(error_indication)\n else:\n if error_status: # SNMP agent errors\n print(\n '%s at %s' % (error_status.prettyPrint(), var_binds[int(error_index) - 1] if error_index else '?'))\n else:\n for varBind in var_binds: # SNMP response contents\n return str(varBind).split(\"=\")[1].replace(\" \", \"\")",
"def datasets() -> Community:\n\n global _COMMUNITY # noqa: PLW0603\n\n if _COMMUNITY is None:\n _COMMUNITY = Community.from_id(Configuration().community)\n\n return _COMMUNITY",
"def get_weak_snmp_community(devices):\n\n tables = nipper_xml.findall(\"./report/part/[@ref='SECURITYAUDIT']/section/[@ref='SNMP.WEAK.COMMUNITY']/\"\n \"section[@ref='FINDING']/table\")\n\n snmp = {}\n\n # Data Structure\n # <device ID> : {}\n # <Community String>: {}\n # <Weakness>: weakness\n # <Host>: host\n\n for table in tables:\n for device in devices:\n if device in table.get('title').split():\n headings = []\n data = []\n for heading in table.findall('./headings/heading'):\n headings.append(heading.text)\n for row in table.findall('./tablebody/tablerow'):\n for item in row.findall('.tablecell'):\n data.append(item.find('./item').text)\n if DEBUG:\n print info + \"SNMP Weak Community String:\"\n print \"\\t\" + info + \"Headers: %s\" % headings\n print \"\\t\" + info + \"Data: %s\" % data\n raw_input(warn + \"Press enter to continue...\")\n if device not in snmp:\n snmp[device] = {}\n c = headings.index('Community')\n w = headings.index('Weakness')\n snmp[device][data[c]] = {headings[w]: data[w]}\n if 'Host' in headings:\n h = headings.index('Host')\n snmp[device][data[c]].update({headings[h]: data[h]})\n\n if DEBUG:\n print \"SNMP Weak Community String Dict:\"\n print info, snmp\n raw_input(warn + \"Press enter to continue...\")\n\n return snmp",
"def getResponseString(retCode):\n return (_getResponseString(retCode))",
"def get_site_status_msg(course_id):\r\n try:\r\n if os.path.isfile(settings.STATUS_MESSAGE_PATH):\r\n with open(settings.STATUS_MESSAGE_PATH) as f:\r\n content = f.read()\r\n else:\r\n return None\r\n\r\n status_dict = json.loads(content)\r\n msg = status_dict.get('global', None)\r\n if course_id in status_dict:\r\n msg = msg + \"<br>\" if msg else ''\r\n msg += status_dict[course_id]\r\n\r\n return msg\r\n except:\r\n log.exception(\"Error while getting a status message.\")\r\n return None",
"def _get(self):\n path = self.router.central_core\n response = self.request(method=\"get\", path=path)\n return response",
"def get_course(dept, num):\n \n # semester: 10 = Fall, 20 = Spring, 30 = Summer\n host = \"https://selfservice.mypurdue.purdue.edu/prod/bwckctlg.p_disp_course_detail\"\n query = \"?cat_term_in={term}&subj_code_in={dept}&crse_numb_in={num}\".format(term=\"201620\", dept=dept, num=num)\n urlfetch.set_default_fetch_deadline(600)\n result = urlfetch.fetch(host+query)\n \n if result.status_code == 200:\n tree = html.fromstring(result.content)\n text = tree[1][4][2].text_content() # get just the relevant text of the webpage \n\n # remove unicode non-breaking spaces to allow regexing\n text = text.replace(u'\\xa0',u' ')\n return text",
"def get_device_value_oid(ip, oid, community_string=\"public\"):\n\n iterator = get_iterator(ip, oid, community_string, is_oid=True)\n\n error_indication, error_status, error_index, var_binds = next(iterator)\n\n if error_indication: # SNMP engine errors\n print(error_indication)\n else:\n if error_status: # SNMP agent errors\n print(\n '%s at %s' % (error_status.prettyPrint(), var_binds[int(error_index) - 1] if error_index else '?'))\n else:\n for varBind in var_binds: # SNMP response contents\n return str(varBind).split(\"=\")[1].replace(\" \", \"\")",
"def get_info():\n message = \"GET information about glancesync server\"\n\n logger_api.info(message)\n\n message = '''\n {\n \"id\": \"%s\",\n \"owner\": \"%s\",\n \"status\": \"%s\",\n \"version\": \"%s\",\n \"updated\": \"%s\",\n \"runningfrom\": \"%s\",\n \"href\": \"%s\"\n }\n ''' % (ID, OWNER, STATUS, VERSION, UPDATED, RUNNINGFROM, API_INFO_URL)\n\n resp = make_response(message, httplib.OK)\n resp.headers[SERVER_HEADER] = SERVER\n resp.headers[CONTENT_TYPE] = JSON_TYPE\n\n logger_api.info('Return result: %s', message)\n\n return resp",
"def community_request(current_user, user_id):\n community = Community(community_id=user_id, user_id=current_user.id)\n community.save()\n return response('success', 'Successfully joined community', 200)",
"def _get_org(self, context, org):\r\n try:\r\n rtn = {'context': context,\r\n 'org': org,\r\n 'space': self._bbreader.cache[context][org]['space'],\r\n 'org_config': self._bbreader.cache[context][org]['org'],\r\n }\r\n except KeyError:\r\n raise RequestError('No such context/org: {}/{}'.format(context, org))\r\n\r\n return rtn",
"def fusion_api_get_current_security_mode(self, uri=None, api=None, headers=None, param='/modes/current-mode'):\n return self.security_standards.get(uri=uri, api=api, headers=headers, param=param)",
"def _set_extended_community(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"extended-community\", rest_name=\"extended-community\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"extended_community must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"extended-community\", rest_name=\"extended-community\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__extended_community = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_org(self, org_name):\n org = SpokeOrg()\n result = org.get(org_name)\n if result == []:\n msg = \"Can't find org %s\" % org_name\n self.log.error(msg)\n raise error.NotFound(msg) \n return result",
"def community_stats(request):\n stats = cache.get(STATS_CACHE_KEY, None)\n if not stats:\n\n stats = fetch(PEOPLE_STATS_URL)\n packages_data = fetch(PACKAGES_STATS_URL)\n if 'meta' in packages_data:\n stats.update({'packages': packages_data['meta']['total_count']})\n\n stats = {'community_stats': stats}\n\n cache.add(STATS_CACHE_KEY, stats, 60 * 60 * 12) # for half a day\n\n return stats",
"def get_organisation_description() -> pd.DataFrame:\n return GETTER.organisationdescription",
"def _get_community_platform_details(community_platform_name: str) -> Dict[str, Any]:\n try:\n importlib.import_module(name=\"scrapli_community\")\n except ModuleNotFoundError as exc:\n title = \"Module not found!\"\n message = (\n \"Scrapli Community package is not installed!\\n\"\n \"To resolve this issue, install the transport plugin. You can do this in one of \"\n \"the following ways:\\n\"\n \"1: 'pip install -r requirements-community.txt'\\n\"\n \"2: 'pip install scrapli[community]'\"\n )\n warning = format_user_warning(title=title, message=message)\n raise ScrapliModuleNotFound(warning) from exc\n\n try:\n # replace any underscores in platform name with \".\"; should support any future platforms\n # that dont have \"child\" os types -- i.e. just \"cisco\" instead of \"cisco_iosxe\"\n scrapli_community_platform = importlib.import_module(\n name=f\"scrapli_community.{community_platform_name.replace('_', '.')}\"\n )\n except ModuleNotFoundError as exc:\n title = \"Module not found!\"\n message = (\n f\"Scrapli Community platform '{community_platform_name}` not found!\\n\"\n \"To resolve this issue, ensure you have the correct platform name, and that a scrapli \"\n \" community platform of that name exists!\"\n )\n warning = format_user_warning(title=title, message=message)\n raise ScrapliModuleNotFound(warning) from exc\n\n platform_details_original = getattr(scrapli_community_platform, \"SCRAPLI_PLATFORM\", {})\n if not platform_details_original:\n msg = \"Community platform missing required attribute `SCRAPLI_PLATFORM`\"\n raise ScrapliException(msg)\n platform_details: Dict[str, Any] = deepcopy(platform_details_original)\n return platform_details"
] | [
"0.609412",
"0.60336554",
"0.5709522",
"0.5464909",
"0.5363651",
"0.5267703",
"0.52122504",
"0.51907164",
"0.5050455",
"0.5021374",
"0.49875236",
"0.49829078",
"0.49786323",
"0.49206883",
"0.48999393",
"0.48586422",
"0.47954577",
"0.47920048",
"0.47856095",
"0.47722608",
"0.47715706",
"0.47150564",
"0.47020358",
"0.46918243",
"0.4678866",
"0.46778765",
"0.4672875",
"0.4668225",
"0.4667071",
"0.466118"
] | 0.7556748 | 0 |
Retrieves the EULA status [Example] ${resp} = Fusion Api Eula Status | | | def fusion_api_eula_status(self, api=None, headers=None):
return self.eula.save(api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_status():\n data = {\n 'status': 'up',\n }\n jsn = json.dumps(data)\n\n resp = Response(jsn, status=200, mimetype='application/json')\n\n return resp",
"def get_patient_status():\n r = requests.get(\"http://vcm-7474.vm.duke.edu:5000/api/heart_rate/3\")\n print(r.text)",
"def status():\r\n\r\n url = '{}/status'.format(USGS_API_ENDPOINT)\r\n payload = {\r\n \"jsonRequest\": payloads.status()\r\n }\r\n logger.debug(\"API call URL: {}\".format(url))\r\n logger.debug(\"API call payload: {}\".format(payload))\r\n response = requests.post(url, payload).json()\r\n logger.debug(\"Received response:\\n{}\".format(json.dumps(response, indent=4)))\r\n _catch_usgs_error(response)\r\n\r\n return response",
"def axapi_status(self, result):\n try:\n status = result.json()['response']['status']\n if status == 'fail':\n error = '\\n ERROR: ' + result.json()['response']['err']['msg']\n return error, status\n else:\n return status\n except:\n good_status_codes = ['<Response [200]>', '<Response [204]>']\n status_code = str(result)\n if status_code in good_status_codes:\n return 'OK'\n else:\n return status_code",
"def status_check():\n return {\"status\": \"OK\"}",
"def status(self):\n r = requests.get('/'.join([self.base_url, self.ENDPOINT_STATUS]))\n return r.json()",
"def fusion_api_get_appliance_status(self, api=None, headers=None):\n return self.info.get_status(api=api, headers=headers)",
"def getIssStatus():\n issURL = 'http://api.open-notify.org/iss-now.json'\n r = requests.get(issURL)\n return r.json()",
"def show_status():\n return jsonify({\"status\": \"OK\"})",
"def get_status():\n return \"OK\" # defaults to a 200 HTML status return code",
"def getStatus():\n return json.dumps({'camera': Camera.status(), 'rover': rover.status()}), 200",
"def status():\n return jsonify({\"Status\": \"Ok\"})",
"def status():\n return jsonify({\"status\": \"OK\"})",
"def readresp(self, cmd):\n\t\tdata = self.read(22)\n\t\tresponse = data[0]\n\t\t#print \"laser response\", self.mylaser, response\n\t\tgstt.lstt_dacanswers[self.mylaser] = response\n\t\tcmdR = data[1]\n\t\tstatus = Status(data[2:])\n\t\tr.set('/lack/'+str(self.mylaser), response)\n\n\t\tif cmdR != cmd:\n\t\t\traise ProtocolError(\"expected resp for %r, got %r\"\n\t\t\t\t% (cmd, cmdR))\n\n\t\tif response != \"a\":\n\t\t\traise ProtocolError(\"expected ACK, got %r\"\n\t\t\t\t% (response, ))\n\n\t\tself.last_status = status\n\t\treturn status",
"def get_status(self):\n r = requests.get(self.base_url + '/status')\n return r.json()",
"def test_get_status(self):\n response = self.client.open(\n '/v1/status',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))",
"async def get_status():",
"def status():\n return 'OK'",
"def status():\n return jsonify(service='scwr-api-requirements', status='ok')",
"def _do_get_status(self):\n logging.info(__name__ + ' : Get status of the device.')\n result = self._execute('X')\n usage = {\n 0: \"Channel not in use\",\n 1: \"Channel used for Nitrogen level\",\n 2: \"Channel used for Helium Level (Normal pulsed operation)\",\n 3: \"Channel used for Helium Level (Continuous measurement)\",\n 9: \"Error on channel (Usually means probe unplugged)\"\n }\n # current_flowing = {\n # 0 : \"Curent not flowing in Helium Probe Wire\",\n # 1 : \"Curent not flowing in Helium Probe Wire\"\n # }\n # auto_fill_status = {\n # 00 : \"End Fill (Level > FULL)\",\n # 01 : \"Not Filling (Level < FULL, Level > FILL)\",\n # 10 : \"Filling (Level < FULL, Level > FILL)\",\n # 11 : \"Start Filling (Level < FILL)\"\n # }\n return usage.get(int(result[1]), \"Unknown\")",
"def getStatus():",
"def get_avalanche_status():\n avalanche = AvalancheWarningScraper(\"http://lawiny.topr.pl/\")\n avalanche_status = avalanche.navigate_and_extract_avalanche_data()\n return avalanche_status",
"def _read_status(self):\n results = self.status_table.query_items({'api_version': self.api_version})\n if not results:\n return None\n else:\n return results[0]",
"def _service_status(res, ctx):\n\n if _has_error_code(res):\n return print_errors(res, ctx)\n\n template = '''\\\nname: {name}\nconfig-location: {config_location}\nlog-location: {log_location}\nscript-location: {script_location}\nrunning: {running}\nenabled: {enabled}\ncontainer-running: {container_running}\ndeployment: {deployment}\nconfig: {config}'''\n\n result = template.format(name=res['name'],\n config_location=res['config_location'],\n log_location=res['log_location'],\n script_location=res['script_location'],\n running=res['running'],\n enabled=res['enabled'],\n container_running=res['container_status'].get('running', False),\n deployment=res['deployment'],\n config=res['config'])\n\n if 'callback_uri' in res:\n result += \"\\ncallback-uri: {callback_uri}\".format(callback_uri=res['callback_uri'])\n\n return result",
"def status(_):\n return {\"status\": \"ok\"}",
"def status():\n (code, message) = rest_api.status(request)\n if (code == 200):\n return 'Running'\n else:\n abort(code)",
"def test_flask_usage_status_code(self):\n response = self.client.get(url_for(\"main.faq\"))\n flaskUsageCode = FlaskUsage.query.filter_by(path=\"/faq\").order_by(\"datetime\").first()\n if flaskUsageCode is not None:\n self.assertEqual(flaskUsageCode.status, response.status_code)",
"def msgStatus():\n return jsonify({\"status\": \"OK\"})",
"def get_status() -> None:\n assert scraper.get_status() == True",
"def get_health_check(self):\n return util.create_response(output=\"OK\")"
] | [
"0.63770556",
"0.6142492",
"0.61247075",
"0.60678786",
"0.60083723",
"0.59488666",
"0.59464103",
"0.59203297",
"0.5910582",
"0.59019333",
"0.58966845",
"0.58875823",
"0.5884239",
"0.5875305",
"0.5871708",
"0.5823951",
"0.5812663",
"0.58030885",
"0.5777332",
"0.57773304",
"0.5776965",
"0.57505554",
"0.57416147",
"0.5728641",
"0.57161444",
"0.56211793",
"0.5620883",
"0.56190675",
"0.5615098",
"0.5594606"
] | 0.6223168 | 1 |
Gets the status of the upgrade task once after the upgrade completes [Arguments] [Example] ${resp} = Fusion Api Get Appliance Firmware Upgrade Status | | | def fusion_api_get_appliance_firmware_upgrade_status(self, api=None, headers=None):
param = '/notification'
return self.appfirmware.get(api=api, headers=headers, param=param) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def get_status():",
"def get_status():\n data = {\n 'status': 'up',\n }\n jsn = json.dumps(data)\n\n resp = Response(jsn, status=200, mimetype='application/json')\n\n return resp",
"def get_firmware_update_status(self):\n\n response = self.execute_command(CMD_GET_FIRMWARE_UPDATE_STATUS)[0]\n inprogress = (response & 0x80) == 0x80\n return {\n \"inprogress\": inprogress,\n \"error\": response & 0x7f,\n }",
"def status():\n (code, message) = rest_api.status(request)\n if (code == 200):\n return 'Running'\n else:\n abort(code)",
"def status(self,request):\n\t\t# ----------- DEBUG -----------------\n\t\tMODULE.info(\"updater/status invoked with:\")\n\t\tpp = pprint.PrettyPrinter(indent=4)\n\t\tst = pp.pformat(request.options).split(\"\\n\")\n\t\tfor s in st:\n\t\t\t\tMODULE.info(\" << %s\" % s)\n\t\t# -----------------------------------\n\n\t\tresponse = []\n\t\ttry:\n\t\t\tresult = {}\n\n\t\t\t# be as current as possible.\n\t\t\twhat = 'reinitializing UniventionUpdater'\n\t\t\tself.uu.ucr_reinit()\n\n\t\t\twhat = 'reloading registry'\n\t\t\tself.ucr.load()\n\n\t\t\twhat = 'getting UCS version'\n\t\t\tresult['ucs_version'] = self.uu.get_ucs_version()\n\n\t\t\t# if nothing is returned -> convert to empty string.\n\t\t\twhat = 'querying available release updates'\n\t\t\tresult['release_update_available'] = self.uu.release_update_available()\n\t\t\tif result['release_update_available'] is None:\n\t\t\t\tresult['release_update_available'] = ''\n\n\t\t\twhat = 'querying update-blocking components'\n\t\t\tblocking_component = self.uu.get_all_available_release_updates()[1]\n\t\t\tif not blocking_component:\n\t\t\t\tblocking_component = ''\n\t\t\tresult['release_update_blocking_component'] = blocking_component\n\n\t\t\twhat = 'querying appliance mode'\n\t\t\tresult['appliance_mode'] = self.ucr.is_true('server/appliance')\n\n\t\t\t# current errata patchlevel, converted to int, 0 if unset.\n\t\t\twhat = 'querying errata patchlevel'\n\t\t\tresult['erratalevel'] = 0\n\t\t\ttmp = self.ucr.get('version/erratalevel')\n\t\t\tif tmp:\n\t\t\t\tresult['erratalevel'] = int(tmp)\n\n\t\t\twhat = \"querying availability for easy mode\"\n\t\t\tresult['easy_mode'] = self.ucr.is_true('update/umc/updateprocess/easy', False)\n\n\t\t\tif result['easy_mode']:\n\t\t\t\t# updates/available should reflect the need for an update\n\t\t\t\teasy_update_available = self.ucr.is_true('update/available', False)\n\t\t\t\t# but dont rely on ucr! update/available is set during univention-upgrade --check\n\t\t\t\t# but when was the last time this was run?\n\n\t\t\t\t# release update\n\t\t\t\teasy_update_available = easy_update_available or result['release_update_available']\n\t\t\t\t# if no update seems necessary perform a real (expensive) check nonetheless\n\t\t\t\teasy_update_available = easy_update_available or self.uu.component_update_available()\n\t\t\t\tresult['easy_update_available'] = bool(easy_update_available)\n\t\t\telse:\n\t\t\t\tresult['easy_update_available'] = False\n\n\t\t\t# Component counts are now part of the general 'status' data.\n\t\t\twhat = \"counting components\"\n\t\t\tc_count = 0\n\t\t\te_count = 0\n\t\t\tfor comp in self.uu.get_all_components():\n\t\t\t\tc_count = c_count+1\n\t\t\t\tif self.ucr.is_true('%s/%s' % (COMPONENT_BASE,comp),False):\n\t\t\t\t\te_count = e_count+1\n\t\t\tresult['components'] = c_count\n\t\t\tresult['enabled'] = e_count\n\n\t\t\t# HACK: the 'Updates' form polls on the serial file\n\t\t\t#\t\tto refresh itself. Including the serial value\n\t\t\t#\t\tinto the form helps us to have a dependent field\n\t\t\t#\t\tthat can trigger the refresh of the \"Releases\"\n\t\t\t#\t\tcombobox and the 'package updates available' field.\n\t\t\tresult['serial'] = self._serial_file.timestamp()\n\n\t\t\t# HACK: together with the hack in 'WatchedFile' regarding\n\t\t\t#\t\tmtime changes without content changes, the above 'serial'\n\t\t\t#\t\tvalue might not change even if we need a refresh...\n\t\t\t#\t\tso we include a dummy field that returns the\n\t\t\t#\t\tcurrent time\n\t\t\tresult['timestamp'] = int(time())\n\n\t\t\t# Any real installer action can set the following variable\n\t\t\t# to indicate that the computer should be rebooted before\n\t\t\t# proceeding.\n\t\t\tresult['reboot_required'] = self.ucr.is_true('update/reboot/required',False)\n\n\t\texcept Exception, ex:\n\t\t\ttyp = str(type(ex)).strip('<>')\n\t\t\tmsg = '[while %s] [%s] %s' % (what,typ,str(ex))\n\t\t\tresult['message'] = msg\n\t\t\tresult['status'] = 1\n\t\t\tMODULE.error(msg)\n\n\t\tresponse.append(result)\n\t\t# ----------- DEBUG -----------------\n\t\tMODULE.info(\"updater/status returns:\")\n\t\tpp = pprint.PrettyPrinter(indent=4)\n\t\tst = pp.pformat(response).split(\"\\n\")\n\t\tfor s in st:\n\t\t\t\tMODULE.info(\" >> %s\" % s)\n\t\t# -----------------------------------\n\n\t\tself.finished(request.id,response)",
"def status():\n return jsonify(service='scwr-api-requirements', status='ok')",
"def getStatus():",
"def _do_get_status(self):\n logging.info(__name__ + ' : Get status of the device.')\n result = self._execute('X')\n usage = {\n 0: \"Channel not in use\",\n 1: \"Channel used for Nitrogen level\",\n 2: \"Channel used for Helium Level (Normal pulsed operation)\",\n 3: \"Channel used for Helium Level (Continuous measurement)\",\n 9: \"Error on channel (Usually means probe unplugged)\"\n }\n # current_flowing = {\n # 0 : \"Curent not flowing in Helium Probe Wire\",\n # 1 : \"Curent not flowing in Helium Probe Wire\"\n # }\n # auto_fill_status = {\n # 00 : \"End Fill (Level > FULL)\",\n # 01 : \"Not Filling (Level < FULL, Level > FILL)\",\n # 10 : \"Filling (Level < FULL, Level > FILL)\",\n # 11 : \"Start Filling (Level < FILL)\"\n # }\n return usage.get(int(result[1]), \"Unknown\")",
"def health_check():\n ret = {\"Status\": 200, \"Msg\": \"Service is Up\"}\n return jsonify(ret)",
"async def get_status(self) -> str:\n return await self.hw_device.status()",
"def status():\n return 'OK'",
"def _get_status(self):\n return self.__status",
"def getUpgrade(self):\n\t\tquery = ''\n\t\tconn = self.get_connection()\n\t\theaders = { 'Content-type' : 'application/json', 'Authorization' : 'A10 %s' %self.sessionid}\n\t\tconn.request('GET', self.get_path() + '/' + query, headers=headers)\n\t\tresponse = conn.getresponse()\n\t\texpected_status = 200\n\t\terrors = {500: 'An unexpected runtime exception', 404: 'Specified upgrade does not exist'}\n\t\tpayload = self.get_output(response, expected_status, errors)\n\t\tconn.close()\n\t\tif self.debug:\n\t\t\tprint 'payload:', payload\n\t\tif payload == '':\n\t\t\tpayload = None\n\t\tif payload is not None:\n\t\t\tdata = json.loads(payload)\n\t\t\tpayload= data.get('upgrade')\n\t\treturn deserialize_Upgrade_json(payload)",
"def status():\n _request('worklog/status/')",
"async def get_status(self):\n data = await self.gen_req(self.status_url)\n return data['data']['attributes']['releasedAt'], data['data']['attributes']['version']",
"def update_get():\n\n status, error = update.status.get()\n if error:\n return json_response.error(error), 200\n return json_response.success({'status': str(status)})",
"def remote_status():",
"def __get_status_api(self):\r\n try:\r\n return Call_shelly_api(url=self.__api_address + \"/status\")\r\n except ShellyException as err:\r\n _LOGGER.warning(err)",
"def status():\r\n\r\n url = '{}/status'.format(USGS_API_ENDPOINT)\r\n payload = {\r\n \"jsonRequest\": payloads.status()\r\n }\r\n logger.debug(\"API call URL: {}\".format(url))\r\n logger.debug(\"API call payload: {}\".format(payload))\r\n response = requests.post(url, payload).json()\r\n logger.debug(\"Received response:\\n{}\".format(json.dumps(response, indent=4)))\r\n _catch_usgs_error(response)\r\n\r\n return response",
"def handle_health():\n return flask.jsonify(status=\"up\")",
"def fusion_api_get_appliance_status(self, api=None, headers=None):\n return self.info.get_status(api=api, headers=headers)",
"def status_check():\n return {\"status\": \"OK\"}",
"def get_status(id):\n task = run_ctx_request.AsyncResult(id)\n if task.state == states.PENDING:\n abort(404)\n if task.state == states.RECEIVED or task.state == states.STARTED:\n return '', 202, {'Location': url_for('api.get_status', id=id)}\n return task.info",
"def status():\n response = \"NOT_OK\"\n if db_client.data_loaded:\n response = \"OK\"\n return flask.jsonify({'status': response})",
"def status(args):\n print('Yields HPC fleet \"{}\" status'.format(args))",
"async def racelandshop_status(_hass, connection, msg):\n racelandshop = get_racelandshop()\n content = {\n \"startup\": racelandshop.status.startup,\n \"background_task\": racelandshop.status.background_task,\n \"lovelace_mode\": racelandshop.system.lovelace_mode,\n \"reloading_data\": racelandshop.status.reloading_data,\n \"upgrading_all\": racelandshop.status.upgrading_all,\n \"disabled\": racelandshop.system.disabled,\n \"disabled_reason\": racelandshop.system.disabled_reason,\n \"has_pending_tasks\": racelandshop.queue.has_pending_tasks,\n \"stage\": racelandshop.stage,\n }\n connection.send_message(websocket_api.result_message(msg[\"id\"], content))",
"def get_status(self):\n url = \"data_request?id=jobstatus&job=%d&plugin=zwave\" % self.id\n return self.vera.get(url)",
"def updater_job_status(self,request):\n\t\t# ----------- DEBUG -----------------\n\t\tMODULE.info(\"updater/installer/status invoked with:\")\n\t\tpp = pprint.PrettyPrinter(indent=4)\n\t\tst = pp.pformat(request.options).split(\"\\n\")\n\t\tfor s in st:\n\t\t\t\tMODULE.info(\" << %s\" % s)\n\t\t# -----------------------------------\n\n\t\t# First check if a job is running. This will update the\n\t\t# internal field self._current_job, or if the job is finished,\n\t\t# it would return an empty string.\n\t\tinst = self.__which_job_is_running()\n\n\t\tjob = request.options.get('job','')\n\t\tresult = {}\n\t\tif job in INSTALLERS:\n\t\t\t# make a copy, not a reference!\n#\t\t\tresult = {}\n#\t\t\tfor arg in INSTALLERS[job]:\n#\t\t\t\tresult[arg] = INSTALLERS[job][arg]\n\t\t\tresult = deepcopy(INSTALLERS[job])\n\n\t\t\tif 'statusfile' in INSTALLERS[job]:\n\t\t\t\ttry:\n\t\t\t\t\tfor line in open(INSTALLERS[job]['statusfile']):\n\t\t\t\t\t\tfields = line.strip().split('=')\n\t\t\t\t\t\tif len(fields) == 2:\n\t\t\t\t\t\t\tresult['_%s_' % fields[0]] = fields[1]\n\t\t\t\texcept:\n\t\t\t\t\tpass\n\t\t\t# if we encounter that the frontend asks about the last job we\n\t\t\t# have executed -> include its properties too.\n\t\t\tif self._current_job:\n\t\t\t\tif self._current_job['job'] == job:\n\t\t\t\t\tfor f in self._current_job:\n\t\t\t\t\t\tresult[f] = self._current_job[f]\n\t\t\t\t\t\tif isinstance(result[f],str) and result[f].isdigit():\n\t\t\t\t\t\t\tresult[f] = int(result[f])\n\t\t\t\tif inst == '':\n\t\t\t\t\tresult['running'] = False\n\t\t\telse:\n\t\t\t\t# no job running but status for release was asked? \n\t\t\t\t# maybe the server restarted after job finished\n\t\t\t\t# and the frontend did not get that information\n\t\t\t\t# Bug #26318\n\t\t\t\tif job == 'release':\n\t\t\t\t\tresult['detail'] = '%s-%s' % (self.ucr.get('version/version'), self.ucr.get('version/patchlevel'))\n\t\t\t\telse:\n\t\t\t\t\tresult['detail'] = _('Unknown')\n\n\t\t\t# -------------- additional fields -----------------\n\n\t\t\t# elapsed time, ready to be displayed. (not seconds, but rather\n\t\t\t# the formatted string)\n\t\t\tif 'time' in result and 'started' in result:\n\t\t\t\telapsed = result['time'] - result['started']\n\t\t\t\tif elapsed < 60:\n\t\t\t\t\tresult['elapsed'] = '%ds' % elapsed\n\t\t\t\telse:\n\t\t\t\t\tmins = int(elapsed/60)\n\t\t\t\t\tsecs = elapsed - (60 * mins)\n\t\t\t\t\tif mins < 60:\n\t\t\t\t\t\tresult['elapsed'] = '%d:%02dm' % (mins,secs)\n\t\t\t\t\telse:\n\t\t\t\t\t\thrs = int(mins/60)\n\t\t\t\t\t\tmins = mins - (60*hrs)\n\t\t\t\t\t\tresult['elapsed'] = '%d:%02d:%02dh' % (hrs,mins,secs)\n\t\t\t# Purpose is now formatted in the language of the client (now that\n\t\t\t# this LANG is properly propagated to us)\n\t\t\tif 'purpose' in result:\n\t\t\t\tif result['purpose'].find('%') != -1:\n\t\t\t\t\t# make sure to not explode (Bug #26318), better show nothing\n\t\t\t\t\tif 'detail' in result:\n\t\t\t\t\t\tresult['label'] = result['purpose'] % result['detail']\n\t\t\t\telse:\n\t\t\t\t\tresult['label'] = result['purpose']\n\t\t\t# Affordance to reboot... hopefully this gets set before\n\t\t\t# we stop polling on this job status\n\t\t\tself.ucr.load()\t# make it as current as possible\n\t\t\tresult['reboot'] = self.ucr.is_true('update/reboot/required',False)\n\n\t\t# ----------- DEBUG -----------------\n\t\tMODULE.info(\"updater/installer/status returns:\")\n\t\tpp = pprint.PrettyPrinter(indent=4)\n\t\tst = pp.pformat(result).split(\"\\n\")\n\t\tfor s in st:\n\t\t\t\tMODULE.info(\" >> %s\" % s)\n\t\t# -----------------------------------\n\n\t\tself.finished(request.id,result)",
"async def check_health():\n return {\"healthy\": True}",
"def get_full_juju_status():\n\n status = model.get_status(lifecycle_utils.get_juju_model())\n return status"
] | [
"0.65604794",
"0.62352014",
"0.61701816",
"0.6165392",
"0.60088515",
"0.6008357",
"0.59645104",
"0.5938892",
"0.59130305",
"0.58647084",
"0.5851301",
"0.58230835",
"0.5815668",
"0.57895094",
"0.57872385",
"0.5784844",
"0.5748978",
"0.57383335",
"0.57311296",
"0.5721712",
"0.5719601",
"0.57174313",
"0.5711395",
"0.5688628",
"0.5683688",
"0.5682563",
"0.5680813",
"0.56618917",
"0.56596774",
"0.56559366"
] | 0.690662 | 0 |
Returns a valid request body dictionary to pass to fusion_api_configure_appliance_interfaces [Arguments] | def fusion_api_create_appliance_interfaces_payload(self, body=None, api=None):
return self.interfaces.make_body(body, api) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_configure_appliance_interfaces(self, body=None, api=None, headers=None):\n return self.interfaces.configure(body, api, headers)",
"def get_interface_initialization_kwargs(self, **kwargs) -> dict:\n return {\n key: value\n for key, value in kwargs.items()\n if not self.input_definitions.get(key=key).run_method_input\n }",
"def request_body(self):\n return json.dumps({\n \"schemaId\": self.schemaId,\n \"associationIds\": self.associationIds,\n \"name\": self.name,\n \"value\": self.value\n })",
"def request_dict(self):\r\n ap_map = self.get_access_points()\r\n\r\n if not ap_map:\r\n return None\r\n\r\n request = dict()\r\n\r\n request[\"version\"] = \"1.1.0\"\r\n request[\"host\"] = \"localhost\"\r\n request[\"request_address\"] = True\r\n request[\"address_language\"] = \"en_GB\"\r\n request[\"wifi_towers\"] = ap_map.values()\r\n\r\n if self.access_token:\r\n request[\"access_token\"] = self.access_token\r\n\r\n return request",
"def _get_request_args(self):\n str_args = False\n request_args = {}\n if request.method == \"POST\" or request.method == \"PUT\":\n # Use only body args and ignore any args from query string\n if request.headers.get(\"content-type\", \"\").startswith(CONT_TYPE_JSON):\n # JSON body request\n if request.data:\n request_args = json_loads(request.data)\n if GATEWAY_ARG_PARAMS not in request_args:\n # Magic fallback: Directly use JSON first level as args if params key not present\n request_args = {GATEWAY_ARG_PARAMS: request_args}\n elif request.form:\n # Form encoded payload\n if GATEWAY_ARG_JSON in request.form:\n payload = request.form[GATEWAY_ARG_JSON]\n request_args = json_loads(payload)\n if GATEWAY_ARG_PARAMS not in request_args:\n # Magic fallback: Directly use JSON first level as args if params key not present\n request_args = {GATEWAY_ARG_PARAMS: request_args}\n else:\n # Fallback: Directly use form values\n str_args = True\n request_args = {GATEWAY_ARG_PARAMS: request.form.to_dict(flat=True)}\n else:\n # No args found in body\n request_args = {GATEWAY_ARG_PARAMS: {}}\n\n # Extract file args\n for file_arg in request.files:\n try:\n file_handle = request.files[file_arg]\n arg_val = file_handle.read()\n request_args[GATEWAY_ARG_PARAMS][file_arg] = arg_val\n except Exception as ex:\n log.exception(\"Error reading request file argument %s\", file_arg)\n\n elif request.method == \"GET\":\n str_args = True\n REQ_ARGS_SPECIAL = {\"authtoken\", \"timeout\", \"headers\"}\n args_dict = request.args.to_dict(flat=True)\n request_args = {k: request.args[k] for k in args_dict if k in REQ_ARGS_SPECIAL}\n req_params = {k: request.args[k] for k in args_dict if k not in REQ_ARGS_SPECIAL}\n request_args[GATEWAY_ARG_PARAMS] = req_params\n\n request_args[\"str_args\"] = str_args # Indicate downstream that args are str (GET or form encoded)\n #log.info(\"Request args: %s\" % request_args)\n return request_args",
"def parse_request_body(self):\n try:\n request_arguments = self.request.arguments\n if request_arguments:\n new_request_arguments = {\n k: common.my_str(v[0].decode('utf8'))\n for k, v in request_arguments.items()\n }\n return new_request_arguments\n else:\n request_body = self.request.body\n request_data = request_body.decode('utf-8')\n request_data_dict = json.loads(request_data)\n self.request.arguments = {\n k: [str(v)]\n for k, v in request_data_dict.items()\n }\n new_request_arguments = {\n k: common.my_str(v)\n for k, v in request_data_dict.items()\n }\n return new_request_arguments\n except Exception as e:\n raise tornado.web.HTTPError(\n status_code=400, log_message='bad_request: {}'.format(str(e)))",
"def get_args(self):\n rqst = self.request\n args = rqst.arguments()\n resp = {}\n for arg in args:\n resp[arg] = repr(rqst.get_all(arg))\n return resp",
"def _GetRequestBody(\n self,\n body_params: Iterable[FieldDescriptor],\n ) -> Dict[str, Dict[str, Any]]:\n if not body_params:\n return {}\n\n properties: (Dict[str, Union[SchemaReference, ArraySchema,\n DescribedSchema]]) = dict()\n for field_d in body_params:\n field_name = casing.SnakeToCamel(field_d.name)\n properties[field_name] = self._GetDescribedSchema(field_d)\n\n return {\n \"content\": {\n \"application/json\": {\n \"schema\": {\n \"type\": \"object\",\n \"properties\": properties,\n },\n },\n },\n }",
"def getArguments(self):\n ApiCli.getArguments(self)\n\n if self.args.alarm_name is not None:\n self.alarm_name = self.args.alarm_name\n\n if self.args.metric_name is not None:\n self.metric_name = self.args.metric_name\n\n if self.args.aggregate is not None:\n self.aggregate = self.args.aggregate\n\n if self.args.operation is not None:\n self.operation = self.args.operation\n\n if self.args.threshold is not None:\n self.threshold = self.args.threshold\n\n if self.args.interval is not None:\n self.interval = self.args.interval\n\n if self.args.host_group_id is not None:\n self.host_group_id = self.args.host_group_id\n\n if self.args.actions is not None:\n self.actions = self.args.actions\n\n if self.args.note is not None:\n self.note = self.args.note\n\n if self.args.per_host_notify is not None:\n self.per_host_notify = self.args.per_host_notify\n\n if self.args.is_disabled is not None:\n self.is_disabled = self.args.is_disabled\n\n payload = {}\n\n # Create trigger predicate dictionary\n predicate = {}\n\n if self.aggregate is not None:\n predicate['agg'] = self.aggregate\n\n if self.operation is not None:\n predicate['op'] = self.operation\n\n if self.threshold is not None:\n predicate['val'] = self.threshold\n\n if 'agg' in predicate or 'op' in predicate or 'val' in predicate:\n payload['triggerPredicate'] = predicate\n\n # Create payload dictionary\n if self.alarm_name:\n payload['name'] = self.alarm_name\n\n if self.host_group_id is not None:\n payload['hostgroupId'] = self.host_group_id\n\n if self.interval is not None:\n payload['interval'] = self.intervals[self.interval]\n\n if self.metric_name is not None:\n payload['metricName'] = self.metric_name\n\n if self.note is not None:\n payload['note'] = self.note\n\n if self.actions is not None:\n payload['actions'] = self.actions\n\n if self.per_host_notify is not None:\n payload['perHostNotify'] = True if self.per_host_notify == 'yes' else False\n\n if self.is_disabled is not None:\n payload['isDisabled'] = True if self.is_disabled == 'yes' else False\n\n self.data = json.dumps(payload, sort_keys=True)\n self.headers = {'Content-Type': 'application/json'}",
"def build_minimum_request_body():\n return \\\n {\n \"intent\": \"AUTHORIZE\",\n \"application_context\": {\n \"return_url\": \"https://www.example.com\",\n \"cancel_url\": \"https://www.example.com\"\n },\n \"purchase_units\": [\n {\n \"amount\": {\n \"currency_code\": \"USD\",\n \"value\": \"220.00\"\n }\n }\n ]\n }",
"def interface(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"interface\"), kwargs)",
"def request_json(self):\n try:\n return json.loads(self.request.body)\n except Exception:\n logging.info('No JSON payload in request body.')\n return {}",
"def post(self):\n context = request.environ.get('context')\n json = util.copy_project_id_into_json(context, g.json)\n obj = dbapi.net_interfaces_create(context, json)\n interface = jsonutils.to_primitive(obj)\n return interface, 200, None",
"def _format_request_body(self, method, **kwargs):\n # As Python can't accept dashes in kwargs keys, replace any\n # underscores with them here.\n fixed = {}\n for k, v in kwargs.items():\n fixed[k.replace('_', '-')] = v\n\n return {\"method\": method, \"tag\": self.tag, \"arguments\": fixed}",
"def _load_request_body(self, body_primitive) -> Dict[str, BaseModel]:\n if self._request_body_parameter is None or self._request_body_class is None:\n raise ValueError(\"The endpoint doesn't accept a request body\")\n\n try:\n body = self._request_body_class(**body_primitive)\n except ValidationError as ex:\n raise InvalidFieldsError.from_validation_error(ex) from ex\n\n return {self._request_body_parameter: body}",
"def make_request(self: BaseApi,) -> typing.Dict[str, int]:\n\n def serialize_item(item):\n if isinstance(item, pydantic.BaseModel):\n return item.dict()\n return item\n\n body = None\n\n m = ApiRequest(\n method=\"GET\",\n path=\"/api/v3/store/inventory\".format(),\n content_type=None,\n body=body,\n headers=self._only_provided({}),\n query_params=self._only_provided({}),\n cookies=self._only_provided({}),\n )\n return self.make_request({\"200\": {\"application/json\": typing.Dict[str, int],},}, m)",
"def _build_payload(self, body: Dict) -> Dict[str, Any]:\n return {'jsonrpc': '2.0',\n 'id': self._id_count,\n **body}",
"def toPostArgs(self):\n args = {}\n\n # Add namespace definitions to the output\n for ns_uri, alias in self.namespaces.items():\n if self.namespaces.isImplicit(ns_uri):\n continue\n if alias == NULL_NAMESPACE:\n ns_key = 'openid.ns'\n else:\n ns_key = 'openid.ns.' + alias\n args[ns_key] = oidutil.toUnicode(ns_uri)\n\n for (ns_uri, ns_key), value in self.args.items():\n key = self.getKey(ns_uri, ns_key)\n # Ensure the resulting value is an UTF-8 encoded *bytestring*.\n args[key] = oidutil.toUnicode(value)\n\n return args",
"def serialize(self) -> dict:\n return {\n \"input_idx\": self.input_idx,\n \"body_parameter_idx\": self.body_parameter_idx,\n }",
"def eseries_host_argument_spec():\n argument_spec = basic_auth_argument_spec()\n argument_spec.update(dict(\n api_username=dict(type='str', required=True),\n api_password=dict(type='str', required=True, no_log=True),\n api_url=dict(type='str', required=True),\n ssid=dict(type='str', required=True),\n validate_certs=dict(type='bool', required=False, default=True),\n ))\n return argument_spec",
"def get_argdict(cls, toolchain, args):\n return {} # Empty must be overloaded (if required)",
"def get_required_params():\n return {}",
"def to_dict(self):\n result = {\n 'name': self.__class__.__name__,\n 'params': {\n 'requirements': {f: req for f, req in self.requirements.items()}\n }\n }\n\n return result",
"def main():\n\n # endpdoint = \"restconf/data/ietf-interfaces:interfaces\"\n # endpoint = f\"restconf/data/ietf-interfaces:interfaces/interface={name}\"\n\n if len(argv) > 1:\n try:\n inventory = load_inventory(argv[1])\n except FileExistsError as err:\n print(\"FileExistsError: \", err)\n else:\n print(\"You must provide a path to your inventory file.\")\n sys.exit()\n\n r1 = inventory['dev-r1']\n loop = [interface for interface in r1[\"interface\"] if interface[\"name\"] == \"Loopback0\"][0]\n\n payload = render_payload(\n loop,\n \"interface.j2\"\n )\n\n session = create_session(r1[\"username\"], r1[\"password\"])\n endpoint = f\"restconf/data/ietf-interfaces:interfaces/interface=Loopback0\"\n results = put_request(r1[\"host\"],session, endpoint, payload)\n print(results)\n\n save_endpoint = \"restconf/operations/cisco-ia:save-config/\"\n saved = save_config(r1[\"host\"], session, save_endpoint)\n\n # target_routers = [\"dev-r1\"]\n\n # for host_key, attribs in inventory.items():\n\n # if host_key in target_routers:\n # print(f\"configuring interfaces on {host_key}\")\n\n # # create a session imported from restconf_api\n # session = create_session(attribs)\n\n # # get all interfaces\n # results = get_interface(attribs, session, \"Loopback0\")\n\n # interface = results[\"ietf-interfaces:interface\"]\n\n # print(json.dumps(interface))\n # # convert to yaml\n # # yaml_output = yaml.safe_dump(results)\n # # with open(\"vars/interfaces.yml\", \"w\") as file:\n # # file.write(yaml_output)\n\n # # results = update_interfaces(attribs, session)\n # # print(results.text, results.status_code)\n\n # # print(get_interfaces(attribs, session))",
"def parse_interfaces(interfaces):\n parsed_interfaces = collections.defaultdict(dict)\n\n for m, d in iteritems(interfaces):\n app, func = m.split('.', 1)\n\n method = parsed_interfaces[app][func] = {}\n\n # Make default assumptions since these aren't provided by Phab\n method['formats'] = ['json', 'human']\n method['method'] = 'POST'\n\n method['optional'] = {}\n method['required'] = {}\n\n for name, type_info in iteritems(dict(d['params'])):\n # Set the defaults\n optionality = 'required'\n param_type = 'string'\n\n # Usually in the format: <optionality> <param_type>\n type_info = TYPE_INFO_COMMENT_RE.sub('', type_info)\n info_pieces = TYPE_INFO_SPLITTER_RE.findall(type_info)\n for info_piece in info_pieces:\n if info_piece in ('optional', 'required'):\n optionality = info_piece\n elif info_piece == 'ignored':\n optionality = 'optional'\n param_type = 'string'\n elif info_piece == 'nonempty':\n optionality = 'required'\n elif info_piece == 'deprecated':\n optionality = 'optional'\n else:\n param_type = info_piece\n\n method[optionality][name] = map_param_type(param_type)\n\n return dict(parsed_interfaces)",
"def _to_request_dict(self):\n collection_config_request = {\"CollectionName\": self.name}\n\n if self.parameters is not None:\n collection_config_request[\"CollectionParameters\"] = self.parameters\n\n return collection_config_request",
"def toArgs(self):\n # FIXME - undocumented exception\n post_args = self.toPostArgs()\n kvargs = {}\n for k, v in post_args.items():\n if not k.startswith('openid.'):\n raise ValueError(\n 'This message can only be encoded as a POST, because it '\n 'contains arguments that are not prefixed with \"openid.\"')\n else:\n kvargs[k[7:]] = v\n\n return kvargs",
"def get_update_request_no_data() -> Dict[str, Any]:\n payload_dict = {}\n return payload_dict",
"def getParameters( self ):\n parameterDict = {}\n parameterDict['StorageName'] = self.name\n parameterDict['ProtocolName'] = self.protocolName\n parameterDict['Protocol'] = self.protocol\n parameterDict['Host'] = self.host\n parameterDict['Path'] = self.path\n parameterDict['Port'] = self.port\n parameterDict['SpaceToken'] = self.spaceToken\n parameterDict['WSUrl'] = self.wspath\n return S_OK( parameterDict )",
"def parameters(self) -> Dict[str, Any]:\n return self.data[\"args\"].get(\"parameters\", {})"
] | [
"0.5714076",
"0.57095265",
"0.560145",
"0.54681474",
"0.5430471",
"0.5424038",
"0.5419637",
"0.53896075",
"0.53837776",
"0.5308621",
"0.5246314",
"0.5194026",
"0.5180592",
"0.5168699",
"0.5140633",
"0.51296604",
"0.5125179",
"0.50711733",
"0.50526464",
"0.5038075",
"0.5023689",
"0.50147736",
"0.5010763",
"0.50086135",
"0.49966642",
"0.49712014",
"0.49670464",
"0.49590415",
"0.49521476",
"0.49451154"
] | 0.6355418 | 0 |
Returns appliance interfaces, time, and locale [Example] ${resp} = Fusion Api Get Appliance Interfaces | | | | def fusion_api_get_appliance_interfaces(self, api=None, headers=None):
return self.interfaces.get(api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def intGet(): \n macchanger, ip, iwconfig = pathGet()\n interfaces = []\n a = str(subprocess.check_output(\"{} link show\".format(ip), shell=True))\n ints = a.split(': ')\n for i in range(len(ints)):\n if len(ints[i].split()) == 1:\n if ints[i] not in [\"1\", \"lo\", \"b'1\"]:\n interface = {'name':str(ints[i])}\n interfaces.append(interface)\n # Get interface properties\n for interface in interfaces:\n name = interface['name']\n macs = subprocess.check_output(\"{} -s {}\".format(macchanger, name), shell=True).decode(\"utf-8\")\n interface['cMac'] = macs.split()[2]\n interface['cVend'] = macs.split(\"(\")[1].split(\")\")[0]\n interface['pMac'] = macs.split(\"\\n\")[1].split()[2]\n interface['pVend'] = macs.split(\"\\n\")[1].split(\"(\")[1].split(\")\")[0]\n try:\n mon = subprocess.check_output(\"{} {} 2> /dev/null\".format(iwconfig, name), shell=True).split()\n mon1 = mon[3].decode(\"utf-8\").split(':')[1]\n if mon1 == 'off/any':\n mon1 = mon[4].decode(\"utf-8\").split(':')[1]\n interface['mon'] = mon1\n except:\n interface['mon'] = 'Wired'\n return(interfaces)",
"def interface_status(system_ip):\n\n click.secho(\"\\nRetrieving the Interface Status\")\n\n url = base_url + \"/device/interface/synced?deviceId={0}\".format(system_ip)\n\n response = requests.get(url=url, headers=header,verify=False)\n if response.status_code == 200:\n items = response.json()['data']\n else:\n print(\"Failed to get list of interface \" + str(response.text))\n exit()\n\n print(\"\\nInterfaces status for Device = \",system_ip)\n\n headers = [\"Interface Name\", \"IP address\", \"VPN ID\", \"Operational status\"]\n table = list()\n\n for item in items:\n if item.get('ip-address') != \"-\":\n tr = [item.get('ifname'), item.get('ip-address'),item.get('vpn-id'), item.get('if-oper-status')]\n table.append(tr)\n\n try:\n click.echo(tabulate.tabulate(table, headers, tablefmt=\"fancy_grid\"))\n except UnicodeEncodeError:\n click.echo(tabulate.tabulate(table, headers, tablefmt=\"grid\"))",
"def get_interfaces(switch):\n\n log(\"Entering {0}.\".format(sys._getframe().f_code.co_name), level='DEBUG')\n conn_error = False\n commands = [\"show interfaces status\"]\n\n try:\n response = switch.runCmds(1, commands)\n except ProtocolError, err:\n (errno, msg) = err[0]\n # 1002: invalid command\n if errno == 1002:\n log(\"Invalid EOS interface name ({0})\".format(commands), error=True)\n else:\n conn_error = True\n log(\"ProtocolError while retrieving {0} ([{1}] {2})\".\n format(commands, errno, msg),\n error=True)\n except Exception, err:\n conn_error = True\n # 60: Operation timed out\n # 61: Connection refused (http vs https?)\n # 401: Unauthorized\n # 405: Method Not Allowed (bad URL)\n if hasattr(err, 'errno'):\n if err.errno == 60:\n log(\"Connection timed out: Incorrect hostname/IP or eAPI\"\n \" not configured on the switch.\", error=True)\n elif err.errno == 61:\n log(\"Connection refused: http instead of https selected or\"\n \" eAPI not configured on the switch.\", error=True)\n else:\n log(\"General Error retrieving {0} ({1})\".format(commands,\n err),\n error=True)\n else:\n # Parse the string manually\n msg = str(err)\n msg = msg.strip('<>')\n err = msg.split(': ')[-1]\n\n if \"401 Unauthorized\" in err:\n log(\"ERROR: Bad username or password\")\n elif \"405 Method\" in err:\n log(\"ERROR: Incorrect URL\")\n else:\n log(\"HTTP Error retrieving {0} ({1})\".format(commands,\n err),\n error=True)\n\n if conn_error:\n raise EapiException(\"Connection error with eAPI\")\n\n # Filter out non-Ethernet interfaces\n for interface in response[0][u'interfaceStatuses'].keys():\n if str(interface)[:8] != 'Ethernet':\n response[0][u'interfaceStatuses'].pop(interface, None)\n\n return response[0][u'interfaceStatuses']",
"def getInterface(self):\n\t\tquery = ''\n\t\tconn = self.get_connection()\n\t\theaders = { 'Content-type' : 'application/json', 'Authorization' : 'A10 %s' %self.sessionid}\n\t\tconn.request('GET', self.get_path() + '/' + query, headers=headers)\n\t\tresponse = conn.getresponse()\n\t\texpected_status = 200\n\t\terrors = {500: 'An unexpected runtime exception', 404: 'Specified interface does not exist'}\n\t\tpayload = self.get_output(response, expected_status, errors)\n\t\tconn.close()\n\t\tif self.debug:\n\t\t\tprint 'payload:', payload\n\t\tif payload == '':\n\t\t\tpayload = None\n\t\tif payload is not None:\n\t\t\tdata = json.loads(payload)\n\t\t\tpayload= data.get('interface')\n\t\treturn deserialize_Interface_json(payload)",
"def show_interfaces(self):\n txt = \"Show Interfaces of %s\\n%s has %d interfaces\\n\"%(self.hostname, self.hostname, len(self.interfaces))\n for inf in self.interfaces:\n txt += \"%s IP-Address: %s \\\"%s\\\"\\n\"%(inf, self.interfaces[inf]['ip'], self.interfaces[inf]['status'])\n return txt",
"def fusion_api_get_appliance_time_and_locale(self, api=None, headers=None):\n return self.timeandlocale.get(api, headers)",
"def list(self, req, resp):\n interfaces = []\n for e in EntryPoints('tachyonic.element.interfaces'):\n interfaces.append({'id': e, 'name': e})\n return raw_list(req, interfaces)",
"def show(ctx, appeui):\n if '.' in appeui:\n appeui = str(hexStringInt(str(appeui)))\n \n # Form the url and payload\n server = ctx.obj['server']\n payload = {'token': ctx.obj['token']}\n url = 'http://{}/api/v{}'.format(server, str(version))\n url += '/apps' if appeui == 'all' else '/app/{}'.format(appeui)\n \n # Make the request\n data = restRequest(server, url, 'get', payload, 200)\n if data is None:\n return\n \n # Single application\n if appeui != 'all':\n a = data\n indent = ' ' * 10\n if a['appinterface_id'] == 0:\n a['appinterface_id'] = '-'\n if a['domain'] is None:\n a['domain'] = '-'\n click.echo('Application EUI: ' + euiString(a['appeui']))\n click.echo('{}name: {}'.format(indent, a['name']))\n click.echo('{}domain: {}'.format(indent, a['domain']))\n click.echo('{}fport: {}'.format(indent, a['fport']))\n click.echo('{}interface: {}'.format(indent, a['appinterface_id']))\n if a['appinterface_id'] != '-':\n click.echo('{}Properties:'.format(indent))\n properties = sorted(a['properties'].values(), key=lambda k: k['port'])\n for p in properties:\n click.echo('{} {} {}:{}'.format(indent, p['port'], p['name'], p['type']))\n return\n \n # All applications\n click.echo('{:14}'.format('Application') + \\\n '{:24}'.format('AppEUI') + \\\n '{:15}'.format('Domain') + \\\n '{:6}'.format('Fport') + \\\n '{:10}'.format('Interface'))\n for i,a in data.iteritems():\n if a['appinterface_id'] == 0:\n a['appinterface_id'] = '-'\n if a['domain'] is None:\n a['domain'] = '-'\n click.echo('{:13.13}'.format(a['name']) + ' ' + \\\n '{:23}'.format(euiString(a['appeui'])) + ' ' + \\\n '{:14.14}'.format(a['domain']) + ' ' + \\\n '{:5.5}'.format(str(a['fport'])) + ' ' + \\\n '{:10}'.format(str(a['appinterface_id'])))",
"def get_interfaces_status(device):\n\n try:\n out = device.parse('show ip interface brief')\n except SchemaEmptyParserError as e:\n log.error('No interface information found')\n return None\n\n # {'interface': {'GigabitEthernet1': {'interface_is_ok': 'YES',\n # 'ip_address': '172.16.1.210',\n # 'method': 'DHCP',\n # 'protocol': 'up',\n # 'status': 'up'},\n\n return {key: val.get('status') for key, val in out.get('interface', {}).items()}",
"def fusion_api_get_appliance_status(self, api=None, headers=None):\n return self.info.get_status(api=api, headers=headers)",
"def fusion_api_create_appliance_interfaces_payload(self, body=None, api=None):\n return self.interfaces.make_body(body, api)",
"def write_interfaces(report, devices):\n\n report.add_heading(\"Device Active Interfaces\", 3)\n\n report.add_paragraph(\"The following device information shows each active interface configuration as well as the \"\n \"corresponding zone defined for that interface. During the configuration audit, the assessment\"\n \" team placed emphasis on the active interfaces identified in the following tables.\",\n style='Normal')\n\n for device in devices:\n if len(devices[device]['Interfaces']) > 0: # Don't create a table when there are no interfaces\n report.add_heading(\"\\n%s Active Interfaces\" % device, 4)\n table = report.add_table(rows=1, cols=3)\n hdr_cells = table.rows[0].cells\n hdr_cells[0].text = 'Interface'\n hdr_cells[1].text = 'Address'\n table.style = 'Medium Grid 1 Accent 1'\n for interface in devices[device]['Interfaces']:\n if 'Active' in devices[device]['Interfaces'][interface].keys():\n if devices[device]['Interfaces'][interface]['Active']:\n if devices[device]['Interfaces'][interface]['Active'].upper() == \"YES\":\n row_cells = table.add_row().cells\n row_cells[0].text = \"%s\" % devices[device]['Interfaces'][interface]['Interface']\n if 'Address' in devices[device]['Interfaces'][interface].keys():\n row_cells[1].text = \"%s\" % devices[device]['Interfaces'][interface]['Address']\n if \"ACL In\" in devices[device]['Interfaces'][interface].keys():\n hdr_cells[2].text = 'ACL IN'\n if devices[device]['Interfaces'][interface]['ACL In'] is not None:\n row_cells[2].text = \"%s\" % devices[device]['Interfaces'][interface]['ACL In']\n else:\n row_cells[2].text = \"---\"\n elif \"Zone\" in devices[device]['Interfaces'][interface].keys():\n hdr_cells[2].text = 'Zone'\n if devices[device]['Interfaces'][interface]['Zone'] is not None:\n row_cells[2].text = \"%s\" % devices[device]['Interfaces'][interface]['Zone']\n else:\n row_cells[2].text = \"---\"\n else:\n row_cells = table.add_row().cells\n row_cells[0].text = \"%s\" % devices[device]['Interfaces'][interface]['Interface']\n if 'Address' in devices[device]['Interfaces'][interface].keys():\n row_cells[1].text = \"%s\" % devices[device]['Interfaces'][interface]['Address']\n if \"ACL In\" in devices[device]['Interfaces'][interface].keys():\n hdr_cells[2].text = 'ACL IN'\n row_cells[2].text = \"%s\" % devices[device]['Interfaces'][interface]['ACL In']\n else:\n hdr_cells[2].text = 'DELETE ME'\n row_cells[2].text = \"---\"\n else:\n logging.info(\"%s has no interfaces. An interfaces table will not be generated for this device\" % device)\n if DEBUG:\n print warn + \"%s has no interfaces\" % device\n return report",
"def get_interfaces_description(device, interface=None):\r\n\r\n try:\r\n out = device.parse('show interfaces descriptions {interface}'.format(\r\n interface=interface\r\n ))\r\n except SchemaEmptyParserError as e:\r\n return None\r\n\r\n # Sample output\r\n # {\r\n # \"interface-information\": {\r\n # \"physical-interface\": [\r\n # {\r\n # \"admin-status\": \"up\",\r\n # \"description\": \"none/100G/in/hktGCS002_ge-0/0/0\", <------------ \r\n # \"name\": \"ge-0/0/0\", <---- Given interface\r\n # \"oper-status\": \"up\",\r\n # }, \r\n # ...\r\n \r\n physical_interface_list = out.q.get_values('physical-interface', None)\r\n\r\n for intf in physical_interface_list:\r\n if intf['name'] == interface:\r\n return intf['description']\r\n\r\n return None",
"def show_interface(dut, interface_name = None, cli_type=\"\"):\n cli_type = st.get_ui_type(dut, cli_type=cli_type)\n cli_type = \"klish\" if cli_type in [\"rest-put\", \"rest-patch\"] else cli_type\n output = list()\n if cli_type == \"klish\" or cli_type == \"click\":\n command = \"show sflow interface\"\n if interface_name:\n command = \"{} | grep {}\".format(command, interface_name)\n return st.show(dut, command, type=cli_type)\n elif cli_type == \"rest\":\n if not interface_name:\n url = REST_URI\n else:\n url = \"{}/SFLOW_SESSION/SFLOW_SESSION_TABLE\".format(REST_URI)\n result = st.rest_read(dut, url, SFLOW_SESSION_LIST=interface_name)\n if result and result.get(\"status\") == 200 and result.get(\"output\"):\n if YANG_MODULE in result[\"output\"]:\n data = result[\"output\"][YANG_MODULE]\n if data.get(\"SFLOW_SESSION_TABLE\").get(\"SFLOW_SESSION_LIST\"):\n for intf_list in data.get(\"SFLOW_SESSION_TABLE\").get(\"SFLOW_SESSION_LIST\"):\n response = dict()\n response[\"sampling_rate\"] = intf_list.get(\"sample_rate\")\n response[\"admin_status\"] = intf_list.get(\"admin_state\")\n response[\"interface\"] = intf_list.get(\"ifname\")\n if response:\n output.append(response)\n else:\n st.log(\"{} not observed in ouput\".format(YANG_MODULE))\n else:\n st.log(\"REST show INTERFACE GET CALL --- {}\".format(output))\n return output\n else:\n st.log(\"UNSUPPORTED CLI TYPE {}\".format(cli_type))\n return output",
"def apiai_response(query, session_id):\n\trequest = ai.text_request()\n\trequest.lang='en'\n\trequest.session_id=session_id\n\trequest.query = query\n\tresponse = request.getresponse()\n\treturn json.loads(response.read().decode('utf8'))",
"def fusion_api_get_appliance_interface_attribute(self, device='eth0', attribute='macAddress', api=None, headers=None):\n return self.interfaces.get_device_attribute(device, attribute, api, headers)",
"async def status(request: web.Request) -> web.Response:\n connectivity = {'status': 'none', 'interfaces': {}}\n try:\n connectivity['status'] = await nmcli.is_connected()\n connectivity['interfaces'] = {\n i.value: await nmcli.iface_info(i) for i in nmcli.NETWORK_IFACES\n }\n log.debug(\"Connectivity: {}\".format(connectivity['status']))\n log.debug(\"Interfaces: {}\".format(connectivity['interfaces']))\n status = 200\n except subprocess.CalledProcessError as e:\n log.error(\"CalledProcessError: {}\".format(e.stdout))\n status = 500\n except FileNotFoundError as e:\n log.error(\"FileNotFoundError: {}\".format(e))\n status = 500\n\n return web.json_response(connectivity, status=status)",
"def display_ethernet_interface(appliances=[],\n credentials=[],\n timeout=120,\n no_check_hostname=False,\n EthernetInterface=\"\",\n persisted=True,\n web=False):\n logger = make_logger(\"mast.network\")\n check_hostname = not no_check_hostname\n env = datapower.Environment(\n appliances,\n credentials,\n timeout=120,\n check_hostname=check_hostname)\n logger.info(\n \"Attempting to Retrieve EthernetInterface configuration for \"\n \"{} {}\".format(\n str(env.appliances), EthernetInterface))\n\n resp = env.perform_action(\n \"get_config\",\n _class=\"EthernetInterface\",\n name=EthernetInterface,\n persisted=persisted)\n logger.debug(\"Response received: {}\".format(str(resp)))\n if web:\n return (\n util.render_ethernet_interface_results_table(resp),\n util.render_history(env))\n for host, r in resp.items():\n print host, \"\\n\", \"=\" * len(host), \"\\n\"\n print r\n print",
"def interface(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"interface\"), kwargs)",
"def get_response(self):\n res = IODControlRes()\n for field in [\"ARUUID\", \"SessionKey\", \"AlarmSequenceNumber\"]:\n res.setfieldval(field, self.getfieldval(field))\n\n res.block_type = self.block_type + 0x8000\n return res",
"def fusion_api_get_appliance_interface_mac(self, device, api=None, headers=None):\n return self.interfaces.get_device_mac(device, api, headers)",
"def list_interfaces(self, instance_name):\n return ['A_VIF']",
"def get():\n res = {\n 'interface': common.PQOS_API.current_iface(),\n 'interface_supported': common.PQOS_API.supported_iface()\n }\n return res, 200",
"def get_ing_from_all_ns():\n ing_list = get_all_ingress()\n ing_list_json = get_json_list(ing_list)\n return Response(response=ing_list_json, status=200, mimetype='application/json')",
"def get_listing_information():\n\n #getting the guidebox_id variable from show_page.html\n guidebox_id = request.args.get(\"guidebox_id\")\n\n #get the show from the database\n show = Show.find_show_with_guidebox_id(guidebox_id)\n\n #get show title from Guidebox so it can be used in the OnConnect title search url \n show_title = str(show.title)\n\n #get OnConnect seriesId\n series_id = onconnect_search_series_id(show_title)\n\n #obtaining listing information for a 24 hour period from the current time\n airings = onconnect_search_airings(series_id)\n\n return jsonify(airings)",
"def get_alarm_info(self):\n response = self.get(COMMAND_UIC, 'GetAlarmInfo')\n\n return response_list(response['alarmList']['alarm'])",
"def getAz(ip, response_ilb):\n for i in response_ilb['NetworkInterfaces']:\n logger.info('GetAz: Details about Internal Load Balancer')\n for k in i['PrivateIpAddresses']:\n logger.info('GetAz: IP Address of ILB is :' + k['PrivateIpAddress'])\n if k['PrivateIpAddress'] == ip:\n return i['AvailabilityZone']\n\n return None",
"def fusion_api_configure_appliance_interfaces(self, body=None, api=None, headers=None):\n return self.interfaces.configure(body, api, headers)",
"def get_network(isamAppliance, application_interface, statistics_duration, check_mode=False, force=False):\n return isamAppliance.invoke_get(\"Retrieving the Application Interface Statistics\",\n \"/analysis/interface_statistics.json{0}\".format(\n tools.create_query_string(prefix=application_interface,\n timespan=statistics_duration)),requires_model=requires_model)",
"def interface(self):\n\n data = ['[Interface]']\n for item in INTERFACE_KEYS:\n value = getattr(self, item, None)\n if value:\n data.append(value)\n\n return '''\n'''.join(data)"
] | [
"0.6129418",
"0.592415",
"0.586233",
"0.5833587",
"0.5555277",
"0.55209786",
"0.5518274",
"0.55138946",
"0.54967284",
"0.5416245",
"0.53760463",
"0.53188145",
"0.5274189",
"0.5272627",
"0.52077365",
"0.5183069",
"0.51734155",
"0.51588535",
"0.51240015",
"0.51147765",
"0.51101905",
"0.5101659",
"0.5077336",
"0.5075429",
"0.50442404",
"0.50329494",
"0.5023112",
"0.50225025",
"0.5021227",
"0.501249"
] | 0.6542954 | 0 |
Gets the appliance's status information [Example] ${resp} = Fusion Api Get Appliance Status | | | def fusion_api_get_appliance_status(self, api=None, headers=None):
return self.info.get_status(api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_status():\n data = {\n 'status': 'up',\n }\n jsn = json.dumps(data)\n\n resp = Response(jsn, status=200, mimetype='application/json')\n\n return resp",
"def get_status(self):\n r = requests.get(self.base_url + '/status')\n return r.json()",
"def status(self):\n r = requests.get('/'.join([self.base_url, self.ENDPOINT_STATUS]))\n return r.json()",
"def getStatus():\n return json.dumps({'camera': Camera.status(), 'rover': rover.status()}), 200",
"def show_status():\n return jsonify({\"status\": \"OK\"})",
"def __get_status_api(self):\r\n try:\r\n return Call_shelly_api(url=self.__api_address + \"/status\")\r\n except ShellyException as err:\r\n _LOGGER.warning(err)",
"def get_status(request):\n if \"liveness\" in request.query_params:\n return Response({\"alive\": True})\n\n app_status = ApplicationStatus()\n celery_param = request.query_params.get(\"celery\", \"false\").lower()\n if celery_param == \"true\":\n return Response(app_status.celery_task_status)\n\n response = {\n \"api_version\": app_status.api_version,\n \"celery_status\": app_status.celery_status,\n \"commit\": app_status.commit,\n \"current_datetime\": app_status.current_datetime,\n \"database_status\": app_status.database_status,\n \"debug\": app_status.debug,\n \"modules\": app_status.modules,\n \"platform_info\": app_status.platform_info,\n \"python_version\": app_status.python_version,\n }\n return Response(response)",
"def service_status() -> Response:\n data, code, headers = controllers.service_status(request.args)\n response: Response = make_response(jsonify(data), code, headers)\n return response",
"def _service_status(res, ctx):\n\n if _has_error_code(res):\n return print_errors(res, ctx)\n\n template = '''\\\nname: {name}\nconfig-location: {config_location}\nlog-location: {log_location}\nscript-location: {script_location}\nrunning: {running}\nenabled: {enabled}\ncontainer-running: {container_running}\ndeployment: {deployment}\nconfig: {config}'''\n\n result = template.format(name=res['name'],\n config_location=res['config_location'],\n log_location=res['log_location'],\n script_location=res['script_location'],\n running=res['running'],\n enabled=res['enabled'],\n container_running=res['container_status'].get('running', False),\n deployment=res['deployment'],\n config=res['config'])\n\n if 'callback_uri' in res:\n result += \"\\ncallback-uri: {callback_uri}\".format(callback_uri=res['callback_uri'])\n\n return result",
"def axapi_status(self, result):\n try:\n status = result.json()['response']['status']\n if status == 'fail':\n error = '\\n ERROR: ' + result.json()['response']['err']['msg']\n return error, status\n else:\n return status\n except:\n good_status_codes = ['<Response [200]>', '<Response [204]>']\n status_code = str(result)\n if status_code in good_status_codes:\n return 'OK'\n else:\n return status_code",
"def status():\n return jsonify({\"status\": \"OK\"})",
"def status_check():\n return {\"status\": \"OK\"}",
"def status():\r\n\r\n url = '{}/status'.format(USGS_API_ENDPOINT)\r\n payload = {\r\n \"jsonRequest\": payloads.status()\r\n }\r\n logger.debug(\"API call URL: {}\".format(url))\r\n logger.debug(\"API call payload: {}\".format(payload))\r\n response = requests.post(url, payload).json()\r\n logger.debug(\"Received response:\\n{}\".format(json.dumps(response, indent=4)))\r\n _catch_usgs_error(response)\r\n\r\n return response",
"def status():\n (code, message) = rest_api.status(request)\n if (code == 200):\n return 'Running'\n else:\n abort(code)",
"def status():\n return jsonify({\"Status\": \"Ok\"})",
"def api_check_status():\n (success, status) = AppStatus.check_status()\n if success: return jsonify({\"success\": True})\n abort(503, status)",
"def health_check():\n return dict(api_status='OK')",
"def test_get_status(self):\n response = self.client.open(\n '/v1/status',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))",
"def get_status():\n return \"OK\" # defaults to a 200 HTML status return code",
"def status(_):\n return {\"status\": \"ok\"}",
"def status():\n return jsonify(service='scwr-api-requirements', status='ok')",
"def api_health(self):\n return messages.SUCCESS_JSON, 200",
"def getStatus():",
"def explain_status(response):\n verbose = STATUS_LIST[response['code']]\n response['verbose'] = verbose\n return response",
"def _read_status(self):\n results = self.status_table.query_items({'api_version': self.api_version})\n if not results:\n return None\n else:\n return results[0]",
"def status(self):\n return self._get(path='status')",
"def get_health_check(self):\n return util.create_response(output=\"OK\")",
"def status(code=200):\n\treturn jsonify(server.status_data()), code",
"def get_health(self):\n return {'status': 'ok'}",
"def getStatus(url, URL_JSON_KEY,pet_id):\n \n method = \"pet.get?\"\n petId = \"&id=\"+pet_id\n url+= method + URL_JSON_KEY + petId\n petJson = urlopen(url)\n petsInfo = load(reader(petJson))\n message = petsInfo['petfinder']['header']['status']['code']['$t']\n if message == '100':\n status = petsInfo['petfinder']['pet']['status']['$t']\n return status\n elif message =='201':\n status = \"Animal Removed\"\n return status\n else:\n print(\"Something went wrong. Sorry.\")"
] | [
"0.7071417",
"0.669771",
"0.6697411",
"0.66495866",
"0.6645041",
"0.6572344",
"0.6570378",
"0.6546204",
"0.654606",
"0.65379375",
"0.65231735",
"0.6503739",
"0.64886177",
"0.6475442",
"0.6458086",
"0.64383113",
"0.642176",
"0.6403919",
"0.63265085",
"0.62873745",
"0.6264494",
"0.6246175",
"0.6239436",
"0.62240297",
"0.6201595",
"0.61812013",
"0.6179526",
"0.61719394",
"0.6158973",
"0.6121894"
] | 0.7710749 | 0 |
Gets the appliance's version information [Example] ${resp} = Fusion Api Get Appliance Version | | | def fusion_api_get_appliance_version(self, api=None, headers=None):
return self.info.get_version(api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def version():\n response = make_response('{\"version\" : %s }' % app.config.get('VERSION'), 200)\n response.content_type = \"application/json\"\n return response",
"def version_get():\n try:\n return json_response.success({'version': version.local_version()})\n except version.Error as e:\n return json_response.error(str(e)), 200",
"def get_version():\n ver = '0.0.0'\n req = restcall(0, 'config', 10.0)\n if req['text'] is not None:\n try: \n tree = ET.fromstring(req['text'])\n ver = tree.findall('app_version')[0].text\n if ver is None:\n ver = '0.0.0'\n _LOGGER.info(\"ISY: firmware version: %s\", ver)\n except ET.ParseError:\n _LOGGER.error(\"No version information found on ISY.\")\n return ver",
"def get_version(self):\n return self.http_call(\"get\", url=f\"{self.base_url}/version\").json()",
"def GetVersion(self):\n return self._SendRequest(HTTP_GET, \"/version\", None, None)",
"def get_version(self):\n url = '{}/v2/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['version']\n except Exception as e:\n pass\n return ''",
"def get_version(self):\n res = requests.get(self.base_url + '/version')\n\n return res",
"def get_version(self):\n return self.__make_api_call('get/version')",
"def get_version(self):\n url = '{}/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['version']\n except Exception as e:\n pass\n return ''",
"def test_get_version(mocker):\n client = wsgi.application.test_client(mocker)\n\n url = '/api/v0/version'\n\n response = client.get(url)\n\n output = {\n \"message\": f\"AIOPS Publisher Version {wsgi.VERSION}\",\n \"status\": \"OK\",\n \"version\": wsgi.VERSION\n }\n assert response.get_json() == output\n assert response.status_code == 200",
"def get_api_version(self):\n return self.connector.request('GET', '/app/webapiVersion')",
"def get_version(self):\n url = '{}/version'.format(self.url)\n try:\n r = requests.get(url)\n if r.status_code == 200:\n return r.json()['orionld version']\n except Exception as e:\n pass\n return ''",
"def get_version(ip):\n url='http://{}/ins'.format(ip)\n\n myheaders={'content-type':'application/json'}\n payload={\n \"ins_api\": {\n \"version\": \"1.0\",\n \"type\": \"cli_show\",\n \"chunk\": \"0\",\n \"sid\": \"1\",\n \"input\": \"show version\",\n \"output_format\": \"json\"\n }\n }\n response = requests.post(url,data=json.dumps(payload), headers=myheaders,auth=(nxos_username,nxos_password))\n resp = response.json()['ins_api']['outputs']['output']['body']['kickstart_ver_str']\n return resp",
"def get_versions():\n ret_obj = {'versions': picard_versions(current_app)}\n return make_response(jsonify(ret_obj), 200)",
"def query_api_version(self):\n version_resp = self._session.get('/api/version',\n logon_required=False)\n self._api_version = version_resp\n return self._api_version",
"def version(self):\n _, body = self.request('/', 'GET')\n return body.get('version', None)",
"def get_application_version(self):\n return self.connector.request('GET', '/app/version')",
"def test_server_details_ok(self):\n response = self.call_api('server_details', {}, 200).json\n self.assertEqual(utils.get_app_version(), response['server_version'])",
"def get_version(self):\n\t\treturn call_sdk_function('PrlApi_GetVersion')",
"def db_version():\n\n headers = {\n 'accept': 'text/plain',\n }\n\n try:\n response = requests.get('https://reactome.org/AnalysisService/database/version', headers=headers)\n except ConnectionError as e:\n print(e)\n\n if response.status_code == 200:\n return response.text\n else:\n print('Status code returned a value of %s' % response.status_code)",
"def version(self) -> 'outputs.VersionResponse':\n return pulumi.get(self, \"version\")",
"def get_version(self):\r\n if not self.endpoint_checker(self.endpointurl):\r\n raise Exception(\"Please use a valid ESRI REST url\")\r\n\r\n parsedurl = urlparse(self.endpointurl)\r\n print(f\"{parsedurl.scheme}://{parsedurl.netloc}/arcgis/rest/services/?f=pjson\")\r\n req = requests.get(\r\n f\"{parsedurl.scheme}://{parsedurl.netloc}/arcgis/rest/services/?f=pjson\"\r\n )\r\n\r\n if req.status_code == 200:\r\n try:\r\n return req.json()[\"currentVersion\"]\r\n except KeyError:\r\n try:\r\n req = requests.get(\r\n self.endpointurl.split(\"services/\")[0] + \"services/?f=pjson\"\r\n )\r\n return req.json()[\"currentVersion\"]\r\n except Exception as e:\r\n raise e\r\n raise Exception(\r\n f\"An Error occurred retrieving vital information, the response status {str(req.status_code)} associate with {req.json()['error']['message']}\"\r\n )",
"def do_get_version(self, arg):\n arg = arg\n print(self.phil.if_version)",
"def get_server_version(object = server_status_req):\n try:\n response = urllib2.urlopen(object).read()\n server_connect = json.loads(response)\n return server_connect['GetSeverStatus']\n except URLError, e:\n print 'Error: No Response From Server.'",
"async def version(self) -> str:\n response = await self._request(\"status\")\n return response[\"version\"]",
"def show(self, request):\n # data = VERSIONS[request.url_version]\n LOG.debug('request: %s ' % request)\n data = VERSIONS['1.0']\n LOG.debug('data: %s ' % data)\n LOG.debug(\"conf: sections:%s\" % conf.sections())\n v = Version(data[\"id\"], data[\"status\"],\n request.application_url, data[\"updated\"])\n return wsgi.Result(VersionDataView(v))",
"def get_api_version(session: \"Session\") -> str:\n component_versions = get_component_versions(session)\n return str(component_versions.get(CoordConsts.KEY_API_VERSION, \"2.0.0\"))",
"def _get_api_version(self):\n with self.nb_session.get(\n self.nb_api_url, timeout=10,\n verify=(not settings.NB_INSECURE_TLS)) as resp:\n result = float(resp.headers[\"API-Version\"])\n log.info(\"Detected NetBox API v%s.\", result)\n return result",
"def get_version(self) -> Dict[str, str]:\n return self.http.get(self.config.paths.version)",
"def version(self):\n r = requests.get(\"http://%s/api/version\" %(self.url), headers=self.headers)\n if r.status_code == 200:\n return True, r.content\n else:\n return False, {}"
] | [
"0.7194165",
"0.71895546",
"0.71703166",
"0.7068277",
"0.70262593",
"0.7015712",
"0.69781196",
"0.6933454",
"0.69299096",
"0.6899857",
"0.6874984",
"0.6837599",
"0.6784274",
"0.6759895",
"0.6743399",
"0.6726015",
"0.66781604",
"0.6660853",
"0.6617225",
"0.6587143",
"0.6578284",
"0.6577919",
"0.6574595",
"0.6565039",
"0.656217",
"0.6549324",
"0.6521558",
"0.6510697",
"0.6465069",
"0.6450255"
] | 0.7203293 | 0 |
Get appliance state [Example] ${resp} = Fusion Api Get Appliance State | | def fusion_api_get_appliance_state(self, appliance):
return self.appstate.get(appliance) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_resource_state():\n output = [f'{\"S. No.\":6}\\t{\"Resource\":50}\\t{\"Health State\":12}\\t{\"Reason\":100}\\n']\n\n for index, resource in enumerate(HEALTH_AGGREGATOR.resource_state):\n output.append(\n f'{index + 1:<6}\\t{resource:<50}\\t'\n f'{\"Healthy\" if HEALTH_AGGREGATOR.resource_state[resource][\"is_healthy\"] else \"Unhealthy\":<12}\\t'\n f'{HEALTH_AGGREGATOR.resource_state[resource][\"reason\"]:<100}\\n'\n )\n\n return Response('\\n'.join(output), 200, mimetype='text/plain')",
"def adc_api_state():\n return jsonify(adc.get_state())",
"def get_job_state(self, response) -> Text:\n return response['state']",
"def get_state(self) -> str:\n url = f\"{self.ha_url}/api/states/{self.entity_id}\"\n\n req = urllib.request.Request(url=url, headers=self.headers)\n with urllib.request.urlopen(req) as r:\n response = r.read().decode(\"utf\")\n return json.loads(response)[\"state\"]",
"def read_state():\n request = urllib2.Request('http://www.saltybet.com/state.json')\n request.add_header('User-Agent','Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11')\n opener = urllib2.build_opener()\n try:\n data = opener.open(request).read()\n except:\n\tprint \"WARNING! Unable to obtain state.json. Are you not logged in?\"\n return {'status' : 'unreachable'}\n return json.loads(data)",
"def status(self, action=None):\n if action:\n function = _status_functions[action]\n done, data = self._request(function)\n if done:\n if data:\n return states[int(data[0], 16)]\n else:\n raise EvseError\n done, data = self._request('GS')\n if done:\n return states[int(data[0])]\n\n raise EvseError",
"def get_ha_state():\n\n query = {\n \"type\": \"op\",\n \"cmd\": \"<show><high-availability><state></state></high-availability></show>\",\n }\n\n return __proxy__[\"panos.call\"](query)",
"def get_states():\n try:\n ''' Returns a list of states in list named result '''\n data = State.select()\n return ListStyle.list(data, request), 200\n except Exception as e:\n abort(500)",
"def fusion_api_get_appliance_status(self, api=None, headers=None):\n return self.info.get_status(api=api, headers=headers)",
"def query_state(state_id):\n\tthisQuery = Query('state', state_id)\n\tthisQuery.send_query()\n\tresponse = thisQuery.pull_result()\n\treturn jsonify(response)\n\t#return render_template('response.html', response=response)",
"def getStatus(self):\n return self.json_state.get(\"state\")",
"def _get_state(self):\n print(\"GET STATE\")\n res = self._send_command(\n \"RS;\",\n fb_required=True,\n res_pattern=\"STATE:\")\n # The received answer is supposed to be something like\n # STATE:0|1|-1\n state = int(res.split(':')[1])\n if state == PVDriver.IDLE:\n return \"IDLE\"\n elif state == PVDriver.MOVING:\n return \"MOVING\"\n else:\n return \"ERROR\"",
"def mock_api_stage_fail_get_state() -> Tuple[bytes, str]:\n return (\n unhexlify(DUMMY_LOGIN_RESPONSE.encode(ENCODING_CODEC)),\n DUMMY_GET_STATE_RESPONSE\n )",
"def status(name='default'):\n machine_states = dict(_status())\n return machine_states[name]",
"def get_state(self, state):\n status = [u'noState', u'poweredOn', u'blocked', u'suspended', \n u'poweredOff', u'poweredOff', u'crashed']\n return status[int(state)]",
"def status(_):\n return {\"status\": \"ok\"}",
"def get_status():\n data = {\n 'status': 'up',\n }\n jsn = json.dumps(data)\n\n resp = Response(jsn, status=200, mimetype='application/json')\n\n return resp",
"def access_spot_instance_state() -> Response:\n retry_session = requests_retry_session()\n response = retry_session.get(INSTANCE_ACTION_URL)\n return response",
"def get_job_state(self, response) -> JobState:\n return response.state",
"async def test_api_core_state(hass: HomeAssistant, mock_api_client: TestClient) -> None:\n resp = await mock_api_client.get(\"/api/core/state\")\n assert resp.status == HTTPStatus.OK\n json = await resp.json()\n assert json[\"state\"] == \"RUNNING\"",
"def _service_status(res, ctx):\n\n if _has_error_code(res):\n return print_errors(res, ctx)\n\n template = '''\\\nname: {name}\nconfig-location: {config_location}\nlog-location: {log_location}\nscript-location: {script_location}\nrunning: {running}\nenabled: {enabled}\ncontainer-running: {container_running}\ndeployment: {deployment}\nconfig: {config}'''\n\n result = template.format(name=res['name'],\n config_location=res['config_location'],\n log_location=res['log_location'],\n script_location=res['script_location'],\n running=res['running'],\n enabled=res['enabled'],\n container_running=res['container_status'].get('running', False),\n deployment=res['deployment'],\n config=res['config'])\n\n if 'callback_uri' in res:\n result += \"\\ncallback-uri: {callback_uri}\".format(callback_uri=res['callback_uri'])\n\n return result",
"def status_check():\n return {\"status\": \"OK\"}",
"def get_state_lga(lga_dict, google_api, action, api_key, entity,\n state_data=None, state=''):\n\n for lga in lga_dict[state]:\n print(lga)\n\n search = f'{entity}+{state}+{lga}'\n\n link = google_api + action + search + api_key\n\n\n requested_obj = requests.get(link)\n\n json_data = json.loads(requested_obj.text)\n\n results = json_data.get('results')\n\n state_data = get_queried_data(results)\n\n state_data = get_nextpage_data(google_api, api_key,\n json_data, state_data)\n\n return state_data",
"def state(self) -> 'outputs.DeviceStateResponse':\n return pulumi.get(self, \"state\")",
"def get_status(self, state):\n raise NotImplementedError",
"def health_check():\n return dict(api_status='OK')",
"def get_thing_state(thingName, thingProperty):\n response = client.get_thing_shadow(thingName=thingName)\n\n streamingBody = response[\"payload\"]\n jsonState = json.loads(streamingBody.read())\n print(jsonState)\n\n thingPropVal = jsonState[\"state\"][\"reported\"][thingProperty]\n print(thingPropVal)\n\n return thingPropVal",
"def get_device_state(self, charger):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"get_state\",\n \"token\": charger.token(),\n \"account_token\": self.api_token\n }\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_api_secure\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n return response_json",
"def getStatus():",
"def check_action_status(payload):\n response = requests.post(url, data=payload)\n return response.json()"
] | [
"0.6575499",
"0.64693236",
"0.64353204",
"0.64068586",
"0.6088605",
"0.6075611",
"0.6041648",
"0.5974168",
"0.59656465",
"0.5870416",
"0.5819571",
"0.57874763",
"0.57726026",
"0.57429564",
"0.56989646",
"0.5692511",
"0.56874037",
"0.5601614",
"0.5588285",
"0.5570118",
"0.5563054",
"0.5558524",
"0.5547077",
"0.55453616",
"0.5542085",
"0.5527304",
"0.5486004",
"0.5480226",
"0.54789555",
"0.54374766"
] | 0.69704103 | 0 |
Retrieves the specified support dump and saves it locally. [Arguments] | def fusion_api_download_support_dump(self, uri, localfile, api=None, headers=None):
return self.dump.get(uri=uri, localfile=localfile, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_create_fabric_support_dump(self, uri, body, api=None, headers=None):\n params = '/support-dumps'\n return self.fabric.post(uri, body, api, headers, params)",
"def get_dump(lang, filename):\n from .utils import get_dump as download\n click.echo('Starting to download Wikipedia dump for lang {}.'.format(lang))\n download(lang, filename=filename)\n click.echo('Download finished')",
"def fusion_api_create_support_dump(self, body, api=None, headers=None):\n return self.dump.create(body=body, api=api, headers=headers)",
"def fusion_api_download_fabric_support_dump(self, uri, localfile, api=None, headers=None):\n return self.fabric.get_file(uri=uri, localfile=localfile, api=api, headers=headers)",
"def dumpme(self) :\n fileName = \"./data/oP4_ModelBuilder.dump\"\n with open(fileName,\"wb\") as dumpedFile:\n oPickler = pickle.Pickler(dumpedFile)\n oPickler.dump(self)",
"def includeInDump(self):\n pass",
"def dump_me(self, fileName=None) :\n if fileName is None :\n fileName = \"./data/oP4_ModelBuilder.dump\"\n else :\n pass\n with open(fileName,\"wb\") as dumpedFile:\n oPickler = pickle.Pickler(dumpedFile)\n oPickler.dump(self)",
"def load_dump(self):\n # Create uuid extension\n command = \"CREATE EXTENSION IF NOT EXISTS \\\"uuid-ossp\\\";\"\n try:\n self.cursor.execute(command)\n except:\n # uuid extension already exists\n pass\n print(\"uuid extension couldn't be created\")\n\n path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'SQL', 'fa2909.sql')\n try:\n self.cursor.execute(open(path, \"r\").read())\n print('table was created successfully')\n return True\n except:\n # error\n print(\"table couldn't be created\")\n return False",
"def fetch_inspect_data(filename, output, db_url=None):\n r2dt.write_training_data(filename, db_url, output)",
"def import_project_dump(self, key):",
"def quick_dump(what, where, **args):\n # @todo - Add couple of checks etc, isDir, extension, isFile, exists? overwrite?\n\n pickle.dump(what,open(where,\"wb\"),**args)",
"def saveSnapshot(self, filename): \n\t\tpass",
"def save(self, target, withdP=False):\n from six.moves.cPickle import dump\n data = self.serialize(withdP=withdP)\n with open(target, 'wb') as f:\n dump(data, f)",
"def save(self, target, withdP=False):\n from six.moves.cPickle import dump\n data = self.serialize(withdP=withdP)\n with open(target, 'wb') as f:\n dump(data, f)",
"def dumpu(self, data, url, **kwargs):\n return self.dump(data, self.open(url, \"w\", **kwargs), **kwargs)",
"def dump(self, obj):\r\n return self.localpath.dump(obj)",
"def saved(args):\r\n\r\n stories = _get_saved_stories(args=args)\r\n\r\n if args.export == 'json':\r\n return stories\r\n elif args.export == 'xml':\r\n return pystache.render(\"\"\"<?xml version=\"1.0\" encoding=\"utf-8\"?>\r\n <feed xmlns=\"http://www.w3.org/2005/Atom\">\r\n <title>Saved stories on Hacker News</title>\r\n {{#stories}}\r\n <entry>\r\n <title>{{title}}</title>\r\n <link href=\"{{url}}\" />\r\n </entry>\r\n {{/stories}}\r\n </feed>\"\"\", {'stories': stories})",
"def pickleToDisk(population, fName):\n pickle.dump(population, open(fName + \".pkl\", 'wb'), protocol=2)",
"def save_battle(battle, battle_name):\n path = './data_reader/data/battles/' + battle_name\n\n with open(path, 'wb') as outfile:\n pickle.dump(battle, outfile, -1)",
"def straight_dump_to_file(name, the_url, the_file):\n the_response = urllib.urlopen(the_url)\n the_data = json.loads(the_response.read())\n if name: the_file.write(name + \"\\n\")\n the_file.write(the_url)\n the_file.write(\"\\n\")\n the_file.write(str(the_data))\n the_file.write(\"\\n\\n\")\n return the_data",
"def dump(self):\n # dump self.data\n pickle.dump(self.data, open(self.data_dir + DATA_PATH, 'wb+'))\n # dump self.code2desc\n pickle.dump(self.code2desc, open(self.data_dir + CODE2DESC_PATH, 'wb+'))\n # dump self.family2tf\n pickle.dump(self.family2tf, open(self.data_dir + FAMILY2TF_PATH, 'wb+'))\n # dump self.word2tf\n pickle.dump(self.word2tf, open(self.data_dir + WORD2TF_PATH, 'wb+'))\n # dump self.word2df\n pickle.dump(self.word2df, open(self.data_dir + WORD2DF_PATH, 'wb+'))\n return None",
"def dump_release(input_bytes):\n return dump_from_release(input_bytes, \"release\")",
"def save_dumps(module_name: str, dumps: Dict[str, str], dump_root: str = \".\"):\n\n for dump_format in dumps:\n dump_name = module_name + \".\" + dump_format\n with open(Path(dump_root, dump_name), \"w\") as f:\n f.write(dumps[dump_format])",
"def find_tool():\n return shutil.which('dump')",
"def _load_support(name):\n curr = P.dirname(P.abspath(__file__))\n with open(P.join(curr, \"data\", \"%s.yml\" % name)) as fin:\n return yaml.full_load(fin)",
"def dump(args):\n if args.dump_command == \"trace\":\n _dump_trace(args)\n elif args.dump_command == \"checkpoint\":\n _dump_checkpoint(args)\n elif args.dump_command == \"config\":\n _dump_config(args)\n else:\n raise ValueError()",
"def test_bcftools_cli_dump(self):\n runner = CliRunner()\n result = runner.invoke(cli.main, [\"dump\"])\n assert result.exit_code == 0\n assert os.path.isfile(os.path.join(BASE_DIR, \"hmtnote_dump.pkl\"))",
"def save(self, target):\n from six.moves.cPickle import dump\n data = self.serialize()\n with open(target, 'wb') as f:\n dump(data, f)",
"def save(self, target):\n from six.moves.cPickle import dump\n data = self.serialize()\n with open(target, 'wb') as f:\n dump(data, f)",
"def dump(self) -> None:\n ..."
] | [
"0.5763988",
"0.55533063",
"0.52658886",
"0.5232461",
"0.5137154",
"0.5132785",
"0.5127345",
"0.5075329",
"0.5033584",
"0.5018131",
"0.5017229",
"0.49247614",
"0.49243915",
"0.49243915",
"0.49054408",
"0.48881996",
"0.48875943",
"0.48782125",
"0.48453915",
"0.48338124",
"0.48259664",
"0.48253807",
"0.4812052",
"0.48102084",
"0.4787545",
"0.47658285",
"0.4751215",
"0.47387564",
"0.47387564",
"0.4737918"
] | 0.5809528 | 0 |
Configures appliance time and locale [Arguments] | def fusion_api_configure_appliance_time_and_locale(self, body=None, api=None, headers=None):
return self.timeandlocale.configure(body, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def checkAndDefaultArgs(args):\n if not args.date:\n args.date = datetime.now().strftime(\"%d/%m/%Y\")\n\n if args.time_in:\n if \"am\" not in args.time_in and \"pm\" not in args.time_in:\n print(\"Invalid time_in string provided: Using default 1pm\")\n args.time_in = \"1pm\"\n else:\n args.time_in = \"1pm\"\n\n if args.time_out:\n if \"am\" not in args.time_out and \"pm\" not in args.time_out:\n print(\"Invalid time_out string provided: Using default 2pm\")\n args.time_out = \"2pm\"\n else:\n args.time_out = \"2pm\"",
"def __parse_init_time(args):\n if args.init_time is None:\n return\n try:\n if args.init_time.isdigit():\n args.init_time=int(args.init_time)\n else:\n args.init_time=datetime.strptime(args.init_time, __DATE_FORMAT)\n except Exception as ex:\n error_exit(str(ex))",
"def add_time_suffix_into_arguments(args: Dict[str, Any]):\n arg_keys = args.keys()\n if 'start_time' in arg_keys:\n start_time = args.get('start_time', '')\n date_time = dateparser.parse(start_time)\n if date_time:\n args['start_time'] = str(\n date_time.strftime(API_SUPPORT_DATE_FORMAT)\n )\n else:\n raise ValueError(\n MESSAGES['INVALID_TIME_VALIDATION'].format('start_time')\n )\n\n if 'end_time' in arg_keys:\n end_time = args.get('end_time', '')\n date_time = dateparser.parse(end_time)\n if date_time:\n args['end_time'] = str(date_time.strftime(API_SUPPORT_DATE_FORMAT))\n else:\n raise ValueError(\n MESSAGES['INVALID_TIME_VALIDATION'].format('end_time')\n )",
"def __init__(self, *args, **kwargs):\n _gdi_.Locale_swiginit(self,_gdi_.new_Locale(*args, **kwargs))",
"def run(self):\n if len(sys.argv[1:]) == 0:\n arg_parse.print_help()\n arg_parse.exit()\n\n try:\n if args.unix:\n self.from_unix_sec()\n print (\"Unix Seconds: \" + self.in_unix_sec + \" UTC\")\n elif args.umil:\n self.from_unix_milli()\n print (\"Unix Milliseconds: \" + self.in_unix_milli + \" UTC\")\n elif args.wh:\n self.from_win_64_hex()\n print (\"Windows 64 bit Hex BE: \" + self.in_windows_hex_64 + \" UTC\")\n elif args.whle:\n self.from_win_64_hexle()\n print (\"Windows 64 bit Hex LE: \" + self.in_windows_hex_le + \" UTC\")\n elif args.goog:\n self.from_chrome()\n print (\"Google Chrome Time: \" + self.in_chrome + \" UTC\")\n elif args.active:\n self.from_ad()\n print (\"Active Directory Timestamp: \" + self.in_ad + \" UTC\")\n elif args.uhbe:\n self.from_unix_hex_32be()\n print (\"Unix Hex 32 bit BE: \" + self.in_unix_hex_32 + \" UTC\")\n elif args.uhle:\n self.from_unix_hex_32le()\n print (\"Unix Hex 32 bit LE: \" + self.in_unix_hex_32le + \" UTC\")\n elif args.cookie:\n self.from_cookie()\n print (\"Windows Cookie Date: \" + self.in_cookie + \" UTC\")\n elif args.oleb:\n self.from_ole_be()\n print (\"Windows OLE 64 bit double BE: \" + self.in_ole_be + \" UTC\")\n elif args.olel:\n self.from_ole_le()\n print (\"Windows OLE 64 bit double LE: \" + self.in_ole_le + \" UTC\")\n elif args.mac:\n self.from_mac()\n print (\"Mac Absolute Time: \" + self.in_mac + \" UTC\")\n elif args.hfsdec:\n self.from_hfs_dec()\n print (\"Mac OS/HFS+ Decimal Date: \" + self.in_hfs_dec + \" UTC\")\n elif args.hfsbe:\n self.from_hfs_be()\n print (\"HFS/HFS+ 32 bit Hex BE: \" + self.in_hfs_be + \" HFS Local / HFS+ UTC\")\n elif args.hfsle:\n self.from_hfs_le()\n print (\"HFS/HFS+ 32 big Hex LE: \" + self.in_hfs_le + \" HFS Local / HFS+ UTC\")\n elif args.msdos:\n self.from_msdos()\n print (\"MS-DOS 32 bit Hex Value: \" + self.in_msdos + \" Local\")\n elif args.fat:\n self.from_fat()\n print (\"FAT Date + Time: \" + self.in_fat + \" Local\")\n elif args.sys:\n self.from_systime()\n print (\"Microsoft 128 bit SYSTEMTIME: \" + self.in_systemtime + \" UTC\")\n elif args.ft:\n self.from_filetime()\n print (\"Microsoft FILETIME/LDAP time: \" + self.in_filetime + \" UTC\")\n elif args.pr:\n self.from_prtime()\n print (\"Mozilla PRTime: \" + self.in_prtime + \" UTC\")\n elif args.auto:\n self.from_ole_auto()\n print (\"OLE Automation Date: \" + self.in_ole_auto + \" UTC\")\n elif args.ios:\n self.from_ios_time()\n print (\"iOS 11 Date: \" + self.in_iostime)\n elif args.sym:\n self.from_sym_time()\n print (\"Symantec AV Timestamp: \" + self.in_symtime)\n elif args.gps:\n self.from_gps_time()\n print (\"GPS Timestamp: \" + self.in_gpstime)\n elif args.timestamp:\n self.to_timestamps()\n elif args.guess:\n self.from_all()\n except Exception as e:\n logging.error(str(type(e)) + \",\" + str(e))",
"def fusion_api_get_appliance_time_and_locale(self, api=None, headers=None):\n return self.timeandlocale.get(api, headers)",
"def InitLocale(self):\n self.ResetLocale()\n if 'wxMSW' in wx.PlatformInfo:\n import locale\n try:\n lang, enc = locale.getdefaultlocale()\n self._initial_locale = wx.Locale(lang, lang[:2], lang)\n # locale.setlocale(locale.LC_ALL, lang)\n # locale.setlocale(locale.LC_ALL, 'C')\n with open('./launch.log', 'a') as fp:\n fp.write(f'wxApp_LocaleFix.InitLocale: lang = {lang}\\n')\n print(lang)\n except (ValueError, locale.Error) as ex:\n target = wx.LogStderr()\n orig = wx.Log.SetActiveTarget(target)\n with open('./launch.log', 'a') as fp:\n fp.write(f'wxApp_LocaleFix.InitLocale:except-0 Unable to set default locale: \\'{ex}\\'\\n')\n print(\"Unable to set default locale: '{}'\".format(ex))\n wx.LogError(\"Unable to set default locale: '{}'\".format(ex))\n wx.Log.SetActiveTarget(orig)\n try:\n locale.setlocale(locale.LC_ALL, lang.replace('_', '-'))\n except (ValueError, locale.Error) as ex:\n locale.setlocale(locale.LC_ALL, lang.replace('-', '_'))\n target = wx.LogStderr()\n orig = wx.Log.SetActiveTarget(target)\n with open('./launch.log', 'a') as fp:\n fp.write(f'wxApp_LocaleFix.InitLocale:except-1 Unable to set default locale: \\'{ex}\\'\\n')\n print(\"Unable to set default locale: '{}'\".format(ex))\n wx.LogError(\"Unable to set default locale: '{}'\".format(ex))\n wx.Log.SetActiveTarget(orig)",
"def test_long_not_configured(self):\n locale = {\n 'timeformat': '%H:%M',\n 'dateformat': '%Y-%m-%d',\n 'longdateformat': '',\n 'datetimeformat': '%Y-%m-%d %H:%M',\n 'longdatetimeformat': '',\n }\n assert (dt.datetime(2017, 1, 1), True) == guessdatetimefstr(\n '2017-1-1'.split(), locale=locale, default_day=dt.datetime.today())\n assert (dt.datetime(2017, 1, 1, 16, 30), False) == guessdatetimefstr(\n '2017-1-1 16:30'.split(), locale=locale, default_day=dt.datetime.today())",
"def __init__(self, *args, **kwargs):\n _gdi_.PyLocale_swiginit(self,_gdi_.new_PyLocale(*args, **kwargs))\n PyLocale._setCallbackInfo(self, self, PyLocale)",
"def _parse_args(self, *args, **kw):\n\n datefmt = kw.get('datefmt', getDefaultDateFormat())\n d = t = s = None\n ac = len(args)\n microsecs = None\n\n if ac == 10:\n # Internal format called only by DateTime\n yr, mo, dy, hr, mn, sc, tz, t, d, s = args\n elif ac == 11:\n # Internal format that includes milliseconds (from the epoch)\n yr, mo, dy, hr, mn, sc, tz, t, d, s, millisecs = args\n microsecs = millisecs * 1000\n\n elif ac == 12:\n # Internal format that includes microseconds (from the epoch) and a\n # flag indicating whether this was constructed in a timezone naive\n # manner\n yr, mo, dy, hr, mn, sc, tz, t, d, s, microsecs, tznaive = args\n if tznaive is not None: # preserve this information\n self._timezone_naive = tznaive\n\n elif not args or (ac and args[0] is None):\n # Current time, to be displayed in local timezone\n t = time()\n lt = safelocaltime(t)\n tz = self.localZone(lt)\n ms = (t - math.floor(t))\n s, d = _calcSD(t)\n yr, mo, dy, hr, mn, sc = lt[:6]\n sc = sc + ms\n self._timezone_naive = False\n\n elif ac == 1:\n arg = args[0]\n\n if arg == '':\n raise SyntaxError(arg)\n\n if isinstance(arg, DateTime):\n \"\"\"Construct a new DateTime instance from a given\n DateTime instance.\n \"\"\"\n t = arg.timeTime()\n s, d = _calcSD(t)\n yr, mo, dy, hr, mn, sc, tz = arg.parts()\n\n elif isinstance(arg, datetime):\n yr, mo, dy, hr, mn, sc, numerictz, tznaive = \\\n self._parse_iso8601_preserving_tznaive(arg.isoformat())\n if arg.tzinfo is None:\n self._timezone_naive = True\n tz = None\n else:\n self._timezone_naive = False\n # if we have a pytz tzinfo, use the `zone` attribute\n # as a key\n tz = getattr(arg.tzinfo, 'zone', numerictz)\n ms = sc - math.floor(sc)\n x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc)\n\n if tz:\n try:\n zone = _TZINFO[tz]\n except DateTimeError:\n try:\n zone = _TZINFO[numerictz]\n except DateTimeError:\n raise DateTimeError(\n 'Unknown time zone in date: %s' % arg)\n tz = zone.tzinfo.zone\n else:\n tz = self._calcTimezoneName(x, ms)\n s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms)\n\n elif (isinstance(arg, basestring) and\n arg.lower() in _TZINFO._zidx):\n # Current time, to be displayed in specified timezone\n t, tz = time(), _TZINFO._zmap[arg.lower()]\n ms = (t - math.floor(t))\n # Use integer arithmetic as much as possible.\n s, d = _calcSD(t)\n x = _calcDependentSecond(tz, t)\n yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms)\n\n elif isinstance(arg, basestring):\n # Date/time string\n iso8601 = iso8601Match(arg.strip())\n fields_iso8601 = iso8601 and iso8601.groupdict() or {}\n if fields_iso8601 and not fields_iso8601.get('garbage'):\n yr, mo, dy, hr, mn, sc, tz, tznaive = \\\n self._parse_iso8601_preserving_tznaive(arg)\n self._timezone_naive = tznaive\n else:\n yr, mo, dy, hr, mn, sc, tz = self._parse(arg, datefmt)\n\n if not self._validDate(yr, mo, dy):\n raise DateError('Invalid date: %s' % arg)\n if not self._validTime(hr, mn, int(sc)):\n raise TimeError('Invalid time: %s' % arg)\n ms = sc - math.floor(sc)\n x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc)\n\n if tz:\n try:\n tz = _TZINFO._zmap[tz.lower()]\n except KeyError:\n if numericTimeZoneMatch(tz) is None:\n raise DateTimeError(\n 'Unknown time zone in date: %s' % arg)\n else:\n tz = self._calcTimezoneName(x, ms)\n s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms)\n\n else:\n # Seconds from epoch, gmt\n t = arg\n lt = safelocaltime(t)\n tz = self.localZone(lt)\n ms = (t - math.floor(t))\n s, d = _calcSD(t)\n yr, mo, dy, hr, mn, sc = lt[:6]\n sc = sc + ms\n\n elif ac == 2:\n if isinstance(args[1], basestring):\n # Seconds from epoch (gmt) and timezone\n t, tz = args\n ms = (t - math.floor(t))\n try:\n tz = _TZINFO._zmap[tz.lower()]\n except KeyError:\n if numericTimeZoneMatch(tz) is None:\n raise DateTimeError('Unknown time zone: %s' % tz)\n # Use integer arithmetic as much as possible.\n s, d = _calcSD(t)\n x = _calcDependentSecond(tz, t)\n yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms)\n else:\n # Year, julian expressed in local zone\n t = time()\n lt = safelocaltime(t)\n tz = self.localZone(lt)\n yr, jul = args\n yr = _correctYear(yr)\n d = (_julianday(yr, 1, 0) - jd1901) + jul\n x_float = d * 86400.0\n x_floor = math.floor(x_float)\n ms = x_float - x_floor\n x = long(x_floor)\n yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms)\n s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms)\n else:\n # Explicit format\n yr, mo, dy = args[:3]\n hr, mn, sc, tz = 0, 0, 0, 0\n yr = _correctYear(yr)\n if not self._validDate(yr, mo, dy):\n raise DateError('Invalid date: {}'.format(args))\n args = args[3:]\n if args:\n hr, args = args[0], args[1:]\n if args:\n mn, args = args[0], args[1:]\n if args:\n sc, args = args[0], args[1:]\n if args:\n tz, args = args[0], args[1:]\n if args:\n raise DateTimeError('Too many arguments')\n if not self._validTime(hr, mn, sc):\n raise TimeError('Invalid time: %s' % repr(args))\n\n x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc)\n ms = sc - math.floor(sc)\n if tz:\n try:\n tz = _TZINFO._zmap[tz.lower()]\n except KeyError:\n if numericTimeZoneMatch(tz) is None:\n raise DateTimeError('Unknown time zone: %s' % tz)\n else:\n # Get local time zone name\n tz = self._calcTimezoneName(x, ms)\n s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms)\n\n self._dayoffset = int((_julianday(yr, mo, dy) + 2) % 7)\n # Round to nearest microsecond in platform-independent way. You\n # cannot rely on C sprintf (Python '%') formatting to round\n # consistently; doing it ourselves ensures that all but truly\n # horrid C sprintf implementations will yield the same result\n # cross-platform, provided the format asks for exactly 6 digits after\n # the decimal point.\n sc = round(sc, 6)\n if sc >= 60.0: # can happen if, e.g., orig sc was 59.9999999\n sc = 59.999999\n self._nearsec = math.floor(sc)\n self._year, self._month, self._day = yr, mo, dy\n self._hour, self._minute, self._second = hr, mn, sc\n self.time, self._d, self._tz = s, d, tz\n # self._micros is the time since the epoch\n # in long integer microseconds.\n if microsecs is None:\n microsecs = long(round(t * 1000000.0))\n self._micros = microsecs",
"def main():\n\n # check database for tracking options\n # if empty prompt to add subject\n\n # present tracking options\n\n # calculate timedelta\n\n # printing/updating the time",
"def Args(parser):\n parser.add_argument(\n 'instance',\n help='Cloud SQL instance ID.')\n parser.add_argument(\n '--due-time',\n '-d',\n required=True,\n help='The time when this run was due to start in RFC 3339 format, for '\n 'example 2012-11-15T16:19:00.094Z.')",
"def display_strptime_formatters():\n data = [\n [\"%a\", \"Weekday as locale's abbreviated name.\", \"Mon\"],\n [\"%A\", \"Weekday as locale's full name.\", \"Monday\"],\n [\"%w\", \"Weekday as a decimal number, where 0 is Sunday and 6 is Saturday.\", \"1\"],\n [\"%d\", \"Day of the month as a zero-padded decimal number.\", \"30\"],\n [\"%-d\", \"Day of the month as a decimal number. (Platform specific)\", \"30\"],\n [\"%b\", \"Month as locale's abbreviated name.\", \"Sep\"],\n [\"%B\", \"Month as locale's full name.\", \"September\"],\n [\"%m\", \"Month as a zero-padded decimal number.\", \"09\"],\n [\"%-m\", \"Month as a decimal number. (Platform specific)\", \"9\"],\n [\"%y\", \"Year without century as a zero-padded decimal number.\", \"13\"],\n [\"%Y\", \"Year with century as a decimal number.\", \"2013\"],\n [\"%H\", \"Hour (24-hour clock) as a zero-padded decimal number.\", \"07\"],\n [\"%-H\", \"Hour (24-hour clock) as a decimal number. (Platform specific)\", \"7\"],\n [\"%I\", \"Hour (12-hour clock) as a zero-padded decimal number.\", \"07\"],\n [\"%-I\", \"Hour (12-hour clock) as a decimal number. (Platform specific)\", \"7\"],\n [\"%p\", \"Locale's equivalent of either AM or PM.\", \"AM\"],\n [\"%M\", \"Minute as a zero-padded decimal number.\", \"06\"],\n [\"%-M\", \"Minute as a decimal number. (Platform specific)\", \"6\"],\n [\"%S\", \"Second as a zero-padded decimal number.\", \"05\"],\n [\"%-S\", \"Second as a decimal number. (Platform specific)\", \"5\"],\n [\"%f\", \"Microsecond as a decimal number, zero-padded on the left.\", \"000000\"],\n [\"%z\", \"UTC offset in the form +HHMM or -HHMM (empty string if the the object is naive).\", \"\"],\n [\"%Z\", \"Time zone name (empty string if the object is naive).\", \"\"],\n [\"%j\", \"Day of the year as a zero-padded decimal number.\", \"273\"],\n [\"%-j\", \"Day of the year as a decimal number. (Platform specific)\", \"273\"],\n [\"%U\", \"Week number of the year (Sunday as the first day of the week) as a zero padded decimal number. All days in a new year preceding the first Sunday are considered to be in week 0.\", \"39\"],\n [\"%W\", \"Week number of the year (Monday as the first day of the week) as a decimal number. All days in a new year preceding the first Monday are considered to be in week 0.\", \"39\"],\n [\"%c\", \"Locale's appropriate date and time representation.\", \"Mon Sep 30 07:06:05 2013\"],\n [\"%x\", \"Locale's appropriate date representation.\", \"09/30/13\"],\n [\"%X\", \"Locale's appropriate time representation.\", \"07:06:05\"],\n [\"%%\", \"A literal '%' character.\", \"%\"]\n ]\n\n display(HTML(\n '<table><tr>{}</tr></table>'.format(\n '</tr><tr>'.join(\n '<td>{}</td>'.format('</td><td>'.join(str(_) for _ in row)) for row in data)\n )\n ))",
"def svn_info_t_text_time_set(svn_info_t_self, apr_time_t_text_time): # real signature unknown; restored from __doc__\n pass",
"def do_rt(self, arg):\n self.do_timesheet('report today')",
"def do_locale(args):\r\n # Global can't be defined at module level. Processes are wierd. pylint: disable=W0601\r\n global ARGS\r\n signal.signal(signal.SIGINT, signal.SIG_IGN) # Set the workers to ignore KeyboardInterrupts.\r\n # Unpack arguments\r\n lang, langs, stem, cstem, modfilter, brname, browser, ARGS = args\r\n parseargs()\r\n # A Hack. CN has a different structure, so use a different url form.\r\n if lang == 'cn':\r\n stem = cstem\r\n # Reset the driver between rounds\r\n restart_driver(browser)\r\n # Log into the site, so you can access the modules.\r\n try:\r\n log_in(lang)\r\n except Exception:\r\n DRIVER.quit()\r\n return '\"Login to {0} failed. That breaks the whole locale, look into it:\\n{1}\"'.format(\r\n lang, tidy_error().replace('\"', '\"\"'))\r\n\r\n # Start recording results.\r\n result = '_'.join([lang.upper(), brname.upper()])\r\n for mod in modfilter:\r\n try:\r\n # Figure out the locale coding.\r\n url = stem.format(langs[lang][0].replace('-', '_'), MODULES[mod][lang])\r\n DRIVER.get(url)\r\n begin_module()\r\n # Try to do the module\r\n for elem in SCRIPTS[mod]:\r\n domo(elem)\r\n result += ',\"{0}: PASS\"'.format(get_time())\r\n # Something goes wrong, document it and go to the next module.\r\n except Exception:\r\n result += ',\"{0}: FAIL: {1}\"'.format(get_time(), tidy_error().replace('\"', '\"\"'))\r\n draw_failure(lang, mod)\r\n DRIVER.quit()\r\n return result",
"def __init__(self, args, config_file):\n super(Timesheet, self).__init__()\n self.configure_attr(args, config_file)",
"def on_action_set_time_format(self, content):\n self.set_time_format(content['time_format'])",
"def running_custom_hour(arg):\n pass",
"def Init1(*args, **kwargs):\n return _gdi_.Locale_Init1(*args, **kwargs)",
"def ConfigureDefaults(area_bounds=None, \n area_bounds_format=['x_min','y_min','x_max','y_max'], \n area_bounds_range=None, years_are_bounds=False,\n dates_are_bounds=False, init_date_str_format='%y%m%d',\n member_name='realization', period_name='time', \n initialistion_time_name='forecast_reference_time'): \n global default_area_bounds\n global default_area_bounds_format\n global default_area_bounds_range\n global default_years_are_bounds\n global default_dates_are_bounds\n global default_init_date_str_format\n global default_member_name\n global default_period_name\n global default_initialistion_time_name\n \n default_area_bounds = area_bounds\n default_area_bounds_format = area_bounds_format\n default_area_bounds_range = area_bounds_range\n default_years_are_bounds = years_are_bounds\n default_dates_are_bounds = dates_are_bounds\n default_init_date_str_format = init_date_str_format\n default_member_name = member_name\n default_period_name = period_name\n default_initialistion_time_name = initialistion_time_name",
"def set_command_time(self, *args, **kwargs):\n return _uhd_swig.usrp_source_set_command_time(self, *args, **kwargs)",
"def __init__(self, fromTime, toTime='', language=''):\n self.update_filters(fromTime, toTime, language)",
"def date(*args):\n current_date = datetime.now().isoformat(' ').split('.')[0]\n send.system_message(current_date)",
"def main(args=None):\n jd = get_juldate()\n print(jd)\n return",
"def includeme(config):\r\n config.add_translation_dirs('faapp:locale', )\r\n config.add_subscriber('faapp.locale.add_renderer_globals', 'pyramid.events.BeforeRender')\r\n config.add_subscriber('faapp.locale.add_localizer', 'pyramid.events.NewRequest')",
"def test_function():\n test_date_time = datetime.now()\n print(test_date_time.strftime('%a')) # abrivated Locale weekday name\n print(test_date_time.strftime('%A')) # full Locale weekday name\n print(test_date_time.strftime('%b')) # abrivated Locale month name\n print(test_date_time.strftime('%B')) # full Locale month name\n print(test_date_time.strftime('%c')) # Date and time representation\n print(test_date_time.strftime('%d')) # month day number\n print(test_date_time.strftime('%H')) # hour 24 hour format\n print(test_date_time.strftime('%I')) # hour 12 hour format\n print(test_date_time.strftime('%j')) # day no of the year\n print(test_date_time.strftime('%m')) # month number (01-12)\n print(test_date_time.strftime('%M')) # Minute (00-59)\n print(test_date_time.strftime('%p')) # AM or PM\n print(test_date_time.strftime('%S')) # seconds (00-59)\n print(test_date_time.strftime('%w')) # week day number (0-sunday, 1-monday,..)\n print(test_date_time.strftime('%W')) # week number of the year\n print(test_date_time.strftime('%x')) # date expression\n print(test_date_time.strftime('%X')) # time expression\n print(test_date_time.strftime('%y')) # last 2 digit year\n print(test_date_time.strftime('%Y')) # 4 digit year\n print(test_date_time.strftime('%Z')) # Time zone name (no characters if time zone not exist)\n print(test_date_time.strftime('%%')) # A percentage character",
"def timezone():\n \n pass",
"def _init_system(*args):\n __set_time_elements(args[0], args[1])\n __set_control_elements(args[0], args[2], args[3])\n __set_sensor_elements(args[0], args[4], args[5], args[6], args[7])",
"def do_rrt(self, arg):\n self.do_timesheet('report extend track today')"
] | [
"0.6263801",
"0.5611947",
"0.5575541",
"0.5507469",
"0.5434917",
"0.52419883",
"0.5239562",
"0.51864845",
"0.51664406",
"0.5156668",
"0.5128682",
"0.5097738",
"0.50857764",
"0.5076884",
"0.50143385",
"0.50018144",
"0.49850664",
"0.49705616",
"0.49607",
"0.49502623",
"0.49257427",
"0.48803166",
"0.48580524",
"0.48574045",
"0.48276672",
"0.48194188",
"0.48030245",
"0.47743803",
"0.47732732",
"0.47692975"
] | 0.68499887 | 0 |
Returns appliance time, and locale [Example] ${resp} = Fusion Api Get Appliance Interfaces | | | | def fusion_api_get_appliance_time_and_locale(self, api=None, headers=None):
return self.timeandlocale.get(api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_configure_appliance_time_and_locale(self, body=None, api=None, headers=None):\n return self.timeandlocale.configure(body, api, headers)",
"def get_system_time(self):\r\n method = self.public_endpoints['system_time']['method']\r\n url = self.base_url + self.public_endpoints['system_time']['url']\r\n req = requests.request(method, url)\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res",
"def test_get_game_time_on_ice(self):\n msg = \"Response status is not 200\"\n response = self.api.get_game_time_on_ice(self.game_id)\n self.assertEqual(response.status_code, 200, msg)",
"async def server_time(self):\n uri = \"/fapi/v1/time\"\n success, error = await self.request(\"GET\", uri)\n return success, error",
"def _disp_times():\n fields = request.args.get('fields', type=str)\n format_type = request.args.get('format', type=str)\n top = request.args.get('top', type=int)\n token = request.args.get('token', type=str)\n results = {}\n\n result, length, code = retrieve(token, format_type, top, request_table[fields])\n return flask.jsonify(result=result, length=length, code=code)\n\n # elif code == 401: # Unauthorized\n # app.logger.debug(\"Token Expired! Let's log the user out.\")\n # return render_template('calc.html')",
"def get_patient_status():\n r = requests.get(\"http://vcm-7474.vm.duke.edu:5000/api/heart_rate/3\")\n print(r.text)",
"def get_uk_time(message):\n time_api = 'http://worldtimeapi.org/api/timezone/Europe/London.json'\n london_time = requests.get(time_api).json()\n\n return(\"The current time in London, England is {}\".format(\n london_time['datetime'][11:16]))",
"async def server_time(self):\n uri = \"/v3/time\"\n success, error = await self.request(\"GET\", uri)\n return success, error",
"def fusion_api_get_appliance_status(self, api=None, headers=None):\n return self.info.get_status(api=api, headers=headers)",
"def __repr__(self):\n return \"API Server Running at: \" + str(self.host) + \" on Port:\" + str(self.port) \\\n + \"/records: Return records\\n\" + \"/response_time: Return All response_times\\n\" \\\n + \"/response_time/max: Return max response_times\\n\" \\\n + \"/response_time/min: Return min response_times\\n\" \\\n + \"/response_time/average: Return average response_times\\n\" \\\n + \"/response_time/all_stats: Return tuple of(min, max, average) response_times\\n\"",
"def printing_weather_1(accu_response):\n\n min_temperature = f\"{accu_response['DailyForecasts'][0]['Temperature']['Minimum']['Value']} ºC\"\n max_temperature = f\"{accu_response['DailyForecasts'][0]['Temperature']['Maximum']['Value']} ºC\"\n weather_forecast = accu_response['Headline']['Text']\n for_what_time = accu_response['Headline']['EffectiveDate']\n source = ' -- AccuWeather API Service'\n\n #PRINTING WEATHER INFORMATION --\n title_printer(\" ---- WEATHER SOON ---- \")\n\n print(\"MINIMAL TEMPERATURE\", end=\"\")\n print(f\"{min_temperature:.>62}\")\n time.sleep(0.5)\n\n print(\"MAXIMUM TEMPERATURE\", end=\"\")\n print(f\"{max_temperature:.>62}\")\n time.sleep(0.5)\n\n print(\"\")\n print(\"MEASUREMENT:\")\n print(for_what_time)\n print(\"\")\n time.sleep(0.5)\n\n print(weather_forecast)\n print(source)\n print(\"= \" * 40)\n time.sleep(0.5)",
"def ALOHA():\r\n return (\r\n \r\n f\"<h1>ALOHA!!!</h1></br>\"\r\n f\"<h2>This API is for Climate Data in Hawaii</h2></br>\"\r\n f\"Available Routes:<br/>\"\r\n f\"/api/v1.0/precipitation<br/><br/>\"\r\n f\"/api/v1.0/stations<br/><br/>\"\r\n f\"/api/v1.0/tobs<br/><br/>\"\r\n f\"/api/v1.0/start_date</br>\"\r\n f\"/api/v1.0/start_date/end_date\"\r\n \r\n )",
"def _timeserie() -> Tuple[str, str, str]:\n return (\n \"OK\",\n \"text/html\",\n timeserie(app.host, os.environ.get(\"MAPBOX_ACCESS_TOKEN\", \"\")),\n )",
"def home():\n return (\n f\"Welcome to the Hawaii Weather API<br/>\"\n \"<br/>\"\n f\"Available Routes:<br/>\"\n f\"/api/v1.0/precipitation<br/>\"\n f\"/api/v1.0/stations<br/>\"\n f\"/api/v1.0/tobs<br/>\"\n f\"/api/v1.0/start_date<br/>\"\n f\"/api/v1.0/start_date/end_date<br/>\"\n \"<br/>\"\n f\"Date format: YYYY-MM-DD\"\n )",
"def print_response(response):\n print(f\"Response for {url}\")\n if response.status_code == 200:\n # Green text\n print(f\"\\033[1;32;40m {response.status_code} {response.reason}\\033[1;37;40m\")\n else:\n # Red text\n print(f\"\\033[1;31;40m {response.status_code} {response.reason}\\033[1;37;40m\")\n # print(response.json())\n print(f\" {response.elapsed.total_seconds()} seconds elapsed.\")",
"def _query_aprs_api(self):\n \n # Query APRS.fi for the balloon's location\n try:\n aprs_request = urllib2.Request(self._aprs_api_endpoint)\n aprs_opener = urllib2.build_opener()\n aprs_response = aprs_opener.open(aprs_request, None, self.aprs_update_timeout)\n except Exception as e:\n # Error downloading the file\n raise APRSAPIError('There was an error querying the APRS.fi API.')\n \n # Parse the APRS response\n try:\n parsed_response = json.load(aprs_response)\n except ValueError as e:\n # Error parsing the response\n raise APRSAPIError('There was an error parsing the JSON response from the APRS.fi API.')\n\n # Check for an API error\n if parsed_response['result'] == \"fail\":\n raise APRSAPIError('An error occured querying the APRS.fi API: \"'+parsed_response['description']+'\"')\n\n # Format the response into the expected format\n final_response = {\n 'timestamp': int(parsed_response['entries'][0]['time']),\n 'longitude': float(parsed_response['entries'][0]['lng']),\n 'latitude': float(parsed_response['entries'][0]['lat']),\n 'altitude': float(parsed_response['entries'][0]['altitude'])\n }\n\n return final_response",
"def print_response(response):\n #fyi this is not my code, i grabbed it from github\n #forgot to copy the url though\n for report in response.get('reports', []):\n columnHeader = report.get('columnHeader', {})\n dimensionHeaders = columnHeader.get('dimensions', [])\n metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', [])\n\n for row in report.get('data', {}).get('rows', []):\n dimensions = row.get('dimensions', [])\n dateRangeValues = row.get('metrics', [])\n\n for header, dimension in zip(dimensionHeaders, dimensions):\n print header + ': ' + dimension\n\n for i, values in enumerate(dateRangeValues):\n print 'Date range: ' + str(i)\n for metricHeader, value in zip(metricHeaders, values.get('values')):\n print metricHeader.get('name') + ': ' + value",
"def get_alarm_info(self):\n response = self.get(COMMAND_UIC, 'GetAlarmInfo')\n\n return response_list(response['alarmList']['alarm'])",
"def DemoTime(session):\n message = \"Its Demo time? <p>I love Demo time. Dem Dem Demo Time.</p>\"\n return ResponseBuilder.create_response(message=message, message_is_ssml=True,\n end_session=True)",
"def alerts_info(): \n\n\n user_id = session['user_id']\n user = User.query.get(user_id)\n lat = str(user.location.lat)\n lng = str(user.location.lng)\n\n r = requests.get('https://api.forecast.io/forecast/45713f3bbbe3402dbe4aff89c61caccd/' + lat + \",\" + lng)\n\n data = r.json()\n\n alerts = {\n 'apparentTemperature': data['currently']['apparentTemperature'],\n 'humidity': data['currently']['humidity'],\n \"nearestStormDistance\": data[\"currently\"][\"nearestStormDistance\"],\n \"summary\": data['currently'][\"summary\"], \n }\n\n return jsonify(alerts)",
"def test_time_status(self):\n result = self.test_client.time_status\n\n assert result == \"12312\"",
"def negotiate_time(self, update, context):\n chat_id = update.effective_chat.id\n response_code = update.callback_query[\"data\"] # eta_later, eta_never, eta_20:45, etc.\n log.info(\"Offer @%s raw: @%s\", update.effective_chat.id, response_code)\n\n if response_code == \"eta_never\":\n # the user pressed the button to say they're cancelling their offer\n self.send_message(chat_id, c.MSG_THANKS_NOTHANKS)\n context.user_data[\"reviewed_request\"] = None\n context.user_data[\"state\"] = c.State.AVAILABLE\n\n elif response_code == \"eta_later\":\n # Show them more options in the interactive menu\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=\"Alege timpul\",\n reply_markup=InlineKeyboardMarkup(k.build_dynamic_keyboard()),\n )\n else:\n # This is an actual offer, ot looks like `eta_20:40`, extract the actual timestamp in UTC\n offer = response_code.split(\"_\")[-1]\n log.info(\n \"Relaying offer @%s UTC (%s %s)\", offer, utc_short_to_user_short(offer), c.TIMEZONE\n )\n\n # tell the backend about it\n request_id = context.user_data[\"reviewed_request\"]\n self.backend.relay_offer(request_id, chat_id, offer)\n\n # tell the user that this is now processed by the server\n self.send_message(\n chat_id, (c.MSG_ACK_TIME % utc_short_to_user_short(offer)) + c.MSG_COORDINATING\n )",
"def info ():\n\n info = {\n 'name' : app.config['APPLICATION_NAME'],\n 'short_name' : app.config['APPLICATION_SHORT_NAME'],\n 'main_page_url' : app.config['APPLICATION_MAIN_URL'],\n # 'css_url' : app.config.get ('APPLICATION_CSS_URL', ''),\n 'css' : 'span.smalltext { font-size: smaller }',\n 'supported_langs_query' : [ LANG ],\n }\n return make_json_response (info)",
"def get_alarms(username, auth, url):\n f_url = url + \"/imcrs/fault/alarm?operatorName=\" + username + \\\n \"&recStatus=0&ackStatus=0&timeRange=0&size=50&desc=true\"\n response = requests.get(f_url, auth=auth, headers=HEADERS)\n try:\n if response.status_code == 200:\n alarm_list = (json.loads(response.text))\n return alarm_list['alarm']\n except requests.exceptions.RequestException as error:\n return \"Error:\\n\" + str(error) + ' get_alarms: An Error has occured'",
"def home():\n\n # Provide the date range (from the most distant to the recent date) for\n # filtering in the last two API routes\n session = Session(engine)\n start_limit = session.query(Measurement.date).filter(Measurement.date).\\\n order_by(Measurement.date).first()\n end_limit = session.query(Measurement.date).filter(Measurement.date).\\\n order_by(Measurement.date.desc()).first()\n\n return (\n f'Available Routes:<br/>'\n f'<br/>'\n f'/api/v1.0/precipitation<br/>'\n f'/api/v1.0/stations<br/>'\n f'/api/v1.0/tobs<br/>'\n f'<br/>'\n f'/api/v1.0/start<br/>'\n f'/api/v1.0/start/end<br/>'\n f'<br/>'\n f'*Please use \"yyyy-mm-dd\" as the date format to replace the \"start\" and/or \"end\" parameter(s) in the last two API routes in order to filter summarized temperature results based on desired date range:<br/>'\n f'The earliest date available in this dataset is {start_limit[0]}<br/>'\n f'The most recent date available in this dataset is {end_limit[0]}<br/>'\n )",
"def adc_api_help():\n _help_msg[\"headers\"] = str(request.headers)\n return jsonify(_help_msg)",
"def read_home():\n return {'message': 'API live!'}",
"def apiai_response(query, session_id):\n\trequest = ai.text_request()\n\trequest.lang='en'\n\trequest.session_id=session_id\n\trequest.query = query\n\tresponse = request.getresponse()\n\treturn json.loads(response.read().decode('utf8'))",
"def initialize_timer():\n try:\n print_debug(\"Initializing the timer by fetching it on the online API\")\n response = WEB_INSTANCE.open(config.API_LOCATION).read()\n response = response.rstrip()\n print_debug(\"Found \"+str(response)+\" on the online API\")\n save_time_left(response)\n return response\n except Exception, e:\n print(e)\n return 'WAITING'",
"def _get_ip_resp(api_url: str):\n return get(api_url, headers={'user-agent': USER_AGENT})"
] | [
"0.5711539",
"0.56033796",
"0.5578812",
"0.55610555",
"0.548458",
"0.5395302",
"0.5336408",
"0.5309246",
"0.5282827",
"0.5265166",
"0.51527244",
"0.5128419",
"0.511608",
"0.50852835",
"0.5059935",
"0.5021352",
"0.5005887",
"0.49950993",
"0.49843264",
"0.4951892",
"0.49455053",
"0.49429762",
"0.49286774",
"0.4900371",
"0.48917603",
"0.48896304",
"0.4885569",
"0.48732397",
"0.48686934",
"0.48676336"
] | 0.7146377 | 0 |
Returns appliance trap destinations [Example] ${resp} = Fusion Api Get Appliance Trap Destinations | | | | | def fusion_api_get_appliance_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622
return self.trap.get(id=id, param=param, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_appliance_snmpv3_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.get(id=id, param=param, api=api, headers=headers)",
"def traceroute(self,dest):\n\t\tself.tn.write('traceroute %s\\n'%(dest))\n\t\tself.tn.write('exit\\n')\n\t\tresp = self.tn.read_all()\n\t\treturn resp",
"def get_log_forwarding_destinations(self) -> dict:\n uri = f\"{self.uri}/log-forwarding-destinations\"\n\n response = self.request(uri=uri)\n return response.json()",
"def fusion_api_validate_appliance_trap_destination(self, body=None, api=None, headers=None):\n return self.trap.validate(body=body, api=api, headers=headers)",
"def destinations(self) -> Optional[Sequence['outputs.AddressPrefixItemResponse']]:\n return pulumi.get(self, \"destinations\")",
"def fusion_api_delete_appliance_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.delete(id=id, api=api, headers=headers)",
"def getDestinations(self) -> dict:\n if self.loggingEnabled:\n self.logger.debug(f\"Starting getDestinations\")\n path = \"/config/destinations\"\n res = self.connector.getData(self.endpoint + path)\n return res",
"def _process_listroute_response(response):\n response[\"aircraft_id\"] = response.pop(config_param(\"query_aircraft_id\"))\n return response",
"def adapt_departures_by_stop_code(response):\n route_list = response['RTT']['AgencyList']['Agency']['RouteList']['Route']\n response_by_line = {}\n\n for route in route_list:\n formatted_response = {\n \"direction\": route['RouteDirectionList']['RouteDirection']['@Name'],\n \"line_code\": route['@Code'],\n \"line_name\": route['@Name'],\n \"stop\": route['RouteDirectionList']['RouteDirection']['StopList']['Stop']['@StopCode'],\n \"stop_name\": route['RouteDirectionList']['RouteDirection']['StopList']['Stop']['@name'],\n \"times\": [],\n }\n if route['RouteDirectionList']['RouteDirection']['StopList']['Stop']['DepartureTimeList']:\n formatted_response['times'] = route['RouteDirectionList']['RouteDirection']['StopList']['Stop']['DepartureTimeList']['DepartureTime']\n formatted_response['times'] = sorted([int(t) for t in formatted_response['times']])\n response_by_line[route['@Code']] = formatted_response\n\n return response_by_line",
"def get_traceroute_output(self):\n url = self.source['url']\n if 'post_data' in self.source:\n context = self.source['post_data']\n else:\n context = None\n status_code, content = self.urlopen(url, context=context)\n content = content.strip()\n regex = r'<pre.*?>(?P<traceroute>.*?)</pre>'\n pattern = re.compile(regex, re.DOTALL | re.IGNORECASE)\n try:\n traceroute = re.findall(pattern, content)[0].strip()\n except IndexError:\n # Manually append closing </pre> for partially downloaded page\n content = \"{0}</pre>\".format(content)\n traceroute = re.findall(pattern, content)[0].strip()\n return (status_code, traceroute)",
"def directions(origin, destination, mode, language, arrival_time, departure_time): \n now = datetime.now()\n res = gmaps.directions(origin,\n destination,\n mode=mode,\n departure_time=now,\n arrival_time=arrival_time,\n language=language)\n\n #return(res[0][\"legs\"][0])\n\n if not res:\n # empty\n return \"Not Found\"\n else:\n return res",
"def get_destination(event):\n if event['result']['parameters']['destination_station']:\n return event['result']['parameters']['destination_station']['destination']\n else:\n return \"\"",
"def getSDDCT0routes(proxy_url, session_token):\n t0_routes_json = get_sddc_t0_routes_json(proxy_url, session_token)\n t0_routes = {}\n if 'results' in t0_routes_json:\n pass\n else:\n print(\"No results. Something went wrong - please check your syntax and try again.\")\n sys.exit(1)\n\n if t0_routes_json == None:\n print(\"API Error\")\n sys.exit(1)\n elif len(t0_routes_json['results']) == 1:\n t0_routes = t0_routes_json['results'][0]['route_entries']\n elif len(t0_routes_json['results']) >1:\n t0_routes0 = t0_routes_json['results'][0]['route_entries']\n t0_routes1 = t0_routes_json['results'][1]['route_entries']\n t0_routes = t0_routes0 + t0_routes1\n\n df = pd.DataFrame(t0_routes)\n df.drop(['lr_component_id', 'lr_component_type'], axis=1, inplace=True)\n df.drop_duplicates(inplace = True)\n print('T0 Routes')\n print('Route Type Legend:')\n print('t0c - Tier-0 Connected\\nt0s - Tier-0 Static\\nb - BGP\\nt0n - Tier-0 NAT\\nt1s - Tier-1 Static\\nt1c - Tier-1 Connected\\nisr: Inter-SR')\n print()\n print(df.sort_values(by=[ 'route_type', 'network'], ascending=True).to_string())\n # route_table = PrettyTable(['Route Type', 'Network', 'Admin Distance', 'Next Hop'])\n # for routes in t0_routes:\n # route_table.add_row([routes['route_type'],routes['network'],routes['admin_distance'],routes['next_hop']])\n # print (route_table.get_string(sort_key = operator.itemgetter(1,0), sortby = \"Network\", reversesort=True))",
"def get_rogueap_location(self, conn, macaddr: str, offset=0, limit=100, units=\"FEET\"):\n path = urlJoin(urls.ROGUE_LOCATION[\"GET_AP_LOC\"], macaddr)\n params = {\n \"offset\": offset,\n \"limit\": limit,\n \"units\": units\n }\n resp = conn.command(apiMethod=\"GET\", apiPath=path, apiParams=params)\n return resp",
"async def test_wanted(aresponses):\n aresponses.add(\n MATCH_HOST,\n \"/api/wanted/missing?sortKey=airDateUtc&page=1&pageSize=10&sortDir=desc\",\n \"GET\",\n aresponses.Response(\n status=200,\n headers={\"Content-Type\": \"application/json\"},\n text=load_fixture(\"wanted-missing.json\"),\n ),\n match_querystring=True,\n )\n\n async with ClientSession() as session:\n client = Sonarr(HOST, API_KEY, session=session)\n response = await client.wanted()\n\n assert response\n assert isinstance(response, models.WantedResults)\n\n assert response.page == 1\n assert response.per_page == 10\n assert response.total == 2\n assert response.sort_key == \"airDateUtc\"\n assert response.sort_dir == \"descending\"\n\n assert response.episodes\n assert isinstance(response.episodes, List)\n assert len(response.episodes) == 2\n\n assert response.episodes[0]\n assert isinstance(response.episodes[0], models.Episode)",
"def respond_to_bart_intent(self, intent):\n try: \n if intent.destination is None: \n etd_dict = self.bart_api.first_leg_train_etd(origin_station_name=intent.origin)\n else:\n etd_dict = self.bart_api.first_leg_train_etd(origin_station_name=intent.origin,\n destination_station_name=intent.destination)\n\n if not etd_dict:\n response = NoDeparturesResponse()\n return response\n else: \n response = BARTQueryResponse()\n response.routes = [{ \n \"origin\": intent.origin, \n \"destination\": dest,\n \"departures\": departures\n } for dest, departures in etd_dict.items()]\n return response\n\n except ValueError as e: \n if e is not None:\n response = NamesNotFoundResponse()\n response.names.append({ \"name\": e.args[0], \"type\": \"route\" })\n return response",
"def fusion_api_add_appliance_snmpv3_trap_destination(self, body=None, api=None, headers=None):\n return self.snmpv3trap.create(body=body, api=api, headers=headers)",
"def getSDDCT0BGPRoutes(proxy, session_token):\n bgp_neighbors = get_sddc_t0_bgp_neighbors_json(proxy, session_token)\n if bgp_neighbors == None:\n print(\"API Error\")\n sys.exit(1)\n\n learnedRoutesTable = PrettyTable(['BGP Neighbor', 'Source Address', 'AS Path', 'Network', 'Next Hop'])\n advertisedRoutesTable = PrettyTable(['BGP Neighbor', 'Source Address', 'Network', 'Next Hop'])\n if 'results' in bgp_neighbors:\n neighbors = bgp_neighbors['results']\n else:\n print(\"No results. Something went wrong - please check your syntax and try again.\")\n sys.exit(1)\n for i in range(len(neighbors)):\n bgp_neighbor_id = neighbors[i]['id']\n route_learned_json = get_sddc_t0_learned_routes_json(proxy, session_token, bgp_neighbor_id)\n if route_learned_json == None:\n print(\"API Error\")\n sys.exit(1)\n\n route_advertised_json = get_sddc_t0_advertised_routes_json(proxy, session_token, bgp_neighbor_id)\n if route_advertised_json == None:\n print(\"API Error\")\n sys.exit(1)\n\n# Building the learned routes table\n edgeLearnedRoutes = route_learned_json['results'][0]['egde_node_routes']\n sourceAddrLearned = edgeLearnedRoutes[0]['source_address']\n bgpLearnedRoutes = edgeLearnedRoutes[1]['routes']\n for x in range(len(bgpLearnedRoutes)):\n learnedRoutesTable.add_row([bgp_neighbor_id,sourceAddrLearned,bgpLearnedRoutes[x]['as_path'],bgpLearnedRoutes[x]['network'],bgpLearnedRoutes[x]['next_hop']])\n# Building the advertised routes table\n edgeAdvertisedRoutes = route_advertised_json['results'][0]['egde_node_routes']\n sourceAddrAdvertised = edgeAdvertisedRoutes[0]['source_address']\n bgpAdvertisedRoutes = edgeAdvertisedRoutes[1]['routes']\n for y in range(len(bgpAdvertisedRoutes)):\n advertisedRoutesTable.add_row([bgp_neighbor_id,sourceAddrAdvertised,bgpAdvertisedRoutes[y]['network'],bgpAdvertisedRoutes[y]['next_hop']])\n print ('BGP Advertised Routes')\n print (advertisedRoutesTable.get_string(sortby=\"BGP Neighbor\"))\n print ('BGP Learned Routes')\n print (learnedRoutesTable.get_string(sortby=\"BGP Neighbor\"))",
"def fusion_api_add_or_update_appliance_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.create(body=body, id=id, api=api, headers=headers)",
"def getSDDCT0staticroutes(proxy_url,session_token):\n t0_static_routes_json = get_sddc_t0_static_routes_json(proxy_url, session_token)\n if t0_static_routes_json == None:\n print(\"API Error\")\n sys.exit(1)\n if 'results' in t0_static_routes_json:\n t0_static_routes = t0_static_routes_json['results']\n else:\n print(\"No results. Something went wrong - please check your syntax and try again.\")\n sys.exit(1)\n route_table = PrettyTable(['Display Name', 'Network', 'Admin Distance', 'Next Hop'])\n for routes in t0_static_routes:\n route_table.add_row([routes['display_name'],routes['network'],routes['next_hops'][0]['admin_distance'],routes['next_hops'][0]['ip_address']])\n print (route_table.get_string(sort_key = operator.itemgetter(1,0), sortby = \"Network\", reversesort=True))",
"def display_routing_table(appliances=[],\n credentials=[],\n timeout=120,\n no_check_hostname=False,\n web=False):\n logger = make_logger(\"mast.network\")\n check_hostname = not no_check_hostname\n env = datapower.Environment(\n appliances,\n credentials,\n timeout,\n check_hostname=check_hostname)\n logger.info(\n \"Attempting to retrieve routing table from {}\".format(\n str(env.appliances)))\n\n # try RoutingStatus3 first\n try:\n logger.debug(\"Attempting RoutingStatus3\")\n resp = env.perform_action(\n \"get_status\",\n domain=\"default\",\n provider=\"RoutingStatus3\")\n xpath = datapower.STATUS_XPATH + \"RoutingStatus3\"\n except urllib2.HTTPError:\n logger.warn(\n \"RoutingStatus3 unavailable, falling back to RoutingStatus2\")\n resp = env.perform_action(\n \"get_status\",\n domain=\"default\",\n provider=\"RoutingStatus2\")\n xpath = datapower.STATUS_XPATH + \"RoutingStatus2\"\n logger.debug(\"Response received: {}\".format(resp))\n\n header_row = []\n for host, l in resp.items():\n if not web:\n print host, \"\\n\", \"=\" * len(host), \"\\n\"\n fields = [child.tag for child in l.xml.find(xpath)]\n\n if web:\n if not header_row:\n header_row = list(fields)\n header_row.insert(0, \"Appliance\")\n rows = []\n\n width = len(max(fields, key=len))\n template = \"{:<{width}} \" * len(fields)\n header = template.format(*fields, width=width)\n if not web:\n print header\n\n for item in l.xml.findall(xpath):\n values = [child.text for child in item]\n line = template.format(*values, width=width)\n if web:\n _row = list(values)\n _row.insert(0, host)\n rows.append(_row)\n if not web:\n print line\n if web:\n return flask.render_template(\n \"results_table.html\",\n header_row=header_row,\n rows=rows), util.render_history(env)\n print",
"def get(self, request):\n source = request.GET.get(\"source\", \"BLR\")\n destination = request.GET.get(\"destination\", \"DEL\")\n dateofdeparture = request.GET.get(\"date_of_departure\", \"20191027\")\n resp = get_flights(source, destination, dateofdeparture)\n return Response(resp)",
"def getNextDest(self):\n\n if self.direction_forward:\n if len(self.destinations)-1 == self.current_loc: #if Autobuz reaches rightmost destination, it also takes a break and changes directions\n self.direction_forward = False #Autobuz changes direction\n self.updateOmLocation()\n return self.destinations[self.current_loc], (self.break_duration + self.trip_duration) #return destination reached and elapsed time\n \n else:\n self.current_loc += 1\n self.updateOmLocation()\n return self.destinations[self.current_loc], self.trip_duration\n \n else:\n if 0 == self.current_loc: #if Autobuz reaches leftmost destination, it also takes a break and changes directions\n self.direction_forward = True #Autobuz changes direction\n self.updateOmLocation()\n return self.destinations[self.current_loc], (self.break_duration + self.trip_duration)\n \n else:\n self.current_loc -= 1\n self.updateOmLocation()\n return self.destinations[self.current_loc], self.trip_duration",
"def list_all_destinations(self):\n\n return self.ioapi.get_destination_list()",
"def sendArpReply(logger, device, destination, count=3, quiet=False, blocking=True):\n\n args = [Arping.ARPING_COMMAND_NAME, \n Arping.INTERFACE_OPTION, device, \n Arping.COUNT_OPTION, str(count),\n Arping.ARP_REPLY_OPTION]\n\n if quiet is True:\n args.append(Arping.QUIET_OPTION)\n\n # must set destination as last arg\n args.append(destination) \n\n rc = Command.execute(logger, Arping.ARPING_COMMAND_NAME, args, blocking=blocking)\n\n return rc",
"def get_rp_traffic_detail(isamAppliance, instance, date, duration, aspect, aspect_identifier, check_mode=False,\n force=False):\n return isamAppliance.invoke_get(\n \"Retrieving detailed traffic records for a specific Junction or User-Agent on a Reverse Proxy instance\",\n \"/analysis/reverse_proxy_traffic/traffic/instance/{0}/{1}/{2}/{3}\".format(instance, aspect, aspect_identifier,\n tools.create_query_string(date=date,\n duration=duration,\n aspect=aspect)),requires_model=requires_model)",
"def _get_nitro_response(self, service, response) :\n\t\ttry :\n\t\t\tresult = service.payload_formatter.string_to_resource(traceroute6_response, response, self.__class__.__name__)\n\t\t\tif(result.errorcode != 0) :\n\t\t\t\tif (result.errorcode == 444) :\n\t\t\t\t\tservice.clear_session(self)\n\t\t\t\tif result.severity :\n\t\t\t\t\tif (result.severity == \"ERROR\") :\n\t\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\t\telse :\n\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\treturn result.traceroute6\n\t\texcept Exception as e :\n\t\t\traise e",
"def destination(self) -> pulumi.Output['outputs.DestinationResponse']:\n return pulumi.get(self, \"destination\")",
"def fulfillment():\n #Route based on action\n apiai_req = request.get_json(silent=True, force=True)\n\n print(\"Request:\")\n print(json.dumps(apiai_req, indent=4))\n action = apiai_req.get(\"result\").get(\"action\")\n if action == \"uber.type\":\n return uber_types_handler(apiai_req.get(\"result\"),SERVER_TOKEN) #Handles ride options between Point A and Point B\n if action ==\"uber.estimate\":\n return uber_estimate_handler(apiai_req, SERVER_TOKEN) #Handles Ride Price Estimations\n if action ==\"uber.confirm\":\n return uber_confirm_handler(apiai_req) #Handles Final Ride Confirmations",
"def get_arp_table(self, vrf=\"\"):\n\n arp_table = []\n output = self._send_command('/ip arp print terse')\n\n arps = parse_terse_output(output)\n\n for arp in arps:\n if arp.get('mac-address'):\n arp_table.append({\n 'interface': arp.get('interface'),\n 'mac': cast_mac(arp.get('mac-address')),\n 'ip': arp.get('address'),\n 'age': -1.0,\n })\n\n return arp_table"
] | [
"0.5970618",
"0.5425277",
"0.5258685",
"0.5169851",
"0.50962853",
"0.4999015",
"0.4969509",
"0.47679025",
"0.46750867",
"0.4669468",
"0.46454915",
"0.46285298",
"0.46087664",
"0.46033296",
"0.45980322",
"0.45966095",
"0.45963508",
"0.45782527",
"0.45575908",
"0.45300308",
"0.4523218",
"0.451939",
"0.4477558",
"0.44706893",
"0.4458562",
"0.44461018",
"0.44459686",
"0.44013026",
"0.43976524",
"0.43949142"
] | 0.70147145 | 0 |
Validates appliance trap destinations [Example] ${resp} = Fusion Api Get Appliance Trap Destinations | | | | def fusion_api_validate_appliance_trap_destination(self, body=None, api=None, headers=None):
return self.trap.validate(body=body, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def validate_response(response: json):\n if \"error\" in response:\n print(\"ERROR: Request returned error\")\n print_request_response(response)\n exit(1)",
"def fusion_api_get_appliance_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.trap.get(id=id, param=param, api=api, headers=headers)",
"def validate_response(recipients, response):\n \n result_obj = json.loads(response)\n assert 'message' in result_obj\n assert 'routes' in result_obj\n \n total = 0\n for r in result_obj['routes']:\n assert 'ip' in r\n num_r = len(r['recipients'])\n\n #sum the number of recipients\n total += num_r\n\n relay_type = MessageRelays.get_relay_type_by_throughput(num_r)\n subnet_split = relay_type.subnet_prefix.split('.')\n ip_split = r['ip'].split('.')\n \n #make sure the subnet is correct\n assert ip_split[2] == subnet_split[2]\n \n assert total == len(recipients)",
"def test_generating(resp):\n errors = []\n if not check_int(resp[\"tightness\"]):\n errors.append(\"Invalid type for Itinerary response's 'tightness' field.\")\n\n if not isinstance(resp, bool):\n errors.append(\"Invalid type for Itinerary response's 'start_from_airport' field.\")",
"def lineup_post():\n scan = request.args.get('scan')\n if scan == 'start':\n station_scan = True\n stations = locast_service.get_stations()\n station_scan = False\n return ('', 204)\n\n return (f'{scan} is not a valid scan command', 400)",
"def validate_response(self, response):\n pass",
"def check_flights(booking_token):\n parameters = {'v': 2, # default\n 'pnum': 1, # passenger number\n 'bnum': 0, # number of bags\n 'booking_token': booking_token\n }\n response = requests.get(CHECK_FLIGHTS_ENGINE, params=parameters).json()\n print(response)\n checked = response['flights_checked']\n invalid = response['flights_invalid']\n return checked, invalid",
"def test_functional_good_ip(self, url):\n response = requests.get(\"http://localhost:80/ip2w/{url}\".format(url=url))\n if response.status_code != BAD_GATEWAY:\n print(\"\\nGATEWAY is OK\")\n self.assertEqual(response.status_code, OK)\n content = response.json()\n self.assertEqual(len(content), 3)\n self.assertTrue(content.get(\"temp\"))\n self.assertTrue(content.get(\"city\"))\n else:\n print(\"\\nGATEWAY is RESET BY PEER\")",
"def test_bad_airport(self):\n result = self.client.get(\"/search?origin=foo&destination=DFW%2C+Dallas+TX&date=2018-05-21\")\n self.assertNotIn('<meter', result.data)\n self.assertIn('enter a valid airport', result.data)",
"def response_validator(url_dict, host_name_ip, api_endpoint):\r\n for key, value in url_dict.items():\r\n url_framed = url_framer_or_formatter(value.strip(),host_name_ip) + api_endpoint\r\n logger.debug(\"{} Executing request for {}::{} {}\".format(\"#\" * 20, key,url_framed, \"#\" * 20))\r\n status_code, response_data, error_msg = common_http_validator(method='GET', url=url_framed)\r\n if status_code == 200:\r\n logger.debug(\"{} ok status obtained with response message as {}\".format(status_code,json.loads(response_data)['status']))\r\n else:\r\n logger.debug(\"{} status with response as {} and exception message as {}\".format(status_code,response_data,error_msg))\r\n\r\n logger.debug(\"{} Request execution completed for {}::{} {}\".format(\"#\" * 20, key,url_framed, \"#\" * 20))",
"def validate_outgoing_response(request, response, schema_map, resolver):\n body = prepare_body(response)\n Draft4Validator(\n schema_map.response_body_schema,\n resolver=resolver,\n types=EXTENDED_TYPES,\n ).validate(body)",
"def validate(self, response):\n return response[\"status_code\"] == 1",
"def check_response(response):\n status = response.get('status')\n ret = status and status == 'OK'\n if not ret:\n logging.error('Received unexpected failure response from polyswarmd: %s', response)\n return ret",
"def check_response_errors(self, resp):\n return True",
"def test_bad_awards_autocomplete_request(client):\n\n resp = client.post(\"/api/v1/federal_accounts/autocomplete/\", content_type=\"application/json\", data=json.dumps({}))\n assert resp.status_code == status.HTTP_400_BAD_REQUEST",
"def _check_response(self, res: requests.Response, token: str) -> None:\n return",
"def _process_unsuccessful_response(\n self,\n response: Response,\n case: Literal['validate_api_key', 'balances', 'trades', 'asset_movements'],\n ) -> Union[\n list,\n tuple[bool, str],\n ExchangeQueryBalances,\n ]:\n try:\n response_list = jsonloads_list(response.text)\n except JSONDecodeError as e:\n msg = f'{self.name} {case} returned an invalid JSON response: {response.text}.'\n log.error(msg)\n\n if case in ('validate_api_key', 'balances'):\n return False, msg\n if case in ('trades', 'asset_movements'):\n self.msg_aggregator.add_error(\n f'Got remote error while querying {self.name} {case}: {msg}',\n )\n return []\n\n raise AssertionError(f'Unexpected {self.name} response_case: {case}') from e\n\n error_data = self._get_error_response_data(response_list)\n if error_data.error_code == API_ERR_AUTH_NONCE_CODE:\n message = API_ERR_AUTH_NONCE_MESSAGE\n # Errors related with the API key return a human readable message\n elif case == 'validate_api_key' and error_data.error_code == API_KEY_ERROR_CODE:\n message = API_KEY_ERROR_MESSAGE\n else:\n # Below any other error not related with the system clock or the API key\n reason = error_data.reason or response.text\n message = (\n f'{self.name} query responded with error status code: {response.status_code} '\n f'and text: {reason}.'\n )\n log.error(message)\n\n if case in ('validate_api_key', 'balances'):\n return False, message\n if case in ('trades', 'asset_movements'):\n self.msg_aggregator.add_error(\n f'Got remote error while querying {self.name} {case}: {message}',\n )\n return []\n\n raise AssertionError(f'Unexpected {self.name} response_case: {case}')",
"def response_check(response):\n print(response)\n print(response.text)\n return response.status_code == 201",
"def assert_valid_responses(response) -> None:\n assert valid_resp_name in response.text\n assert valid_resp_addr in response.text\n assert response.status_code == 200",
"def test_trucks_api(self):\n resp = self.app.get('/trucks')\n self.assertEqual(resp.status_code, 200)\n\n # ensure proper JSON is returned\n data = json.loads(resp.data)\n assert 'resp' in data\n for item in data['resp']:\n # address is not actually required\n assert 'name' in item\n assert 'fooditems' in item\n assert 'latitude' in item\n assert 'longitude' in item\n assert 'schedule' in item",
"def _assemble_and_send_validation_request(self):\r\n # Fire off the query.\r\n response = self.client.service.validateShipment(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n RequestedShipment=self.RequestedShipment)\r\n return response",
"def verify_destinations(**kwargs):\n if \"mapd_db\" in kwargs[\"destinations\"]:\n valid_destination_set = True\n if kwargs[\"dest_db_server\"] is None:\n # If dest_server is not set for mapd_db, then exit\n logging.error(\n '\"dest_server\" is required when destination = \"mapd_db\"'\n )\n if \"file_json\" in kwargs[\"destinations\"]:\n valid_destination_set = True\n if kwargs[\"output_file_json\"] is None:\n # If output_file_json is not set for file_json, then exit\n logging.error(\n '\"output_file_json\" is required when destination = \"file_json\"'\n )\n if \"output\" in kwargs[\"destinations\"]:\n valid_destination_set = True\n if \"jenkins_bench\" in kwargs[\"destinations\"]:\n valid_destination_set = True\n if kwargs[\"output_file_jenkins\"] is None:\n # If output_file_jenkins is not set for jenkins_bench, then exit\n logging.error(\n '\"output_file_jenkins\" is required '\n + 'when destination = \"jenkins_bench\"'\n )\n if not valid_destination_set:\n return False\n else:\n return True",
"def test_404(self):\n response = self.make_call(origin='Milano Lambrate', destination='Milano Cadorna')\n self.assert400(response)",
"def checkResponseOK(response):\n assert response['result'] == 'OK'",
"def check(self):\n invalid = []\n\n if not self.route:\n invalid.append(('route', 'missing'))\n elif not self.route[1] in ['GET', 'POST', 'PUT']:\n invalid.append(('route', 'invalid method: %s' % self.route[1]))\n\n has_2xx = False\n for rcode in self.return_codes:\n code = rcode[0]\n if code >= 200 and code < 300:\n has_2xx = True\n break\n if not has_2xx:\n invalid.append(('return_codes', 'Missing succes return code doc'))\n\n if self.client_auth is None:\n invalid.append(\n ('client_auth', 'Please provide client auth requirement'))\n\n if self.user_auth is None:\n invalid.append(\n ('user_auth', 'Please provide user auth requirement'))\n\n if invalid:\n msgs = []\n for error in invalid:\n msgs.append(\"%s: %s\" % error)\n raise ValueError(\n \"APIFunc for %s is invalid: %s\"\n % (self.viewfunc.__name__,\n ', '.join(msgs)))",
"def _check_response(response: requests.Response) -> None:\n logger.debug('Received response:\\n%s', response.content)\n try:\n response.raise_for_status()\n if not response.json()['status']:\n _report_failure('your e-mail address appears to be invalid')\n except requests.exceptions.HTTPError:\n _report_failure()\n except (ValueError, KeyError):\n _report_failure('there was a problem with the server response')",
"def test_validity():\n\n data = request.json\n promo_code = Promo_code.query.filter_by(code=data['code']).first()\n if promo_code is not None:\n origin = Promo_code.query.filter_by(event=data['origin']).first()\n destination = Promo_code.query.filter_by(event=data['destination']).first()\n\n try:\n origin_distance = geolocator.geocode(data['origin'])\n origin_distance_codes = (origin_distance.latitude, origin_distance.longitude)\n\n destination_distance = geolocator.geocode(data['destination'])\n destination_distance_codes = (destination_distance.latitude, destination_distance.longitude)\n\n event = geolocator.geocode(promo_code.event)\n event_codes = (event.latitude, event.longitude)\n\n event_origin_distance = geopy.distance.vincenty(origin_distance_codes, event_codes).km\n event_destination_distance = geopy.distance.vincenty(destination_distance_codes, event_codes).km\n\n if origin or destination is not None or \\\n event_origin_distance < promo_code.radius or \\\n event_destination_distance < promo_code.radius:\n return jsonify({'promo_code details': dict(id=promo_code.id,\n code=promo_code.code,\n event=promo_code.event,\n expiry_data=promo_code.expiry_date,\n status=promo_code.status,\n price=promo_code.price),\n 'polyline':data['destination'] + data['origin']}), 200\n return jsonify({'status':'fail', 'message':'Promo code is not valid'}),400\n except:\n return jsonify({\"Error with the location entered\"})\n\n return jsonify({'status': 'fail',\n 'message': 'code doesnot exist'}), 404",
"def is_valid_response(self, response):\r\n if response.status_code in VALID_CODES:\r\n return True\r\n return False",
"def validate_response(response):\n assert response.ok\n rpcdict = response.json()\n assert rpcdict['jsonrpc'] == '2.0'\n assert rpcdict['id']\n assert 'error' in rpcdict.keys() or 'result' in rpcdict.keys()",
"def fusion_api_get_appliance_snmpv3_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.get(id=id, param=param, api=api, headers=headers)"
] | [
"0.5611637",
"0.5527218",
"0.5477554",
"0.53854334",
"0.51200557",
"0.51143605",
"0.50652254",
"0.50519484",
"0.5023582",
"0.50039655",
"0.4974728",
"0.49745622",
"0.49464026",
"0.4939441",
"0.49148548",
"0.48938537",
"0.4878036",
"0.48708522",
"0.48633423",
"0.4861019",
"0.48329365",
"0.48281872",
"0.48117223",
"0.48022935",
"0.47834864",
"0.47788092",
"0.47779623",
"0.47768342",
"0.47597438",
"0.47578683"
] | 0.6713842 | 0 |
Adds or updates the specified trap forwarding destination. The trap destination associated with the given id will be updated if a trap destination with that id already exists. If the given id is not found, then a trap destination will be created with the given id. [Example] ${resp} = Fusion Api Add Or Update Appliance Trap Destination | | | | | def fusion_api_add_or_update_appliance_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622
return self.trap.create(body=body, id=id, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_update_appliance_trap_destination(self, body, id, api=None, headers=None): # pylint: disable=W0622\n return self.trap.put(body=body, id=id, api=api, headers=headers)",
"def fusion_api_edit_appliance_snmpv3_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.put(body=body, id=id, api=api, headers=headers)",
"def fusion_api_add_appliance_snmpv3_trap_destination(self, body=None, api=None, headers=None):\n return self.snmpv3trap.create(body=body, api=api, headers=headers)",
"def fusion_api_delete_appliance_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.delete(id=id, api=api, headers=headers)",
"def fusion_api_get_appliance_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.trap.get(id=id, param=param, api=api, headers=headers)",
"def fusion_api_validate_appliance_trap_destination(self, body=None, api=None, headers=None):\n return self.trap.validate(body=body, api=api, headers=headers)",
"def fusion_api_delete_appliance_snmpv3_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.delete(id=id, api=api, headers=headers)",
"def post(self, destination_id, add_to_beginning=\"False\",clear_other_waypoints=\"False\",datasource=\"tranquility\",**kwargs):\n kwargs_dict ={\n\"destination_id\" : destination_id, \"add_to_beginning\" : add_to_beginning, \"clear_other_waypoints\" : clear_other_waypoints, \"datasource\" : datasource, \n }\n kwargs_dict.update(kwargs)\n return EsiRequestObject(self.base_url, self.post_responses) \\\n .post(**kwargs_dict)",
"def post(self, id):\n\n data = json.loads(request.get_data())\n response = add_location(data, id)\n return response",
"def fusion_api_get_appliance_snmpv3_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.get(id=id, param=param, api=api, headers=headers)",
"def follow(source_id, destination_id):\n if source_id == destination_id:\n return \"You can't follow yourself!\"\n\n Forward.objects.get_or_create(source_id=source_id,\n destination_id=destination_id)\n Backward.objects.get_or_create(destination_id=destination_id,\n source_id=source_id)",
"def fusion_api_edit_appliance_snmpv3_trap_forwarding_user(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.put(body=body, id=id, api=api, headers=headers)",
"def update_log_forwarding_destinations(\n self,\n label: str,\n sources: list,\n consumer: str,\n credentials: dict,\n address: str,\n destination_uuid: str,\n ) -> Session:\n uri = f\"{self.uri}/log-forwarding-destinations/{destination_uuid}\"\n data = {\n \"label\": label,\n \"sources\": sources,\n \"consumer\": consumer,\n \"credentials\": credentials,\n \"address\": address,\n }\n response = self.request(uri=uri, method=\"PUT\", data=data)\n\n return response",
"def add( # pylint: disable=inconsistent-return-statements\n self,\n id, # type: str\n event_route=None, # type: Optional[\"_models.DigitalTwinsEventRoute\"]\n event_routes_add_options=None, # type: Optional[\"_models.EventRoutesAddOptions\"]\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n\n api_version = kwargs.pop('api_version', \"2022-05-31\") # type: str\n content_type = kwargs.pop('content_type', \"application/json\") # type: Optional[str]\n\n _traceparent = None\n _tracestate = None\n if event_routes_add_options is not None:\n _traceparent = event_routes_add_options.traceparent\n _tracestate = event_routes_add_options.tracestate\n if event_route is not None:\n _json = self._serialize.body(event_route, 'DigitalTwinsEventRoute')\n else:\n _json = None\n\n request = build_add_request(\n id=id,\n api_version=api_version,\n content_type=content_type,\n json=_json,\n traceparent=_traceparent,\n tracestate=_tracestate,\n template_url=self.add.metadata['url'],\n )\n request = _convert_request(request)\n request.url = self._client.format_url(request.url)\n\n pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access\n request,\n stream=False,\n **kwargs\n )\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)\n raise HttpResponseError(response=response, model=error)\n\n if cls:\n return cls(pipeline_response, None, {})",
"def put (id, travel_stop):\n travel_stop['source'] = \"otherDB\"\n travel_stop['id'] = id\n travel_stops[id] = travel_stop\n \n return travel_stop, 200",
"def add_destination(self):\n pass",
"def update_conditional_forwarder(DirectoryId=None, RemoteDomainName=None, DnsIpAddrs=None):\n pass",
"def add_route(g, origin, destination, distance, choice_dir):\n origin_code = g.convert[origin]\n destination_code = g.convert[destination]\n distance = int(distance)\n # Add route both ways\n if(choice_dir == \"y\"):\n g.city_dict[origin_code].add_flights_in((destination_code, distance))\n g.city_dict[origin_code].add_flights_out((destination_code, distance))\n \n g.city_dict[destination_code].add_flights_in((origin_code, distance))\n g.city_dict[destination_code].add_flights_out((origin_code, distance))\n # Add route one way \n if(choice_dir == \"n\"):\n g.city_dict[origin_code].add_flights_out((destination_code, distance))\n g.city_dict[destination_code].add_flights_in((origin_code, distance))\n \n \n \n return g",
"def fusion_api_add_appliance_snmpv3_trap_forwarding_user(self, body=None, api=None, headers=None):\n return self.snmpv3user.create(body=body, api=api, headers=headers)",
"def add_nat_gateway_route(route_table_id, destination, nat_gateway_id):\n response = EC2.create_route(\n DestinationCidrBlock=destination,\n RouteTableId=route_table_id,\n NatGatewayId=nat_gateway_id\n )\n return response",
"def _alter_route(self, ifname, action, destination, next_hop):\n version = destination.version\n ifname = self.generic_to_host(ifname)\n try:\n LOG.debug(self.sudo(\n '-%s' % version, 'route', action, str(destination), 'via',\n str(next_hop), 'dev', ifname\n ))\n return True\n except RuntimeError as e:\n # Since these are user-supplied custom routes, it's very possible\n # that adding/removing them will fail. A failure to apply one of\n # these custom rules, however, should *not* cause an overall router\n # failure.\n LOG.warn('Route could not be %sed: %s' % (action, unicode(e)))\n return False",
"def add(self, source, destination, port):\n logger.info('Adding path from %s to %s on port %s', source, destination, port)\n rules = [{\"IPProtocol\": \"tcp\", \"ports\": [int(port)]}]\n src_tags, dest_tags, src_ranges, _ = self._extract_service_info(\n source, destination)\n firewall_name = \"bu-%s-%s-%s\" % (destination.network.name, destination.name, port)\n try:\n firewall = self.driver.ex_get_firewall(firewall_name)\n if isinstance(source, CidrBlock):\n if not firewall.source_ranges:\n firewall.source_ranges = []\n firewall.source_ranges.append(str(source.cidr_block))\n logger.info(firewall.source_ranges)\n if isinstance(source, Service):\n if not firewall.source_tags:\n firewall.source_tags = []\n source_tag = \"%s-%s\" % (source.network.name, source.name)\n firewall.source_tags.append(source_tag)\n logger.info(firewall.source_tags)\n firewall = self.driver.ex_update_firewall(firewall)\n except ResourceNotFoundError:\n logger.info(\"Firewall %s not found, creating.\", firewall_name)\n firewall = self.driver.ex_create_firewall(firewall_name, allowed=rules,\n network=destination.network.name,\n source_ranges=src_ranges,\n source_tags=src_tags,\n target_tags=dest_tags)\n return Path(destination.network, source, destination, \"tcp\", port)",
"def edit_a_parcel(destination, id):\n query = \"\"\"UPDATE parcels SET destination = %s WHERE id = %s\"\"\"\n tuple =(destination , id)\n db.insert(query, tuple)",
"def put(self, id):\n return None, 204",
"def add(self, token, destination, overwrite=False, **kwargs):\n if not overwrite:\n try:\n record = self.get(token)\n except self.RedirectDoesNotExist:\n # There is no redirect associated with this token so we're not\n # attempting and overwrite\n pass\n else:\n # There _is_ a redirect associated with this token so we raise\n # the \"RedirectAlreadyExists\" exception here\n destination = record.get(\"destination\")\n raise self.RedirectAlreadyExists(\n f\"'{token}' is already associated with '{destination}'\"\n )\n\n if not urls.is_valid(destination):\n raise self.InvalidRedirectDestination(f\"{destination} is not a valid url\")\n\n record = kwargs\n\n dimensions = urls.extract_dimensions(destination)\n record.update({f\"dimensions_{k}\": v for k, v in dimensions.items()})\n\n record.update(\n {\n \"token\": token,\n \"destination\": destination,\n \"updated_at\": datetime.utcnow().isoformat(),\n }\n )\n\n self.table.put_item(Item=record)\n return record",
"def forward_to(id):\n\n db = init_connection_engine()\n\n if id == 'short_URL':\n return redirect(url_for('index'))\n else:\n # Looking up the URL by its ID in the DB.\n try:\n # Using a with statement ensures that the connection is always released\n # back into the pool at the end of statement (even if an error occurs).\n with db.connect() as conn:\n lookup_url = \"SELECT url_data FROM url_list WHERE url_id='\" + id + \"';\"\n target_url = conn.execute(lookup_url).fetchone()\n # If target URL is not found.\n if not target_url:\n flash('Not found')\n return redirect(url_for('index'))\n # If something goes wrong.\n except:\n flash('Something went wrong')\n return redirect(url_for('index'))\n\n return redirect(target_url[0])",
"def updateDestination(\n self, destinationId: str = None, destinationConfig: dict = None\n ) -> dict:\n if destinationId is None:\n raise ValueError(\"Require a destination ID\")\n if destinationConfig is None:\n raise ValueError(\"Require a dictionation for updating the destination\")\n if self.loggingEnabled:\n self.logger.debug(f\"Starting updateDestination\")\n privateHeader = deepcopy(self.header)\n privateHeader[\n \"Content-Type\"\n ] = \"application/vnd.adobe.platform.projectionDestination+json\"\n path = f\"/config/destinations/{destinationId}\"\n res = self.connector.putData(\n self.endpoint + path, data=destinationConfig, headers=privateHeader\n )\n return res",
"def insert_route(src_ip, gre_tunnel):\n import os\n try:\n os.system(\"\"\"\n /usr/bin/sudo /sbin/iptables -t mangle -A PREROUTING -s %s -j MARK --set-mark %s\n \"\"\" % (src_ip, gre_tunnel))\n except:\n raise iptExc(\"Could not insert route from src_ip %s to gre tunnel %s in iptables\" % (src_ip, gre_tunnel))\n return True",
"def handle_set_destination(self, data):\n #If the origin_id is 0, it has not been specified and we must find\n #the closest node to where we are now\n self.dest_node = data.dest_id\n if data.origin_id == 0:\n #Will set self.current_node\n self.get_nearest_node(data.dest_id)\n else:\n self.current_node = data.origin_id\n if self.current_node == data.dest_id:\n self.at_dest = True\n msg = (\"We're already there!\")\n return srvs.SetDestinationResponse(True, msg)\n rospy.wait_for_service('/get_trajectory')\n get_traj = rospy.ServiceProxy('/get_trajectory', srvs.GetTrajectory)\n trajectory = get_traj(False, self.current_node, data.dest_id).trajectory\n self.np_trajectory = to_numpy_trajectory(trajectory)\n self.loop = False\n self.at_dest = False\n msg = (\"Trajectory to destination of vehicle #%i \" % self.vehicle_id +\n \"successfully set.\")\n return srvs.SetDestinationResponse(True, msg)",
"def add_vehicle_for_the_route_successfully(self):\n route = self.get_route_object()\n response = self.client.patch(\n api_reverse('route:route', args=[route.id]),\n self.vehicle_id ,\n HTTP_AUTHORIZATION='token {}'.format(self.token_two))\n return response"
] | [
"0.7097082",
"0.64457333",
"0.6294257",
"0.5827654",
"0.57723075",
"0.5516807",
"0.5310152",
"0.5275602",
"0.51791847",
"0.5105681",
"0.50483966",
"0.50059736",
"0.4965412",
"0.49398524",
"0.49364766",
"0.48378426",
"0.47720003",
"0.4741462",
"0.46749225",
"0.4662247",
"0.4640799",
"0.46376747",
"0.46284014",
"0.46253616",
"0.4620312",
"0.46043625",
"0.45973822",
"0.45966864",
"0.45933113",
"0.45844793"
] | 0.7597969 | 0 |
Updates the specified trap forwarding destination. The trap destination associated with the given id will be updated if a trap destination with that id already exists. [Example] ${resp} = Fusion Api Update Appliance Trap Destination | | | | | def fusion_api_update_appliance_trap_destination(self, body, id, api=None, headers=None): # pylint: disable=W0622
return self.trap.put(body=body, id=id, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_add_or_update_appliance_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.create(body=body, id=id, api=api, headers=headers)",
"def fusion_api_edit_appliance_snmpv3_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.put(body=body, id=id, api=api, headers=headers)",
"def fusion_api_delete_appliance_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.delete(id=id, api=api, headers=headers)",
"def fusion_api_add_appliance_snmpv3_trap_destination(self, body=None, api=None, headers=None):\n return self.snmpv3trap.create(body=body, api=api, headers=headers)",
"def updateDestination(\n self, destinationId: str = None, destinationConfig: dict = None\n ) -> dict:\n if destinationId is None:\n raise ValueError(\"Require a destination ID\")\n if destinationConfig is None:\n raise ValueError(\"Require a dictionation for updating the destination\")\n if self.loggingEnabled:\n self.logger.debug(f\"Starting updateDestination\")\n privateHeader = deepcopy(self.header)\n privateHeader[\n \"Content-Type\"\n ] = \"application/vnd.adobe.platform.projectionDestination+json\"\n path = f\"/config/destinations/{destinationId}\"\n res = self.connector.putData(\n self.endpoint + path, data=destinationConfig, headers=privateHeader\n )\n return res",
"def update_log_forwarding_destinations(\n self,\n label: str,\n sources: list,\n consumer: str,\n credentials: dict,\n address: str,\n destination_uuid: str,\n ) -> Session:\n uri = f\"{self.uri}/log-forwarding-destinations/{destination_uuid}\"\n data = {\n \"label\": label,\n \"sources\": sources,\n \"consumer\": consumer,\n \"credentials\": credentials,\n \"address\": address,\n }\n response = self.request(uri=uri, method=\"PUT\", data=data)\n\n return response",
"def fusion_api_delete_appliance_snmpv3_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.delete(id=id, api=api, headers=headers)",
"def fusion_api_validate_appliance_trap_destination(self, body=None, api=None, headers=None):\n return self.trap.validate(body=body, api=api, headers=headers)",
"def fusion_api_get_appliance_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.trap.get(id=id, param=param, api=api, headers=headers)",
"def alter_destination(self, destination):\n series = self.series\n if not series:\n logging.warning('Cannot alter destination to %s for orphan mission %s.' % (destination, self.id))\n return\n\n destination_point = series.point_for_station(destination)\n if not destination_point:\n logging.warning('Cannot alter destination to %s for mission %s. (no id found)' % (destination, self.id))\n return\n\n destination_id = destination_point.station_id\n passed = False\n for stop in self.stops:\n if passed:\n stop.status = StopStatuses.canceled\n else:\n if stop.station_id == destination_id:\n passed = True\n stop.status = StopStatuses.altDestination\n else:\n stop.alteredDestination = destination\n\n if passed:\n logging.info('Mission %s altered destination to %s.' % (self.id, destination))\n else:\n logging.warning('Mission %s could not find altered destination %s.' % (self.id, destination))\n url = '/agent/station/%s' % destination_id\n self.issue_time += timedelta(seconds=config.INTERVAL_BETWEEN_UPDATE_MSG)\n self.tasks.append(self.instruction_task(url, 'prio', self.issue_time))",
"def fusion_api_edit_appliance_snmpv3_trap_forwarding_user(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.put(body=body, id=id, api=api, headers=headers)",
"def update_conditional_forwarder(DirectoryId=None, RemoteDomainName=None, DnsIpAddrs=None):\n pass",
"def edit_a_parcel(destination, id):\n query = \"\"\"UPDATE parcels SET destination = %s WHERE id = %s\"\"\"\n tuple =(destination , id)\n db.insert(query, tuple)",
"def updateDest(self):\n\n\t\t# if end is reached stop calling\n\t\tif self.i == self.numSteps:\n\t\t\treturn False\n\n\t\t# controller\n\t\tpoint = self.control.nextUpPD(self.i)\n\t\tcommand_string = 'id1 mav.waypoint_actuator setdest [%s, %s, %s, %s, 0.2] \\n' % (\n\t\t\tpoint[0], point[1], point[2], point[3])\n\t\tcomm.write(bytes(command_string, 'utf8'))\n\n\t\tself.i = self.i + 1\n\t\treturn GLib.SOURCE_CONTINUE",
"def updateOne(id):\n print(inspect.stack()[1][3])\n # read data from the API call\n req_data = request.get_json()\n\n query = select([Followup]).where(Followup.columns.id == id)\n ResultProxy = connection.execute(query)\n ResultSet = ResultProxy.fetchone()\n if(not ResultSet):\n return {'error': 'Unable to Find the given client'}\n\n # Update the URL\n json_data = {}\n\n for req in req_data:\n if (req in Followup.c.keys()):\n json_data[req] = req_data[req]\n\n query = (\n update(Followup).\n where(Followup.columns.id == id).\n values(json_data)\n )\n ResultProxy = connection.execute(query)\n if(not ResultProxy):\n return {'error': 'Unable to Update the given client'}\n return {'status': \"Update Succesful\"}",
"def put(self, id):\n return None, 204",
"def put (id, travel_stop):\n travel_stop['source'] = \"otherDB\"\n travel_stop['id'] = id\n travel_stops[id] = travel_stop\n \n return travel_stop, 200",
"async def put(self):\r\n data = await self.request.json()\r\n agent_uuid = data[\"agent_uuid\"]\r\n ip_address = data[\"ip_address\"]\r\n agent_obj = Agent.filter(Agent.uuid == agent_uuid).first()\r\n if not agent_obj:\r\n response_obj = {\"status\": \"failed\"}\r\n logger.error(\"No agent found!!!\")\r\n return web.Response(text=str(response_obj), status=500)\r\n try:\r\n Agent.update(ip_address=ip_address).where(Agent.uuid == agent_uuid)\r\n logger.info(\"Agent updated!!!\")\r\n return web.Response(text=\"successful\", status=200)\r\n except Exception as ex:\r\n response_obj = {\"status\": \"failed\"}\r\n error_message = str(ex)\r\n logger.error(error_message)\r\n return web.Response(text=str(response_obj), status=500)",
"def put(self, id):\n context = request.environ.get('context')\n obj = dbapi.netdevice_data_update(context, id, request.json)\n resp = {\"data\": jsonutils.to_primitive(obj.variables)}\n return resp, 200, None",
"def _alter_route(self, ifname, action, destination, next_hop):\n version = destination.version\n ifname = self.generic_to_host(ifname)\n try:\n LOG.debug(self.sudo(\n '-%s' % version, 'route', action, str(destination), 'via',\n str(next_hop), 'dev', ifname\n ))\n return True\n except RuntimeError as e:\n # Since these are user-supplied custom routes, it's very possible\n # that adding/removing them will fail. A failure to apply one of\n # these custom rules, however, should *not* cause an overall router\n # failure.\n LOG.warn('Route could not be %sed: %s' % (action, unicode(e)))\n return False",
"def update(self,\n dns_forwarder_zone_id,\n policy_dns_forwarder_zone,\n ):\n return self._invoke('update',\n {\n 'dns_forwarder_zone_id': dns_forwarder_zone_id,\n 'policy_dns_forwarder_zone': policy_dns_forwarder_zone,\n })",
"def handleTransitUpdateRequest(self, request:CSERequest) -> Result:\n\t\tif (url := self._getForwardURL(request.id)) is None:\n\t\t\treturn Result(rsc=RC.notFound, dbg=f'forward URL not found for id: {request.id}')\n\t\tif len(request.originalArgs) > 0:\t# pass on other arguments, for discovery\n\t\t\turl += '?' + urllib.parse.urlencode(request.originalArgs)\n\t\tLogging.log(f'Forwarding Update request to: {url}')\n\t\treturn self.sendUpdateRequest(url, request.headers.originator, data=request.data)",
"def put(self, id):\n req = api.payload\n try:\n result = update_task(\n get_db(),\n id,\n req[\"task\"],\n date.fromisoformat(req[\"due_by\"]),\n Status[req[\"status\"]],\n )\n return task_to_dict(result), 201\n except ValueError:\n api.abort(422, \"Invalid Status\")",
"def fusion_api_get_appliance_snmpv3_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.get(id=id, param=param, api=api, headers=headers)",
"def put(self, id=None):\n if id:\n slip = test4ValidEntity(id)\n if slip == None:\n self.response.set_status(404)\n else:\n slip_data = json.loads(self.request.body)\n if 'number' in slip_data:\n \"\"\" Test for requested Slip number already in use. \"\"\"\n query = Slip.query()\n results = query.fetch(limit = MAX_SLIPS)\n for match in results:\n if slip_data['number'] == match.number:\n slip.number = getSlipNum()\n else:\n slip.number = slip_data['number']\n if 'current_boat' in slip_data:\n if slip.current_boat == None:\n slip.current_boat = slip_data['current_boat']\n else:\n \"\"\" Query for the Boat and change at_sea to False. \"\"\"\n query = Boat.query(Boat.id == slip_data['current_boat'])\n result = query.fetch(limit = 1)\n if 'at_sea' in result:\n result.at_sea = False\n slip.current_boat = slip_data['current_boat']\n else:\n slip.current_boat = None\n if 'arrival_date' in slip_data:\n slip.arrival_date = slip_data['arrival_date']\n else:\n slip.arrival_date = None\n if 'departed_boat' in slip_data:\n slip.departed_boat = slip_data['departed_boat']\n else:\n slip.departed_boat = None\n if 'departure_date' in slip_data:\n slip.departure_date = slip_data['departure_date']\n else:\n slip.departure_date = None\n slip.put()\n slip_dict = slip.to_dict()\n del slip_dict['departure_history']\n self.response.headers['Content-Type'] = 'application/json'\n self.response.write(json.dumps(slip_dict))",
"def update_item(id: str, obj: endpoint_model):\n # should this error if exists?\n if obj.id:\n if obj.id != id:\n raise HTTPException(status_code=400, detail=\"id in body does not match id in path\")\n else:\n obj.id = id\n new_obj = db.save(obj)\n return new_obj",
"def update_gateway(self,\n id: str,\n *,\n global_: bool = None,\n loa_reject_reason: str = None,\n metered: bool = None,\n name: str = None,\n operational_status: str = None,\n speed_mbps: int = None,\n **kwargs\n ) -> DetailedResponse:\n\n if id is None:\n raise ValueError('id must be provided')\n headers = {}\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='update_gateway')\n headers.update(sdk_headers)\n\n params = {\n 'version': self.version\n }\n\n data = {\n 'global': global_,\n 'loa_reject_reason': loa_reject_reason,\n 'metered': metered,\n 'name': name,\n 'operational_status': operational_status,\n 'speed_mbps': speed_mbps\n }\n data = {k: v for (k, v) in data.items() if v is not None}\n data = json.dumps(data)\n headers['content-type'] = 'application/json'\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n\n url = '/gateways/{0}'.format(\n *self.encode_path_vars(id))\n request = self.prepare_request(method='PATCH',\n url=url,\n headers=headers,\n params=params,\n data=data)\n\n response = self.send(request)\n return response",
"def handle_set_destination(self, data):\n #If the origin_id is 0, it has not been specified and we must find\n #the closest node to where we are now\n self.dest_node = data.dest_id\n if data.origin_id == 0:\n #Will set self.current_node\n self.get_nearest_node(data.dest_id)\n else:\n self.current_node = data.origin_id\n if self.current_node == data.dest_id:\n self.at_dest = True\n msg = (\"We're already there!\")\n return srvs.SetDestinationResponse(True, msg)\n rospy.wait_for_service('/get_trajectory')\n get_traj = rospy.ServiceProxy('/get_trajectory', srvs.GetTrajectory)\n trajectory = get_traj(False, self.current_node, data.dest_id).trajectory\n self.np_trajectory = to_numpy_trajectory(trajectory)\n self.loop = False\n self.at_dest = False\n msg = (\"Trajectory to destination of vehicle #%i \" % self.vehicle_id +\n \"successfully set.\")\n return srvs.SetDestinationResponse(True, msg)",
"def update_ship(id):\n data = request.get_json()\n print(data)\n for ship in db['ships']:\n if ship['id'] == id:\n if data['name']:\n ship['name'] == data['name']\n if data['age']:\n ship['age'] == data['age']\n return ship, status.HTTP_202_ACCEPTED\n return {}, status.HTTP_404_NOT_FOUND",
"def upvote_reply(name, title, reply_id):\n reply = reply_service.get_reply(reply_id)\n if reply:\n reply_service.upvote_reply(reply_id, current_user.id)\n return redirect(request.referrer)\n else:\n abort(404)"
] | [
"0.6613562",
"0.6590284",
"0.5745858",
"0.5331495",
"0.5328401",
"0.532052",
"0.5276687",
"0.5275363",
"0.51345956",
"0.5115327",
"0.50964355",
"0.5036326",
"0.50140005",
"0.49928546",
"0.4823668",
"0.47933242",
"0.47568846",
"0.47446564",
"0.47323117",
"0.46850047",
"0.4684243",
"0.46566126",
"0.46453106",
"0.46438286",
"0.4618636",
"0.46182257",
"0.46104124",
"0.45734608",
"0.45727882",
"0.45696628"
] | 0.74026597 | 0 |
Deletes the trap destination associated with id [Example] ${resp} = Fusion Api Delete Appliance Trap Destination | | | | | def fusion_api_delete_appliance_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622
return self.trap.delete(id=id, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_delete_appliance_snmpv3_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.delete(id=id, api=api, headers=headers)",
"def delete(id):\n r = requests.delete(API_ROUTE + '/' + str(id), headers={'Auth': _auth()})\n if r.status_code != requests.codes.no_content:\n return r.text, r.status_code\n return redirect(url_for('index'), code=278)",
"def delete(self, id):\n\n ns.abort(404, 'This API is not supported yet.')",
"def delete(self, id):\n return self.app.post('/delete/' + str(id), data=dict(id=id),\n follow_redirects=True)",
"def delete(self, id):\n return self._post(\n request=ApiActions.DELETE.value,\n uri=ApiUri.ACTIONS.value,\n params={'id': id}\n )",
"def delete(self, id):\n return self._post(\n request=ApiActions.DELETE.value,\n uri=ApiUri.ACTIONS.value,\n params={'id': id}\n )",
"def fusion_api_get_appliance_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.trap.get(id=id, param=param, api=api, headers=headers)",
"def delete(short_id):\n try:\n url = Url.get(short_id)\n except:\n return jsonify({\"Error\", \"No Such ID\"})\n\n url.delete()\n return jsonify({\"statusCode\": 301,})",
"def delete(self, id):\n delete_entry(id)\n return None, 204",
"def do_DELETE(self):\n rest_params = common.get_restful_params(self.path)\n if rest_params is None:\n common.echo_json_response(self, 405, \"Not Implemented: Use /agents/ interface\")\n return\n\n if \"agents\" not in rest_params:\n common.echo_json_response(self, 400, \"uri not supported\")\n logger.warning('DELETE agent returning 400 response. uri not supported: ' + self.path)\n return\n\n agent_id = rest_params[\"agents\"]\n\n if agent_id is not None:\n if self.server.db.remove_agent(agent_id):\n #send response\n common.echo_json_response(self, 200, \"Success\")\n return\n else:\n #send response\n common.echo_json_response(self, 404)\n return\n else:\n common.echo_json_response(self, 404)\n return",
"def delete(self, id: str) -> Any:\n\n return self.client.delete(self._url(id))",
"def delete(id_=None):\n\n logger.debug('Catch DELETE request by URL /api/departments/%i.', id_)\n ds.delete(id_)\n return '', 204",
"def delete(self, id):\t\t\n\t\ttry:\n\t\t\tpost_service.delete(id)\n\t\texcept AssertionError as e:\n\t\t\tpost_space.abort(400, e.args[0], status = \"Could not delete post\", statusCode = \"400\")\n\t\texcept Exception as e:\n\t\t\tpost_space.abort(500, e.args[0], status = \"Could not delete post\", statusCode = \"500\")",
"def delete_item(id):\n return '', 201",
"def delete_data(request, result_id):\n result = TestResult.objects.get(id=result_id)\n result.delete()\n gun = result.bullet.gun\n return HttpResponseRedirect(reverse('gun', args=[gun.id]))",
"def post_route_target_delete(self, resource_id, resource_dict):\n pass",
"def delete(self, id: int):\n self._select_interface(self._rc_delete, self._http_delete, id)",
"def delete(self,\n dns_forwarder_zone_id,\n ):\n return self._invoke('delete',\n {\n 'dns_forwarder_zone_id': dns_forwarder_zone_id,\n })",
"def deleteOne(id):\n print(inspect.stack()[1][3])\n query = Followup.delete().where(Followup.columns.id == id)\n ResultProxy = connection.execute(query)\n if(not ResultProxy):\n return {'error': 'Unable to find the given client'}\n return {'status': \"Delete Succesful\"}",
"def delete_gateway(self,\n id: str,\n **kwargs\n ) -> DetailedResponse:\n\n if id is None:\n raise ValueError('id must be provided')\n headers = {}\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='delete_gateway')\n headers.update(sdk_headers)\n\n params = {\n 'version': self.version\n }\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n\n url = '/gateways/{0}'.format(\n *self.encode_path_vars(id))\n request = self.prepare_request(method='DELETE',\n url=url,\n headers=headers,\n params=params)\n\n response = self.send(request)\n return response",
"def post(self, request, aiid, *args, **kwargs):\n form = ProxyDeleteAIForm(request.POST)\n\n if form.is_valid():\n status = form.save(\n token=self.request.session.get('token', False)\n )\n\n message = status['status']['info']\n\n if status['status']['code'] in [200, 201]:\n level = messages.SUCCESS\n else:\n level = messages.ERROR\n else:\n level = messages.ERROR\n message = 'Something went wrong'\n\n messages.add_message(self.request, level, message)\n return redirect('studio:summary')",
"def delete(self):\n rest_params = common.get_restful_params(self.request.uri)\n if rest_params is None:\n common.echo_json_response(self, 405, \"Not Implemented: Use /agents/ interface\")\n return\n\n if \"agents\" not in rest_params:\n common.echo_json_response(self, 400, \"uri not supported\")\n return\n\n agent_id = rest_params[\"agents\"]\n\n if agent_id is None:\n common.echo_json_response(self, 400, \"uri not supported\")\n logger.warning('DELETE returning 400 response. uri not supported: ' + self.request.path)\n\n agent = self.db.get_agent(agent_id)\n\n if agent is None:\n common.echo_json_response(self, 404, \"agent id not found\")\n logger.info('DELETE returning 404 response. agent id: ' + agent_id + ' not found.')\n return\n\n op_state = agent['operational_state']\n if op_state == cloud_verifier_common.CloudAgent_Operational_State.SAVED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.FAILED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.TERMINATED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.TENANT_FAILED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.INVALID_QUOTE:\n self.db.remove_agent(agent_id)\n common.echo_json_response(self, 200, \"Success\")\n logger.info('DELETE returning 200 response for agent id: ' + agent_id)\n else:\n self.db.update_agent(agent_id, 'operational_state',cloud_verifier_common.CloudAgent_Operational_State.TERMINATED)\n common.echo_json_response(self, 202, \"Accepted\")\n logger.info('DELETE returning 202 response for agent id: ' + agent_id)",
"def handle_delete(self, api, command):\n return self._make_request_from_command('DELETE', command)",
"async def delete_order(request: web.Request, order_id) -> web.Response:\n return web.Response(status=200)",
"def delete(self, _id):",
"async def delete(self):\r\n try:\r\n data = await self.request.json()\r\n agent_uuid = data.get(\"agent_uuid\")\r\n agent_to_delete = Agent.filter(Agent.uuid == agent_uuid).first()\r\n sys_id = (\r\n System.select().where(System.agent_uuid == agent_to_delete).execute()\r\n )\r\n if sys_id:\r\n logger.error(\"Agent not deleted\")\r\n return web.Response(text=\"Agent not deleted.\")\r\n else:\r\n agent_to_delete.delete_instance()\r\n logger.info(\"Agent deleted successfully\")\r\n return web.Response(text=\"Agent deleted successfully.\")\r\n except Exception as ex:\r\n error_message = str(ex)\r\n logger.error(error_message)\r\n return web.Response(text=error_message, status=500)",
"def delete(self, id):\n response = remove_location(id)\n return response",
"def __Delete(self, url, id = None):\n\n conn = self.__GetConnection()\n if (id != None):\n url += \"/\" + str(id)\n conn.request(\"DELETE\", url, \"\", self.__MakeHeaders(True))\n response = conn.getresponse()\n self.__CheckResponse(response)",
"def delete(self, id):\n\t\tassert isinstance(id, str), 'The ID must be a string'\n\t\tassert id, 'The ID must not be an empty string'\n\n\t\turl = f'{self.root.url}/api/v1.2/cases/{str(id)}'\n\t\treturn self.root.r('DELETE', url, body=None, headers=None, verify=self.root.verify)",
"async def delete(id: UUID):\n async with get_client() as client:\n try:\n await client.delete_flow_run(id)\n except ObjectNotFound as exc:\n exit_with_error(f\"Flow run '{id}' not found!\")\n\n exit_with_success(f\"Successfully deleted flow run '{id}'.\")"
] | [
"0.7169179",
"0.6183716",
"0.6171733",
"0.5891423",
"0.5883739",
"0.5883739",
"0.58711326",
"0.5850136",
"0.58202666",
"0.5785711",
"0.57626516",
"0.5716147",
"0.5701345",
"0.56500584",
"0.5644843",
"0.56413263",
"0.559493",
"0.55848175",
"0.55716556",
"0.5553562",
"0.55503094",
"0.55366427",
"0.5535961",
"0.5530785",
"0.5528686",
"0.5524733",
"0.5524425",
"0.55234694",
"0.5521062",
"0.5503743"
] | 0.77792716 | 0 |
Returns appliance SNMPv3 trap destinations [Example] ${resp} = Fusion Api Get Appliance SNMPv3 Trap Destinations | | | | | def fusion_api_get_appliance_snmpv3_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622
return self.snmpv3trap.get(id=id, param=param, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_appliance_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.trap.get(id=id, param=param, api=api, headers=headers)",
"def fusion_api_add_appliance_snmpv3_trap_destination(self, body=None, api=None, headers=None):\n return self.snmpv3trap.create(body=body, api=api, headers=headers)",
"def traceroute(self,dest):\n\t\tself.tn.write('traceroute %s\\n'%(dest))\n\t\tself.tn.write('exit\\n')\n\t\tresp = self.tn.read_all()\n\t\treturn resp",
"def fusion_api_delete_appliance_snmpv3_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.delete(id=id, api=api, headers=headers)",
"def get_log_forwarding_destinations(self) -> dict:\n uri = f\"{self.uri}/log-forwarding-destinations\"\n\n response = self.request(uri=uri)\n return response.json()",
"def destinations(self) -> Optional[Sequence['outputs.AddressPrefixItemResponse']]:\n return pulumi.get(self, \"destinations\")",
"def get_traceroute_output(self):\n url = self.source['url']\n if 'post_data' in self.source:\n context = self.source['post_data']\n else:\n context = None\n status_code, content = self.urlopen(url, context=context)\n content = content.strip()\n regex = r'<pre.*?>(?P<traceroute>.*?)</pre>'\n pattern = re.compile(regex, re.DOTALL | re.IGNORECASE)\n try:\n traceroute = re.findall(pattern, content)[0].strip()\n except IndexError:\n # Manually append closing </pre> for partially downloaded page\n content = \"{0}</pre>\".format(content)\n traceroute = re.findall(pattern, content)[0].strip()\n return (status_code, traceroute)",
"def _get_nitro_response(self, service, response) :\n\t\ttry :\n\t\t\tresult = service.payload_formatter.string_to_resource(traceroute6_response, response, self.__class__.__name__)\n\t\t\tif(result.errorcode != 0) :\n\t\t\t\tif (result.errorcode == 444) :\n\t\t\t\t\tservice.clear_session(self)\n\t\t\t\tif result.severity :\n\t\t\t\t\tif (result.severity == \"ERROR\") :\n\t\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\t\telse :\n\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\treturn result.traceroute6\n\t\texcept Exception as e :\n\t\t\traise e",
"def sendArpReply(logger, device, destination, count=3, quiet=False, blocking=True):\n\n args = [Arping.ARPING_COMMAND_NAME, \n Arping.INTERFACE_OPTION, device, \n Arping.COUNT_OPTION, str(count),\n Arping.ARP_REPLY_OPTION]\n\n if quiet is True:\n args.append(Arping.QUIET_OPTION)\n\n # must set destination as last arg\n args.append(destination) \n\n rc = Command.execute(logger, Arping.ARPING_COMMAND_NAME, args, blocking=blocking)\n\n return rc",
"def _process_listroute_response(response):\n response[\"aircraft_id\"] = response.pop(config_param(\"query_aircraft_id\"))\n return response",
"def traceroute(host, unique_id=None, index=None, sourcetype=\"traceroute\",\n source=\"traceroute_search_command\", logger=None, include_dest_info=True,\n include_raw_output=False):\n\n if system_name().lower() == \"windows\":\n cmd = [\"tracert\"]\n else:\n cmd = [\"traceroute\"]\n\n # Add the host argument\n cmd.append(host)\n\n # Run the traceroute command and get the output\n output = None\n return_code = None\n\n try:\n output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)\n return_code = 0\n except subprocess.CalledProcessError as exception:\n output = exception.output\n return_code = exception.returncode\n except OSError as exception:\n if exception.errno == errno.ENOENT:\n raise CommandNotFoundException(cmd[0])\n else:\n raise exception\n\n # Parse the output\n try:\n trp = Traceroute.parse(output)\n\n # This will contain the hops\n parsed = []\n\n hop_idx = 0\n\n # Make an entry for each hop\n for hop in trp.hops:\n\n if hop.probes is None or len(hop.probes) == 0:\n continue\n\n hop_idx = hop_idx + 1\n\n # This will track the probes\n rtts = []\n ips = []\n names = []\n\n hop_dict = collections.OrderedDict()\n hop_dict['hop'] = hop_idx\n\n for probe in hop.probes:\n\n if probe.rtt is not None:\n rtts.append(str(probe.rtt))\n\n if probe.dest_ip is not None:\n ips.append(probe.dest_ip)\n\n if probe.dest is not None:\n names.append(probe.dest)\n\n hop_dict['rtt'] = rtts\n hop_dict['ip'] = ips\n hop_dict['name'] = names\n\n if include_dest_info:\n hop_dict['dest_ip'] = trp.dest_ip\n hop_dict['dest_host'] = trp.dest\n\n if include_raw_output:\n hop_dict['output'] = output\n\n parsed.append(hop_dict)\n\n except Exception:\n\n if logger:\n logger.exception(\"Unable to parse traceroute output\")\n\n raise Exception(\"Unable to parse traceroute output\")\n\n # Write the event as a stash new file\n if index is not None:\n writer = StashNewWriter(index=index, source_name=source, sourcetype=sourcetype,\n file_extension=\".stash_output\")\n\n # Let's store the basic information for the traceroute that will be included with each hop\n proto = collections.OrderedDict()\n\n # Include the destination info if it was included already\n if not include_dest_info:\n proto['dest_ip'] = trp.dest_ip\n proto['dest_host'] = trp.dest\n\n if unique_id is None:\n unique_id = binascii.b2a_hex(os.urandom(4))\n\n proto['unique_id'] = unique_id\n\n for parsed_hop in parsed:\n\n result = collections.OrderedDict()\n result.update(parsed_hop)\n result.update(proto)\n\n # Log that we performed the traceroute\n if logger:\n logger.debug(\"Wrote stash file=%s\", writer.write_event(result))\n\n return output, return_code, parsed",
"def get_snmp_data():\n if request.method == 'GET':\n \n xticks = list(map(str, snmp_data['time']))\n \n res = {}\n \n for target in snmp_targets:\n \n res[target] = list(snmp_data[target])\n \n \n xticks = list(map(str, snmp_data['time']))\n res['xticks'] = xticks\n \n json_data = json.dumps(res)\n resp = Response(json_data, status=200, mimetype='application/json')\n resp.headers = {'Access-Control-Allow-Origin': '*'}\n \n return resp\n \n else:\n return 'only get request is allowed'",
"def __run_traceroute(self):\n self.print_debug(\"ip_address={0}\".format(self.ip_address))\n\n filename = \"{0}.{1}.txt\".format(self.ip_address, self.country)\n filepath = os.path.join(self.tmp_dir, filename)\n\n if not os.path.exists(filepath):\n if self.country == \"LO\":\n status_code, traceroute = self.execute_cmd(self.source['url'])\n else:\n status_code, traceroute = self.get_traceroute_output()\n if status_code != 0 and status_code != 200:\n return {'error': status_code}\n open(filepath, \"w\").write(traceroute)\n traceroute = open(filepath, \"r\").read()\n\n self.raw_string = traceroute \n self.__get_hops(traceroute)\n\n\n #if not self.no_geo:\n # self.__get_geocoded_hops()\n\n #self.hops = map(lambda h: {h.pop(\"hop_num\") : h}, self.hops)",
"def fusion_api_edit_appliance_snmpv3_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.put(body=body, id=id, api=api, headers=headers)",
"def fusion_api_validate_appliance_trap_destination(self, body=None, api=None, headers=None):\n return self.trap.validate(body=body, api=api, headers=headers)",
"def fusion_api_delete_appliance_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.delete(id=id, api=api, headers=headers)",
"def get_hct_tableau_results(session):\n LOG.debug(\"Exporting HCT results for Tableau dashboard backing data\")\n\n hct_tableau_results = datastore.fetch_rows_from_table(session, (\"shipping\", \"uw_reopening_results_hct_data_pulls\"))\n\n return Response((row[0] + '\\n' for row in hct_tableau_results), mimetype=\"application/x-ndjson\")",
"def get_rogueap_location(self, conn, macaddr: str, offset=0, limit=100, units=\"FEET\"):\n path = urlJoin(urls.ROGUE_LOCATION[\"GET_AP_LOC\"], macaddr)\n params = {\n \"offset\": offset,\n \"limit\": limit,\n \"units\": units\n }\n resp = conn.command(apiMethod=\"GET\", apiPath=path, apiParams=params)\n return resp",
"def arping(iprange=\"10.0.1.0/24\"):\n\n conf.verb=0\n ans,unans=srp(Ether(dst=\"ff:ff:ff:ff:ff:ff\")/ARP(pdst=iprange),\n timeout=2)\n\n collection = []\n for snd, rcv in ans:\n result = rcv.sprintf(r\"%ARP.psrc% %Ether.src%\").split()\n collection.append(result)\n return collection",
"def getDestinations(self) -> dict:\n if self.loggingEnabled:\n self.logger.debug(f\"Starting getDestinations\")\n path = \"/config/destinations\"\n res = self.connector.getData(self.endpoint + path)\n return res",
"def getSDDCT0routes(proxy_url, session_token):\n t0_routes_json = get_sddc_t0_routes_json(proxy_url, session_token)\n t0_routes = {}\n if 'results' in t0_routes_json:\n pass\n else:\n print(\"No results. Something went wrong - please check your syntax and try again.\")\n sys.exit(1)\n\n if t0_routes_json == None:\n print(\"API Error\")\n sys.exit(1)\n elif len(t0_routes_json['results']) == 1:\n t0_routes = t0_routes_json['results'][0]['route_entries']\n elif len(t0_routes_json['results']) >1:\n t0_routes0 = t0_routes_json['results'][0]['route_entries']\n t0_routes1 = t0_routes_json['results'][1]['route_entries']\n t0_routes = t0_routes0 + t0_routes1\n\n df = pd.DataFrame(t0_routes)\n df.drop(['lr_component_id', 'lr_component_type'], axis=1, inplace=True)\n df.drop_duplicates(inplace = True)\n print('T0 Routes')\n print('Route Type Legend:')\n print('t0c - Tier-0 Connected\\nt0s - Tier-0 Static\\nb - BGP\\nt0n - Tier-0 NAT\\nt1s - Tier-1 Static\\nt1c - Tier-1 Connected\\nisr: Inter-SR')\n print()\n print(df.sort_values(by=[ 'route_type', 'network'], ascending=True).to_string())\n # route_table = PrettyTable(['Route Type', 'Network', 'Admin Distance', 'Next Hop'])\n # for routes in t0_routes:\n # route_table.add_row([routes['route_type'],routes['network'],routes['admin_distance'],routes['next_hop']])\n # print (route_table.get_string(sort_key = operator.itemgetter(1,0), sortby = \"Network\", reversesort=True))",
"def parseTraceroute(self, stdoutputdata):\n\t\titemlist = stdoutputdata.split(\"\\n\")\n\t\tres = defaultdict(list)\n\t\tfor item in itemlist:\n\t\t\tre_ip = re.search(r'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}', item)\n\t\t\tif re_ip:\n\t\t\t\tip = re_ip.group(0)\n\t\t\t\tres[\"route\"].append(ip)\n\t\tres[\"route\"].append(self.task[\"destination\"])\n\t\tres[\"destination\"] = self.task[\"destination\"]\n\t\treturn res",
"def Traceroute6(cls, client, resource) :\n\t\ttry :\n\t\t\tif type(resource) is not list :\n\t\t\t\tTraceroute6resource = traceroute6()\n\t\t\t\tTraceroute6resource.n = resource.n\n\t\t\t\tTraceroute6resource.I = resource.I\n\t\t\t\tTraceroute6resource.r = resource.r\n\t\t\t\tTraceroute6resource.v = resource.v\n\t\t\t\tTraceroute6resource.m = resource.m\n\t\t\t\tTraceroute6resource.p = resource.p\n\t\t\t\tTraceroute6resource.q = resource.q\n\t\t\t\tTraceroute6resource.s = resource.s\n\t\t\t\tTraceroute6resource.T = resource.T\n\t\t\t\tTraceroute6resource.w = resource.w\n\t\t\t\tTraceroute6resource.host = resource.host\n\t\t\t\tTraceroute6resource.packetlen = resource.packetlen\n\t\t\t\treturn Traceroute6resource.perform_operationEx(client)\n\t\texcept Exception as e :\n\t\t\traise e",
"def get(self, *args, **kwargs):\n\n try:\n\n if len(args) > 2 or len(args) < 1:\n raise ValueError(\"Invalid URL\")\n\n tenant_id = uuid.UUID(args[0])\n tenant = RUNTIME.tenants[tenant_id]\n vaps = tenant.vaps\n\n if len(args) == 1:\n self.write_as_json(vaps.values())\n else:\n vap = EtherAddress(args[1])\n self.write_as_json(vaps[vap])\n\n except KeyError as ex:\n self.send_error(404, message=ex)\n except ValueError as ex:\n self.send_error(400, message=ex)\n self.set_status(200, None)",
"def display_routing_table(appliances=[],\n credentials=[],\n timeout=120,\n no_check_hostname=False,\n web=False):\n logger = make_logger(\"mast.network\")\n check_hostname = not no_check_hostname\n env = datapower.Environment(\n appliances,\n credentials,\n timeout,\n check_hostname=check_hostname)\n logger.info(\n \"Attempting to retrieve routing table from {}\".format(\n str(env.appliances)))\n\n # try RoutingStatus3 first\n try:\n logger.debug(\"Attempting RoutingStatus3\")\n resp = env.perform_action(\n \"get_status\",\n domain=\"default\",\n provider=\"RoutingStatus3\")\n xpath = datapower.STATUS_XPATH + \"RoutingStatus3\"\n except urllib2.HTTPError:\n logger.warn(\n \"RoutingStatus3 unavailable, falling back to RoutingStatus2\")\n resp = env.perform_action(\n \"get_status\",\n domain=\"default\",\n provider=\"RoutingStatus2\")\n xpath = datapower.STATUS_XPATH + \"RoutingStatus2\"\n logger.debug(\"Response received: {}\".format(resp))\n\n header_row = []\n for host, l in resp.items():\n if not web:\n print host, \"\\n\", \"=\" * len(host), \"\\n\"\n fields = [child.tag for child in l.xml.find(xpath)]\n\n if web:\n if not header_row:\n header_row = list(fields)\n header_row.insert(0, \"Appliance\")\n rows = []\n\n width = len(max(fields, key=len))\n template = \"{:<{width}} \" * len(fields)\n header = template.format(*fields, width=width)\n if not web:\n print header\n\n for item in l.xml.findall(xpath):\n values = [child.text for child in item]\n line = template.format(*values, width=width)\n if web:\n _row = list(values)\n _row.insert(0, host)\n rows.append(_row)\n if not web:\n print line\n if web:\n return flask.render_template(\n \"results_table.html\",\n header_row=header_row,\n rows=rows), util.render_history(env)\n print",
"def get_nat_rules(**kwargs):\n proxy = kwargs['proxy']\n sessiontoken = kwargs['sessiontoken']\n tier1_id = kwargs['tier1_id']\n json_response = get_sddc_nat_info_json(proxy, sessiontoken, tier1_id)\n if json_response is not None:\n sddc_NAT = json_response['results']\n table = PrettyTable(['ID', 'Name', 'Public IP', 'Ports', 'Internal IP', 'Enabled?'])\n for i in sddc_NAT:\n if 'destination_network' in i:\n table.add_row([i['id'], i['display_name'], i['destination_network'], i['translated_ports'], i['translated_network'], i['enabled']])\n else:\n table.add_row([i['id'], i['display_name'], i['translated_network'], \"any\", i['source_network'], i['enabled']])\n print(table)\n else:\n print(\"Something went wrong. Please check your syntax and try again.\")\n sys.exit(1)",
"def get_destination(event):\n if event['result']['parameters']['destination_station']:\n return event['result']['parameters']['destination_station']['destination']\n else:\n return \"\"",
"def getOneAddress(results):\n return getOnePayload(results).dottedQuad()",
"def getSDDCT0staticroutes(proxy_url,session_token):\n t0_static_routes_json = get_sddc_t0_static_routes_json(proxy_url, session_token)\n if t0_static_routes_json == None:\n print(\"API Error\")\n sys.exit(1)\n if 'results' in t0_static_routes_json:\n t0_static_routes = t0_static_routes_json['results']\n else:\n print(\"No results. Something went wrong - please check your syntax and try again.\")\n sys.exit(1)\n route_table = PrettyTable(['Display Name', 'Network', 'Admin Distance', 'Next Hop'])\n for routes in t0_static_routes:\n route_table.add_row([routes['display_name'],routes['network'],routes['next_hops'][0]['admin_distance'],routes['next_hops'][0]['ip_address']])\n print (route_table.get_string(sort_key = operator.itemgetter(1,0), sortby = \"Network\", reversesort=True))",
"def sendArpRequest(logger, device, destination, count=3, timeout=1, quiet=False, firstReply=False, blocking=True):\n\n args = [Arping.ARPING_COMMAND_NAME, \n Arping.INTERFACE_OPTION, device, \n Arping.COUNT_OPTION, str(count),\n Arping.TIMEOUT_OPTION, str(timeout)]\n\n if quiet is True:\n args.append(Arping.QUIET_OPTION)\n\n if firstReply is True:\n args.append(Arping.FIRST_REPLY_OPTION)\n\n # must set destination as last arg\n args.append(destination) \n\n rc = Command.execute(logger, Arping.ARPING_COMMAND_NAME, args, timeoutSec=(timeout+3), blocking=blocking)\n\n return rc"
] | [
"0.6452569",
"0.56578344",
"0.54804915",
"0.5134963",
"0.5104358",
"0.48025116",
"0.4775512",
"0.47501403",
"0.46201384",
"0.45593098",
"0.4524613",
"0.44882345",
"0.44875172",
"0.4471129",
"0.446934",
"0.44567436",
"0.4438928",
"0.44336593",
"0.44281107",
"0.44168553",
"0.44147",
"0.4398355",
"0.43976834",
"0.4383331",
"0.43585894",
"0.43569887",
"0.4344007",
"0.43412572",
"0.43325752",
"0.4317462"
] | 0.7171664 | 0 |
Adds SNMPv3 trap forwarding destination. [Example] ${resp} = Fusion Api Add Appliance SNMPv3 Trap Destination | | | | def fusion_api_add_appliance_snmpv3_trap_destination(self, body=None, api=None, headers=None):
return self.snmpv3trap.create(body=body, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_edit_appliance_snmpv3_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.put(body=body, id=id, api=api, headers=headers)",
"def fusion_api_add_or_update_appliance_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.create(body=body, id=id, api=api, headers=headers)",
"def fusion_api_add_appliance_snmpv3_trap_forwarding_user(self, body=None, api=None, headers=None):\n return self.snmpv3user.create(body=body, api=api, headers=headers)",
"def fusion_api_get_appliance_snmpv3_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.get(id=id, param=param, api=api, headers=headers)",
"def fusion_api_delete_appliance_snmpv3_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.delete(id=id, api=api, headers=headers)",
"def addTunnel (self, sourcemachineguid, sourceport, destinationmachineguid, destinationport, jobguid = \"\", executionparams = {}):\n params =dict()\n params['sourceport'] = sourceport\n params['destinationmachineguid'] = destinationmachineguid\n params['sourcemachineguid'] = sourcemachineguid\n params['destinationport'] = destinationport\n return q.workflowengine.actionmanager.startActorAction('ras', 'addTunnel', params, jobguid=jobguid, executionparams=executionparams)",
"def fusion_api_edit_appliance_snmpv3_trap_forwarding_user(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.put(body=body, id=id, api=api, headers=headers)",
"def fusion_api_update_appliance_trap_destination(self, body, id, api=None, headers=None): # pylint: disable=W0622\n return self.trap.put(body=body, id=id, api=api, headers=headers)",
"def forward(self, srcip, packet): #gets entire packet and srcip of that packet\n # get route to send packet\n best_route = self.get_route(srcip, packet[DEST]) #is a socket\n\n sock = best_route\n\n\n jsonpack = json.dumps(packet)\n sock.sendall(jsonpack.encode())\n # TODO fix src and dest\n return True",
"def add_router_to_l3_agent(self, l3_agent, body):\r\n return self.post((self.agent_path + self.L3_ROUTERS) % l3_agent,\r\n body=body)",
"def insert_ret(self, space_no, field_types, *args):\n d = self.replyQueue.get()\n packet = RequestInsert(self.charset, self.errors, d._ipro_request_id,\n space_no, Request.TNT_FLAG_ADD | Request.TNT_FLAG_RETURN, *args)\n self.transport.write(bytes(packet))\n return d.addCallback(self.handle_reply, self.charset, self.errors, field_types)",
"def traceroute(self,dest):\n\t\tself.tn.write('traceroute %s\\n'%(dest))\n\t\tself.tn.write('exit\\n')\n\t\tresp = self.tn.read_all()\n\t\treturn resp",
"def insert_route(src_ip, gre_tunnel):\n import os\n try:\n os.system(\"\"\"\n /usr/bin/sudo /sbin/iptables -t mangle -A PREROUTING -s %s -j MARK --set-mark %s\n \"\"\" % (src_ip, gre_tunnel))\n except:\n raise iptExc(\"Could not insert route from src_ip %s to gre tunnel %s in iptables\" % (src_ip, gre_tunnel))\n return True",
"def fusion_api_validate_appliance_trap_destination(self, body=None, api=None, headers=None):\n return self.trap.validate(body=body, api=api, headers=headers)",
"def forward(self, srcif, packet):\n # packet is already decoded\n def send_no_route():\n send_src = srcif[:-1]\n send_src += '1'\n self.sockets[srcif].send(json.dumps({\n SRCE: send_src,\n DEST: packet[SRCE],\n TYPE: NRTE,\n MESG: {}\n }).encode())\n # GEt correct route.\n sock_addr = self.get_route(srcif, packet[DEST])\n\n # If no route available, send no route message back\n if sock_addr == None:\n send_no_route()\n else:\n sock = self.sockets[sock_addr]\n # If socket is available, send to proper neighbor.\n sock.send(json.dumps(packet).encode())\n return False",
"def add_returned_route_on_gw(self, context, router_id, port):\n LOG.debug('OVNL3RouterPlugin::')\n ovn_router_name = utils.ovn_gateway_name(router_id)\n for fixed_ip in port['fixed_ips']:\n subnet_id = fixed_ip['subnet_id']\n subnet = self._plugin.get_subnet(context, subnet_id)\n route = {'destination': subnet['cidr'], 'nexthop': ovn_const.OVN_LROUTER_TRANSIT_PORT_IP}\n with self._ovn.transaction(check_error=True) as txn:\n txn.add(self._ovn.add_static_route(ovn_router_name,\n ip_prefix=route['destination'],\n nexthop=route['nexthop']))",
"def insert(self, space_no, *args):\n d = self.replyQueue.get()\n packet = RequestInsert(self.charset, self.errors, d._ipro_request_id, space_no, Request.TNT_FLAG_ADD, *args)\n self.transport.write(bytes(packet))\n return d.addCallback(self.handle_reply, self.charset, self.errors, None)",
"def forward(self, srcif, packet): \n # TODO: will need to massively update this \n #print(\"PACKET FROM DATA: {0}\".format(packet))\n #print(\"ROUTING TABLE IS: {0}\".format(self.routes))\n dest = packet[\"dst\"]\n chosen_router = self.get_route(srcif, dest)\n if chosen_router is None:\n return False\n\n #TODO implement most specific route and business routes\n outroutes = []\n #print(\"CHOSEN ROUTER ISSSSSSSSSSSSSSSSSS\", chosen_router) \n #print(\"THIS IS FOR FORWARD:\", json.dumps(packet).encode(\"ascii\"))\n chosen_router.send(json.dumps(packet).encode(\"ascii\"))\n #return may need to be changed \n return True",
"def replace_ret(self, space_no, field_types, *args):\n d = self.replyQueue.get()\n packet = RequestInsert(self.charset, self.errors, d._ipro_request_id, space_no, Request.TNT_FLAG_RETURN, *args)\n self.transport.write(bytes(packet))\n return d.addCallback(self.handle_reply, self.charset, self.errors, field_types)",
"def install_splunk_forwarder(self, vm_ip):\n installer_agent_script_path = get_config(KEY_SPLUNK_CLIENT, \"INSTALLER_AGENT_SCRIPT_PATH\")\n forwarder_password = get_config(KEY_SPLUNK_CLIENT, \"SPLUNK_FORWARDER_ADMIN_PASSWORD\")\n deployserver_ip = get_config(KEY_SPLUNK_SERVER, \"SPLUNK_DEPLOYMENT_SERVER_IP\")\n deployserver_port = get_config(KEY_SPLUNK_SERVER, \"SPLUNK_DEPLOYMENT_SERVER_PORT\")\n salt_api = SaltNetAPI()\n forwarder_details = {\"pillar\": {\"deployment_server_ip\": deployserver_ip,\n \"deployment_server_port\": deployserver_port,\n \"forwarder_password\": forwarder_password}}\n # Check if client vm is up and running\n vm_minion_status_resp = salt_api.check_minion_status(vm_ip)\n if not vm_minion_status_resp:\n err_code = \"LOG_FWRDR012_CHECK_VM_STATUS\"\n err_message = LOG_FORWARDER_ERROR[err_code]\n err_trace = \"\"\n logger.error('{} err_code: {}, err_message: {}, err_trace: {}'\n .format(LOG_FORWARDER_ID, err_code, err_message, err_trace))\n raise TASException(err_code, err_message, err_trace)\n\n minion_name = salt_api.get_minion_name_from_ip(vm_ip)\n splunk_api_response = salt_api.execute_command(minion_name['minion_name'],\n args=installer_agent_script_path,\n pillar_details=forwarder_details)\n\n if not splunk_api_response:\n err_code = \"LOG_FWRDR009_UNABLE_INSTALL\"\n err_message = LOG_FORWARDER_ERROR[err_code]\n err_trace = \"\"\n logger.error('{} err_code: {}, err_message: {}, err_trace: {}'\n .format(LOG_FORWARDER_ID, err_code, err_message, err_trace))\n raise TASException(err_code, err_message, err_trace)\n\n if 'status' not in splunk_api_response or \\\n 'comment' not in splunk_api_response:\n err_code = \"LOG_FWRDR008_UNKNOWN_SALT_API_RESPONSE\"\n err_message = LOG_FORWARDER_ERROR[err_code]\n err_trace = \"\"\n logger.error('{} err_code: {}, err_message: {}, err_trace: {}'\n .format(LOG_FORWARDER_ID, err_code, err_message, err_trace))\n raise TASException(err_code, err_message, err_trace)\n\n if not splunk_api_response['status']:\n err_code = \"LOG_FWRDR000_SALT_SERVER_ERROR\"\n err_message = LOG_FORWARDER_ERROR[err_code]\n err_trace = \"\"\n logger.error('{} err_code: {}, err_message: {}, err_trace: {}'\n .format(LOG_FORWARDER_ID, err_code, err_message, err_trace))\n raise TASException(err_code, err_message, err_trace)\n\n logger.info(\"{} Response received after executing \"\n \"the Installation of Log Forwarder script\".format(LOG_FORWARDER_ID))\n logger.debug(\"{} Response for Installation of Log Forwarder{}\"\n .format(LOG_FORWARDER_ID, str(splunk_api_response['comment'])))\n os_kernel = salt_api.get_os_kernel_from_minion_id(minion_name['minion_name'])\n os_kernel_fold = os_kernel.casefold()\n self.response_parser.parse_salt_script_response(splunk_api_response['comment'], os_kernel_fold)\n return True",
"def new_nat_rule(**kwargs):\n\n proxy = kwargs['proxy']\n sessiontoken = kwargs['sessiontoken']\n nat_id = kwargs['objectname']\n tier1_id = kwargs['tier1_id']\n action = kwargs['action']\n logging = kwargs['logging']\n status = kwargs['disabled']\n public_ip = kwargs['public_ip']\n private_ip = kwargs['private_ip']\n\n if action == 'REFLEXIVE' and kwargs['service'] is not None:\n print('Reflexive rules may not be configured with a service / port. Please check your configuration and try again.')\n else:\n pass\n\n if kwargs['disabled'] == True:\n status = True\n elif kwargs['disabled'] == False:\n status = False\n if kwargs['logging'] == True:\n logging = True\n elif kwargs['logging'] == False:\n logging = False\n\n json_data = {}\n json_data[\"sequence_number\"] = 0\n json_data[\"logging\"] = logging\n json_data[\"enabled\"] = status\n json_data[\"id\"] = nat_id\n json_data[\"firewall_match\"] = \"MATCH_INTERNAL_ADDRESS\"\n json_data[\"scope\"] = []\n\n match action:\n case \"REFLEXIVE\":\n json_data[\"action\"] = f'REFLEXIVE'\n json_data[\"translated_network\"] = public_ip\n json_data[\"source_network\"] = private_ip\n\n case \"DNAT\":\n json_data['action'] = 'DNAT'\n json_data[\"destination_network\"] = public_ip\n json_data[\"translated_network\"] = private_ip\n if kwargs['translated_port'] is not None:\n json_data[\"translated_ports\"] = kwargs['translated_port']\n\n match tier1_id:\n case \"cgw\":\n json_data[\"scope\"] = [\"/infra/labels/cgw-public\"]\n\n if kwargs['service'] is not None:\n service = kwargs['service']\n json_data[\"service\"] = f'/infra/services/{service}'\n\n json_response_status_code = new_sddc_nat_json(proxy, sessiontoken, nat_id, tier1_id, json_data) \n if json_response_status_code is not None:\n print(f\"NAT {nat_id} created successfully\")\n else:\n print(\"Something went wrong. Please check your syntax and try again.\")",
"def add_windows_firewall_rule(attacker_ip, listening_ip):\n try:\n add_rule_result = subprocess.check_output(\n 'netsh advfirewall firewall add rule name=\"flytrap - \"'\n + attacker_ip + ' description=\"Rule automatically added by '\n 'flytrap.\" dir=in action=block '\n 'protocol=any localip=' + listening_ip +\n ' remoteip=' + attacker_ip)\n if \"Ok.\" in str(add_rule_result):\n print(attacker_ip + \" has been successfully blocked.\")\n else:\n print(\"Error adding firewall rule to block \" + attacker_ip)\n except subprocess.CalledProcessError:\n print(\"Unable to add firewall rule. Flytrap needs to be run as \"\n \"administrator.\")",
"def fusion_api_get_appliance_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.trap.get(id=id, param=param, api=api, headers=headers)",
"def add_destination(self):\n pass",
"def writeresponse(self, rspstr):\n self.response += rspstr",
"def insert(self, v: Route) -> ADJ_RIB_in_response: # pylint: disable=undefined-variable\n self.check(v)\n # input filter\n filtered = False\n if self.filter(v):\n filtered = True\n # check implicit withdraw\n substitute = None\n if self._implicit_withdraw:\n i = hash(v.addr)\n if i in self._destinations:\n for net in self._destinations[i]:\n if net.nh == v.nh:\n substitute = net\n self.remove(net)\n if not filtered:\n return super().insert(v), substitute\n return None, substitute",
"def add_redirect(self, expr, node_host, node_port, openflow_host, openflow_port):\n pusher = self.StaticFlowEntryPusher(openflow_host, openflow_port)\n device = self.Device(openflow_host, openflow_port)\n try:\n (_, connected_dpid, node_mac, node_vlan) = device.get(node_host)\n except KeyError:\n raise\n request_hands_off = {\n \"switch\": connected_dpid,\n \"name\": \"request_hands_off-\" + node_host + \"-\" + node_port + \"-\" + expr,\n \"priority\": \"32767\",\n \"ether-type\": 0x0800,\n \"protocol\": 0x06,\n \"src-ip\": node_host,\n \"src-mac\": node_mac,\n \"dst-ip\": expr,\n \"dst-port\":\"80\",\n \"vlan-id\":node_vlan,\n \"active\":\"true\",\n \"actions\":\"output=normal\"\n }\n request_in = {\n \"switch\": connected_dpid,\n \"name\": \"request_in-\" + node_host + \"-\" + node_port + \"-\" + expr,\n \"priority\": \"32766\",\n \"ether-type\": 0x0800,\n \"protocol\": 0x06,\n \"dst-ip\": expr,\n \"dst-port\": \"80\",\n \"vlan-id\":node_vlan,\n \"active\": \"true\",\n \"actions\": \"set-dst-mac=\" + node_mac + \",set-dst-ip=\" + node_host +\n \",set-dst-port=\" + node_port +\",output=normal\"\n }\n request_out = {\n \"switch\": connected_dpid,\n \"name\": \"request_out-\" + node_host + \"-\" + node_port + \"-\" + expr,\n \"cookie\": \"0\",\n \"priority\": \"32766\",\n \"ether-type\": 0x0800,\n \"protocol\": 0x06,\n \"src-ip\": node_host,\n \"src-mac\": node_mac,\n \"src-port\": node_port,\n \"vlan-id\":node_vlan,\n \"active\": \"true\",\n \"actions\": \"set-src-port=80,set-src-ip=\" + expr + \",output=normal\"\n }\n pusher.remove({\"name\":\"request_hands_off-\" + node_host + \"-\" + node_port + \"-\" + expr})\n pusher.remove({\"name\":\"request_out-\" + node_host + \"-\" + node_port + \"-\" + expr})\n pusher.remove({\"name\":\"request_in-\" + node_host + \"-\" + node_port + \"-\" + expr})\n pusher.set(request_hands_off)\n pusher.set(request_out)\n pusher.set(request_in)",
"def sendArpReply(logger, device, destination, count=3, quiet=False, blocking=True):\n\n args = [Arping.ARPING_COMMAND_NAME, \n Arping.INTERFACE_OPTION, device, \n Arping.COUNT_OPTION, str(count),\n Arping.ARP_REPLY_OPTION]\n\n if quiet is True:\n args.append(Arping.QUIET_OPTION)\n\n # must set destination as last arg\n args.append(destination) \n\n rc = Command.execute(logger, Arping.ARPING_COMMAND_NAME, args, blocking=blocking)\n\n return rc",
"def add_udp(ctx, global_ip, global_port, local_ip, local_port, nat_type, twice_nat_id):\n\n # Verify the ip address format \n if is_valid_ipv4_address(local_ip) is False:\n ctx.fail(\"Given local ip address {} is invalid. Please enter a valid local ip address !!\".format(local_ip))\n\n if is_valid_ipv4_address(global_ip) is False:\n ctx.fail(\"Given global ip address {} is invalid. Please enter a valid global ip address !!\".format(global_ip))\n\n config_db = ConfigDBConnector()\n config_db.connect()\n\n entryFound = False\n table = \"STATIC_NAPT\"\n key = \"{}|UDP|{}\".format(global_ip, global_port)\n dataKey1 = 'local_ip'\n dataKey2 = 'local_port'\n dataKey3 = 'nat_type'\n dataKey4 = 'twice_nat_id'\n\n data = config_db.get_entry(table, key)\n if data:\n if data[dataKey1] == local_ip and data[dataKey2] == str(local_port):\n click.echo(\"Trying to add static napt entry, which is already present.\")\n entryFound = True\n\n if nat_type == 'snat':\n ipAddress = local_ip\n else:\n ipAddress = global_ip\n\n if isIpOverlappingWithAnyStaticEntry(ipAddress, 'STATIC_NAT') is True:\n ctx.fail(\"Given entry is overlapping with existing NAT entry !!\")\n\n if entryFound is False:\n counters_db = SonicV2Connector()\n counters_db.connect(counters_db.COUNTERS_DB)\n snat_entries = 0\n max_entries = 0\n exists = counters_db.exists(counters_db.COUNTERS_DB, 'COUNTERS_GLOBAL_NAT:Values')\n if exists:\n counter_entry = counters_db.get_all(counters_db.COUNTERS_DB, 'COUNTERS_GLOBAL_NAT:Values')\n if 'SNAT_ENTRIES' in counter_entry:\n snat_entries = counter_entry['SNAT_ENTRIES']\n if 'MAX_NAT_ENTRIES' in counter_entry:\n max_entries = counter_entry['MAX_NAT_ENTRIES']\n \n if int(snat_entries) >= int(max_entries):\n click.echo(\"Max limit is reached for NAT entries, skipping adding the entry.\")\n entryFound = True\n\n if entryFound is False:\n count = 0\n if twice_nat_id is not None:\n count = getTwiceNatIdCountWithStaticEntries(twice_nat_id, table, count)\n count = getTwiceNatIdCountWithDynamicBinding(twice_nat_id, count, None)\n if count > 1:\n ctx.fail(\"Same Twice nat id is not allowed for more than 2 entries!!\")\n\n if nat_type is not None and twice_nat_id is not None:\n config_db.set_entry(table, key, {dataKey1: local_ip, dataKey2: local_port, dataKey3: nat_type, dataKey4: twice_nat_id})\n elif nat_type is not None:\n config_db.set_entry(table, key, {dataKey1: local_ip, dataKey2: local_port, dataKey3: nat_type})\n elif twice_nat_id is not None:\n config_db.set_entry(table, key, {dataKey1: local_ip, dataKey2: local_port, dataKey4: twice_nat_id})\n else:\n config_db.set_entry(table, key, {dataKey1: local_ip, dataKey2: local_port})",
"def create_log_forwarding_destinations(\n self,\n label: str,\n sources: list,\n consumer: str,\n credentials: dict,\n address: str\n ) -> Session:\n uri = f\"{self.uri}/log-forwarding-destinations\"\n data = {\n \"label\": label,\n \"sources\": sources,\n \"consumer\": consumer,\n \"credentials\": credentials,\n \"address\": address\n }\n response = self.request(uri=uri, method=\"POST\", data=data)\n\n return response"
] | [
"0.5947996",
"0.5868191",
"0.5803545",
"0.555319",
"0.52671266",
"0.52026045",
"0.4991366",
"0.49749583",
"0.49404338",
"0.493424",
"0.4829489",
"0.47948322",
"0.47238278",
"0.47180727",
"0.47138092",
"0.47075",
"0.46764702",
"0.46099842",
"0.4608544",
"0.45971972",
"0.45748004",
"0.456917",
"0.4564866",
"0.45435753",
"0.45298958",
"0.45284447",
"0.45153856",
"0.44910046",
"0.4461844",
"0.44483545"
] | 0.72208995 | 0 |
Edits the SNMPv3 trap destination associated with id [Example] ${resp} = Fusion Api Edit Appliance SNMPv3 Trap Destination | | | | | def fusion_api_edit_appliance_snmpv3_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622
return self.snmpv3trap.put(body=body, id=id, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_update_appliance_trap_destination(self, body, id, api=None, headers=None): # pylint: disable=W0622\n return self.trap.put(body=body, id=id, api=api, headers=headers)",
"def fusion_api_add_appliance_snmpv3_trap_destination(self, body=None, api=None, headers=None):\n return self.snmpv3trap.create(body=body, api=api, headers=headers)",
"def fusion_api_add_or_update_appliance_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.create(body=body, id=id, api=api, headers=headers)",
"def fusion_api_delete_appliance_snmpv3_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.delete(id=id, api=api, headers=headers)",
"def fusion_api_get_appliance_snmpv3_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.get(id=id, param=param, api=api, headers=headers)",
"def fusion_api_edit_appliance_snmpv3_trap_forwarding_user(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.put(body=body, id=id, api=api, headers=headers)",
"async def put(self):\r\n data = await self.request.json()\r\n agent_uuid = data[\"agent_uuid\"]\r\n ip_address = data[\"ip_address\"]\r\n agent_obj = Agent.filter(Agent.uuid == agent_uuid).first()\r\n if not agent_obj:\r\n response_obj = {\"status\": \"failed\"}\r\n logger.error(\"No agent found!!!\")\r\n return web.Response(text=str(response_obj), status=500)\r\n try:\r\n Agent.update(ip_address=ip_address).where(Agent.uuid == agent_uuid)\r\n logger.info(\"Agent updated!!!\")\r\n return web.Response(text=\"successful\", status=200)\r\n except Exception as ex:\r\n response_obj = {\"status\": \"failed\"}\r\n error_message = str(ex)\r\n logger.error(error_message)\r\n return web.Response(text=str(response_obj), status=500)",
"def put(self, id):\n return None, 204",
"def put(self, id):\n context = request.environ.get('context')\n obj = dbapi.netdevice_data_update(context, id, request.json)\n resp = {\"data\": jsonutils.to_primitive(obj.variables)}\n return resp, 200, None",
"def put(self,id):\n adm = Administration()\n s = Suggestion.from_dict(api.payload)\n if s is not None:\n s.set_id(id)\n adm.save_suggestion(s)\n return s, 200\n\n else:\n return '', 500",
"def traceroute(self,dest):\n\t\tself.tn.write('traceroute %s\\n'%(dest))\n\t\tself.tn.write('exit\\n')\n\t\tresp = self.tn.read_all()\n\t\treturn resp",
"def post_route_target_update(self, resource_id, resource_dict):\n pass",
"def fusion_api_delete_appliance_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.delete(id=id, api=api, headers=headers)",
"def put(self, ip):\n data = request.json\n update_ue_sub(ip, data)\n return None, 204",
"def fusion_api_edit_switch(self, body, uri, api=None, headers=None):\n return self.switch.update(body, uri, api, headers)",
"def put(self):\n try:\n rest_params = common.get_restful_params(self.request.uri)\n if rest_params is None:\n common.echo_json_response(self, 405, \"Not Implemented: Use /agents/ interface\")\n return\n\n if \"agents\" not in rest_params:\n common.echo_json_response(self, 400, \"uri not supported\")\n logger.warning('PUT returning 400 response. uri not supported: ' + self.request.path)\n return\n\n agent_id = rest_params[\"agents\"]\n if agent_id is None:\n common.echo_json_response(self, 400, \"uri not supported\")\n logger.warning(\"PUT returning 400 response. uri not supported\")\n\n agent = self.db.get_agent(agent_id)\n\n if agent is not None:\n common.echo_json_response(self, 404, \"agent id not found\")\n logger.info('PUT returning 404 response. agent id: ' + agent_id + ' not found.')\n\n if \"reactivate\" in rest_params:\n agent['operational_state']=cloud_verifier_common.CloudAgent_Operational_State.START\n asyncio.ensure_future(self.process_agent(agent, cloud_verifier_common.CloudAgent_Operational_State.GET_QUOTE))\n common.echo_json_response(self, 200, \"Success\")\n logger.info('PUT returning 200 response for agent id: ' + agent_id)\n elif \"stop\" in rest_params:\n # do stuff for terminate\n logger.debug(\"Stopping polling on %s\"%agent_id)\n self.db.update_agent(agent_id,'operational_state',cloud_verifier_common.CloudAgent_Operational_State.TENANT_FAILED)\n\n common.echo_json_response(self, 200, \"Success\")\n logger.info('PUT returning 200 response for agent id: ' + agent_id)\n else:\n common.echo_json_response(self, 400, \"uri not supported\")\n logger.warning(\"PUT returning 400 response. uri not supported\")\n\n except Exception as e:\n common.echo_json_response(self, 400, \"Exception error: %s\"%e)\n logger.warning(\"PUT returning 400 response. Exception error: %s\"%e)\n logger.exception(e)\n self.finish()",
"def put(self, id=None):\n if id:\n slip = test4ValidEntity(id)\n if slip == None:\n self.response.set_status(404)\n else:\n slip_data = json.loads(self.request.body)\n if 'number' in slip_data:\n \"\"\" Test for requested Slip number already in use. \"\"\"\n query = Slip.query()\n results = query.fetch(limit = MAX_SLIPS)\n for match in results:\n if slip_data['number'] == match.number:\n slip.number = getSlipNum()\n else:\n slip.number = slip_data['number']\n if 'current_boat' in slip_data:\n if slip.current_boat == None:\n slip.current_boat = slip_data['current_boat']\n else:\n \"\"\" Query for the Boat and change at_sea to False. \"\"\"\n query = Boat.query(Boat.id == slip_data['current_boat'])\n result = query.fetch(limit = 1)\n if 'at_sea' in result:\n result.at_sea = False\n slip.current_boat = slip_data['current_boat']\n else:\n slip.current_boat = None\n if 'arrival_date' in slip_data:\n slip.arrival_date = slip_data['arrival_date']\n else:\n slip.arrival_date = None\n if 'departed_boat' in slip_data:\n slip.departed_boat = slip_data['departed_boat']\n else:\n slip.departed_boat = None\n if 'departure_date' in slip_data:\n slip.departure_date = slip_data['departure_date']\n else:\n slip.departure_date = None\n slip.put()\n slip_dict = slip.to_dict()\n del slip_dict['departure_history']\n self.response.headers['Content-Type'] = 'application/json'\n self.response.write(json.dumps(slip_dict))",
"def fusion_api_get_appliance_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.trap.get(id=id, param=param, api=api, headers=headers)",
"def put(self, id):\n return update_msg(request.json, id)",
"def cmd_edit(loan_id):\n db = get_db()\n loan = Loan(None, None, None, loan_id = loan_id)\n loan.load(db)\n\n errors = []\n reply = {}\n\n # bad id, loan not found\n #\n if loan.payer_ssn is None:\n reply['status'] = 404\n reply['message'] = 'Command failed: edit ' + str(loan_id)\n errors.append({\"field\": \"loan_id\",\n \"message\": \"Could not find a loan with that id\"})\n\n reply['errors'] = errors\n resp = jsonify(reply)\n resp.status_code = reply['status']\n\n return resp\n\n # no status passed\n #\n if 'loan_status' not in request.args:\n reply['status'] = 404\n reply['message'] = 'Command failed: edit ' + str(loan_id)\n errors.append({\"field\": \"loan_status\",\n \"message\": \"Must supply loan_status in request\"})\n\n reply['errors'] = errors\n resp = jsonify(reply)\n resp.status_code = reply['status']\n\n return resp\n\n # cleanup/check passed status and apply\n #\n raw = request.args['loan_status']\n raw = raw.strip().title()\n loan.loan_status = raw\n\n try:\n loan.validate_loan_status()\n loan.save(db)\n except InvalidLoanStatusError as e:\n reply['status'] = 404\n reply['message'] = 'Command failed: edit ' + str(loan_id)\n\n errors.append({\"field\": \"loan_status\",\n \"message\": \"Legal values: New, Approved, Denied, Review\"})\n\n else:\n reply['status'] = 200\n reply['message'] = 'command succeeded: edit ' + str(loan_id)\n reply['loan'] = loan.to_jsonable()\n\n if len(errors):\n reply['errors'] = errors\n\n resp = jsonify(reply)\n resp.status_code = reply['status']\n\n return resp",
"def put(self, id):\n adm = Administration()\n print(api.payload)\n p = Person.from_dict(api.payload)\n if p is not None:\n p.set_id(id)\n adm.save_person(p)\n return p, 200\n\n else:\n return '', 500",
"def edit_a_parcel(destination, id):\n query = \"\"\"UPDATE parcels SET destination = %s WHERE id = %s\"\"\"\n tuple =(destination , id)\n db.insert(query, tuple)",
"def put(self, id):\n data = request.json\n update_entry(id, data)\n return None, 204",
"def put(self, id):\n data = request.json\n update_scenario(id, data)\n return None, 204",
"def put(self, order_id):\n\n ###############\n # json_input = self.get_input()\n # log.pp(json_input)\n # key = 'request_id'\n # order_id = json_input.get(key)\n # if order_id is None:\n # error = \"Order ID parameter '%s': missing\" % key\n # return self.send_errors(error, code=hcodes.HTTP_BAD_REQUEST)\n # else:\n # order_id = str(order_id)\n\n ###############\n log.info(\"Order id '%s' has to be restricted\", order_id)\n\n # Create the path\n log.info(\"Order request: %s\", order_id)\n imain = self.get_service_instance(service_name='irods')\n order_path = self.get_order_path(imain, order_id)\n log.debug(\"Order path: %s\", order_path)\n\n ###############\n error = \"Order '%s' not enabled or you have no permissions\" % order_id\n if not imain.is_collection(order_path):\n return self.send_errors(error, code=hcodes.HTTP_BAD_REQUEST)\n else:\n metadata, _ = imain.get_metadata(order_path)\n key = 'restricted'\n if key not in metadata:\n return self.send_errors(error, code=hcodes.HTTP_BAD_REQUEST)\n else:\n string = metadata.get(key)\n import json\n restricted_users = json.loads(string)\n # log.pp(restricted_users)\n if len(restricted_users) < 1:\n return self.send_errors(\n error, code=hcodes.HTTP_BAD_REQUEST)\n\n ###############\n obj = self.init_endpoint()\n if obj.username not in restricted_users:\n return self.send_errors(error, code=hcodes.HTTP_BAD_REQUEST)\n\n ###############\n # irods copy\n label = \"%s_%s.%s\" % (obj.username, '123', 'zip')\n ipath = self.complete_path(order_path, label)\n self.stream_to_irods(imain, ipath)\n log.verbose(\"Uploaded: %s\", ipath)\n\n ###############\n # define zip final path\n from utilities import path\n filename = 'order_%s' % order_id\n # zip_file_name = path.append_compress_extension(filename)\n zip_ipath = path.join(order_path, filename, return_str=True)\n\n ###############\n # launch container\n self.ingest_restricted_zip(imain, order_id, zip_ipath, ipath)\n\n ###############\n response = {\n 'order_id': order_id,\n 'status': 'filled',\n }\n return self.force_response(response)",
"def patch(self, id=None):\n if id:\n slip = test4ValidEntity(id)\n if slip == None:\n self.response.set_status(404)\n else:\n slip_data = json.loads(self.request.body)\n if 'number' in slip_data:\n \"\"\" Test for Slip number already taken. \"\"\"\n query = Slip.query()\n results = query.fetch(limit = MAX_SLIPS)\n if slip.number in results:\n slip.number = getSlipNum()\n else:\n slip.number = slip_data['number']\n if 'current_boat' in slip_data:\n if slip.current_boat == None:\n slip.current_boat = slip_data['current_boat']\n else:\n \"\"\" Query for the Boat and change at_sea to False. \"\"\"\n query = Boat.query(Boat.id == slip_data['current_boat'])\n result = query.fetch(limit = 1)\n if 'at_sea' in result:\n result.at_sea = False\n slip.current_boat = slip_data['current_boat']\n if 'arrival_date' in slip_data:\n slip.arrival_date = slip_data['arrival_date']\n if 'departed_boat' in slip_data:\n slip.departed_boat = slip_data['departed_boat']\n if 'departure_date' in slip_data:\n slip.departure_date = slip_data['departure_date']\n slip.put()\n slip_dict = slip.to_dict()\n del slip_dict['departure_history']\n self.response.headers['Content-Type'] = 'application/json'\n self.response.write(json.dumps(slip_dict))",
"def do_update(self, id, data):\n verrors = ValidationErrors()\n if not self.is_loaded():\n verrors.add('ipmi.update', f'{IPMIService.IPMI_DEV!r} could not be found')\n elif id not in self.channels():\n verrors.add('ipmi.update', f'IPMI channel number {id!r} not found')\n elif not data.get('dhcp'):\n for k in ['ipaddress', 'netmask', 'gateway']:\n if not data.get(k):\n verrors.add(f'ipmi_update.{k}', 'This field is required when dhcp is false.')\n verrors.check()\n\n def get_cmd(cmds):\n nonlocal id\n return ['ipmitool', 'lan', 'set', f'{id}'] + cmds\n\n rc = 0\n options = {'stdout': DEVNULL, 'stderr': DEVNULL}\n if data.get('dhcp'):\n rc |= run(get_cmd(id, ['dhcp']), **options).returncode\n else:\n rc |= run(get_cmd(['ipsrc', 'static']), **options).returncode\n rc |= run(get_cmd(['ipaddr', data['ipaddress']]), **options).returncode\n rc |= run(get_cmd(['netmask', data['netmask']]), **options).returncode\n rc |= run(get_cmd(['defgw', 'ipaddr', data['gateway']]), **options).returncode\n\n rc |= run(get_cmd(['vlan', 'id', f'{data.get(\"vlan\", \"off\")}']), **options).returncode\n\n rc |= run(get_cmd(['access', 'on']), **options).returncode\n rc |= run(get_cmd(['auth', 'USER', 'MD2,MD5']), **options).returncode\n rc |= run(get_cmd(['auth', 'OPERATOR', 'MD2,MD5']), **options).returncode\n rc |= run(get_cmd(['auth', 'ADMIN', 'MD2,MD5']), **options).returncode\n rc |= run(get_cmd(['auth', 'CALLBACK', 'MD2,MD5']), **options).returncode\n\n # Apparently tickling these ARP options can \"fail\" on certain hardware\n # which isn't fatal so we ignore returncode in this instance. See #15578.\n run(get_cmd(['arp', 'respond', 'on']), **options)\n run(get_cmd(['arp', 'generate', 'on']), **options)\n\n if passwd := data.get('password'):\n cp = run(get_cmd(['ipmitool', 'user', 'set', 'password', '2', passwd]), capture_output=True)\n if cp.returncode != 0:\n err = '\\n'.join(cp.stderr.decode().split('\\n'))\n raise CallError(f'Failed setting password: {err!r}')\n\n cp = run(['ipmitool', 'user', 'enable', '2'], capture_output=True)\n if cp.returncode != 0:\n err = '\\n'.join(cp.stderr.decode().split('\\n'))\n raise CallError(f'Failed enabling user: {err!r}')\n\n return rc",
"def put(self, id):\n req = api.payload\n try:\n result = update_task(\n get_db(),\n id,\n req[\"task\"],\n date.fromisoformat(req[\"due_by\"]),\n Status[req[\"status\"]],\n )\n return task_to_dict(result), 201\n except ValueError:\n api.abort(422, \"Invalid Status\")",
"def put(self,id):\r\n data = request.json\r\n return update(id=id,data=data)",
"def request_action(reqID, action):\n req = get_ride_request(reqID)\n req.status = action.lower().title()\n req.save();"
] | [
"0.624998",
"0.595249",
"0.5786376",
"0.5525899",
"0.54481405",
"0.5194094",
"0.51593745",
"0.51566964",
"0.5087404",
"0.50645834",
"0.4958187",
"0.4930071",
"0.4916984",
"0.49127147",
"0.4853215",
"0.4799228",
"0.47734588",
"0.4762119",
"0.47091353",
"0.46930063",
"0.4662609",
"0.46526968",
"0.46258226",
"0.4616318",
"0.4616082",
"0.4613461",
"0.45644167",
"0.45611393",
"0.4544934",
"0.45422754"
] | 0.7082064 | 0 |
Deletes the SNMPv3 trap destination associated with id [Example] ${resp} = Fusion Api Delete Appliance SNMPv3 Trap Destination | | | | def fusion_api_delete_appliance_snmpv3_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622
return self.snmpv3trap.delete(id=id, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_delete_appliance_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.delete(id=id, api=api, headers=headers)",
"def fusion_api_get_appliance_snmpv3_trap_destinations(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.get(id=id, param=param, api=api, headers=headers)",
"def fusion_api_delete_appliance_snmpv3_trap_forwarding_user(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.delete(id=id, api=api, headers=headers)",
"def do_DELETE(self):\n rest_params = common.get_restful_params(self.path)\n if rest_params is None:\n common.echo_json_response(self, 405, \"Not Implemented: Use /agents/ interface\")\n return\n\n if \"agents\" not in rest_params:\n common.echo_json_response(self, 400, \"uri not supported\")\n logger.warning('DELETE agent returning 400 response. uri not supported: ' + self.path)\n return\n\n agent_id = rest_params[\"agents\"]\n\n if agent_id is not None:\n if self.server.db.remove_agent(agent_id):\n #send response\n common.echo_json_response(self, 200, \"Success\")\n return\n else:\n #send response\n common.echo_json_response(self, 404)\n return\n else:\n common.echo_json_response(self, 404)\n return",
"def delete(self, id):\n\n ns.abort(404, 'This API is not supported yet.')",
"def delete(self):\n rest_params = common.get_restful_params(self.request.uri)\n if rest_params is None:\n common.echo_json_response(self, 405, \"Not Implemented: Use /agents/ interface\")\n return\n\n if \"agents\" not in rest_params:\n common.echo_json_response(self, 400, \"uri not supported\")\n return\n\n agent_id = rest_params[\"agents\"]\n\n if agent_id is None:\n common.echo_json_response(self, 400, \"uri not supported\")\n logger.warning('DELETE returning 400 response. uri not supported: ' + self.request.path)\n\n agent = self.db.get_agent(agent_id)\n\n if agent is None:\n common.echo_json_response(self, 404, \"agent id not found\")\n logger.info('DELETE returning 404 response. agent id: ' + agent_id + ' not found.')\n return\n\n op_state = agent['operational_state']\n if op_state == cloud_verifier_common.CloudAgent_Operational_State.SAVED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.FAILED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.TERMINATED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.TENANT_FAILED or \\\n op_state == cloud_verifier_common.CloudAgent_Operational_State.INVALID_QUOTE:\n self.db.remove_agent(agent_id)\n common.echo_json_response(self, 200, \"Success\")\n logger.info('DELETE returning 200 response for agent id: ' + agent_id)\n else:\n self.db.update_agent(agent_id, 'operational_state',cloud_verifier_common.CloudAgent_Operational_State.TERMINATED)\n common.echo_json_response(self, 202, \"Accepted\")\n logger.info('DELETE returning 202 response for agent id: ' + agent_id)",
"def delete(id):\n r = requests.delete(API_ROUTE + '/' + str(id), headers={'Auth': _auth()})\n if r.status_code != requests.codes.no_content:\n return r.text, r.status_code\n return redirect(url_for('index'), code=278)",
"def handle_delete(self, api, command):\n return self._make_request_from_command('DELETE', command)",
"def delete_item(id):\n return '', 201",
"async def delete(self):\r\n try:\r\n data = await self.request.json()\r\n agent_uuid = data.get(\"agent_uuid\")\r\n agent_to_delete = Agent.filter(Agent.uuid == agent_uuid).first()\r\n sys_id = (\r\n System.select().where(System.agent_uuid == agent_to_delete).execute()\r\n )\r\n if sys_id:\r\n logger.error(\"Agent not deleted\")\r\n return web.Response(text=\"Agent not deleted.\")\r\n else:\r\n agent_to_delete.delete_instance()\r\n logger.info(\"Agent deleted successfully\")\r\n return web.Response(text=\"Agent deleted successfully.\")\r\n except Exception as ex:\r\n error_message = str(ex)\r\n logger.error(error_message)\r\n return web.Response(text=error_message, status=500)",
"def delete_data(request, result_id):\n result = TestResult.objects.get(id=result_id)\n result.delete()\n gun = result.bullet.gun\n return HttpResponseRedirect(reverse('gun', args=[gun.id]))",
"def delete(short_id):\n try:\n url = Url.get(short_id)\n except:\n return jsonify({\"Error\", \"No Such ID\"})\n\n url.delete()\n return jsonify({\"statusCode\": 301,})",
"def post_route_target_delete(self, resource_id, resource_dict):\n pass",
"def delete(self, id):\n return self._post(\n request=ApiActions.DELETE.value,\n uri=ApiUri.ACTIONS.value,\n params={'id': id}\n )",
"def delete(self, id):\n return self._post(\n request=ApiActions.DELETE.value,\n uri=ApiUri.ACTIONS.value,\n params={'id': id}\n )",
"def delete(self, args):\n try:\n db = get_db('intents')\n intents = db.delete_intent(args['intent'])\n resp = jsonify(intents=intents)\n resp.status_code = 200\n return resp\n except DatabaseError as error:\n resp = jsonify(error=error)\n resp.status_code = 500\n return resp\n except DatabaseInputError as error:\n resp = jsonify(error=error)\n resp.status_code = 400\n return resp",
"def delete_port_forward_rule(self, port_forward_rule_id): \n params = {'command':'deletePortForwardingRule',\n 'id':port_forward_rule_id} \n\n try:\n response = self.send_request(params)\n res = json.loads(response)\n clsk_job_id = res['deleteportforwardingruleresponse']['jobid']\n self.logger.debug('Start job - deletePortForwardingRule: %s' % res)\n return clsk_job_id\n except KeyError as ex:\n raise ClskError('Error parsing json data: %s' % ex)\n except ApiError as ex:\n raise ClskError(ex)",
"def delete(damage_id):\n logged_in_user = g.user\n damage_id = str(damage_id)\n\n deleted_damage = libdamage.delete_damage(damage_id=damage_id, \n logged_in_user=logged_in_user)\n\n rci_id = deleted_damage['rci_id']\n\n return redirect(url_for('rci.edit', rci_id=rci_id))",
"def delete(self, id):\n return self.app.post('/delete/' + str(id), data=dict(id=id),\n follow_redirects=True)",
"def delete(session, vtrqid, vpid):\n # validate vtrqid is an int\n # validate vpid\n #\n url = '/'.join([session.base_url(),\n 'vtrq/vp',\n str(vtrqid),\n vpid])\n r = requests.delete(url)\n return fulfill202(session, r)",
"def cloudflare_waf_firewall_rule_delete_command(client: Client, args: Dict[str, Any]) -> CommandResults:\n rule_id = args['id']\n zone_id = args.get('zone_id', client.zone_id)\n\n response = client.cloudflare_waf_firewall_rule_delete_request(rule_id, zone_id)\n\n return CommandResults(\n readable_output=f'Firewall rule {rule_id} was successfully deleted.',\n raw_response=response\n )",
"def doRegistrarDelete(\n registrar_ip: str, registrar_port: str, agent_id: str, tls_context: Optional[ssl.SSLContext]\n) -> Dict[str, Any]:\n\n client = RequestsClient(f\"{registrar_ip}:{registrar_port}\", True, tls_context=tls_context)\n response = client.delete(f\"/v{api_version}/agents/{agent_id}\")\n response_body: Dict[str, Any] = response.json()\n\n if response.status_code == 200:\n logger.debug(\"Registrar deleted.\")\n else:\n logger.warning(\"Status command response: %s Unexpected response from registrar.\", response.status_code)\n keylime_logging.log_http_response(logger, logging.WARNING, response_body)\n\n return response_body",
"def delete_firewall_rule(self, firewall_rule_id): \n params = {'command':'deleteFirewallRule',\n 'id':firewall_rule_id} \n\n try:\n response = self.send_request(params)\n res = json.loads(response)\n clsk_job_id = res['deletefirewallruleresponse']['jobid']\n self.logger.debug('Start job - deleteFirewallRule: %s' % res)\n return clsk_job_id\n except KeyError as ex:\n raise ClskError('Error parsing json data: %s' % ex)\n except ApiError as ex:\n raise ClskError(ex)",
"def fusion_api_edit_appliance_snmpv3_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.put(body=body, id=id, api=api, headers=headers)",
"def delete(self, id: int):\n self._select_interface(self._rc_delete, self._http_delete, id)",
"def handle_delete(self, request, user, *args, **kwargs):\n try:\n\n self.log.info('Delete rule from an environment')\n\n # User permission\n if not has_perm(user, AdminPermission.VIP_VALIDATION, AdminPermission.WRITE_OPERATION):\n self.log.error(\n u'User does not have permission to perform the operation.')\n raise UserNotAuthorizedError(None)\n\n id_rule = kwargs.get('id_rule')\n\n if not is_valid_int_greater_zero_param(id_rule):\n self.log.error(\n u'The id_rule parameter is not a valid value: %s.', id_rule)\n raise InvalidValueError(None, 'id_rule', id_rule)\n\n rule = Rule.objects.get(pk=id_rule)\n rule.delete()\n\n return self.response(dumps_networkapi({}))\n\n except InvalidValueError, e:\n return self.response_error(269, e.param, e.value)\n except Rule.DoesNotExist:\n return self.response_error(358)\n except UserNotAuthorizedError:\n return self.not_authorized()\n except Exception, e:\n return self.response_error(1)",
"def delete(self, id):\n delete_entry(id)\n return None, 204",
"def delete(self, doc_id):\n\n\t\t### DEBUGGING\n\t\tprint()\n\t\tprint(\"-+- \"*40)\n\t\tlog.debug( \"ROUTE class : %s\", self.__class__.__name__ )\n\n\t\t### DEBUG check\n\t\t# log.debug (\"payload : \\n{}\".format(pformat(ns.payload)))\n\n\t\t### check client identity and claims\n\t\tclaims \t\t\t\t= get_jwt_claims() \n\t\tlog.debug(\"claims : \\n %s\", pformat(claims) )\n\n\t\t### query db from generic function \t\t\n\t\tresults, response_code\t= Query_db_delete (\n\t\t\tns, \n\t\t\tmodels,\n\t\t\tdocument_type,\n\t\t\tdoc_id,\n\t\t\tclaims,\n\t\t\troles_for_delete \t= [\"admin\"],\n\t\t\tauth_can_delete \t= [\"owner\"],\n\t\t)\n\n\t\tlog.debug(\"results : \\n%s \", pformat(results) )\n\n\n\t\treturn results, response_code",
"def delete(self, id):\n context = request.environ.get('context')\n dbapi.netdevice_data_delete(context, id, request.json)\n return None, 204, None",
"def delete(self, unique_id):\n return request(\n API_LIST.DNS_DELETE.value,\n {\n 'email': self.email,\n 'token': self.token,\n 'id': unique_id\n }\n )"
] | [
"0.7257434",
"0.5992244",
"0.5957023",
"0.5828099",
"0.5784412",
"0.5744131",
"0.5703777",
"0.56876636",
"0.5585665",
"0.55755144",
"0.55749446",
"0.556508",
"0.55482614",
"0.5519527",
"0.5519527",
"0.55003023",
"0.5499752",
"0.5490947",
"0.54691553",
"0.54597276",
"0.5442648",
"0.5440005",
"0.5432868",
"0.5418923",
"0.539827",
"0.5396952",
"0.5380258",
"0.5355865",
"0.53512144",
"0.53457606"
] | 0.78651416 | 0 |
Returns appliance SNMPv3 trap forwarding users [Example] ${resp} = Fusion Api Get Appliance SNMPv3 Trap Forwarding Users | | | | | def fusion_api_get_appliance_snmpv3_trap_forwarding_users(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622
return self.snmpv3user.get(id=id, param=param, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_add_appliance_snmpv3_trap_forwarding_user(self, body=None, api=None, headers=None):\n return self.snmpv3user.create(body=body, api=api, headers=headers)",
"def fusion_api_delete_appliance_snmpv3_trap_forwarding_user(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.delete(id=id, api=api, headers=headers)",
"def fusion_api_edit_appliance_snmpv3_trap_forwarding_user(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.put(body=body, id=id, api=api, headers=headers)",
"def get_user():\n\treturn '1', 200",
"def list_user():\n\tbegin = 0\n\tlength = 25\n\ttry:\n\t\tif request.json != None:\n\t\t\tbegin = int(request.json.get('begin', 0))\n\t\t\tlength = int(request.json.get('length', 25))\n\texcept:\n\t\tabort(403)\n\tif length > 100 :\n\t\tlength = 100\n\tuserList = User.list(begin, length)\n\tif userList == None:\n\t\tabort(400)\n\treturn jsonify({'users': map(lambda(e): e.output(), userList), 'begin': begin, 'length': len(userList)})",
"def getUsers(client, req):\n client.sendTarget(req[\"id\"], key=\"get.users\", payload={\"payload\": magic.users})",
"def get_user(ranger_url, user, admin_username_password):\n url = format(\"{ranger_url}/service/xusers/users?name={user}\")\n\n base_64_string = base64.encodestring(admin_username_password).replace('\\n', '')\n\n request = urllib2.Request(url)\n request.add_header('Content-Type', 'application/json')\n request.add_header('Accept', 'application/json')\n request.add_header('Authorization', format('Basic {base_64_string}'))\n\n try:\n result = openurl(request, timeout=20)\n response_code = result.getcode()\n response = json.loads(result.read())\n if response_code == 200 and len(response['vXUsers']) >= 0:\n for vxuser in response['vXUsers']:\n if vxuser['name'] == user:\n Logger.info(format(\"User with username {user} exists in Ranger Admin\"))\n return vxuser\n Logger.info(format(\"User with username {user} doesn't exist in Ranger Admin\"))\n return None\n else:\n Logger.error(format(\"Unable to get {user_name} user in Ranger Admin\"))\n return None\n except urllib2.HTTPError as e:\n raise Fail(\"HTTPError while getting \" + str(user) + \" user. Reason = \" + str(e.code))\n except urllib2.URLError as e:\n raise Fail(\"URLError while getting \" + str(user) + \" user. Reason = \" + str(e.reason))\n except TimeoutError:\n raise Fail(\"Connection timeout error while getting \" + str(user) + \" user.\")\n except Exception as err:\n raise Fail(format(\"Error while getting {user} user. Reason = {err}\"))",
"def get_user_list(self, connection):\n http = get_web_service(connection)\n try:\n req = http.request('GET', connection[\"url\"] + '/users/?_format=json')\n data = json.loads(req.data.decode('utf-8'))\n # print(json.dumps(data, indent=4, sort_keys=True))\n return data\n except urllib3.exceptions.HTTPError as e:\n print(\"Connection error\")\n print(e)",
"def get_users():\n table_response = USER_FAVORITES_TABLE.scan()\n return table_response['Items']",
"def get_users():\n return Response(f\"{User.get_all_users()}\", 200, mimetype='text/plain')",
"def get_user_details(self, response):\n\n log.info(str(response) + \"-\" * 80)\n log.info(str(dir(self)) + \"-\" * 80)\n\n return response",
"def get_users(self):\n\t\tself.ise.headers.update({'Accept': 'application/vnd.com.cisco.ise.identity.internaluser.1.1+xml'})\n\n\t\tresp = self.ise.get('{0}/config/internaluser'.format(self.url_base))\n\n\t\tresult = {\n\t\t\t'success': False,\n\t\t\t'response': '',\n\t\t\t'error': '',\n\t\t}\n\n\t\tjson_res = ERS._to_json(resp.text)['ns3:searchResult']\n\n\t\tif resp.status_code == 200 and int(json_res['@total']) > 1:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = [(i['@name'], i['@id'])\n\t\t\t\t\t\t\t\t for i in json_res['ns3:resources']['ns5:resource']]\n\t\t\treturn result\n\n\t\telif resp.status_code == 200 and int(json_res['@total']) == 1:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = [(json_res['ns3:resources']['ns5:resource']['@name'],\n\t\t\t\t\t\t\t\t json_res['ns3:resources']['ns5:resource']['@id'])]\n\t\t\treturn result\n\n\t\telif resp.status_code == 200 and int(json_res['@total']) == 0:\n\t\t\tresult['success'] = True\n\t\t\tresult['response'] = []\n\t\t\treturn result\n\n\t\telse:\n\t\t\tresult['response'] = ERS._to_json(resp.text)['ns3:ersResponse']['messages']['message']['title']\n\t\t\tresult['error'] = resp.status_code\n\t\t\treturn result",
"def get(self):\n\n user = None\n if self.request.headers.get('X-Pp-User'):\n user = self.request.headers['X-Pp-User']\n\n result_json = {\n \"user\": user\n }\n\n self.success(result_json)",
"def request_user_info():\n session = requests.Session()\n session.headers = {\n 'Authorization': f'Bearer {current_access_token}',\n }\n retries = Retry(\n total=5, connect=3, read=3, status=3,\n status_forcelist=[408, 500, 502, 503, 504],\n backoff_factor=0.2,\n respect_retry_after_header=True,\n )\n base_url = current_app.config['AUTH0_BASE_URL']\n adapter = requests.adapters.HTTPAdapter(max_retries=retries)\n session.mount(base_url, adapter)\n\n info_request = session.get(base_url + '/userinfo', timeout=3.0)\n\n info_request.raise_for_status()\n user_info = info_request.json()\n return user_info",
"def fetch_users(self):\n data = self._make_request()\n return data['result']",
"def list_keystone_v3_users(self):\n LOG_OBJ.debug(\"List the users.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/users\"\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while creating user\")\n print (\"No response from Server while creating user\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\" Creating user Failed with status %s \"\n \"and error : %s\" % (response.status, response.data))\n print (\" Creating user Failed with status %s \" %\n response.status)\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Users list : %s \" % output)\n print (\"Users list : %s \" % output)\n return output['users']",
"def list_most_attacked_users_command(client: Client, window: str, limit: str = None,\n page: str = None) -> CommandResults:\n\n raw_response = client.list_most_attacked_users(window, limit, page)\n outputs = raw_response\n threat_families = create_families_objects(dict_safe_get(outputs, [\"users\"]), \"threatStatistics\")\n\n most_attacked_users_output = tableToMarkdown('Most Attacked Users Information',\n outputs,\n headers=['totalVapUsers', 'interval', 'averageAttackIndex',\n 'vapAttackIndexThreshold'],\n headerTransform=pascalToSpace\n )\n\n threat_families_output = tableToMarkdown('Threat Families', threat_families,\n headers=['Mailbox', 'Threat Family Name', 'Threat Score'],\n headerTransform=pascalToSpace)\n\n readable_output = most_attacked_users_output + \"\\n\" + threat_families_output\n\n return CommandResults(\n readable_output=readable_output,\n outputs_prefix='Proofpoint.Vap',\n outputs=outputs,\n raw_response=raw_response,\n outputs_key_field='interval'\n )",
"def list_users(item):\n users = User.load_all_users(item)\n for user in users:\n print(user.username)",
"def list_users(self):\n _url = \"http://\" + self.host_ip + \":35357/v2.0/users\"\n _body = None\n _headers = {'Content-type': 'application/json',\n 'x-auth-token': self.cloud_admin_info['token_project']}\n\n response = self.request(\"GET\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\" no response from Server\")\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\n \"get user list Failed with status %s \" %\n response.status)\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"users List : %s\")\n return output[\"users\"]",
"def get_users(twitter, screen_names):\n request = robust_request(twitter, 'users/lookup', {'screen_name': screen_names}, max_tries=5)\n user_info = []\n for user in request:\n \tuser_info.append(user)\n return user_info",
"def user_list(request_dict):\n users = User.query.all()\n users_list = list()\n for user in users:\n users_list.append(user)\n\n return JSONTools.user_list_reply(users_list)",
"def retrieve_users(payload):\n selection = User.query.order_by(User.id).all()\n users = []\n for item in selection:\n formatted_user = item.format()\n users.append(formatted_user)\n\n return jsonify({\n 'success': True,\n 'total': len(users),\n 'users': users\n })",
"def get_wifi_users():\n response = requests.get(url=f\"http://{init_param['ryu_ip']}:8080/\\\nstats/flow/{init_param['br-int_dpid']}\")\n rules = response.json()[init_param['br-int_dpid']]\n wifi_users = []\n for rule in rules:\n try:\n vlc_ip = rule[\"match\"][\"nw_dst\"]\n except KeyError:\n continue\n else:\n wifi_users.append(vlc_ip)\n return wifi_users",
"def get_users(display_name: Optional[str] = None,\n ids: Optional[Sequence[str]] = None,\n instance_id: Optional[str] = None,\n mobile: Optional[str] = None,\n name_regex: Optional[str] = None,\n output_file: Optional[str] = None,\n source: Optional[str] = None,\n source_user_id: Optional[str] = None,\n status: Optional[str] = None,\n user_name: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetUsersResult:\n __args__ = dict()\n __args__['displayName'] = display_name\n __args__['ids'] = ids\n __args__['instanceId'] = instance_id\n __args__['mobile'] = mobile\n __args__['nameRegex'] = name_regex\n __args__['outputFile'] = output_file\n __args__['source'] = source\n __args__['sourceUserId'] = source_user_id\n __args__['status'] = status\n __args__['userName'] = user_name\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('alicloud:bastionhost/getUsers:getUsers', __args__, opts=opts, typ=GetUsersResult).value\n\n return AwaitableGetUsersResult(\n display_name=pulumi.get(__ret__, 'display_name'),\n id=pulumi.get(__ret__, 'id'),\n ids=pulumi.get(__ret__, 'ids'),\n instance_id=pulumi.get(__ret__, 'instance_id'),\n mobile=pulumi.get(__ret__, 'mobile'),\n name_regex=pulumi.get(__ret__, 'name_regex'),\n names=pulumi.get(__ret__, 'names'),\n output_file=pulumi.get(__ret__, 'output_file'),\n source=pulumi.get(__ret__, 'source'),\n source_user_id=pulumi.get(__ret__, 'source_user_id'),\n status=pulumi.get(__ret__, 'status'),\n user_name=pulumi.get(__ret__, 'user_name'),\n users=pulumi.get(__ret__, 'users'))",
"def run(self, realm, users):\n\t\texisting_users = []\n\t\tfor user in users:\n\t\t\tlogging.debug('Probing user %s' % user)\n\t\t\treq = KerberosUserEnum.construct_tgt_req(realm, user)\n\t\t\trep = self.ksoc.sendrecv(req.dump(), throw = False)\n\t\t\t\t\t\n\t\t\tif rep.name != 'KRB_ERROR':\t\n\t\t\t\t# user doesnt need preauth, but it exists\n\t\t\t\texisting_users.append(user)\n\t\t\t\n\t\t\telif rep.native['error-code'] != KerberosErrorCode.KDC_ERR_PREAUTH_REQUIRED.value:\n\t\t\t\t# any other error means user doesnt exist\n\t\t\t\tcontinue\n\t\t\t\n\t\t\telse:\n\t\t\t\t# preauth needed, only if user exists\n\t\t\t\texisting_users.append(user)\n\n\t\treturn existing_users",
"def user_list(server_object, client, address, command_args):\n\n\tmsg = \"\"\n\n\t#: Create a formatted string of all the users.\n\tfor usr in server_object.usrs.values():\n\t\tmsg += usr + '\\n'\n\n\tclient.send(msg.encode())",
"def get(self):\n user_status,calling_user = has_admin_privileges()\n if user_status == \"no_auth_token\":\n return (bad_request,400,headers)\n\n if user_status == \"not_logged_in\":\n return (unauthorized,401,headers)\n\n # *Only allow directors, organizers to make GET on all users (I don't really see the need for this tbh!)maybe for accepting applications\n if user_status in [\"director\",\"organizer\",\"volunteer\"]:\n try:\n all_users = g.session.query(g.Base.classes.users).all()\n ret = User_Schema(many = True).dump(all_users).data\n return (ret,200,headers)\n except Exception as err:\n print(type(err))\n print(err)\n return (internal_server_error,500,headers)\n else:\n forbidden[\"error_list\"]={\"Authorization error\":\"You do not privileges to access this resource. Contact one of the organizers if you think require access.\"}\n return(forbidden,403,headers)",
"def users(message):\n message.reply(Strings['USERS_FOUND'].format(len(hf.get_users())))",
"def get_users():\n username = request.args.get('username')\n netAdminToolDB = app.config['DATABASE']\n if username != None:\n users = []\n users.append(netAdminToolDB.get_user_name(username))\n else:\n users = netAdminToolDB.get_user()\n\n userList = []\n for user in users:\n uri = url_for('get_user', user_id=user.id,_external=True)\n #role = netAdminToolDB.get_role(user.role_id)\n userList.append({\n 'id': user.id,\n 'uri': uri,\n 'username': user.username,\n 'display_name': user.display_name,\n 'role': user.role_name\n })\n if userList == []:\n return jsonify({'error': 'No users found'}), 404\n\n return jsonify({'users': userList })",
"def get_users_output(display_name: Optional[pulumi.Input[Optional[str]]] = None,\n ids: Optional[pulumi.Input[Optional[Sequence[str]]]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n mobile: Optional[pulumi.Input[Optional[str]]] = None,\n name_regex: Optional[pulumi.Input[Optional[str]]] = None,\n output_file: Optional[pulumi.Input[Optional[str]]] = None,\n source: Optional[pulumi.Input[Optional[str]]] = None,\n source_user_id: Optional[pulumi.Input[Optional[str]]] = None,\n status: Optional[pulumi.Input[Optional[str]]] = None,\n user_name: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetUsersResult]:\n ..."
] | [
"0.6175718",
"0.5447409",
"0.53035015",
"0.5199775",
"0.5180488",
"0.5141749",
"0.5136664",
"0.5105145",
"0.5090195",
"0.5084039",
"0.5067903",
"0.49992546",
"0.49987787",
"0.4993332",
"0.49735498",
"0.49573854",
"0.49499086",
"0.49497983",
"0.4907812",
"0.48920804",
"0.4885891",
"0.48855194",
"0.48824742",
"0.4880734",
"0.4875337",
"0.48703802",
"0.48615497",
"0.48588383",
"0.48553428",
"0.4852343"
] | 0.7442269 | 0 |
Adds SNMPv3 trap forwarding user. [Example] ${resp} = Fusion Api Add Appliance SNMPv3 Trap Forwarding User | | | | def fusion_api_add_appliance_snmpv3_trap_forwarding_user(self, body=None, api=None, headers=None):
return self.snmpv3user.create(body=body, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_edit_appliance_snmpv3_trap_forwarding_user(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.put(body=body, id=id, api=api, headers=headers)",
"def fusion_api_get_appliance_snmpv3_trap_forwarding_users(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.get(id=id, param=param, api=api, headers=headers)",
"def fusion_api_delete_appliance_snmpv3_trap_forwarding_user(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.delete(id=id, api=api, headers=headers)",
"def fusion_api_add_appliance_snmpv3_trap_destination(self, body=None, api=None, headers=None):\n return self.snmpv3trap.create(body=body, api=api, headers=headers)",
"def remote_follow(request):\n remote_user = request.POST.get(\"remote_user\")\n try:\n if remote_user[0] == \"@\":\n remote_user = remote_user[1:]\n remote_domain = remote_user.split(\"@\")[1]\n except (TypeError, IndexError):\n remote_domain = None\n\n wf_response = subscribe_remote_webfinger(remote_user)\n user = get_object_or_404(models.User, id=request.POST.get(\"user\"))\n\n if wf_response is None:\n data = {\n \"account\": remote_user,\n \"user\": user,\n \"error\": \"not_supported\",\n \"remote_domain\": remote_domain,\n }\n return TemplateResponse(request, \"ostatus/subscribe.html\", data)\n\n if isinstance(wf_response, WebFingerError):\n data = {\n \"account\": remote_user,\n \"user\": user,\n \"error\": str(wf_response),\n \"remote_domain\": remote_domain,\n }\n return TemplateResponse(request, \"ostatus/subscribe.html\", data)\n\n url = wf_response.replace(\"{uri}\", urllib.parse.quote(user.remote_id))\n return redirect(url)",
"def add_user(self, userdict):\n return self.post('users', userdict)",
"def add_user_stkpnt(*args):\n return _ida_frame.add_user_stkpnt(*args)",
"def AddUser(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def add(self, user: U) -> None:\n ...",
"def add(isvgAppliance, name, trapAddress, trapCommunity, trapNotificationType=None, trapVersion='V1', trapPort=162,\n objType='snmp', username=None, authEnabled=None, authType=None, authPassPhrase=None, privEnabled=None,\n privType=None, privPassPhrase=None, informSnmpEngineID=None, informTimeout=None, comment='', check_mode=False,\n force=False):\n if force is True or _check(isvgAppliance, None, name, trapAddress, trapCommunity, trapNotificationType, trapVersion,\n trapPort, objType, username, authEnabled, authType, authPassPhrase, privEnabled,\n privType, privPassPhrase, informSnmpEngineID, informTimeout, comment) is False:\n if check_mode is True:\n return isvgAppliance.create_return_object(changed=True)\n else:\n return isvgAppliance.invoke_post(\n \"Add a snmp object\",\n \"/rsp_snmp_objs/\",\n {\n 'name': name,\n 'objType': objType,\n 'comment': comment,\n 'trapAddress': trapAddress,\n 'trapPort': trapPort,\n 'trapCommunity': trapCommunity,\n 'trapVersion': trapVersion,\n 'trapNotificationType': trapNotificationType,\n 'userName': username,\n 'authEnabled': authEnabled,\n 'authType': authType,\n 'authPassPhrase': authPassPhrase,\n 'privEnabled': privEnabled,\n 'privType': privType,\n 'privPassPhrase': privPassPhrase,\n 'informSnmpEngineID': informSnmpEngineID,\n 'informTimeout': informTimeout\n })\n\n return isvgAppliance.create_return_object()",
"def add_user(self, REQUEST):\n\n role_id = REQUEST.form['role_id']\n country_code = role_id.rsplit('-', 1)[-1]\n user_id = REQUEST.form['user_id']\n agent = self._get_ldap_agent()\n\n if not self._allowed(agent, REQUEST, country_code):\n return None\n if not nfp_can_change_user(self, user_id, no_org=False):\n # This means somebody is manipulating the DOM in order to\n # add a user that belongs to an organisation from another\n # country (the button doesn't normally appear)\n return None\n\n with agent.new_action():\n role_id_list = agent.add_to_role(role_id, 'user', user_id)\n\n role_msg = get_role_name(agent, role_id)\n msg = \"User %r added to role %s. \\n\" % (user_id, role_msg)\n\n # for Eionet Groups roles only, test if the added user is member of a\n # national organisation\n\n if self.is_eionet_group(role_id):\n if not get_national_org(agent, user_id, role_id):\n msg += (\n \"The user you want to add to an Eionet Group does not\"\n \" have a mandatory reference to an organisation for \"\n \"your country. Please corect!\")\n\n IStatusMessage(REQUEST).add(msg, type='info')\n\n log.info(\"%s ADDED USER %r TO ROLE %r\",\n logged_in_user(REQUEST), user_id, role_id_list)\n\n if '-awp-' in role_id:\n return REQUEST.RESPONSE.redirect(self.absolute_url() +\n '/awps?nfp=%s#role_%s' %\n (country_code, role_id))\n\n return REQUEST.RESPONSE.redirect(self.absolute_url() +\n '/nrcs?nfp=%s#role_%s' %\n (country_code, role_id))",
"def add_user(self):\n\n pin, code = self.get_auth_pin() \n print(\"Enter the PIN '{}' into the Add Application window and click Add Application\".format(pin))\n input(\"waiting press enter to continue...\")\n\n access_token, refresh_token = self.get_tokens(code)\n user_id = self.tokens.get_next_user_id()\n self.tokens.insert_user(user_id, access_token, refresh_token)\n tstat_ids = self.get_tstat_ids(access_token)\n for tstat_id in tstat_ids:\n logger.info(\"Adding Thermostat ID: {}\".format(tstat_id))\n self.tokens.insert_tstat(user_id, tstat_id)",
"def AddUser(self, usercount, user):\n for i in range(usercount):\n login = string.replace(user[i]['Login'], ' ', '')\n home = self.__homeprefix + login[0] + '/' + login\n action = 'userman -A ' + login + ' -p ' + user[i]['Passwd'] + ' -u ' + str(user[i]['UID']) + \\\n ' -g ' + str(user[i]['GID']) + ' -H ' + home + ' -s ' + user[i]['Shell'] \n output = commands.getstatusoutput(action)\n print output\n updatecount, update = self.__sqlData[\"UPDATE AccUser SET ToDo = 0 WHERE Login = '%s'\" % (login)]",
"def add_user_process():\n\n # extract form data, add, commit, then redirect to /users\n first_name = request.form[\"first-name\"]\n last_name = request.form[\"last-name\"]\n image_url = request.form[\"image-url\"]\n\n msg = db_add_user(first_name, last_name, image_url)\n\n flash(msg[\"text\"], msg[\"severity\"])\n\n return redirect(\"/users\")",
"def add_user():\n request_data = request.get_json()\n\n if 'username' in request_data and 'password' in request_data:\n try:\n User.add_user(request_data['username'], request_data['password'])\n response = Response({}, 201, mimetype=\"application/json\")\n response.headers['Location'] = f'/User/{request_data[\"username\"]}'\n return response\n except UserAlreadyExistsException:\n return Response(\n json.dumps({'error': 'A user already exists with the given username'}),\n 400,\n mimetype='application/json'\n )\n\n return Response(\n json.dumps({'error': 'Username / Password missing in the request body'}),\n 400,\n mimetype='application/json'\n )",
"def fusion_api_add_user(self, body, api=None, headers=None):\n return self.user.create(body, api, headers)",
"def add(self, user):\n int_id = user.get_int_id(self.rooms)\n self.rooms[user.room][\"users\"].append(user)\n\n # Games\n if self.rooms[user.room][\"isGame\"] == \"true\":\n user.send([\"jg\", int_id, user.room])\n # Rooms\n else:\n user.send([\"jr\", int_id, user.room, self.get_strings(user.room)])\n self.packet.send_room([\"ap\", int_id, user.get_string()], user.room)",
"def addUser(self, user):\r\n self.users.append(user)\r\n return len(self.users)-1",
"def add_user():\n input = request.get_json()\n\n if input == None:\n return jsonify({'error': 'Invalid POST request, no data'}), 400\n if not 'username' in input:\n return jsonify({'error': 'Invalid POST request, missing username'}), 400\n if not 'password' in input:\n return jsonify({'error': 'Invalid POST request, missing password'}), 400\n if not 'display_name' in input:\n return jsonify({'error': 'Invalid POST request, missing display_name'}), 400\n if not 'role' in input:\n return jsonify({'error': 'Invalid POST request, missing role'}), 400\n\n netAdminToolDB = app.config['DATABASE']\n id = netAdminToolDB.add_user(input['username'], input['password'],\n input['display_name'], input['role'])\n\n newUser = netAdminToolDB.get_user(id)\n newUserDict = dict(newUser)\n uri = url_for('get_user', user_id=newUser.id, _external=True)\n newUserDict['uri'] = uri\n\n return jsonify({'user': newUserDict}), 201",
"def post_user():\n\tuser = User.add(request.json)\n\tif user == None:\n\t\tabort(404)\n\treturn jsonify({'user': user.output()})",
"def add_member(self, request, pk):\n farm = self.get_object()\n user = request.data.get('user')\n farm.add_member(user)\n return Response({}, status=status.HTTP_202_ACCEPTED)",
"def add_user():\n load_jws_from_request(request)\n if not hasattr(request, 'jws_header') or request.jws_header is None:\n return \"Invalid Payload\", 401\n username = request.jws_payload['data'].get('username')\n address = request.jws_header['kid']\n user = SLM_User(username=username)\n ses.add(user)\n try:\n ses.commit()\n except Exception as ie:\n current_app.logger.exception(ie)\n ses.rollback()\n ses.flush()\n return 'username taken', 400\n userkey = UserKey(key=address, keytype='public', user_id=user.id,\n last_nonce=request.jws_payload['iat']*1000)\n ses.add(userkey)\n try:\n ses.commit()\n except Exception as ie:\n current_app.logger.exception(ie)\n ses.rollback()\n ses.flush()\n #ses.delete(user)\n #ses.commit()\n return 'username taken', 400\n jresult = jsonify2(userkey, 'UserKey')\n current_app.logger.info(\"registered user %s with key %s\" % (user.id, userkey.key))\n return current_app.bitjws.create_response(jresult)",
"def addOne():\n print(inspect.stack()[1][3])\n # read data from the API call\n req_data = request.get_json()\n json_data = {}\n\n for req in req_data:\n if (req in Followup.c.keys()):\n json_data[req] = req_data[req]\n\n query = (\n insert(Followup).\n values(json_data)\n )\n ResultProxy = connection.execute(query)\n if(not ResultProxy):\n return {'error': 'Unable to Add the given client'}\n return {'status': \"Adding Succesful\"}",
"def add_friend_to_trip(request, trip_id, user_id):\n try:\n trip = Trip.objects.get(pk=trip_id)\n if request.user not in trip.users.all():\n return Response(status=status.HTTP_401_UNAUTHORIZED)\n\n user = User.objects.get(pk=user_id)\n if user in trip.users.all():\n error_message = \"User already associated with trip\"\n return Response(error_message, status=status.HTTP_400_BAD_REQUEST)\n\n trip.users.add(user)\n except Trip.DoesNotExist:\n error_message = \"Trip does not exist\"\n return Response(error_message, status=status.HTTP_400_BAD_REQUEST)\n except User.DoesNotExist:\n error_message = \"User does not exist\"\n return Response(error_message, status=status.HTTP_400_BAD_REQUEST)\n except Exception as e:\n return Response(str(e), status=status.HTTP_400_BAD_REQUEST)\n\n return Response(status=status.HTTP_200_OK)",
"def addNewUser(SID, username, userpass, lastname, firstname, email, additionalname, street, zip, fax, states_id, town, language_id, baseURL):\n return call(\"addNewUser\", SID, username, userpass, lastname, firstname, email, additionalname, street, zip, fax, states_id, town, language_id, baseURL)",
"def _game_turn(trick_id: int, landed: bool, user_name: str, game_id: int,\n client: FlaskClient, server_app_context: AppContext) -> None:\n user_att = models.Attempt(trick_id=trick_id,\n game_id=game_id,\n user=user_name,\n landed=landed,\n time_of_attempt=datetime.datetime.utcnow())\n models.db.session.add(user_att)\n models.db.session.commit()",
"def add_user(self, user):\n\t\tself.users[user.username] = user",
"def vlan_user(self, vlan, user):\n self.vlans.append(vlan)\n self.vlan_users.append(vlan + \".\" + user)\n self.user_name.append(user)",
"def insert_ret(self, space_no, field_types, *args):\n d = self.replyQueue.get()\n packet = RequestInsert(self.charset, self.errors, d._ipro_request_id,\n space_no, Request.TNT_FLAG_ADD | Request.TNT_FLAG_RETURN, *args)\n self.transport.write(bytes(packet))\n return d.addCallback(self.handle_reply, self.charset, self.errors, field_types)",
"def AddRosterItem(self, fpb, username):\n pass"
] | [
"0.6939913",
"0.5976426",
"0.58757097",
"0.5855446",
"0.51126504",
"0.5057027",
"0.4986211",
"0.49599764",
"0.49044636",
"0.4904454",
"0.48323953",
"0.48296493",
"0.4788959",
"0.4776318",
"0.47702244",
"0.47659114",
"0.47518277",
"0.47437558",
"0.47392562",
"0.47388226",
"0.47384268",
"0.47208977",
"0.47150126",
"0.46900517",
"0.46806222",
"0.46709436",
"0.46626863",
"0.46271974",
"0.46155357",
"0.45975235"
] | 0.7938626 | 0 |
Edits SNMPv3 trap forwarding user associated with id [Example] ${resp} = Fusion Api Edit Appliance SNMPv3 Trap Forwarding User | | | def fusion_api_edit_appliance_snmpv3_trap_forwarding_user(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622
return self.snmpv3user.put(body=body, id=id, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_add_appliance_snmpv3_trap_forwarding_user(self, body=None, api=None, headers=None):\n return self.snmpv3user.create(body=body, api=api, headers=headers)",
"def update_user(id):\n pass",
"def fusion_api_delete_appliance_snmpv3_trap_forwarding_user(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.delete(id=id, api=api, headers=headers)",
"def fusion_api_edit_user(self, body, uri, api=None, headers=None):\n return self.user.update(body, uri, api, headers)",
"async def put_user_byid(request):\n user_id = request.match_info[\"user_id\"]\n try:\n user_id = int(user_id)\n except (ValueError, TypeError):\n return web.Response(text=\"Incorrect value for user_id\", status=400)\n\n user = request.cirrina.db_session.query(User).filter_by(id=user_id).first()\n if not user:\n return web.Response(status=404, text=\"User not found\")\n\n if user.username == \"admin\":\n return web.Response(status=400, text=\"Cannot change admin\")\n\n is_admin = request.GET.getone(\"is_admin\", None) # str \"true\" or \"flase\"\n if not is_admin: # if None\n return web.Response(text=\"Nothing to change\", status=204)\n\n if is_admin.lower() == \"true\":\n user.is_admin = True\n data = {\"result\": \"{u} is now admin \".format(u=user.username)}\n elif is_admin.lower() == \"false\":\n user.is_admin = False\n data = {\"result\": \"{u} is no longer admin \".format(u=user.username)}\n\n try:\n request.cirrina.db_session.commit() # pylint: disable=no-member\n except sqlalchemy.exc.DataError:\n request.cirrina.db_session.rollback() # pylint: disable=no-member\n return web.Response(status=500, text=\"Database error\")\n\n # TODO : change to a multicast group\n await app.websocket_broadcast(\n {\n \"event\": Event.changed.value,\n \"subject\": Subject.user.value,\n \"changed\": {\"id\": user_id, \"is_admin\": user.is_admin},\n }\n )\n\n return web.json_response(data)",
"def fusion_api_get_appliance_snmpv3_trap_forwarding_users(self, id=None, param='', api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3user.get(id=id, param=param, api=api, headers=headers)",
"def update_user():",
"def update_user():\n user = request.json\n user[\"_id\"] = validate_id(user[\"_id\"])\n if not user_service.update_user(user):\n response = {\n \"status\": False,\n \"message\": f\"No se pudo actualizar el usuario: {str(user['_id'])}\",\n }\n resp = make_response(dumps(response), 404)\n else:\n response = {\n \"status\": True,\n \"message\": f\"Se actualizo corretamente el usuario: {str(user['_id'])}\",\n }\n resp = make_response(dumps(response), 200)\n resp.headers[\"Content-Type\"] = \"application/json\"\n return resp",
"def put(self, user_id):\r\n return update_user(request, user_id)",
"def edit_user_process(user_id):\n\n # extract form data, edit, commit, then redirect to /users\n first_name = request.form[\"first-name\"].strip()\n last_name = request.form[\"last-name\"].strip()\n image_url = request.form[\"image-url\"].strip()\n\n msg = db_edit_user(user_id, first_name, last_name, image_url)\n\n flash(msg[\"text\"], msg[\"severity\"])\n\n return redirect(f\"/users/{user_id}\")",
"def put(self, id):\n data = flask.request.json\n user_dao.update_user(id, data)\n return None, 204",
"def updateUser(self, payload):\n\t\turl = \"https://habitica.com/api/v3/user\"\n\t\treturn(putUrl(url, self.credentials, payload))",
"def update_user(user_id, data):\n logging.debug(\"Uptating user: user_id={}\".format(user_id))\n return ask('appusers/{0}'.format(user_id), data, 'put')",
"def update_user(user_id):\n netAdminToolDB = app.config['DATABASE']\n user = netAdminToolDB.get_user(user_id)\n if user == None:\n return jsonify({'error': 'User_id not found'}), 404\n\n input = request.get_json()\n\n if input == None:\n return jsonfiy({'error': 'Invalid PUT request'}), 400\n\n # Send input directly to update_user function, which checks each key\n netAdminToolDB.update_user(user_id, **input)\n user = netAdminToolDB.get_user(user_id)\n userDict = dict(user)\n uri = url_for('get_user', user_id=user.id, _external=True)\n userDict['uri'] = uri\n\n return jsonify({'user': userDict}), 200",
"def update(self, user: U) -> None:\n ...",
"def put(self, user_id):\n data = request.json\n return update_user(data, user_id)",
"def put(self, id):\n return userDao.update(id, api.payload)",
"def edit_user(user_id):\n \"\"\"Cannot update a user's role\"\"\"\n db = get_db()\n users = db.users\n data = request.json\n \n # Check if user_id is a string\n if not isinstance(user_id, str):\n raise APIException(status_code=400, message='user_id not a string')\n \n # Check if user_id is actually an entry in the users collection\n cursor = users.find({\"user_id\": user_id})\n if cursor.count() is 0:\n raise APIException(status_code=404, message='user_id does not exist yet')\n elif cursor.count() > 1:\n raise APIException(status_code=500, message='Error, multiple entries with same user_id found. user_id must be unique')\n \n # Validate that the data matches the required format\n # user_id = data['user_id']\n # del data['user_id']\n validate_user_data(data, is_adding_new_user=False)\n # data['user_id'] = user_id\n\n result = users.update_one(\n {\"user_id\": user_id},\n {\n \"$set\": {\n \"name\": data[\"name\"],\n \"phone\": data[\"phone\"],\n \"email\": data[\"email\"],\n \"VenmoUsername\": data[\"VenmoUsername\"],\n \"gender\": data[\"gender\"],\n \"height\": data[\"height\"],\n \"weight\": data[\"weight\"],\n \"age\": data[\"age\"],\n \"bio\": data[\"bio\"],\n \"tags\": data[\"tags\"],\n \"location\": data[\"location\"],\n \"pic_url\": data[\"pic_url\"]\n }\n }\n )\n \n if \"role\" not in data:\n return '', 200\n if data[\"role\"] == \"Mentor\":\n result = users.update_one(\n {\"user_id\": user_id},\n {\n \"$set\": {\n \"rates\": data[\"rates\"],\n \"accepting_clients\": data[\"accepting_clients\"]\n }\n }\n )\n return '', 200",
"def fusion_api_edit_appliance_snmpv3_trap_destination(self, body=None, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.put(body=body, id=id, api=api, headers=headers)",
"def update_user():\n #TODO user update \n pass",
"def put(self, id):\n payload = marshal(api.payload, invite_user)\n taskroom_service.invite_user(id, payload['email'])\n return {'Message': \"User Added to the Task Room\"}",
"def idme(bot, update):\n update.message.reply_text(\"Your ID is: \" + str(update.message.from_user.id))",
"def edit_user(user_id):\n if request.method == 'GET':\n # init form with current user:\n form = ProfileForm(\n nickname = session[Session.PROFILE][\"nickname\"], \n location = session[Session.PROFILE][\"location\"],\n about_me = session[Session.PROFILE][\"about_me\"]\n )\n if request.method == 'POST': \n # init form with POSTed form:\n form = ProfileForm(request.form)\n\n if form.validate(): \n # update backend:\n response = service_user_management.patch(\n id = f'auth0|{user_id}', \n nickname = form.nickname.data, \n location = form.location.data,\n about_me = form.about_me.data\n )\n\n # success:\n if 'identities' in response: \n try:\n # update db:\n delegated_user = DelegatedUser.query.get_or_404(\n user_id, \n description='There is no user with id={}'.format(user_id)\n )\n delegated_user.nickname = form.nickname.data\n # update:\n db.session.add(delegated_user)\n # write\n db.session.commit()\n\n # update session:\n session[Session.PROFILE][\"nickname\"] = form.nickname.data\n session[Session.PROFILE][\"location\"] = form.location.data\n session[Session.PROFILE][\"about_me\"] = form.about_me.data\n \n # on successful profile update, flash success\n flash('Your profile was successfully updated.')\n\n return redirect(url_for('.show_user', user_id = user_id))\n except:\n db.session.rollback()\n # on unsuccessful registration, flash an error instead.\n flash('An error occurred. New account could not be created.')\n finally:\n db.session.close()\n # failure:\n else:\n flash(response['message']) \n else:\n # for debugging only:\n flash(form.errors)\n \n return render_template('users/forms/user.html', form=form, user_id=user_id)",
"def test_040_update_user(self):\n\n testflow.step(\"Updating user %s\", TEST_USER2)\n assert USER_CLI.run(\n 'edit',\n TEST_USER2,\n attribute='firstName=userX2',\n )[0]",
"def do_user_update():\n targetUsers = User.query.filter_by(id=request.form['id']).all()\n if not any(targetUsers):\n return user_list(\"Unknown user.\")\n\n targetUser = targetUsers[0]\n\n targetUser.first_name = request.form['first_name']\n targetUser.name = request.form['name']\n targetUser.nick = request.form['nick']\n targetUser.mail = request.form['mail']\n targetUser.role = request.form['role']\n targetUser.state = request.form['state']\n targetUser.gender = request.form['gender']\n targetUser.meter_id = request.form['meter_id']\n targetUser.group_id = request.form['group_id']\n\n db.session.commit()\n return user_list(\"Updated user \" + targetUser.name)",
"def update_user(self, user):\n query = TABELLE['id_users']['update']\n return self.execute(query,\n (user['admin'], user['tester'], user['loot_user'], user['loot_admin'], user['banned'],\n user['id']))",
"def update_user(user_id):\n update_usr = request.get_json()\n if not update_usr:\n abort(400, {'Not a JSON'})\n usr = storage.get(User, user_id)\n if not usr:\n abort(404)\n else:\n for key, value in update_usr.items():\n setattr(usr, key, value)\n storage.save()\n return jsonify(usr.to_dict())",
"def patch_user(user_id):\n success = True\n try:\n usr = db.session.query(User).get(user_id)\n for item in request.json:\n if item == 'username':\n usr.username = request.json['username']\n elif item == 'email':\n usr.username = request.json['email']\n db.session.commit()\n except:\n success = False\n return jsonify(success=success)",
"def internal_edit_user(\n payload: dict,\n raiseonfail: bool = False,\n override_authdb_path: str = None,\n config: SimpleNamespace = None,\n) -> dict:\n\n engine, meta, permjson, dbpath = get_procdb_permjson(\n override_authdb_path=override_authdb_path,\n override_permissions_json=None,\n raiseonfail=raiseonfail,\n )\n\n for key in (\"reqid\", \"pii_salt\"):\n if key not in payload:\n LOGGER.error(\n \"Missing %s in payload dict. Can't process this request.\" % key\n )\n return {\n \"failure_reason\": (\n \"invalid request: missing '%s' in request\" % key\n ),\n \"success\": False,\n \"session_token\": None,\n \"expires\": None,\n \"messages\": [\"Invalid edit-user request.\"],\n }\n\n for key in (\"target_userid\", \"update_dict\"):\n\n if key not in payload:\n\n LOGGER.error(\n \"[%s] Invalid session edit-user request, missing %s.\"\n % (payload[\"reqid\"], key)\n )\n\n return {\n \"success\": False,\n \"failure_reason\": (\n \"invalid request: missing '%s' in request\" % key\n ),\n \"messages\": [\n \"Invalid edit-user request: \"\n \"missing or invalid parameters.\"\n ],\n }\n\n target_userid = payload[\"target_userid\"]\n update_dict = payload[\"update_dict\"]\n if update_dict is None or len(update_dict) == 0:\n return {\n \"success\": False,\n \"failure_reason\": (\n \"invalid request: missing 'update_dict' in request\"\n ),\n \"messages\": [\n \"Invalid user-edit request: \" \"missing or invalid parameters.\"\n ],\n }\n\n update_dict_keys = set(update_dict.keys())\n disallowed_keys = {\n \"user_id\",\n \"system_id\",\n \"password\",\n \"emailverify_sent_datetime\",\n \"emailforgotpass_sent_datetime\",\n \"emailchangepass_sent_datetime\",\n \"last_login_success\",\n \"last_login_try\",\n \"failed_login_tries\",\n \"created_on\",\n \"last_updated\",\n }\n leftover_keys = update_dict_keys.intersection(disallowed_keys)\n\n if len(leftover_keys) > 0:\n LOGGER.error(\n \"[%s] Invalid edit-user request, \"\n \"found disallowed update keys in update_dict: %s.\"\n % (payload[\"reqid\"], leftover_keys)\n )\n return {\n \"success\": False,\n \"failure_reason\": (\n \"invalid request: disallowed keys in update_dict: %s\"\n % leftover_keys\n ),\n \"messages\": [\n \"Invalid edit-user request: \" \"invalid update parameters.\"\n ],\n }\n\n #\n # now, try to update\n #\n try:\n\n users = meta.tables[\"users\"]\n\n sel = (\n select(users.c.user_id, users.c.extra_info)\n .select_from(users)\n .where(users.c.user_id == target_userid)\n )\n\n with engine.begin() as conn:\n result = conn.execute(sel)\n userid_and_extrainfo = result.first()\n\n if not userid_and_extrainfo or len(userid_and_extrainfo) == 0:\n return {\n \"success\": False,\n \"failure_reason\": \"no such user\",\n \"messages\": [\"User info update failed.\"],\n }\n\n if (\n \"extra_info\" in update_dict\n and update_dict[\"extra_info\"] is not None\n ):\n\n user_extra_info = userid_and_extrainfo.extra_info\n if not user_extra_info:\n user_extra_info = {}\n\n for key, val in update_dict[\"extra_info\"].items():\n if val == \"__delete__\" and key in user_extra_info:\n del user_extra_info[key]\n else:\n user_extra_info[key] = val\n\n else:\n user_extra_info = userid_and_extrainfo.extra_info\n\n # do the update\n\n # replace the extra_info key in the update_dict since we update that\n # separately\n update_dict[\"extra_info\"] = user_extra_info\n\n with engine.begin() as conn:\n upd = (\n users.update()\n .where(\n users.c.user_id == target_userid,\n )\n .values(update_dict)\n )\n conn.execute(upd)\n\n s = (\n select(*user_info_columns(users))\n .select_from(users)\n .where(users.c.user_id == target_userid)\n )\n\n result = conn.execute(s)\n row = result.first()\n\n try:\n\n serialized_result = dict(row._mapping)\n LOGGER.info(\n \"[%s] User info updated for \"\n \"user_id: %s.\"\n % (\n payload[\"reqid\"],\n pii_hash(\n serialized_result[\"user_id\"], payload[\"pii_salt\"]\n ),\n )\n )\n\n return {\n \"success\": True,\n \"user_info\": serialized_result,\n \"messages\": [\"User-info update successful.\"],\n }\n\n except Exception as e:\n\n LOGGER.error(\n \"[%s] User info update failed for session token: %s. \"\n \"Exception was: %r.\"\n % (\n payload[\"reqid\"],\n pii_hash(payload[\"target_userid\"], payload[\"pii_salt\"]),\n e,\n )\n )\n\n return {\n \"success\": False,\n \"failure_reason\": \"user requested for update doesn't exist\",\n \"messages\": [\"User info update failed.\"],\n }\n\n except Exception as e:\n\n LOGGER.error(\n \"[%s] User info update failed for user_id: %s. \"\n \"Exception was: %r.\"\n % (\n payload[\"reqid\"],\n pii_hash(payload[\"target_userid\"], payload[\"pii_salt\"]),\n e,\n )\n )\n\n return {\n \"success\": False,\n \"failure_reason\": \"DB error when updating user info\",\n \"messages\": [\"User info update failed.\"],\n }",
"def edit_user():\n userid = request.form[\"userid\"]\n email = request.form[\"email\"]\n fname = request.form[\"fname\"]\n lname = request.form[\"lname\"]\n macaddress = request.form[\"macaddress\"]\n role = request.form[\"role\"]\n\n print(userid, \" | \",email,\" | \", fname,\" | \", lname, \" | \",macaddress,\" | \", role)\n\n user = User.query.get(userid)\n user.email = email\n user.fname = fname\n user.lname = lname\n user.macaddress = macaddress\n user.role = role\n\n # commit the new add.\n db.session.commit()\n\n return userSchema.jsonify(user)"
] | [
"0.64520043",
"0.60710645",
"0.6002895",
"0.57764155",
"0.5557158",
"0.55425996",
"0.5484981",
"0.54391134",
"0.54175264",
"0.5394457",
"0.5381147",
"0.53033",
"0.53024733",
"0.5267579",
"0.5251395",
"0.52505785",
"0.5223246",
"0.520376",
"0.5195357",
"0.5168125",
"0.5167796",
"0.5167018",
"0.51554",
"0.5148575",
"0.51390815",
"0.5137608",
"0.5127798",
"0.51082224",
"0.50948626",
"0.5031232"
] | 0.7626561 | 0 |
Deletes the SNMPv3 trap forwarding user associated with id [Example] ${resp} = Fusion Api Delete Appliance SNMPv3 Trap Forwarding User | | | def fusion_api_delete_appliance_snmpv3_trap_forwarding_user(self, id=None, api=None, headers=None): # pylint: disable=W0622
return self.snmpv3user.delete(id=id, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_delete_appliance_snmpv3_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.snmpv3trap.delete(id=id, api=api, headers=headers)",
"def delete_user(id):\n pass",
"def delete_user():\n user_id = validate_id(request.args.get(\"id\"))\n config = config_service.get_one({\"user\": str(user_id)})\n config_service.delete(str(config[\"_id\"]))\n if user_service.delete_user(user_id) != user_id:\n response = {\n \"status\": False,\n \"message\": f\"No se pudo eliminar el usuario: {str(user_id)}\",\n }\n resp = make_response(jsonify(response), 404)\n else:\n response = {\n \"status\": True,\n \"message\": f\"Se elimino corretamente el usuario: {str(user_id)}\",\n }\n resp = make_response(jsonify(response), 200)\n resp.headers[\"Content-Type\"] = \"application/json\"\n return resp",
"def delete_user_giphy(giphy_id: \"str\") -> \"Tuple[Response, int]\":\n response: \"List[Dict]\" = retriever.retrieve_giphy(giphy_id=giphy_id)\n status: \"int\" = 201 if response else 404\n return jsonify(response), status",
"def delete_user(self, _id):\n return self.make_request(\"DELETE\", \"users/\"+_id, {})",
"def delete(self, id):\n\t\ttry:\n\t\t\tuser_service.delete(id)\n\t\texcept AssertionError as e:\n\t\t\tuser_space.abort(400, e.args[0], status = \"Could not delete user\", statusCode = \"400\")\n\t\texcept Exception as e:\n\t\t\tuser_space.abort(500, e.args[0], status = \"Could not delete user\", statusCode = \"500\")",
"def delete_user_by_xng_id(self, user):\n # type: (dict) -> dict\n self.request_url = \"{0}/{1}/xngId/{2}\".format(self.API_URL, self.USER_ENDPOINT, user['xngId'])\n return self.__create_request(payload=user, request_type=self.REQUEST_DELETE, version=\"v1\")",
"def delete_user(id):\n user = Users.query.filter_by(id=id).first()\n user.delete()\n if not user:\n return send_msg(404, 'Not Found')\n return send_msg(204, \"No data\")",
"def delete_user(payload, user_id):\n user = User.query.get(user_id)\n # exception for non existing id\n if user is None:\n abort(404)\n # set error status\n error = False\n # delete the user\n try:\n user.delete()\n except Exception:\n user.rollback()\n error = True\n print(sys.exc_info())\n finally:\n user.close_session()\n\n if error:\n abort(422)\n\n return jsonify({\n 'success': True,\n 'deleted': user_id\n })",
"def delete_user(self) -> 'outputs.ActingUserResponse':\n return pulumi.get(self, \"delete_user\")",
"def delete_user():\n del globalopts.appdata[request.user]\n del globalopts.users[request.user]\n return \"\", 200",
"def delete(self, id):\n # Get the user from the auth header\n auth_username, auth_password = decode_basic_auth_info(request)\n auth_user = User.query.filter(User.username==auth_username).first()\n if not auth_user.admin:\n return Response(status=403)\n\n user = User.query.get(id)\n if user is None:\n return Response(status=400)\n db.session.delete(user)\n db.session.commit()\n return Response(status=202)",
"def delete_user():",
"def on_delete(self, req, resp, table, id):\n user = req.context['user']\n engine = user_db_engine(user)\n query = \"DELETE FROM {} WHERE id=:id\".format(table)\n\n with engine.new_session() as conn:\n result = conn.execute(query, { \"id\": id })\n\n resp.context['result'] = {'result': 'ok'}\n resp.status = falcon.HTTP_200",
"def user_id_delete(user_id):\n user = storage.get(\"User\", user_id)\n\n if user is None:\n abort(404)\n user.delete()\n del user\n return make_response(jsonify({}), 200)",
"def delete(self, user_id):\r\n return delete_user(request, user_id)",
"def delete_user(user_id):\n usr = storage.get(User, user_id)\n if usr:\n usr.delete(), storage.save()\n return {}\n else:\n abort(404)",
"def delete_user(user_id):\n netAdminToolDB = app.config['DATABASE']\n user = netAdminToolDB.get_user(user_id)\n\n if user == None:\n return jsonify({'error': 'User_id not found'}), 404\n\n netAdminToolDB.delete_user(user_id)\n return jsonify({'result': True})",
"def delete(khoros_object, user_id, return_json=False):\n # TODO: Allow other identifiers (e.g. login, email, etc.) to be provided instead of just the User ID\n query_url = f\"{khoros_object.core_settings['v2_base']}/users/{user_id}\"\n response = api.delete(query_url, return_json, auth_dict=khoros_object.auth)\n if response.status_code == 403 and 'Feature is not configured' in response.text:\n try:\n identifier = response.text.split('identifier: ')[1].split('\"')[0]\n raise errors.exceptions.FeatureNotConfiguredError(identifier=identifier)\n except IndexError:\n raise errors.exceptions.FeatureNotConfiguredError()\n if return_json:\n response = response.json()\n return response",
"def deleteUser(user):\n delete_user(user)\n return redirect(url_for('login'))",
"def delete_user():\n token = request.args.get('token')\n data = jwt.decode(token, app.config['SECRET_KEY'])\n\n permit = functions.delete_user(data)\n if permit:\n return make_response(jsonify({'Delete': 'User Deleted Successfully'}), 201)\n else:\n return make_response(jsonify({'Delete Failed': 'Credentials not match or the user not exist'}), 201)",
"def delete(self,user_id):\n user_status,calling_user = has_admin_privileges()\n if user_status == \"no_auth_token\":\n return (bad_request,400,headers)\n\n if user_status == \"not_logged_in\":\n return (unauthorized,401,headers)\n\n # getting the user. Assuming the user exists. Case of user not existing is checked below\n try:\n user = g.session.query(g.Base.classes.users).get(user_id)\n except Exception as err:\n print(type(err))\n print(err)\n return (internal_server_error,500,headers)\n\n # *Only Directors, Organizers and user calling the request\n if user:\n try:\n if user_status in [\"director\",\"organizer\"] or calling_user.id == user.id:\n if user.rsvps_collection:\n g.session.delete(g.session.query(g.Base.classes.rsvps).get(user.rsvps_collection[0].id))\n if user.applications_collection:\n g.session.delete(g.session.query(g.Base.classes.applications).get(user.applications_collection[0].id))\n g.session.delete(g.session.query(g.Base.classes.users).get(user_id))\n else:\n forbidden[\"error_list\"]={\"Authorization error\":\"You do not privileges to access this resource. Contact one of the organizers if you think require access.\"}\n return (forbidden,403,headers)\n except Exception as err:\n print(type(err))\n print(err)\n return (internal_server_error, 500, headers)\n else:\n return (not_found,404,headers)\n\n # error handling for mail send\n try:\n f = open(\"common/account_creation.html\",'r')\n body = Template(f.read())\n f.close()\n body = body.render(first_name = user.first_name)\n send_email(subject = \"Account creation confirmation!\",recipient = user.email, body = \"Account deleted!\")\n return (\"\",204,headers)\n except Exception as err:\n print(type(err))\n print(err)\n internal_server_error[\"error_list\"][\"error\"] = \"Account successfully created. Error in confirmation email sending.\"\n return (internal_server_error,500,headers)",
"async def delete_user(user_id):\n \n user = User.select().where(User.id == user_id).first()\n\n if not user:\n return HTTPException(404, 'User not found')\n else:\n user.delete_instance()\n\n return f\"User {user.username} deleted successfully\"",
"def del_user_id(user_id):\r\n obj = storage.get(User, user_id)\r\n if obj is None:\r\n abort(404)\r\n obj.delete()\r\n storage.save()\r\n return jsonify({}), 200",
"def deleteOne(id):\n print(inspect.stack()[1][3])\n query = Followup.delete().where(Followup.columns.id == id)\n ResultProxy = connection.execute(query)\n if(not ResultProxy):\n return {'error': 'Unable to find the given client'}\n return {'status': \"Delete Succesful\"}",
"def remove(id):\n q = User.delete().where(User.id == id)\n try:\n q.execute()\n except Exception as e:\n return e\n return redirect(url_for('db'))",
"def delete(user_id):\n # Get the user requested\n user = User.query.filter(User.user_id == user_id).one_or_none()\n\n if user is not None:\n db.session.delete(user)\n db.session.commit()\n return (\n \"User {user_id} deleted\".format(user_id=user_id), 200\n )\n\n else:\n abort(\n 404,\n \"Person not found for Id: {user_id}\".format(user_id=user_id),\n )",
"def delete_user_process(user_id):\n\n db_user = User.query.get_or_404(user_id)\n\n db.session.delete(db_user)\n db.session.commit()\n\n return redirect(\"/users\")",
"def fusion_api_delete_appliance_trap_destination(self, id=None, api=None, headers=None): # pylint: disable=W0622\n return self.trap.delete(id=id, api=api, headers=headers)",
"def delete(self, id):\n userDao.delete(id)\n return \"\", 204"
] | [
"0.65136087",
"0.64713705",
"0.63645256",
"0.6336111",
"0.6335194",
"0.63321716",
"0.63135046",
"0.62973535",
"0.6286232",
"0.62428766",
"0.62398744",
"0.6206671",
"0.617713",
"0.617306",
"0.61483526",
"0.6131463",
"0.6121467",
"0.6104523",
"0.6087752",
"0.60877126",
"0.6070226",
"0.6063813",
"0.6039095",
"0.60131955",
"0.5981085",
"0.5972905",
"0.5972861",
"0.59586847",
"0.59394383",
"0.5914215"
] | 0.84660083 | 0 |
Retrieves a list of category and actions [Arguments] [Example] ${resp} = Fusion Api Get Authorization Category Actions | | | | | def fusion_api_get_authorization_category_actions(self, api=None, headers=None, resource_uri='', sessionID=None,):
param = '/category-actions%s' % resource_uri
return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_authorization_role_category_actions(self, api=None, headers=None, sessionID=None):\n param = '/role-category-actions'\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def fusion_api_get_authorization_permission_actions(self, api=None, headers=None, category_action='', sessionID=None):\n param = '/authorizing-permissions%s' % category_action\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def get_integrations_actions_categories(self, **kwargs):\n\n all_params = ['page_size', 'page_number', 'next_page', 'previous_page', 'sort_by', 'sort_order', 'secure']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_integrations_actions_categories\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n\n resource_path = '/api/v2/integrations/actions/categories'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'page_size' in params:\n query_params['pageSize'] = params['page_size']\n if 'page_number' in params:\n query_params['pageNumber'] = params['page_number']\n if 'next_page' in params:\n query_params['nextPage'] = params['next_page']\n if 'previous_page' in params:\n query_params['previousPage'] = params['previous_page']\n if 'sort_by' in params:\n query_params['sortBy'] = params['sort_by']\n if 'sort_order' in params:\n query_params['sortOrder'] = params['sort_order']\n if 'secure' in params:\n query_params['secure'] = params['secure']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['PureCloud OAuth']\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='CategoryEntityListing',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_event_categories_responder(self):\n pass",
"def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_event_categories_responder_spaces(self):\n pass",
"def categories():\n\tcategories = [\n\t\t'News',\n\t\t'Technology',\n\t\t'Music',\n\t\t'Sports'\n\t]\n\tresponse = { 'response': categories }\n\treturn jsonify(response)",
"def list_actions() -> None:\n colorama_init()\n max_action_name_len = max(len(name) for name in KNOWN_ACTIONS.keys())\n wrapper = textwrap.TextWrapper(\n width=80 - max_action_name_len - 3,\n subsequent_indent=' ' * (max_action_name_len + 3),\n )\n print(\n '{bright}{name:<{max_action_name_len}} -{normal} {doc}'.format(\n bright=Style.BRIGHT,\n name='name',\n max_action_name_len=max_action_name_len,\n normal=Style.NORMAL,\n doc='description [(argument: type, ...)]',\n )\n )\n print('-' * 80)\n for name, action in KNOWN_ACTIONS.items():\n wrapped_doc = wrapper.fill(' '.join(str(action.__doc__).split()))\n print(\n '{bright}{name:<{max_action_name_len}} -{normal} {doc}'.format(\n bright=Style.BRIGHT,\n name=name,\n max_action_name_len=max_action_name_len,\n normal=Style.NORMAL,\n doc=wrapped_doc,\n )\n )\n return None",
"def cluster_actions():\n request_debug(r, logger)\n action = request_get(r, \"action\")\n logger.info(\"cluster_op with action={}\".format(action))\n if action == \"apply\":\n return cluster_apply(r)\n elif action == \"release\":\n return cluster_release(r)\n elif action == \"start\":\n return cluster_start(r)\n elif action == \"stop\":\n return cluster_stop(r)\n elif action == \"restart\":\n return cluster_restart(r)\n else:\n return make_fail_response(\"Unknown action type\")",
"def list_categorias_cmd():\n return ListCategoriaCommand()",
"def actions(self, request, action_list, group):\n return action_list",
"def get_categories(self):\n _url = urljoin(self.base_url, self.API_CATEGORIES)\n return requests.get(_url)",
"def get_actions(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = GetActions.create(\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def get_list(cm_response, **data):\n return cm_response",
"def list(self, request):\n item_categories = ItemCategory.objects.all()\n\n serializer = ItemCategorySerializer(\n item_categories, many=True, context={'request': request})\n return Response(serializer.data)",
"def list(self):\n\n return list(\n filter(\n lambda x: x.get('type') != 'tagit', # pragma: no cover\n self._post(\n request=ApiActions.LIST.value,\n uri=ApiUri.ACTIONS.value,\n ).get('actions')\n )\n )",
"def category():\n kwargs = {k: parse(v) for k, v in request.args.to_dict().items()}\n return jsonify(objects=get_categories(**kwargs))",
"def getcategory(self):\n\n response = requests.get(\"https://fr.openfoodfacts.org/categories.json\")\n\n data = response.json()\n\n self.rawcategorydata = data",
"def get_all_categories_from_collection():\n api_endpoint = URL\n response = requests.get(api_endpoint)\n return response",
"def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder(self):\n pass",
"def get_categories():\n try:\n result = {\n \"success\": True,\n \"categories\": get_all_categories()\n }\n return jsonify(result)\n\n except Exception as exp:\n abort(exp.code)",
"def get_actions(self: object, *args, parameters: dict = None, **kwargs) -> dict:\n # [GET] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/recon/GetActionsV1\n return process_service_request(\n calling_object=self,\n endpoints=Endpoints,\n operation_id=\"GetActionsV1\",\n keywords=kwargs,\n params=handle_single_argument(args, parameters, \"ids\")\n )",
"def list(self):\n return list(\n filter(\n lambda x: x.get('type') == 'tagit', # pragma: no cover\n self._post(\n request=ApiActions.LIST.value,\n uri=ApiUri.ACTIONS.value,\n ).get('actions')\n )\n )",
"def test_get_all_categories(self):\n login = self.autheniticate()\n token = json.loads(login.data.decode()).get('token')\n self.app.post(category_url,\n data=json.dumps(self.data),\n headers=dict(Authorization=\"Bearer \" + token),\n content_type='application/json')\n res = self.app.get(category_url,\n headers=dict(Authorization=\"Bearer \" + token))\n res1 = json.loads(res.data.decode())\n self.assertEqual(res1['status'], 'Success!')\n self.assertEqual(res.status_code, 200)",
"def api_categories():\n categories = session.query(Category)\n return jsonify(json_list=[i.to_json() for i in categories.all()])",
"async def actions(\n self,\n *,\n query_params: Optional[Dict[str, any]] = None,\n headers: Optional[Dict[str, str]] = None,\n ) -> AuditLogsResponse:\n return await self.api_call(\n path=\"actions\",\n query_params=query_params,\n headers=headers,\n )",
"def test_get_categories(self, mocker):\n mock = mocker.patch(\"requests_html.HTMLSession\")\n mock.return_value.get.return_value.html.find.return_value = iter(\n [\n mocker.Mock(text=\"Ammo\", attrs={\"href\": \"catalogue?cat=1\"}),\n mocker.Mock(text=\"Food\", attrs={\"href\": \"catalogue?cat=2\"}),\n mocker.Mock(text=\"Armour\", attrs={\"href\": \"catalogue?cat=3\"}),\n mocker.Mock(text=\"Weapons\", attrs={\"href\": \"catalogue?cat=4\"}),\n ]\n )\n\n result = resources.get_categories()\n assert list(result) == [\n (1, \"Ammo\"),\n (2, \"Food\"),\n (3, \"Armour\"),\n (4, \"Weapons\"),\n ]",
"def list(self, request):\n a_viewset = [\n 'Uses actions (list, create, retrieve, update, partial_update)',\n 'Automatically maps to URLs using Routers',\n 'Provides more functionality with less code',\n ]\n\n return Response({'message': 'Hello!', 'a_viewset': a_viewset})",
"def cli(ctx, category_id):\n return ctx.ti.categories.show_category(category_id)",
"def test_retrieve_notification_categories_list(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_category_name_one = 'Error'\n post_response_one = create_notification_category(\n client,\n new_notification_category_name_one)\n assert post_response_one.status_code == HttpStatus.created_201.value\n\n new_notification_category_name_two = 'Warning'\n post_response_two = create_notification_category(\n client,\n new_notification_category_name_two)\n assert post_response_two.status_code == HttpStatus.created_201.value\n\n url = url_for('service.notificationcategorylistresource', _external=True)\n get_response = client.get(\n url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n url = url_for('service.notificationcategorylistresource', _external=True)\n get_response = client.get(\n url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert len(get_response_data) == 2\n assert get_response_data[0]['name'] == new_notification_category_name_one\n assert get_response_data[1]['name'] == new_notification_category_name_two",
"async def get_categories_for_filter_menu(language: str):\n try:\n category_filter_query_result = get_db().AQLQuery(\n query=menu_queries.QUERY_CATEGORIES_FOR_LANGUAGE,\n batchSize=500,\n bindVars={\"language\": language},\n )\n return {\"categoryitems\": category_filter_query_result.result}\n\n except DocumentNotFoundError as error:\n print(error)\n raise HTTPException(status_code=404, detail=\"Item not found\") from error\n except AQLQueryError as error:\n print(\"AQLQueryError: \", error)\n raise HTTPException(status_code=400, detail=error.errors) from error\n except KeyError as error:\n print(\"KeyError: \", error)\n raise HTTPException(status_code=400) from error"
] | [
"0.72943723",
"0.70284075",
"0.6173326",
"0.6108947",
"0.5853997",
"0.5831378",
"0.58130175",
"0.5803924",
"0.5767478",
"0.5740511",
"0.56732655",
"0.565956",
"0.5625802",
"0.5619425",
"0.55742353",
"0.557309",
"0.5536828",
"0.55304563",
"0.546844",
"0.5437171",
"0.5403862",
"0.5382791",
"0.53812975",
"0.5374467",
"0.5354165",
"0.5349726",
"0.5344651",
"0.5343563",
"0.53430575",
"0.5324299"
] | 0.77807516 | 0 |
Retrieves a list of all roles and associated category and actions. [Arguments] [Example] ${resp} = Fusion Api Get Authorization Role Category Actions | | | | def fusion_api_get_authorization_role_category_actions(self, api=None, headers=None, sessionID=None):
param = '/role-category-actions'
return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_authorization_category_actions(self, api=None, headers=None, resource_uri='', sessionID=None,):\n param = '/category-actions%s' % resource_uri\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"async def command_rolecall(self, context):\n print(self._fetch_category_roles(context))\n print(self._fetch_category_roles(context, COSMETIC_CATEGORY_NAME))",
"def fusion_api_get_authorization_permission_actions(self, api=None, headers=None, category_action='', sessionID=None):\n param = '/authorizing-permissions%s' % category_action\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def get_roles(self) -> requests.models.Response:\n return self.get('v1/roles')",
"def get_roles(self):\n path = \"%s/services/impala/roles\" % self.__base_path\n response = self.__session.get(path)\n self.__check_status_code(response.status_code)\n return response.json()",
"def roles(self):\n params = {\n \"f\" : \"json\"\n }\n uURL = self._url + \"/roles\"\n return self._con.get(path=uURL, params=params)",
"def get_roles():\n\n # Get instance of RolesOperations Class\n roles_operations = RolesOperations()\n\n # Call get_roles method\n response = roles_operations.get_roles()\n\n if response is not None:\n\n # Get the status code from response\n print('Status Code: ' + str(response.get_status_code()))\n\n if response.get_status_code() in [204, 304]:\n print('No Content' if response.get_status_code() == 204 else 'Not Modified')\n return\n\n # Get object from response\n response_object = response.get_object()\n\n if response_object is not None:\n\n # Check if expected ResponseWrapper instance is received.\n if isinstance(response_object, ResponseWrapper):\n\n # Get the list of obtained Role instances\n roles_list = response_object.get_roles()\n\n for role in roles_list:\n # Get the DisplayLabel of each Role\n print(\"Role DisplayLabel: \" + str(role.get_display_label()))\n\n # Get the forecastManager User instance of each Role\n forecast_manager = role.get_forecast_manager()\n\n # Check if forecastManager is not None\n if forecast_manager is not None:\n\n # Get the ID of the forecast Manager\n print(\"Role Forecast Manager User-ID: \" + str(forecast_manager.get_id()))\n\n # Get the name of the forecast Manager\n print(\"Role Forecast Manager User-Name: \" + str(forecast_manager.get_name()))\n\n # Get the ShareWithPeers of each Role\n print(\"Role ShareWithPeers: \" + str(role.get_share_with_peers()))\n\n # Get the Name of each Role\n print(\"Role Name: \" + role.get_name())\n\n # Get the Description of each Role\n print(\"Role Description: \" + str(role.get_description()))\n\n # Get the Id of each Role\n print(\"Role ID: \" + str(role.get_id()))\n\n # Get the reporting_to User instance of each Role\n reporting_to = role.get_reporting_to()\n\n # Check if reporting_to is not None\n if reporting_to is not None:\n # Get the ID of the reporting_to User\n print(\"Role ReportingTo User-ID: \" + str(reporting_to.get_id()))\n\n # Get the name of the reporting_to User\n print(\"Role ReportingTo User-Name: \" + str(reporting_to.get_name()))\n\n # Get the AdminUser of each Role\n print(\"Role AdminUser: \" + str(role.get_admin_user()))\n\n # Check if the request returned an exception\n elif isinstance(response_object, APIException):\n # Get the Status\n print(\"Status: \" + response_object.get_status().get_value())\n\n # Get the Code\n print(\"Code: \" + response_object.get_code().get_value())\n\n print(\"Details\")\n\n # Get the details dict\n details = response_object.get_details()\n\n for key, value in details.items():\n print(key + ' : ' + str(value))\n\n # Get the Message\n print(\"Message: \" + response_object.get_message().get_value())",
"def list_roles():\n\tsession = get_session()\n\tresponse = session.get(\"{url}/api/roles\".format(url=get_registry_url()))\n\treturn response.json()[\"results\"]",
"def _fetch_category_roles(self, context, category_target=GROUP_CATEGORY_NAME):\n try:\n # ask for a specific category\n roles_list = context.guild.roles # preload roles list\n # find the target category's role\n category_role = get(roles_list, name=category_target)\n # preload the position of the category\n target_category_position = category_role.position\n\n category_role_list = []\n\n for i in range(target_category_position - 1, 0, -1):\n if roles_list[i].name.startswith('-') or roles_list[i].name is None:\n break\n else:\n category_role_list.append(roles_list[i])\n\n return category_role_list\n except Exception as error:\n print(f\"Errored when fetching roles in {category_target}\\n{error}\")",
"def list_role(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_role\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/roles'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1RoleList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def getRoles():\n return jsonify(listRoles(ROLES_DIR))",
"def main_role_list(\n client: CitusCloudMgmt,\n **opts: tp.Any\n) -> None:\n\n roles = client.list_roles(opts[\"formation\"])\n click.echo(\n tabulate.tabulate(\n [{\"Name\": i.name, \"Id\": i.id_} for i in roles],\n headers=\"keys\",\n ),\n )",
"def fusion_api_get_roles(self, uri=None, param='', api=None, headers=None):\n return self.roles.get(uri=uri, api=api, headers=headers, param=param)",
"def list(self, **kwargs):\n params = {}\n url = '/openstack/roles?%(params)s' % {\n 'params': parse.urlencode(params, True)\n }\n return self._list(url, 'roles')",
"def list_roles(self):\n resp, body = self.get(\"roles\")\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBodyList(resp, body['roles'])",
"def getAllRoles(self):\n\n # Learn URL of AllRoles service\n url = self.config.get(\"Authorization\",\"allroles\") # http://erra.ccss.cz/g4i-portlet/service/list/roles/en\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] AllRoles url: %s\"% url)\n \n # Request all roles from LifeRay\n import httplib2\n h = httplib2.Http()\n header, content = h.request(url, \"GET\")\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] response header: %s\"% header)\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] response content: %s\"% content)\n\n # Parse the response\n try:\n allRolesJson = json.loads(content)\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] AllRoles reply succesfully parsed\")\n except ValueError,e:\n logging.error(\"[LaymanAuthLiferay][getAllRoles] Cannot parse AllRoles reply: '%s'\"% content)\n raise AuthError(500, \"Cannot parse GET All Roles response [%s] as JSON:%s\"% (content,e)) \n \n roles = allRolesJson[\"roles\"]\n\n # lower() and spaces\n for rr in roles:\n rr[\"roleName\"] = rr[\"roleName\"].lower()\n rr[\"roleName\"] = \"_\".join(rr[\"roleName\"].split(' '))\n\n # Return roles\n logging.debug(\"[LaymanAuthLiferay][getAllRoles] Return roles: %s\"% str(roles))\n return roles",
"def _get_roles(self):\n return api.tuskar.OvercloudRole.list(self.request)",
"def get_roles(filter: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRolesResult:\n __args__ = dict()\n __args__['filter'] = filter\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('datadog:index/getRoles:getRoles', __args__, opts=opts, typ=GetRolesResult).value\n\n return AwaitableGetRolesResult(\n filter=pulumi.get(__ret__, 'filter'),\n id=pulumi.get(__ret__, 'id'),\n roles=pulumi.get(__ret__, 'roles'))",
"def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_event_categories_responder(self):\n pass",
"def list_namespaced_role(self, namespace, **kwargs):\n\n all_params = ['namespace', 'pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_role\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `list_namespaced_role`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/roles'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1RoleList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def list(self):\n return self.client.find_all_roles()",
"def getRoles(self):",
"def getRoles(context):\n\n pmemb = getToolByName(getSite(), 'portal_membership')\n roles = [role for role in pmemb.getPortalRoles() if role != 'Owner']\n return SimpleVocabulary.fromValues(roles)",
"def get_roles_output(filter: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetRolesResult]:\n ...",
"def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_event_categories_responder_spaces(self):\n pass",
"def list_namespaced_cluster_role(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_cluster_role\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/clusterroles'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1ClusterRoleList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def collection_get(request):\n\n # Our account parameter\n account = request.matchdict['id_account']\n\n # Our admin object\n admin = _get_admin(request)\n\n # Check if the account exists\n if account not in admin.list_accounts():\n request.response.status_int = 404\n return\n\n # Get the roles\n list_roles = admin.list_roles(account)\n\n # Return appropriately\n request.response.status_int = 200\n return {\n 'roles':\n list_roles\n }",
"def get_integrations_actions_categories(self, **kwargs):\n\n all_params = ['page_size', 'page_number', 'next_page', 'previous_page', 'sort_by', 'sort_order', 'secure']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_integrations_actions_categories\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n\n resource_path = '/api/v2/integrations/actions/categories'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'page_size' in params:\n query_params['pageSize'] = params['page_size']\n if 'page_number' in params:\n query_params['pageNumber'] = params['page_number']\n if 'next_page' in params:\n query_params['nextPage'] = params['next_page']\n if 'previous_page' in params:\n query_params['previousPage'] = params['previous_page']\n if 'sort_by' in params:\n query_params['sortBy'] = params['sort_by']\n if 'sort_order' in params:\n query_params['sortOrder'] = params['sort_order']\n if 'secure' in params:\n query_params['secure'] = params['secure']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['PureCloud OAuth']\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='CategoryEntityListing',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def get_roles():\n check_admin()\n roles = Role.query.all()\n\n return render_template('admin/roles/roles.html', roles=roles, title=\"Roles\")",
"def list(self, filter, *args, timeout=None):\n req = RoleListRequest()\n req.meta.CopyFrom(ListRequestMetadata())\n page_size_option = self.parent._test_options.get('PageSize')\n if isinstance(page_size_option, int):\n req.meta.limit = page_size_option\n\n req.filter = plumbing.quote_filter_args(filter, *args)\n\n def generator(svc, req):\n tries = 0\n while True:\n try:\n plumbing_response = svc.stub.List(\n req,\n metadata=svc.parent.get_metadata('Roles.List', req),\n timeout=timeout)\n except Exception as e:\n if self.parent.shouldRetry(tries, e):\n tries += 1\n self.parent.jitterSleep(tries)\n continue\n raise plumbing.convert_error_to_porcelain(e) from e\n tries = 0\n for plumbing_item in plumbing_response.roles:\n yield plumbing.convert_role_to_porcelain(plumbing_item)\n if plumbing_response.meta.next_cursor == '':\n break\n req.meta.cursor = plumbing_response.meta.next_cursor\n\n return generator(self, req)"
] | [
"0.68973285",
"0.6569231",
"0.62580717",
"0.6199698",
"0.6108623",
"0.6103739",
"0.61035544",
"0.60652286",
"0.605099",
"0.598602",
"0.59292746",
"0.58605164",
"0.58121884",
"0.57386696",
"0.57254803",
"0.5724486",
"0.5723421",
"0.56763756",
"0.56353784",
"0.558605",
"0.556616",
"0.55502194",
"0.55212766",
"0.5497684",
"0.5486043",
"0.54157096",
"0.5410708",
"0.53687096",
"0.5336609",
"0.5318783"
] | 0.7601334 | 0 |
Lists the user session permissions that would authorize a particular action and resource category. [Arguments] [Example] ${resp} = Fusion Api Get Authorization Permission Actions | | | | | def fusion_api_get_authorization_permission_actions(self, api=None, headers=None, category_action='', sessionID=None):
param = '/authorizing-permissions%s' % category_action
return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_authorization_category_actions(self, api=None, headers=None, resource_uri='', sessionID=None,):\n param = '/category-actions%s' % resource_uri\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def fusion_api_get_authorization_role_category_actions(self, api=None, headers=None, sessionID=None):\n param = '/role-category-actions'\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def permission_list(**kwargs):\n print(AppPermissionSchema(many=True).dumps(\n get_protected_routes(ignored_methods=[\"HEAD\", \"OPTIONS\"]), indent=4))",
"def permissions(self) -> 'outputs.PermissionsResponse':\n return pulumi.get(self, \"permissions\")",
"def getPermission(self, session, category, action, path):\n path = path.decode('utf-8')\n\n try:\n operation = getOperation(category, action)\n except KeyError as error:\n session.log.exception(error)\n error = TBadRequest(\n 'Action %r not possible on category %r.' % (action, category))\n return defer.fail(error)\n\n def run():\n permissions = SecurePermissionAPI(session.auth.user)\n try:\n result = permissions.get([(path, operation)])\n except UnknownPathError as error:\n session.log.exception(error)\n unknownPath = error.paths[0]\n if operation in Operation.TAG_OPERATIONS:\n raise TNonexistentTag(unknownPath.encode('utf-8'))\n if operation in Operation.NAMESPACE_OPERATIONS:\n raise TNonexistentNamespace(unknownPath.encode('utf-8'))\n raise\n except PermissionDeniedError as error:\n session.log.exception(error)\n deniedPath, deniedOperation = error.pathsAndOperations[0]\n deniedCategory, deniedAction = getCategoryAndAction(\n deniedOperation)\n raise TPathPermissionDenied(deniedPath, deniedCategory,\n deniedAction)\n\n policy, exceptions = result[(path, operation)]\n policy = str(policy).lower()\n return TPolicyAndExceptions(policy=policy, exceptions=exceptions)\n\n return session.transact.run(run)",
"def get_permissions(self):\n if self.action == 'list':\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action in ['list', 'retrieve']:\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action in ['create', 'retrieve', 'react', 'reactions']:\n permissions = [IsAuthenticated, IsFriendPostOwner]\n elif self.action in ['update', 'partial_update']:\n permissions = [IsAuthenticated, IsCommentOwner]\n elif self.action in ['destroy']:\n permissions = [IsAuthenticated, IsCommentOrPostOwner]\n else:\n permissions = [IsAuthenticated]\n return[p() for p in permissions]",
"def get_all_permissions(self):\n\t\turl = f'{self.root.url}/api/v1/sessions/permissions'\n\t\treturn self.root.r('GET', url, body=None, headers=None, verify=self.root.verify)",
"def get_permissions(self):\n if self.action == 'list':\n permission_classes = [AdminPermission.__or__(ReviewerPermission)]\n elif self.action == 'retrieve':\n permission_classes = [\n AdminPermission.__or__(\n ReviewerPermission.__or__(UserPermission)\n )\n ]\n elif self.action in ['update', 'partial_update']:\n permission_classes = [AdminPermission.__or__(UserPermission)]\n else:\n permission_classes = [AdminPermission]\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action == 'list':\n permission_classes = [AdminPermission.__or__(ReviewerPermission)]\n elif self.action == 'retrieve':\n permission_classes = [\n AdminPermission.__or__(\n ReviewerPermission.__or__(UserPermission)\n )\n ]\n elif self.action in ['update', 'partial_update']:\n permission_classes = [AdminPermission.__or__(UserPermission)]\n else:\n permission_classes = [AdminPermission]\n return [permission() for permission in permission_classes]",
"def octopus_permissions_get(self, msg, args):\r\n return self.permissions.get_permissions()",
"def fusion_api_list_permission_scopes(self, api=None, headers=None, resource_uri='', sessionID=None):\n param = '/association-scopes%s' % resource_uri\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"async def fetch_permissions(self, condensed=False):\n\n logging.debug(\"Getting permissions (%scondensed)\" % (\n \"\" if condensed else \"not \"))\n\n if condensed:\n perms = await self.client.request.get(\n \"/auth/permissions\", params={\"condensed\": True})\n return perms[\"data\"]\n else:\n perms = await self.client.request.get(\"/auth/permissions\")\n return [BasePermission.build_permission(\n self.client, perm, self.loop) for perm in perms[\"data\"]]",
"def test_permission_list_ok(self):\n test_name = sys._getframe().f_code.co_name\n rv, output = self._execute('permission list')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)",
"async def permissions(self, ctx):\r\n perms = [p.replace(\"_\", \" \") for p in PERMS]\r\n embed = discord.Embed(title=\"Permissions that can be passed to Targeter\")\r\n embed.description = humanize_list(perms)\r\n await ctx.send(embed=embed)",
"async def _p_list(self, ctx):\n result = self.database.get_perm_rules(ctx.guild.id)\n if len(result) == 0:\n await ctx.send(\"No permissions set for this guild.\")\n return\n guild_perms = {}\n for perm in result:\n if guild_perms.get(perm.command, None) is None:\n guild_perms[perm.command] = {}\n if guild_perms.get(perm.command).get(perm.perm_type, None) is None:\n guild_perms[perm.command][perm.perm_type] = []\n guild_perms[perm.command][perm.perm_type].append([perm.target, perm.priority, perm.allow])\n\n out = \"```\"\n for command in guild_perms:\n out += f\"Command: {command}\\n\"\n for level in sorted(guild_perms[command], key=lambda a: self.LEVELS[a]):\n out += f\" Level: {level}\\n\"\n if level == \"guild\":\n out += f\" {guild_perms[command][level]}\\n\"\n else:\n for detail in guild_perms[command][level]:\n out += f\" {detail[1]}-{detail[0]}: {bool(detail[2])}\\n\"\n out += \"```\"\n await ctx.send(out)",
"def get_permissions(self):\n \n if self.action in ['signup', 'login', 'verify']:\n permissions =[AllowAny]\n # cualquiera que vaya a acceder a estas peticiones lo podra hacer\n # si la accion es de tipo retrieve se debe validar el permiso de acceso\n elif self.action in ['retrieve', 'update', 'partial_update']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [IsAuthenticated]\n # si no hay ninguna opcion debe tener una sesion autenticada \n return [p() for p in permissions]",
"def get_permissions(self):\n if self.action == \"destroy\":\n permission_classes = [IsAuthenticated, IsAuthor]\n elif self.action in [\"list\", \"create\"]:\n permission_classes = [IsAuthenticated, IsContributorOrAuthor]\n else:\n permission_classes = [NotAllowed]\n\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action in ['signup', 'login', 'verify']:\n permissions = [AllowAny]\n elif self.action in ['retrieve', 'update', 'partial_update', 'destroy', 'u', 'p']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [IsAuthenticated]\n return [p() for p in permissions]",
"def get_permissions(self):\n if self.action in ['signup', 'login']:\n permissions = [AllowAny]\n elif self.action in ['retrieve']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [AllowAny]\n return [p() for p in permissions]",
"def get_permissions(self):\n if self.action in [\"list\"]:\n permission_classes = [permissions.UserOrPlaylistIsAuthenticated]\n elif self.action in [\"create\", \"set_display_name\", \"push_attendance\"]:\n permission_classes = [\n permissions.PlaylistIsAuthenticated\n | permissions.IsParamsVideoAdminThroughOrganization\n | permissions.BaseIsParamsVideoRoleThroughPlaylist\n ]\n elif self.action in [\n \"partial_update\",\n \"retrieve\",\n ]:\n permission_classes = [\n permissions.IsTokenPlaylistRouteObjectRelatedVideo\n | permissions.IsParamsVideoAdminThroughOrganization\n | permissions.BaseIsParamsVideoRoleThroughPlaylist\n ]\n elif self.action in [\"list_attendances\"]:\n permission_classes = [\n permissions.IsTokenInstructor\n | permissions.IsTokenAdmin\n # With standalone site, admin can access\n | permissions.IsParamsVideoAdminThroughOrganization\n | permissions.IsParamsVideoAdminOrInstructorThroughPlaylist\n ]\n elif self.action is None:\n if self.request.method not in self.allowed_methods:\n raise MethodNotAllowed(self.request.method)\n permission_classes = self.permission_classes\n else:\n # When here it means we forgot to define a permission for a new action\n # We enforce the permission definition in this method to have a clearer view\n raise NotImplementedError(f\"Action '{self.action}' is not implemented.\")\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n from rest_framework.permissions import IsAuthenticated, IsAdminUser\n if self.action =='retrieve' or self.action == 'update':\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action in ['retrieve', 'list']:\n self.permission_classes = [permissions.ViewUserPermission,]\n elif self.action in ['update', 'partial_update']:\n self.permission_classes = [permissions.UpdateUserPermission]\n elif self.action in ['destroy']:\n self.permission_classes = [permissions.UpdateUserPermission]\n\n return [permission() for permission in self.permission_classes]",
"def getAllPerms(self,request):\n request.needAuthType(request.ADMIN)\n request.getAuthNameObj().canDo(\"CHANGE ADMIN PERMISSIONS\")\n all_perms_dic=perm_loader.getLoader().getAllPerms()\n if request.has_key(\"category\"):\n category=request[\"category\"]\n else:\n category=\"all\"\n all_perms_list=self.__getPermsListFromPerms(all_perms_dic,category)\n sorted=SortedList(all_perms_list)\n sorted.sortByPostText('[\"name\"]',0)\n return sorted.getList()",
"def get_permissions(self):\n if self.action == \"create\" or self.action == \"token\":\n permission_classes = [AllowAny]\n else:\n permission_classes = [IsAuthenticated] \n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action == \"create\" or self.action == \"token\":\n permission_classes = [AllowAny]\n else:\n permission_classes = [IsAuthenticated] \n return [permission() for permission in permission_classes]",
"def get_permissions(self):\n if self.action == \"create\" or self.action == \"token\":\n permission_classes = [AllowAny]\n else:\n permission_classes = [IsAuthenticated] \n return [permission() for permission in permission_classes]",
"def list_permissions(self):\n # type: () -> List[Permission]\n headers = Headers({\"accept\": \"application/json\"})\n return self.connection.api_call(\n \"GET\", [\"resources\", self.id, \"permissions\"], model=Permission, headers=headers,\n )",
"def get_permissions(self):\n if self.action == 'list':\n permission_classes = [IsAuthenticatedOrReadOnly]\n if self.action == 'create':\n permission_classes = [AllowAny]\n else:\n permission_classes = [IsAdminUser | IsAuthenticated| IsAdminOrIsSelf]\n return [permission() for permission in permission_classes]"
] | [
"0.6482365",
"0.64557904",
"0.6069945",
"0.60467374",
"0.59383446",
"0.57589465",
"0.5753795",
"0.5751748",
"0.5732013",
"0.5685957",
"0.5685957",
"0.5683999",
"0.5656991",
"0.56460035",
"0.56306535",
"0.5603984",
"0.56025666",
"0.5601795",
"0.55961245",
"0.5596",
"0.55540574",
"0.55402535",
"0.55155087",
"0.5497693",
"0.549036",
"0.5478458",
"0.5478458",
"0.5478458",
"0.5478052",
"0.5469332"
] | 0.74333566 | 0 |
Lists the permission scope. [Arguments] [Example] ${resp} = Fusion Api Get Authorization Permission Actions | | | | | def fusion_api_list_permission_scopes(self, api=None, headers=None, resource_uri='', sessionID=None):
param = '/association-scopes%s' % resource_uri
return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def permissions(self) -> 'outputs.PermissionsResponse':\n return pulumi.get(self, \"permissions\")",
"def fusion_api_get_authorization_permission_actions(self, api=None, headers=None, category_action='', sessionID=None):\n param = '/authorizing-permissions%s' % category_action\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def permission_list(**kwargs):\n print(AppPermissionSchema(many=True).dumps(\n get_protected_routes(ignored_methods=[\"HEAD\", \"OPTIONS\"]), indent=4))",
"async def permissions(self, ctx):\r\n perms = [p.replace(\"_\", \" \") for p in PERMS]\r\n embed = discord.Embed(title=\"Permissions that can be passed to Targeter\")\r\n embed.description = humanize_list(perms)\r\n await ctx.send(embed=embed)",
"def octopus_permissions_get(self, msg, args):\r\n return self.permissions.get_permissions()",
"async def _p_list(self, ctx):\n result = self.database.get_perm_rules(ctx.guild.id)\n if len(result) == 0:\n await ctx.send(\"No permissions set for this guild.\")\n return\n guild_perms = {}\n for perm in result:\n if guild_perms.get(perm.command, None) is None:\n guild_perms[perm.command] = {}\n if guild_perms.get(perm.command).get(perm.perm_type, None) is None:\n guild_perms[perm.command][perm.perm_type] = []\n guild_perms[perm.command][perm.perm_type].append([perm.target, perm.priority, perm.allow])\n\n out = \"```\"\n for command in guild_perms:\n out += f\"Command: {command}\\n\"\n for level in sorted(guild_perms[command], key=lambda a: self.LEVELS[a]):\n out += f\" Level: {level}\\n\"\n if level == \"guild\":\n out += f\" {guild_perms[command][level]}\\n\"\n else:\n for detail in guild_perms[command][level]:\n out += f\" {detail[1]}-{detail[0]}: {bool(detail[2])}\\n\"\n out += \"```\"\n await ctx.send(out)",
"def getPermissions(self, scope):\n\n return [permissions.api_enum_for_permission(p)\n for p in permissions.get_permissions(scope)]",
"def ListScopes(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def list_permissions(self):\n # type: () -> List[Permission]\n headers = Headers({\"accept\": \"application/json\"})\n return self.connection.api_call(\n \"GET\", [\"resources\", self.id, \"permissions\"], model=Permission, headers=headers,\n )",
"def test_permission_list_ok(self):\n test_name = sys._getframe().f_code.co_name\n rv, output = self._execute('permission list')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)",
"def permissions():\n pass",
"async def fetch_permissions(self, condensed=False):\n\n logging.debug(\"Getting permissions (%scondensed)\" % (\n \"\" if condensed else \"not \"))\n\n if condensed:\n perms = await self.client.request.get(\n \"/auth/permissions\", params={\"condensed\": True})\n return perms[\"data\"]\n else:\n perms = await self.client.request.get(\"/auth/permissions\")\n return [BasePermission.build_permission(\n self.client, perm, self.loop) for perm in perms[\"data\"]]",
"def scope_list(client, args):\n from ..util import print_query\n result = client.get_scope()\n if result.is_custom:\n print(\"Proxy is using a custom function to check scope\")\n return\n print_query(result.filter)",
"def oauth2_permission_scope_ids(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"oauth2_permission_scope_ids\")",
"def permissions(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"permissions\")",
"def fusion_api_list_permission_scopes_auth_creation_resource(self, api=None, headers=None, resource_uri='', sessionID=None):\n param = '/associator-scopes%s' % resource_uri\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"async def permissions(self, ctx):\n await ctx.send_help(ctx.command)",
"def permissions(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"permissions\")",
"def get_permissions(self):\n\t\treturn call_sdk_function('PrlFsEntry_GetPermissions', self.handle)",
"def RequestedPermissions(self) -> _n_6_t_0:",
"def permissions(self) -> pulumi.Output[Optional[Sequence['outputs.DataSetResourcePermission']]]:\n return pulumi.get(self, \"permissions\")",
"def get_all_permissions(self, obj=None):",
"def get_permissions(self):\n if self.action in ['create', 'retrieve', 'react', 'reactions']:\n permissions = [IsAuthenticated, IsFriendPostOwner]\n elif self.action in ['update', 'partial_update']:\n permissions = [IsAuthenticated, IsCommentOwner]\n elif self.action in ['destroy']:\n permissions = [IsAuthenticated, IsCommentOrPostOwner]\n else:\n permissions = [IsAuthenticated]\n return[p() for p in permissions]",
"def PermissionSet(self) -> _n_6_t_0:",
"def permissions(self) -> str:\n return pulumi.get(self, \"permissions\")",
"def get_permissions(self):\n if self.action in ['signup', 'login']:\n permissions = [AllowAny]\n elif self.action in ['retrieve']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [AllowAny]\n return [p() for p in permissions]",
"def get_permissions(self):\n if self.action in ['signup', 'login', 'verify']:\n permissions = [AllowAny]\n elif self.action in ['retrieve', 'update', 'partial_update', 'destroy', 'u', 'p']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [IsAuthenticated]\n return [p() for p in permissions]",
"def get_permissions(self):\n if self.action in ['list', 'retrieve']:\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]",
"def get_permissions(self, principal_id):",
"def GetResourceAclSample():\n client = CreateClient()\n for resource in client.GetResources(limit=5).entry:\n acl_feed = client.GetResourceAcl(resource)\n for acl in acl_feed.entry:\n print acl.role.value, acl.scope.type, acl.scope.value"
] | [
"0.65290165",
"0.6478964",
"0.637014",
"0.6203895",
"0.61429197",
"0.61306375",
"0.6116834",
"0.60866284",
"0.5969837",
"0.59249896",
"0.5917814",
"0.59140855",
"0.58482",
"0.58374697",
"0.5819706",
"0.5795554",
"0.57797724",
"0.57768506",
"0.5727272",
"0.5715865",
"0.5682023",
"0.5642739",
"0.56325185",
"0.5624997",
"0.5609554",
"0.5586324",
"0.55777335",
"0.553812",
"0.5520756",
"0.55090255"
] | 0.6607175 | 0 |
Lists the permission scope. [Arguments] [Example] ${resp} = Fusion Api list Permission Scopes Auth Creation Resource | | | | | def fusion_api_list_permission_scopes_auth_creation_resource(self, api=None, headers=None, resource_uri='', sessionID=None):
param = '/associator-scopes%s' % resource_uri
return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_list_permission_scopes(self, api=None, headers=None, resource_uri='', sessionID=None):\n param = '/association-scopes%s' % resource_uri\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def ListScopes(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def permissions(self) -> 'outputs.PermissionsResponse':\n return pulumi.get(self, \"permissions\")",
"def getPermissions(self, scope):\n\n return [permissions.api_enum_for_permission(p)\n for p in permissions.get_permissions(scope)]",
"def permission_list(**kwargs):\n print(AppPermissionSchema(many=True).dumps(\n get_protected_routes(ignored_methods=[\"HEAD\", \"OPTIONS\"]), indent=4))",
"def list_permissions(self):\n # type: () -> List[Permission]\n headers = Headers({\"accept\": \"application/json\"})\n return self.connection.api_call(\n \"GET\", [\"resources\", self.id, \"permissions\"], model=Permission, headers=headers,\n )",
"def scope_list(client, args):\n from ..util import print_query\n result = client.get_scope()\n if result.is_custom:\n print(\"Proxy is using a custom function to check scope\")\n return\n print_query(result.filter)",
"async def _p_list(self, ctx):\n result = self.database.get_perm_rules(ctx.guild.id)\n if len(result) == 0:\n await ctx.send(\"No permissions set for this guild.\")\n return\n guild_perms = {}\n for perm in result:\n if guild_perms.get(perm.command, None) is None:\n guild_perms[perm.command] = {}\n if guild_perms.get(perm.command).get(perm.perm_type, None) is None:\n guild_perms[perm.command][perm.perm_type] = []\n guild_perms[perm.command][perm.perm_type].append([perm.target, perm.priority, perm.allow])\n\n out = \"```\"\n for command in guild_perms:\n out += f\"Command: {command}\\n\"\n for level in sorted(guild_perms[command], key=lambda a: self.LEVELS[a]):\n out += f\" Level: {level}\\n\"\n if level == \"guild\":\n out += f\" {guild_perms[command][level]}\\n\"\n else:\n for detail in guild_perms[command][level]:\n out += f\" {detail[1]}-{detail[0]}: {bool(detail[2])}\\n\"\n out += \"```\"\n await ctx.send(out)",
"async def fetch_permissions(self, condensed=False):\n\n logging.debug(\"Getting permissions (%scondensed)\" % (\n \"\" if condensed else \"not \"))\n\n if condensed:\n perms = await self.client.request.get(\n \"/auth/permissions\", params={\"condensed\": True})\n return perms[\"data\"]\n else:\n perms = await self.client.request.get(\"/auth/permissions\")\n return [BasePermission.build_permission(\n self.client, perm, self.loop) for perm in perms[\"data\"]]",
"def oauth2_permission_scope_ids(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"oauth2_permission_scope_ids\")",
"def get_permissions(self):\n\t\treturn call_sdk_function('PrlFsEntry_GetPermissions', self.handle)",
"def octopus_permissions_get(self, msg, args):\r\n return self.permissions.get_permissions()",
"async def permissions(self, ctx):\r\n perms = [p.replace(\"_\", \" \") for p in PERMS]\r\n embed = discord.Embed(title=\"Permissions that can be passed to Targeter\")\r\n embed.description = humanize_list(perms)\r\n await ctx.send(embed=embed)",
"def test_permission_list_ok(self):\n test_name = sys._getframe().f_code.co_name\n rv, output = self._execute('permission list')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)",
"def fusion_api_get_authorization_permission_actions(self, api=None, headers=None, category_action='', sessionID=None):\n param = '/authorizing-permissions%s' % category_action\n return self.auth.get(api=api, param=param, headers=headers, sessionID=sessionID)",
"def GetResourceAclSample():\n client = CreateClient()\n for resource in client.GetResources(limit=5).entry:\n acl_feed = client.GetResourceAcl(resource)\n for acl in acl_feed.entry:\n print acl.role.value, acl.scope.type, acl.scope.value",
"def permissions():\n pass",
"def permissions(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"permissions\")",
"def RequestedPermissions(self) -> _n_6_t_0:",
"def get_permissions(self, principal_id):",
"def permissions(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"permissions\")",
"def permissions(self) -> str:\n return pulumi.get(self, \"permissions\")",
"def get_all_permissions(self, obj=None):",
"def oauth2_permission_scope_ids(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:\n return pulumi.get(self, \"oauth2_permission_scope_ids\")",
"def get_permissions():\n return config.get_cfg_storage(ID_PERMISSION)",
"def get_all_permissions(self):\n\t\turl = f'{self.root.url}/api/v1/sessions/permissions'\n\t\treturn self.root.r('GET', url, body=None, headers=None, verify=self.root.verify)",
"def permissions(self) -> pulumi.Output[Optional[Sequence['outputs.DataSetResourcePermission']]]:\n return pulumi.get(self, \"permissions\")",
"def fusion_api_get_scope(self, uri=None, param='', api=None, headers=None):\n return self.scope.get(uri=uri, param=param, api=api, headers=headers)",
"def get_permissions(self):\n if self.action in ['create', 'retrieve', 'react', 'reactions']:\n permissions = [IsAuthenticated, IsFriendPostOwner]\n elif self.action in ['update', 'partial_update']:\n permissions = [IsAuthenticated, IsCommentOwner]\n elif self.action in ['destroy']:\n permissions = [IsAuthenticated, IsCommentOrPostOwner]\n else:\n permissions = [IsAuthenticated]\n return[p() for p in permissions]",
"def get_permissions(self):\n if self.action in ['list', 'retrieve']:\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]"
] | [
"0.6913755",
"0.65023345",
"0.6389445",
"0.6303606",
"0.6164184",
"0.61104393",
"0.60109156",
"0.5992203",
"0.59574205",
"0.5911801",
"0.58344525",
"0.58288985",
"0.5827482",
"0.5806445",
"0.57354546",
"0.563093",
"0.56283593",
"0.55610836",
"0.55162734",
"0.5499117",
"0.54967636",
"0.54942983",
"0.5481318",
"0.54756224",
"0.5456424",
"0.5390617",
"0.53883827",
"0.53771824",
"0.53710544",
"0.5363243"
] | 0.6677038 | 1 |
Cancels an inprogress backup. The backup URI may be obtained from the task returned when starting a backup, or by listing the backups [Arguments] | def fusion_api_cancel_backup(self, backup, api=None, headers=None):
return self.backup.cancel(backup=backup, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_backup(BackupId=None):\n pass",
"def delete_backup(self, backup):\n aname = \"cinder_v%s.delete_backup\" % self.version\n with atomic.ActionTimer(self, aname):\n self._get_client().backups.delete(backup)\n bench_utils.wait_for_status(\n backup,\n ready_statuses=[\"deleted\"],\n check_deletion=True,\n update_resource=self._update_resource,\n timeout=CONF.openstack.cinder_volume_delete_timeout,\n check_interval=(CONF.openstack\n .cinder_volume_delete_poll_interval)\n )",
"def cancel_backup(self, group, name):\n\n shutil.rmtree(\n self.backup_path(group, name, temp = True),\n onerror = lambda func, path, excinfo:\n LOG.error(\"Failed to remove backup temporary data '%s': %s.\",\n path, psys.e(excinfo[1])))",
"def run_backup():\n\n from common.models import InvenTreeSetting\n\n if not InvenTreeSetting.get_setting('INVENTREE_BACKUP_ENABLE', False, cache=False):\n # Backups are not enabled - exit early\n return\n\n interval = int(InvenTreeSetting.get_setting('INVENTREE_BACKUP_DAYS', 1, cache=False))\n\n # Check if should run this task *today*\n if not check_daily_holdoff('run_backup', interval):\n return\n\n logger.info(\"Performing automated database backup task\")\n\n call_command(\"dbbackup\", noinput=True, clean=True, compress=True, interactive=False)\n call_command(\"mediabackup\", noinput=True, clean=True, compress=True, interactive=False)\n\n # Record that this task was successful\n record_task_success('run_backup')",
"def Abort(self, *args, **kwargs):\n # type: (*Any, **Any) -> None\n payload = {\"Arg1\": self}\n for i in range(len(args)):\n payload[\"Arg%s\" % (i + 2)] = args[i]\n for item in kwargs.items():\n payload[item[0]] = item[1]\n return self._execute(\"abort\", payload=payload, response_object=None)",
"def delete_backup(self, backup):\n self._impl.delete_backup(backup)",
"def Abort(self, *args, **kwargs):\n # type: (*Any, **Any) -> None\n payload = { \"Arg1\": self }\n for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]\n for item in kwargs.items(): payload[item[0]] = item[1]\n return self._execute('abort', payload=payload, response_object=None)",
"def delete_backup(self, backup):\n return self._backup_manager.delete(backup)",
"def cli(ctx, job_id):\n return ctx.gi.jobs.cancel_job(job_id)",
"def RestoreFromBackup(self, request, global_params=None):\n config = self.GetMethodConfig('RestoreFromBackup')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Remove(ctx,\n backup_target_id):\n if ctx.element is None:\n ctx.logger.error(\"You must establish at least one connection and specify which you intend to use.\")\n exit()\n\n\n\n ctx.logger.info(\"\"\"backup_target_id = \"\"\"+str(backup_target_id)+\"\"\";\"\"\"+\"\")\n try:\n RemoveBackupTargetResult = ctx.element.remove_backup_target(backup_target_id=backup_target_id)\n except common.ApiServerError as e:\n ctx.logger.error(e.message)\n exit()\n except BaseException as e:\n ctx.logger.error(e.__str__())\n exit()\n\n cli_utils.print_result(RemoveBackupTargetResult, ctx.logger, as_json=ctx.json, depth=ctx.depth, filter_tree=ctx.filter_tree)",
"def cvmfsAbort(reponame = None):\n if reponame == None:\n reponame = _getRepoName()\n\n rc = subprocess.call([\"cvmfs_server\", \"abort\", \"-f\", reponame])\n if rc != 0:\n raise RuntimeError(\"Could not abort CVMFS transaction\")",
"def rollback_action(args, kwargs, was_interrupted, result=None):\n raise NotImplementedError()",
"def restore_backup(self, backup_id, volume_id=None):\n aname = \"cinder_v%s.restore_backup\" % self.version\n with atomic.ActionTimer(self, aname):\n restore = self._get_client().restores.restore(backup_id, volume_id)\n restored_volume = self._get_client().volumes.get(restore.volume_id)\n return self._wait_available_volume(restored_volume)",
"def restore(ctx, destination, filesystem, backup_time):\n config_path = ctx.obj['config_path']\n\n config = Config(config_path)\n job = config.jobs.get(filesystem)\n\n if job is None:\n print('Filesystem does not exist.')\n sys.exit(1)\n\n job.restore(backup_time, destination)\n\n print('Restore successful.')",
"def backup(context, user=get_local_user(), remote=False, instance=None, stack=None):\n command = \"run --rm postgres backup\"\n run_command(context, user, remote, instance, stack, command)",
"def Abort(self):\n handler = self.get_command_object(\"Abort\")\n handler()",
"def delete_backup(self, id):\n # don't execute if we are not on a live backup platform\n if not current_app.config.get('BACKUP_IS_LIVE', False):\n raise Ignore()\n\n try:\n success, msg = remove_backup(id)\n except SQLAlchemyError as e:\n db.session.rollback()\n current_app.logger.exception(\"SQLAlchemyError exception\", exc_info=e)\n raise self.retry()\n\n if not success:\n self.update_state(state='FAILED', meta={'msg': 'Delete failed: {msg}'.format(msg=msg)})\n else:\n self.update_state(state='SUCCESS', meta={'msg': 'Delete backup succeeded'})",
"def restore_backup(self):\n print \"Restoring backup for database: %s\" % self.database['NAME']\n # Fetch the latest backup if filepath not specified\n if not self.filepath:\n print \" Finding latest backup\"\n filepaths = self.storage.list_directory()\n filepaths = self.dbcommands.filter_filepaths(filepaths, self.servername)\n if not filepaths:\n raise CommandError(\"No backup files found in: %s\" % self.storage.backup_dir())\n self.filepath = filepaths[-1]\n # Restore the specified filepath backup\n print \" Restoring: %s\" % self.filepath\n backupfile = self.storage.read_file(self.filepath)\n print \" Restore tempfile created: %s\" % utils.handle_size(backupfile)\n self.dbcommands.run_restore_commands(backupfile)",
"def svn_fs_abort_txn(*args):\r\n return _fs.svn_fs_abort_txn(*args)",
"def restore(\n context, backup, user=get_local_user(), remote=False, instance=None, stack=None,\n):\n command = f\"exec postgres pkill -f {PROJECT}\"\n run_command(context, user, remote, instance, stack, command)\n\n command = f\"run --rm postgres restore {backup}\"\n run_command(context, user, remote, instance, stack, command)",
"def __removeBackup(self):\n pass #FIXME!!",
"def cancelDownload(self, _src):\n print(\"\\n\\nCancelling download of '%s'\"%(_src))\n\n #-------------------- \n # Pop from queue\n #--------------------\n self.removeFromDownloadQueue(_src) \n\n\n #-------------------- \n # Callbacks\n #-------------------- \n self.runEventCallbacks('downloadCancelled', _src) \n\n\n #-------------------- \n # Clear queue if there is nothing\n # left in it.\n #-------------------- \n if len(self.downloadQueue) == 0:\n self.clearDownloadQueue()",
"def scancel(self, arg):\n\n if isinstance(arg, (list, tuple)):\n for job_id in arg:\n self.scancel(job_id)\n\n elif str(arg).lower() == 'all':\n self._queue = None\n for job_id in self.queue_job_ids:\n self.scancel(job_id)\n\n elif isinstance(arg, (int, str)):\n cmd = ('scancel {}'.format(arg))\n cmd = shlex.split(cmd)\n subprocess.call(cmd)\n\n else:\n e = ('Could not cancel: {} with type {}'\n .format(arg, type(arg)))\n logger.error(e)\n raise ExecutionError(e)",
"def restore_backup(self, backup, name, flavor, volume):\n return self._manager.restore_backup(backup, name, flavor, volume)",
"def abort(self):\n\t\timport subprocess\n\t\treturn subprocess.Popen('%s %s' % (settings.QDEL_BIN, self.id), shell=True, stdout=subprocess.PIPE).stdout",
"def cancel(self):\n\n query = f\"scancel {self.jobid}\"\n if self.cluster:\n query = f\"scancel {self.jobid} --clusters={self.cluster}\"\n\n cmd = BuildTestCommand(query)\n cmd.execute()\n logger.debug(f\"Cancelling Job: {self.jobid} by running: {query}\")\n\n self.poll()\n self._state = \"CANCELLED\"",
"def cancel_bundle_task(self, bundle_id):\r\n\r\n params = {'BundleId' : bundle_id}\r\n return self.get_object('CancelBundleTask', params,\r\n BundleInstanceTask, verb='POST')",
"def _cancel(self):\n client = SBusClient(self.storlet_pipe_path)\n try:\n resp = client.cancel(self.task_id)\n if not resp.status:\n raise StorletRuntimeException('Failed to cancel task')\n except SBusClientException:\n raise StorletRuntimeException('Failed to cancel task')",
"def delete_archive_task(self, args=None):\r\n result = {\"Task\": \"DeleteArchiveTask\", \"Error\": \"NoError\", \"Status\": \"Deleted\", \"JobID\": args}\r\n\r\n with EndaceWebSession(app_url=self.applianceurl, username=self.username, password=self.password,\r\n cert_verify=self.cert_verify) as sess:\r\n api = EndaceVisionAPIAdapter(sess)\r\n path = \"files\"\r\n rd = api.get(path)\r\n if rd.status_code == 200:\r\n path = \"queries/\" + args\r\n dr = api.delete(path)\r\n if dr.status_code == 200:\r\n try:\r\n response = dr.json()\r\n except json.decoder.JSONDecodeError:\r\n raise Exception(f\"JsonDecodeError - path {path}\")\r\n else:\r\n meta = response.get('meta', {})\r\n if meta:\r\n meta_error = meta.get(\"error\")\r\n if meta_error is not None:\r\n if meta_error is not False:\r\n result['Status'] = \"complete\"\r\n result['Error'] = str(meta_error)\r\n else:\r\n result['Status'] = \"Failed\"\r\n result['Error'] = f\"ServerError - empty meta data from {path}\"\r\n else:\r\n result['Error'] = rd.status_code\r\n result['Error'] = f\"ServerError - HTTP {rd.status_code} to /{path}\"\r\n\r\n if result['Status'] == 'Failed':\r\n self.handle_error_notifications(result['Error'])\r\n return result"
] | [
"0.6074363",
"0.5915743",
"0.5699145",
"0.5403056",
"0.53551537",
"0.5338421",
"0.53317684",
"0.5293905",
"0.5208342",
"0.51806414",
"0.51683414",
"0.51478064",
"0.5142927",
"0.5110171",
"0.50827575",
"0.5061864",
"0.5055457",
"0.5054801",
"0.503485",
"0.5029968",
"0.5022354",
"0.50179607",
"0.49665925",
"0.49588588",
"0.4933396",
"0.4929987",
"0.4880356",
"0.48744214",
"0.48688626",
"0.48546338"
] | 0.6775087 | 0 |
Create a new appliance backup. Any existing backup on the appliance is removed. [Example] ${resp} = Fusion Api Create Backup | | | def fusion_api_create_backup(self, api=None, headers=None):
return self.backup.create(api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_backup(\n self,\n request: dds_20151201_models.CreateBackupRequest,\n ) -> dds_20151201_models.CreateBackupResponse:\n runtime = util_models.RuntimeOptions()\n return self.create_backup_with_options(request, runtime)",
"def create_backup_with_options(\n self,\n request: dds_20151201_models.CreateBackupRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.CreateBackupResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.backup_method):\n query['BackupMethod'] = request.backup_method\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='CreateBackup',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.CreateBackupResponse(),\n self.call_api(params, req, runtime)\n )",
"async def create_backup_async(\n self,\n request: dds_20151201_models.CreateBackupRequest,\n ) -> dds_20151201_models.CreateBackupResponse:\n runtime = util_models.RuntimeOptions()\n return await self.create_backup_with_options_async(request, runtime)",
"def create_backup(ServerName=None, Description=None):\n pass",
"def create(self,\n label=None,\n allow_inconsistent=None,\n force=None,\n timeout=None):\n data = {'label': label}\n\n if allow_inconsistent is not None:\n data['allowInconsistent'] = allow_inconsistent\n if force is not None:\n data['force'] = force\n if timeout is not None:\n data['timeout'] = timeout\n\n request = Request(\n method='post',\n endpoint='/_admin/backup/create',\n data=data\n )\n\n def response_handler(resp):\n if resp.is_success:\n return format_backup(resp.body['result'])\n raise BackupCreateError(resp, request)\n\n return self._execute(request, response_handler)",
"async def create_backup_with_options_async(\n self,\n request: dds_20151201_models.CreateBackupRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.CreateBackupResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient.is_unset(request.backup_method):\n query['BackupMethod'] = request.backup_method\n if not UtilClient.is_unset(request.dbinstance_id):\n query['DBInstanceId'] = request.dbinstance_id\n if not UtilClient.is_unset(request.owner_account):\n query['OwnerAccount'] = request.owner_account\n if not UtilClient.is_unset(request.owner_id):\n query['OwnerId'] = request.owner_id\n if not UtilClient.is_unset(request.resource_owner_account):\n query['ResourceOwnerAccount'] = request.resource_owner_account\n if not UtilClient.is_unset(request.resource_owner_id):\n query['ResourceOwnerId'] = request.resource_owner_id\n if not UtilClient.is_unset(request.security_token):\n query['SecurityToken'] = request.security_token\n req = open_api_models.OpenApiRequest(\n query=OpenApiUtilClient.query(query)\n )\n params = open_api_models.Params(\n action='CreateBackup',\n version='2015-12-01',\n protocol='HTTPS',\n pathname='/',\n method='POST',\n auth_type='AK',\n style='RPC',\n req_body_type='formData',\n body_type='json'\n )\n return TeaCore.from_map(\n dds_20151201_models.CreateBackupResponse(),\n await self.call_api_async(params, req, runtime)\n )",
"def create_backup(self, instance, name, description=None):\n return instance.create_backup(name, description=description)",
"def _create_backup(cls, volume_id, backup_client=None, **kwargs):\n if backup_client is None:\n backup_client = cls.backups_client\n if 'name' not in kwargs:\n name = data_utils.rand_name(cls.__name__ + '-Backup')\n kwargs['name'] = name\n\n backup = backup_client.create_backup(\n volume_id=volume_id, **kwargs)['backup']\n cls.addClassResourceCleanup(\n test_utils.call_and_ignore_notfound_exc,\n backup_client.delete_backup, backup['id'])\n waiters.wait_for_volume_resource_status(backup_client, backup['id'],\n 'available')\n waiters.wait_for_volume_resource_status(cls.volumes_client, volume_id,\n 'available')\n return backup",
"def Create(ctx,\n name,\n attributes = None):\n if ctx.element is None:\n ctx.logger.error(\"You must establish at least one connection and specify which you intend to use.\")\n exit()\n\n\n if(attributes is not None):\n kwargsDict = simplejson.loads(attributes)\n attributes = dict(**kwargsDict)\n\n ctx.logger.info(\"\"\"name = \"\"\"+str(name)+\"\"\";\"\"\"+\"\"\"attributes = \"\"\"+str(attributes)+\"\"\";\"\"\"+\"\")\n try:\n CreateBackupTargetResult = ctx.element.create_backup_target(name=name, attributes=attributes)\n except common.ApiServerError as e:\n ctx.logger.error(e.message)\n exit()\n except BaseException as e:\n ctx.logger.error(e.__str__())\n exit()\n\n cli_utils.print_result(CreateBackupTargetResult, ctx.logger, as_json=ctx.json, depth=ctx.depth, filter_tree=ctx.filter_tree)",
"def backup(isamAppliance, check_mode=False, force=False):\n if check_mode is True:\n return isamAppliance.create_return_object(changed=True)\n else:\n return isamAppliance.invoke_put(\"Creating a backup of the active partition\",\n \"/firmware_settings/kickoff_backup\", {}, requires_model=requires_model)",
"def create_backup(self, name, description=None):\n return self.manager.create_backup(self, name, description=description)",
"def create_backup(self, async=True):\n\n async_result = __node__['bollard'].apply_async('api.postgresql.create-backup',\n soft_timeout=(1 * 24) * 3600,\n hard_timeout=(1 * 24 + 1) * 3600,\n callbacks={'task.pull': create_backup_callback})\n if async:\n return async_result.task_id\n else:\n return async_result.get()",
"def getBackup(self):\n\t\tquery = ''\n\t\tconn = self.get_connection()\n\t\theaders = { 'Content-type' : 'application/json', 'Authorization' : 'A10 %s' %self.sessionid}\n\t\tconn.request('GET', self.get_path() + '/' + query, headers=headers)\n\t\tresponse = conn.getresponse()\n\t\texpected_status = 200\n\t\terrors = {500: 'An unexpected runtime exception', 404: 'Specified backup does not exist'}\n\t\tpayload = self.get_output(response, expected_status, errors)\n\t\tconn.close()\n\t\tif self.debug:\n\t\t\tprint 'payload:', payload\n\t\tif payload == '':\n\t\t\tpayload = None\n\t\tif payload is not None:\n\t\t\tdata = json.loads(payload)\n\t\t\tpayload= data.get('backup')\n\t\treturn deserialize_Backup_json(payload)",
"def create_vm_backup(self, sVmUuid, sTargetHost, nTargetPort, sTargetSessionId, strDescription = '', backup_flags = consts.PBT_FULL, reserved_flags = 0, force_operation = True):\n\t\treturn Job(SDK.PrlSrv_CreateVmBackup(self.handle, sVmUuid, sTargetHost, nTargetPort, sTargetSessionId, strDescription, backup_flags, reserved_flags, force_operation)[0])",
"def create_backup(self, instance, name, description=None):\n body = {\"backup\": {\n \"instance\": utils.get_id(instance),\n \"name\": name,\n }}\n if description is not None:\n body[\"backup\"][\"description\"] = description\n uri = \"/backups\"\n resp, resp_body = self.api.method_post(uri, body=body)\n mgr = self.api._backup_manager\n return CloudDatabaseBackup(mgr, body.get(\"backup\"))",
"def create_backup(self, max_backups):\n\n name = time.strftime(_BACKUP_NAME_FORMAT, time.localtime())\n LOG.info(\"Creating a new backup '%s'.\", name)\n\n groups = self.__groups()\n\n if groups and len(self.backups(groups[-1], check = True)) < max_backups:\n group = groups[-1]\n LOG.info(\"Using backup group %s.\", group)\n else:\n group = self.__create_group()\n\n backup_path = self.backup_path(group, name, temp = True)\n\n try:\n os.mkdir(backup_path)\n except Exception as e:\n raise Error(\"Unable to create a backup directory '{}': {}.\",\n backup_path, psys.e(e))\n\n return group, name, backup_path",
"def backup_cloudformation_temlates(self, template, region='eu-west-2'):\n regions = {\n 'eu-west-1': 'euw1',\n 'eu-west-2': 'euw2'\n }\n backup_bucket = 'cft-bucket-{}'.format(regions[region])\n bucket_name = self.get_bucket_from_cloudformation_template(template)\n key = '{}/{}.json'.format(region, bucket_name)\n\n if template != {}:\n response = self.client.put_object(\n Body=json.dumps(template),\n Bucket=backup_bucket,\n Key=key\n )\n return response",
"def create_volume_backup(self, volume, name=None, description=None,\n container=None, check=True):\n cmd = 'cinder backup-create'\n if name:\n cmd += ' --name ' + name\n if description is not None:\n cmd += ' --description ' + moves.shlex_quote(description)\n if container:\n cmd += ' --container ' + container\n\n cmd += ' ' + volume.id\n\n exit_code, stdout, stderr = self.execute_command(\n cmd, timeout=config.BACKUP_AVAILABLE_TIMEOUT, check=check)\n\n backup_table = output_parser.table(stdout)\n backup = {key: value for key, value in backup_table['values']}\n\n return backup",
"def backup(account, save_folder=None, start_post = 0):\n\n\tprint \"Getting basic information ...\"\n\n\t# make sure there's a folder to save in\n\tif not os.path.exists(save_folder):\n\t\tos.mkdir(save_folder)\n\n\t# start by calling the API with just a single post\n\turl = \"http://\" + account + TUMBLR_URL + \"?num=1\"\n\tresponse = urllib2.urlopen(url)\n\tsoup = BeautifulSoup(response.read(), features=\"xml\")\n\n\t# collect all the meta information\n\ttumblelog = soup.find(\"tumblelog\")\n\ttitle = tumblelog[\"title\"]\n\tdescription = tumblelog.string\n\theader = title\n\n\t# then find the total number of posts\n\tposts_tag = soup.find(\"posts\")\n\ttotal_posts = int(posts_tag[\"total\"])\n\n\t# then get the XML files from the API, which we can only do with a max of 50 posts at once\n\tfor i in range(start_post, total_posts, 50):\n\t\t# find the upper bound\n\t\tj = i + 49\n\t\tif j > total_posts:\n\t\t\tj = total_posts\n\n\t\tprint \"Getting posts \" + str(i) + \" to \" + str(j) + \".\"\n\n\t\turl = \"http://\" + account + TUMBLR_URL + \"?num=50&start=\" + str(i)\n\t\tresponse = urllib2.urlopen(url)\n\t\tsoup = BeautifulSoup(response.read(), features=\"xml\")\n\n\t\tposts = soup.findAll(\"post\")\n\t\tfor post in posts:\n\t\t\tsavePost(post, save_folder, header=header)\n\n\tprint \"Backup Complete :)\"",
"def backup_server(self, server_id):\n status, data, errors, messages = self._make_post_request(MCAPIRoutes.FORCE_BACKUP, extra_params={'id': server_id})\n \n if status == 200:\n return True\n elif status == 500:\n self._check_errors(errors, messages)",
"def auto_swap_create(request, cli):\n # parse request\n message = request[\"m\"]\n refund_addr = request[\"a\"]\n blocksat_network = \"testnet\" if request[\"n\"] == \"t\" else \"mainnet\"\n submarine_network = \"testnet\" if request[\"n\"] == \"t\" else \"bitcoin\"\n uuid = request[\"u\"]\n\n # create blocksat order\n # TODO: Add some bid creation logic here or somewhere else...\n blocksat_order = create_blocksat_order(\n message=message, bid=\"10000\", network=blocksat_network, uuid=uuid\n )\n\n # lookup the invoice with the swap server to ensure it's valid & payable\n assert (\n get_invoice_details(\n invoice=blocksat_order[\"response\"][\"lightning_invoice\"][\"payreq\"],\n network=submarine_network,\n )\n is not None\n )\n\n # get a swap quote from the swap server\n swap = get_swap_quote(\n uuid=uuid,\n invoice=blocksat_order[\"response\"][\"lightning_invoice\"][\"payreq\"],\n network=submarine_network,\n refund_addr=refund_addr,\n )\n\n result = {\n \"sat_fill\": {\n \"u\": uuid,\n \"i\": blocksat_order[\"response\"][\"lightning_invoice\"][\"payreq\"],\n \"am\": swap[\"response\"][\"swap_amount\"],\n \"ad\": swap[\"response\"][\"swap_p2wsh_address\"],\n \"rs\": swap[\"response\"][\"redeem_script\"],\n }\n }\n\n log(f\"Auto_swap result: \\n{pformat(result)}\", cli)\n\n return result",
"def create_deployment(self, ApiId: str, Description: str = None, StageName: str = None) -> Dict:\n pass",
"async def database_create_backup(self, target: Union[str, Path] = None):\n bcfg = self.config[\"Database\"][\"Backup\"]\n backup_dir = Path(bcfg.get(\"BackupDir\", f\"{self._data_dir}/backup\")).expanduser()\n if not backup_dir.is_absolute():\n backup_dir = self._data_dir / backup_dir\n backup_dir.mkdir(parents=True, exist_ok=True)\n if target is None:\n fmt = bcfg.get(\"Format\", \"%FT%H%M%S_zerobot.sqlite\")\n now = datetime.datetime.now()\n target = backup_dir / now.strftime(fmt)\n else:\n if not isinstance(target, Path):\n target = Path(target)\n if not target.is_absolute():\n target = backup_dir / target\n # TODO: MaxBackups\n await zbdb.create_backup(self.database, target, self.eventloop)",
"def backup_details(self, db_name: str, backup_id: str) -> Session:\n uri = f\"{self.uri}/databases/{db_name}/backups/{backup_id}\"\n return self.request(uri=uri, method=\"GET\").json()",
"def create_and_run_deployment(\n project_id: int = Form(...),\n model_id: Text = Form(...),\n version: Text = Form(...),\n model_uri: Text = Form(...),\n type: Text = Form(...) # pylint: disable=redefined-builtin\n) -> JSONResponse:\n\n deploy_manager = DeployManager()\n deployment_id = deploy_manager.create_deployment(\n project_id, model_id, version, model_uri, type\n )\n return JSONResponse({'deployment_id': str(deployment_id)}, HTTPStatus.ACCEPTED)",
"async def post(self):\r\n data = await self.request.json()\r\n register_date = data[\"register_date\"]\r\n ip_address = data[\"ip_address\"]\r\n try:\r\n Agent.create(register_date=register_date, ip_address=ip_address)\r\n response_obj = {\"status\": \"success\"}\r\n return web.Response(text=str(response_obj), status=201)\r\n except Exception as exception:\r\n response_obj = {\"status\": \"failed\", \"reason\": exception}\r\n error_message = str(exception)\r\n logger.error(error_message)\r\n return web.Response(text=str(response_obj), status=500)",
"def test_create_download_delete_backup(\n rotkehlchen_api_server: APIServer,\n data_dir: Path,\n username: str,\n):\n start_ts = ts_now()\n response = requests.put(api_url_for(rotkehlchen_api_server, 'databasebackupsresource'))\n filepath = Path(assert_proper_response_with_result(response))\n assert filepath.exists()\n assert filepath.parent == Path(data_dir, username)\n\n response = requests.get(api_url_for(rotkehlchen_api_server, 'databaseinforesource'))\n result = assert_proper_response_with_result(response)\n backups = result['userdb']['backups']\n assert len(backups) == 1\n assert backups[0]['time'] >= start_ts\n assert backups[0]['version'] == ROTKEHLCHEN_DB_VERSION\n\n # now also try to download that backup and make sure it's the same file\n response = requests.get(\n api_url_for(rotkehlchen_api_server, 'databasebackupsresource'),\n json={'file': str(filepath)},\n )\n with tempfile.TemporaryDirectory() as tmpdirname:\n tempdbpath = Path(tmpdirname, 'temp.db')\n tempdbpath.write_bytes(response.content)\n assert filecmp.cmp(filepath, tempdbpath)\n\n # create an extra database to check that lists work correctly\n second_filepath = filepath.parent / 'back.db'\n second_filepath.touch()\n\n response = requests.delete(\n api_url_for(\n rotkehlchen_api_server,\n 'databasebackupsresource'),\n json={'files': [str(filepath), str(second_filepath)]},\n )\n assert_simple_ok_response(response)\n assert not filepath.exists()\n assert not second_filepath.exists()\n response = requests.get(api_url_for(rotkehlchen_api_server, 'databaseinforesource'))\n result = assert_proper_response_with_result(response)\n backups = result['userdb']['backups']\n assert len(backups) == 0",
"def create(backup_path):\n\n backup_name = os.path.basename(backup_path)\n group_path = os.path.dirname(backup_path)\n group_name = os.path.basename(group_path)\n backup_root = os.path.dirname(group_path)\n\n if group_path == \"/\":\n raise Error(\"Invalid backup group directory: {}.\", group_path)\n\n if _BACKUP_NAME_RE.search(backup_name) is None:\n raise Error(\"'{}' doesn't look like a backup directory.\", backup_path)\n\n if _GROUP_NAME_RE.search(group_name) is None:\n raise Error(\"'{}' doesn't look like a backup group directory.\", group_path)\n\n return backup_name, group_name, Storage(backup_root)",
"def backup(openbazaarInstallationPath,\n backupFolderPath,\n onSucessCallback=None,\n onErrorCallback=None):\n\n dateTime = time.strftime('%Y-%h-%d-%H-%M-%S')\n outputFilePath = os.path.join(\n backupFolderPath,\n \"openbazaar-%s.tar.gz\" % dateTime\n )\n\n # Create the folder for the backup, if it doesn't exist.\n try:\n os.makedirs(backupFolderPath)\n except os.error:\n pass\n\n db_folder = os.path.join(openbazaarInstallationPath, \"db\")\n try:\n with tarfile.open(outputFilePath, \"w:gz\") as tar:\n tar.add(db_folder, arcname=os.path.basename(db_folder))\n except tarfile.TarError as e:\n # TODO: Install proper error logging.\n print \"Error while backing up to:\", outputFilePath\n if onErrorCallback is not None:\n onErrorCallback(e)\n return\n\n if onSucessCallback is not None:\n onSucessCallback(outputFilePath)",
"def create_new_banks():\n\n\tcity = request.form.get('bankCity', '')\n\tname = request.form.get('bankName', '')\n\taddress = request.form.get('bankAddress', '')\n\tinfo = dict(city=city, name=name, address=address)\n\t# print(info)\n\tbank = Bank(city, name, address)\n\tres = bank.save()\n\t# print('res=%d' % res)\n\treturn send_result(info, res, status=\"True\")"
] | [
"0.71544784",
"0.663603",
"0.63791287",
"0.63514477",
"0.62523586",
"0.61538434",
"0.61149126",
"0.5964317",
"0.5938151",
"0.5883149",
"0.57419795",
"0.5724489",
"0.5643536",
"0.5501559",
"0.5416815",
"0.5396462",
"0.5314764",
"0.52601314",
"0.52175057",
"0.52039903",
"0.51830614",
"0.51167315",
"0.5083966",
"0.5063502",
"0.5062047",
"0.50582767",
"0.5043433",
"0.5017901",
"0.5008575",
"0.4986187"
] | 0.6859653 | 1 |
Retrieves the ca certificate by aliasname or get the ca certificates list. [Example] ${resp} = Fusion Api Get Ca Certificate | | | | | def fusion_api_get_ca_certificate(self, uri=None, api=None, headers=None, param=''):
return self.ca.get(uri=uri, api=api, headers=headers, param=param) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.get(aliasname, api, headers)",
"def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)",
"def fusion_api_remove_external_ca_certificates(self, aliasName, api=None, headers=None):\n return self.ca.delete(aliasName, api=api, headers=headers)",
"def ca_certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ca_certificate\")",
"def dcos_ca_bundle():\n resp = sdk_cmd.cluster_request('GET', '/ca/dcos-ca.crt')\n cert = resp.content.decode('ascii')\n assert cert is not None\n return cert",
"def get_ca_certificate_from_opaque_secret(secret_name, secret_ns):\n kube = kubernetes.KubeOperator()\n secret = kube.kube_get_secret(secret_name, secret_ns)\n\n if not hasattr(secret, 'data'):\n raise Exception('Invalid secret %s\\\\%s' % (secret_ns, secret_name))\n\n data = secret.data\n if 'ca.crt' not in data:\n raise Exception('Invalid CA certificate data from secret %s\\\\%s' %\n (secret_ns, secret_name))\n\n try:\n ca_crt = base64.decode_as_text(data['ca.crt'])\n except TypeError:\n raise Exception('CA certificate secret data is invalid %s\\\\%s' %\n (secret_ns, secret_name))\n\n return ca_crt",
"def ca_certificate(self) -> str:\n return pulumi.get(self, \"ca_certificate\")",
"def ca_certificate(self) -> str:\n return pulumi.get(self, \"ca_certificate\")",
"def ca():\n return trustme.CA()",
"def fusion_api_get_appliance_certificate(self, api=None, headers=None):\n return self.appliance_certificate.get(api, headers)",
"def get_certificate_authority_certificate(self):\n client = confidant.clients.get_boto_client('acm-pca')\n certificate = client.get_certificate_authority_certificate(\n CertificateAuthorityArn=self.settings['arn'],\n )\n # TODO: support pagination for this call\n tags = client.list_tags(\n CertificateAuthorityArn=self.settings['arn'],\n )\n _tags = {}\n for tag in tags['Tags']:\n _tags[tag['Key']] = tag['Value']\n return {\n 'ca': self.ca_name,\n 'certificate': certificate['Certificate'],\n 'certificate_chain': certificate['CertificateChain'],\n 'tags': _tags,\n }",
"def fusion_api_get_internal_ca_crl(self, api=None, headers=None):\n param = '/ca/crl'\n return self.ca.get(api=api, param=param, headers=headers)",
"def get_certificate(self, url):\n bearer = 'Authorization: Bearer '+str(self.exchanged_token).split('\\n', 1)[0]\n data = json.dumps({\"service_id\": \"x509\"})\n\n headers = StringIO()\n buffers = StringIO()\n\n c = pycurl.Curl()\n c.setopt(pycurl.URL, url)\n c.setopt(pycurl.HTTPHEADER, [bearer, 'Content-Type: application/json'])\n c.setopt(pycurl.POST, 1)\n c.setopt(pycurl.POSTFIELDS, data)\n c.setopt(c.WRITEFUNCTION, buffers.write)\n c.setopt(c.HEADERFUNCTION, headers.write)\n c.setopt(c.VERBOSE, True)\n\n try:\n c.perform()\n status = c.getinfo(c.RESPONSE_CODE)\n c.close()\n body = buffers.getvalue()\n\n if str(status) != \"303\" :\n self.log.error(\"On \\\"get redirect curl\\\": %s , http error: %s \" % (body, str(status)))\n return False \n except pycurl.error, error:\n errno, errstr = error\n self.log.info('An error occurred: %s' % errstr)\n return False\n \n redirect = self.tts\n for item in headers.getvalue().split(\"\\n\"):\n if \"location\" in item:\n redirect = redirect + item.strip().replace(\"location: \", \"\")\n\n headers = {'Authorization': 'Bearer ' + self.exchanged_token.strip()}\n response = requests.get(redirect, headers=headers)\n\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as e:\n # Whoops it wasn't a 200\n self.log.error(\"get_certificate() Error: %s \" %str(e))\n return False\n\n with open('/tmp/output.json', 'w') as outf:\n outf.write(response.content)\n else:\n self.log.error(\"No location in redirect response\")\n\n return True",
"def _parse_certificate(cls, response):\n links = _parse_header_links(response)\n try:\n cert_chain_uri = links[u'up'][u'url']\n except KeyError:\n cert_chain_uri = None\n return (\n response.content()\n .addCallback(\n lambda body: messages.CertificateResource(\n uri=cls._maybe_location(response),\n cert_chain_uri=cert_chain_uri,\n body=body))\n )",
"def fusion_api_get_appliance_certificate(self, api=None, headers=None):\n return self.wsc.get(api=api, headers=headers)",
"def endpoint_tls_ca(self) -> Optional[bytes]:\n if self.is_ready and (data := self._data):\n if data.endpoint_tls_ca:\n return data.endpoint_tls_ca.encode()\n return None",
"def ca(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ca\")",
"def ca(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ca\")",
"def ca(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ca\")",
"def get_certificates_by_pcc(conn: dict, id: str) -> dict:\n return get(conn, f\"{S3PCCS}/{id}/certificates\")",
"def cert_arn_lookup(session, domain_name):\n if session is None:\n return None\n\n client = session.client('acm')\n response = client.list_certificates()\n for certs in response['CertificateSummaryList']:\n if certs['DomainName'] == domain_name:\n return certs['CertificateArn']\n if certs['DomainName'].startswith('*'): # if it is a wildcard domain like \"*.thebossdev.io\"\n cert_name = certs['DomainName'][1:] + '$'\n if re.search(cert_name, domain_name) != None:\n return certs['CertificateArn']\n return None",
"def fusion_api_get_client_certificate(self, ip, api=None, headers=None):\n return self.client_certificate.get(ip, api, headers)",
"def catalog_alias_get(self, args):\n try:\n alias = self.server.connect_ermrest_alias(args.id)\n response = alias.retrieve()\n if not args.quiet:\n pp(response)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog alias not found', e)\n else:\n raise e",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n csr: Optional[pulumi.Input[str]] = None,\n expires_on: Optional[pulumi.Input[str]] = None,\n hostnames: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n min_days_for_renewal: Optional[pulumi.Input[int]] = None,\n request_type: Optional[pulumi.Input[str]] = None,\n requested_validity: Optional[pulumi.Input[int]] = None) -> 'OriginCaCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _OriginCaCertificateState.__new__(_OriginCaCertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"csr\"] = csr\n __props__.__dict__[\"expires_on\"] = expires_on\n __props__.__dict__[\"hostnames\"] = hostnames\n __props__.__dict__[\"min_days_for_renewal\"] = min_days_for_renewal\n __props__.__dict__[\"request_type\"] = request_type\n __props__.__dict__[\"requested_validity\"] = requested_validity\n return OriginCaCertificate(resource_name, opts=opts, __props__=__props__)",
"def test_set_one_ca_list(self):\n cacert = load_certificate(FILETYPE_PEM, root_cert_pem)\n cadesc = cacert.get_subject()\n\n def single_ca(ctx):\n ctx.set_client_ca_list([cadesc])\n return [cadesc]\n\n self._check_client_ca_list(single_ca)",
"def get_ssl_certificate():",
"def get_ssl_certificate() :",
"def _get_ca_bundle():\n try:\n import certifi\n return certifi.where()\n except ImportError:\n pass",
"def caget(PV):\n return epics.caget(PV)",
"def fusion_api_get_rabbitmq_client_certificate(self, param='', api=None, headers=None):\n return self.rabmq.get(param=param, api=api, headers=headers)"
] | [
"0.6737102",
"0.6336662",
"0.6227532",
"0.6090018",
"0.60808045",
"0.6077929",
"0.59895927",
"0.59895927",
"0.5879556",
"0.58654875",
"0.58157027",
"0.57867277",
"0.5784408",
"0.5756572",
"0.575524",
"0.57094204",
"0.56620824",
"0.56620824",
"0.56620824",
"0.5600329",
"0.5577867",
"0.55692476",
"0.5552525",
"0.55430824",
"0.552701",
"0.5495198",
"0.5493991",
"0.54564136",
"0.5452111",
"0.5445409"
] | 0.7302024 | 0 |
Retrieves the contents of the CRL file maintained by the internal CA; in Base64 encoded format, in the form of a string. [Example] ${resp} = Fusion Api Get Internal Ca Crl | | | def fusion_api_get_internal_ca_crl(self, api=None, headers=None):
param = '/ca/crl'
return self.ca.get(api=api, param=param, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def as_text(self):\n buf=BIO.MemoryBuffer()\n m2.x509_crl_print(buf.bio_ptr(), self.crl)\n return buf.read_all()",
"def _get_cla_raw_data(cla_document_url):\n data = ''\n error = None\n try:\n r = requests.get(cla_document_url)\n except Exception as error:\n pass\n\n if error is None and r.status_code in [200]:\n data = r.text\n\n return data",
"def retrieve_ipac_file(url):\n \n request = urllib2.Request(url)\n \n # Encode the username and password to send for authorization\n base64string = base64.encodestring('%s:%s' % (IPAC_USER, IPAC_PASSWORD)).replace('\\n', '')\n request.add_header(\"Authorization\", \"Basic %s\" % base64string)\n \n # Retrieve the response\n try:\n response = urllib2.urlopen(request)\n except urllib2.HTTPError, e:\n print \"HTTPError: Authorization failed or request invalid.\\n\\t->HTTP Response returned error code {}\".format(e.code)\n raise\n except urllib2.URLError, e:\n print \"URLError: {}\".format(e.reason)\n raise\n \n file = StringIO.StringIO(response.read())\n return file",
"def make_request_txt(self):\n #print (self.url)\n try:\n with closing(get(self.url, stream=True)) as resp: #returns b`txt`\n if self.is_txt(resp):\n return resp.content.decode(\"utf-8\")\n else:\n return None\n except RequestException as e:\n print('Error during requests to {0} : {1}'.format(url, str(e)))\n return None",
"def httpretrieve_get_string(url, querydata=None, postdata=None, \\\r\n httpheaders=None, proxy=None, timeout=30):\r\n\r\n # Open a read-only file-like object for the HTTP request.\r\n httpobj = httpretrieve_open(url, querydata=querydata, postdata=postdata, \\\r\n httpheaders=httpheaders, proxy=proxy, timeout=timeout)\r\n\r\n # Read all of the response and return it.\r\n try:\r\n return httpobj.read()\r\n finally:\r\n httpobj.close()",
"def get():\n\n l2ca_info = caps.l2ca_info()\n\n res = {\n 'cache_size': l2ca_info['cache_size'],\n 'cw_size': l2ca_info['cache_way_size'],\n 'cw_num': l2ca_info['cache_ways_num'],\n 'clos_num': l2ca_info['clos_num'],\n 'cdp_supported': l2ca_info['cdp_supported'],\n 'cdp_enabled': l2ca_info['cdp_enabled']\n }\n return res, 200",
"def get_response():\n result = ''\n line = ''\n while line != '\\n':\n result += line\n line = FROMFILE.readline()\n #print(\" I read line:[\"+line+\"]\")\n return result",
"def fusion_api_get_ca_certificate(self, uri=None, api=None, headers=None, param=''):\n return self.ca.get(uri=uri, api=api, headers=headers, param=param)",
"def getResponseString(retCode):\n return (_getResponseString(retCode))",
"def read_raw(self) -> bytes:\n r = requests.get(self.raw_url)\n if r.status_code == 404:\n raise Exception(f\"Document {self.key} does not exist\")\n r.raise_for_status()\n return r.content",
"def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)",
"def request_content(self):\n try:\n length = int(self.headers.get('content-length'))\n\n except (TypeError, ValueError):\n return \"\"\n else:\n return self.rfile.read(length)",
"def get():\n\n l3ca_info = caps.l3ca_info()\n\n res = {\n 'cache_size': l3ca_info['cache_size'],\n 'cw_size': l3ca_info['cache_way_size'],\n 'cw_num': l3ca_info['cache_ways_num'],\n 'clos_num': l3ca_info['clos_num'],\n 'cdp_supported': l3ca_info['cdp_supported'],\n 'cdp_enabled': l3ca_info['cdp_enabled']\n }\n return res, 200",
"def request_content(self):\r\n try:\r\n length = int(self.headers.getheader('content-length'))\r\n\r\n except (TypeError, ValueError):\r\n return \"\"\r\n else:\r\n return self.rfile.read(length)",
"def _download(self) -> bytes:\n\n self.log.info(\"Downloading FCC facilities..\")\n # Disabling weak dh check. FCC should update their servers.\n ciphers = requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS\n requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS += ':HIGH:!DH:!aNULL'\n r = requests.get(FACILITIES_URL)\n requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = ciphers\n r.raise_for_status()\n return r.content",
"def get_file_contents(filename):\n try:\n with open(filename, 'r') as f:\n # It's assumed our file contains a single line,\n # with our API key\n return f.read().strip()\n except FileNotFoundError:\n print(\"'%s' file not found\" % filename)",
"def http_get_contents(url) -> str:\n\n # Clean url\n url = str(url).strip('\\\\')\n url = str(url).strip('\\n')\n\n try:\n # Fixed SSL bug on MacOS: /Applications/Python\\ 3.8/Install\\ Certificates.command\n http = urllib3.PoolManager()\n http_response = http.request('GET', url, timeout=5)\n http_response_content = http_response.data\n\n if http_response.status == 200:\n return http_response_content.decode('utf-8')\n\n return ''\n\n # pylint: disable=W0703\n except Exception as error:\n # pylint: disable=W1202\n LOGGER.error('Error. Could not connect to: {0}. Error message: {1}'.format(url, error))\n\n return ''",
"def fetch_content(type,msgid,baseurl,user,password):\n payload = {'request': 'content', 'type': type, 'msgid': msgid}\n r = requests.get(baseurl, params=payload, auth=HTTPBasicAuth(user, password), verify=False)\n return r.text.encode('ascii', 'ignore')",
"def load_mock_response(file_path: str) -> str:\n with open(file_path, encoding='utf-8') as mock_file:\n return mock_file.read()",
"def get_info() -> str:\n req = Request(URL + '/info')\n context = ssl._create_unverified_context()\n with urlopen(req, context=context) as response:\n return response.read().decode('utf-8')",
"def get_own_cert_chain_as_string(self):\n# _log.debug(\"get_own_cert_chain_as_string: node_name={}\".format(self.node_name))\n cert_path = self.get_own_cert_path()\n try:\n cert_chain_str = open(cert_path, 'rt').read()\n return cert_chain_str\n except Exception as err:\n # Certificate not available\n _log.debug(\"No runtime certificate string can be found, err={}\".format(err))\n return None",
"def canned_ims2_response():\n return file_utils.response_file_to_json(test_data_dir + '/waveform_41177893.1')",
"def get(self, url):\n \n content = \"\"\n if hasattr(http.client, \"HTTPSConnection\"): \n url_options = urlparse(url)\n\n conn = http.client.HTTPSConnection(url_options.netloc)\n conn.request('GET', url_options.path + '?' + url_options.query)\n content = conn.getresponse().read().decode('utf-8')\n conn.close()\n else: \n p = os.popen('curl -k \"' + url + '\"')\n content = p.read()\n p.close() \n\n return content",
"def get_certificate(self, path: Union[bytes, str]) -> str:\n path = _to_bytes_or_null(path)\n certificate = ffi.new(\"char **\")\n ret = lib.Fapi_GetCertificate(self._ctx, path, certificate)\n _chkrc(ret)\n # certificate is guaranteed to be a null-terminated string\n return ffi.string(_get_dptr(certificate, lib.Fapi_Free)).decode()",
"def certificate_body(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate_body\")",
"def getfilehttps(self, url):\n ctx = ssl.create_default_context()\n ctx.check_hostname = False\n ctx.verify_mode = ssl.CERT_NONE\n response = urllib.request.urlopen(url, context=ctx)\n result = response.read()\n return result",
"def get_content_from_ulr(self):\n response = urllib.request.urlopen(self.url)\n if response.getcode() != 200:\n self.logger.info(\"Cisco - get_content_from_url()\")\n raise ConnectionError('Unable to load ', self.url)\n content = response.read()\n response.close()\n return content",
"def __get_file_code(self, path):\n response = requests.get(path, auth=self.authentication).json()\n code = base64.b64decode(response['content']).decode('utf-8')\n return code",
"def get_response(self):\n res = IOCRBlockRes()\n for field in [\"IOCRType\", \"IOCRReference\", \"FrameID\"]:\n res.setfieldval(field, self.getfieldval(field))\n return res",
"async def getLegalInformation(self, body=\"\"):\n payload = {}\n \n # Parameter validation\n schema = ContentValidator.getLegalInformation()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(api_url=self._urls[\"getLegalInformation\"], proccessed_params=\"\"\"{\"required\":[],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + base64.b64encode(\"{}:{}\".format(self._conf.applicationID, self._conf.applicationToken).encode()).decode()\n }\n if self._conf.locationDetails:\n headers[\"x-location-detail\"] = ujson.dumps(self._conf.locationDetails)\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(urlparse(self._urls[\"getLegalInformation\"]).netloc, \"get\", await create_url_without_domain(\"/service/application/content/v1.0/legal\", ), query_string, headers, body, exclude_headers=exclude_headers), data=body, cookies=self._conf.cookies)"
] | [
"0.6228554",
"0.6052844",
"0.58945525",
"0.56648254",
"0.5539024",
"0.54497933",
"0.53351825",
"0.5327286",
"0.52844656",
"0.52766275",
"0.5275653",
"0.5241717",
"0.52319336",
"0.5169041",
"0.51577955",
"0.5104549",
"0.5101868",
"0.5093677",
"0.5073579",
"0.5059927",
"0.5019072",
"0.5014374",
"0.4988616",
"0.4968411",
"0.4965379",
"0.49618423",
"0.49562523",
"0.4955164",
"0.49238196",
"0.49091074"
] | 0.66197985 | 0 |
Remove external CA Certificates in Oneview [Arguments] | def fusion_api_remove_external_ca_certificates(self, aliasName, api=None, headers=None):
return self.ca.delete(aliasName, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_ca_certs_from_systemwide_ca_store(self):\n\n raise NotImplementedError()",
"def fusion_api_revoke_certificate(self, name=None, api=None, headers=None):\n return self.ca.revoke(name=name, api=api, headers=headers)",
"def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)",
"def fusion_api_delete_client_certificate(self, aliasname, api=None, headers=None):\n return self.client_certificate.delete(aliasname, api, headers)",
"def fusion_api_delete_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.delete(aliasname, api, headers)",
"def test_remove_trusted_project1(self):\n pass",
"def revoke_from_menu(self):\n\n csha1_vhlist = self._get_installed_locations()\n certs = self._populate_saved_certs(csha1_vhlist)\n\n while True:\n if certs:\n code, selection = revocation.display_certs(certs)\n\n if code == display_util.OK:\n revoked_certs = self._safe_revoke([certs[selection]])\n # Since we are currently only revoking one cert at a time...\n if revoked_certs:\n del certs[selection]\n elif code == display_util.HELP:\n revocation.more_info_cert(certs[selection])\n else:\n return\n else:\n logger.info(\n \"There are not any trusted Let's Encrypt \"\n \"certificates for this server.\")\n return",
"def test_reset_ca_list(self):\n cacert = load_certificate(FILETYPE_PEM, root_cert_pem)\n secert = load_certificate(FILETYPE_PEM, server_cert_pem)\n clcert = load_certificate(FILETYPE_PEM, server_cert_pem)\n\n cadesc = cacert.get_subject()\n sedesc = secert.get_subject()\n cldesc = clcert.get_subject()\n\n def changed_ca(ctx):\n ctx.set_client_ca_list([sedesc, cldesc])\n ctx.set_client_ca_list([cadesc])\n return [cadesc]\n\n self._check_client_ca_list(changed_ca)",
"def test_remove_trusted_project3(self):\n pass",
"def replace_certificate(self):\n return self.__query(\"certificateReplace\", data)",
"def test_remove_trusted_project4(self):\n pass",
"def test_remove_trusted_project2(self):\n pass",
"def test_remove_trusted_project5(self):\n pass",
"def test_remove_trusted_project7(self):\n pass",
"def test_remove():\n client = TestClient()\n client.run('config set proxies.https=myurl')\n client.run('config rm proxies.https')\n conf_file = load(client.cache.conan_conf_path)\n assert 'myurl' not in conf_file",
"def test_remove_trusted_project(self):\n pass",
"def ca():\n return trustme.CA()",
"def test_remove_trusted_project6(self):\n pass",
"def unassign(id, type, appid, specialid):\n try:\n client().certificates.unassign(id, type, appid, specialid)\n logger.info(\n 'ctl:cert:unassign', 'Unassigned {0} from {0}'.format(id, appid)\n )\n except Exception as e:\n raise CLIException(str(e))",
"def test_set_one_ca_list(self):\n cacert = load_certificate(FILETYPE_PEM, root_cert_pem)\n cadesc = cacert.get_subject()\n\n def single_ca(ctx):\n ctx.set_client_ca_list([cadesc])\n return [cadesc]\n\n self._check_client_ca_list(single_ca)",
"def sslCheckOriginal():\n print('[+] Populating SSL for later check')\n for url in ssl_strip_monitored_urls:\n try:\n cert = ssl.get_server_certificate((str(url), 443))\n x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)\n p_k = x509.get_pubkey()\n s_n = x509.get_serial_number()\n\n print('[+] Acquired Certificate: %s' % url)\n print(' |_________> serial_number %s' % s_n)\n print(' |_________> public_key %s' % p_k)\n\n check_ssl_strip_results.append(SSL_Strip_Check(url, p_k, s_n))\n\n except Exception as err:\n print('[-] Error While Acquiring certificats on setup phase !')\n traceback.print_exc()\n return time.time()",
"def test_load_client_ca_unicode(self, context, ca_file):\n pytest.deprecated_call(context.load_client_ca, ca_file.decode(\"ascii\"))",
"def RevokeCertificates(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def fusion_api_import_external_ca_certificates(self, body, api=None, headers=None):\n return self.ca.add(body, api=api, headers=headers)",
"def _remove_certs_keys(self, cert_list): # pylint: disable=no-self-use\n # This must occur first, LIST is the official key\n self._remove_certs_from_list(cert_list)\n\n # Remove files\n for cert in cert_list:\n os.remove(cert.backup_path)\n os.remove(cert.backup_key_path)",
"def delete_dataset_without_original_url():\n logging.warning(\n \"*** deleting all netex files created by transport.data.gouv.fr ***\"\n )\n r = requests.get(\"https://transport.data.gouv.fr/api/datasets\")\n r.raise_for_status()\n datasets = r.json()\n\n print_resource = lambda r: f\"\\n\\t*[url = {r['url']} | extras = {r.get('extras')}]\"\n print_resources = lambda rs: [print_resource(r) for r in rs]\n\n for d in datasets:\n dataset_name = d[\"title\"]\n if d[\"type\"] != \"public-transit\":\n continue\n\n dataset_id = d[\"id\"]\n\n community_resources = _find_community_resources(dataset_id)\n logging.info(\"community ressources : %s\", print_resources(community_resources))\n old_community_resources = [\n r\n for r in community_resources\n if \"transport:original_resource_url\" not in r.get(\"extras\", {})\n ]\n if old_community_resources:\n logging.info(\n \"old community ressources : %s\",\n print_resources(old_community_resources),\n )\n _delete_community_resources(dataset_id, old_community_resources)\n logging.info(\"deleted community resource for the dataset %s\", dataset_id)",
"def revoke_certificate(\n project_id: str,\n location: str,\n ca_pool_name: str,\n certificate_name: str,\n) -> None:\n\n caServiceClient = privateca_v1.CertificateAuthorityServiceClient()\n\n # Create Certificate Path.\n certificate_path = caServiceClient.certificate_path(\n project_id, location, ca_pool_name, certificate_name\n )\n\n # Create Revoke Certificate Request and specify the appropriate revocation reason.\n request = privateca_v1.RevokeCertificateRequest(\n name=certificate_path, reason=privateca_v1.RevocationReason.PRIVILEGE_WITHDRAWN\n )\n result = caServiceClient.revoke_certificate(request=request)\n\n print(\"Certificate revoke result:\", result)",
"def create_selfsigned_certificates(name):\n pass",
"def get_ssl_certificate() :",
"async def cleanup_challenge(\n self,\n key: josepy.jwk.JWK,\n identifier: acme.messages.Identifier,\n challenge: acme.messages.ChallengeBody,\n ):\n pass"
] | [
"0.6013904",
"0.565803",
"0.5591083",
"0.55188787",
"0.54182845",
"0.5397607",
"0.523788",
"0.52177835",
"0.52039444",
"0.5153776",
"0.51488674",
"0.51408875",
"0.51112974",
"0.50869256",
"0.50488645",
"0.50306964",
"0.5020207",
"0.5018251",
"0.4996934",
"0.49696755",
"0.49619925",
"0.49409172",
"0.49086532",
"0.4872488",
"0.48707765",
"0.48673844",
"0.48381564",
"0.48363233",
"0.4830312",
"0.48245648"
] | 0.6606943 | 0 |
[Arguments] [Example] ${resp} = Fusion Api Get Certificate Validation Configuration | | | def fusion_api_get_certificate_validation_configuration(self, api=None, headers=None):
return self.cv.get(api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_ssl_certificate() :",
"def getCampaignConfig(docName, url=reqmgr_url):\n headers = {\"Content-type\": \"application/json\", \"Accept\": \"application/json\"}\n conn = make_x509_conn(url)\n url = '/reqmgr2/data/campaignconfig/%s' % docName\n conn.request(\"GET\", url, headers=headers)\n r2 = conn.getresponse()\n data = json.loads(r2.read())\n return data['result']",
"def get_config_verify(self,\n raw_response: Any,\n *args,\n **kwargs) -> bool:\n pass",
"def get_certificate_issuance_config_output(certificate_issuance_config_id: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetCertificateIssuanceConfigResult]:\n ...",
"def certificate_auth():\r\n url = 'https://www.12306.cn'\r\n response = requests.get(url, verify=False)\r\n print(response.status_code)\r\n print(response.text)",
"def request_cert(session, domain_name, validation_domain):\n if session is None:\n return None\n\n client = session.client('acm')\n validation_options = [\n {\n 'DomainName': domain_name,\n 'ValidationDomain': validation_domain\n },\n ]\n response = client.request_certificate(DomainName=domain_name,\n DomainValidationOptions=validation_options)\n return response",
"def get_ssl_certificate():",
"def get_config(req):\n #try:\n # user_id = req.user\n #except KeyError as e:\n # msg = req.get_error_msg(e)\n # return send_error_response(msg)\n try:\n config = tools_config_get_config(req)\n except Exception:\n raise http_exc.HTTPClientError()\n else:\n return Response(json_body=json.dumps(config), content_type='application/json')",
"def api_specification():\n api_specification_question = [\n {\n 'type': 'input',\n 'message': 'Enter the IP address of your CMDB server (use format yyx.yyx.yyx.yyx where \\'y\\' is optional):',\n 'name': 'server',\n 'validate': AddressValidator\n },\n {\n 'type': 'input',\n 'message': 'Enter your CMDB username:',\n 'name': 'username',\n 'validate': NotEmpty\n },\n {\n 'type': 'password',\n 'message': 'Enter your CMDB password:',\n 'name': 'password'\n },\n {\n 'type': 'input',\n 'message': 'Enter your API key:',\n 'name': 'api_key',\n 'validate': NotEmpty\n }\n ]\n api_specification_answer = prompt(api_specification_question, style=style)\n return api_specification_answer",
"def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)",
"def _validate_GetCC_response(self, response, prompt):\n error = self._find_error(response)\n\n if error:\n log.error(\"GetCC command encountered error; type='%s' msg='%s'\", error[0], error[1])\n raise InstrumentProtocolException('GetCC command failure: type=\"%s\" msg=\"%s\"' % (error[0], error[1]))\n\n if not SBE19CalibrationParticle.resp_regex_compiled().search(response):\n log.error('_validate_GetCC_response: GetCC command not recognized: %s.' % response)\n raise InstrumentProtocolException('GetCC command not recognized: %s.' % response)\n\n return response",
"def main():\n ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z'\n #cert_file_name = os.path.join(os.path.dirname(__file__), \"testcert.pem\")\n\n parser = argparse.ArgumentParser(description='Parse a certificate and show days left')\n parser.add_argument('-v', '--verbose', action='store_true', help='show full certificate')\n parser.add_argument('cert', nargs='+', help='certifcate file(s)')\n args = parser.parse_args()\n for cert_file_name in args.cert:\n try:\n cert_dict = ssl._ssl._test_decode_cert(cert_file_name)\n serial = cert_dict['serialNumber']\n subject = dict(x[0] for x in cert_dict['subject'])\n issued_to = subject['commonName']\n time_left = datetime.datetime.strptime(cert_dict['notAfter'], ssl_date_fmt) - datetime.datetime.utcnow()\n if args.verbose:\n pp(cert_dict)\n ssl_expires_in(issued_to, serial, time_left)\n\n except Exception as error:\n print(\"Error decoding certificate: {:}\".format(error))",
"def get_certificate(self, url):\n bearer = 'Authorization: Bearer '+str(self.exchanged_token).split('\\n', 1)[0]\n data = json.dumps({\"service_id\": \"x509\"})\n\n headers = StringIO()\n buffers = StringIO()\n\n c = pycurl.Curl()\n c.setopt(pycurl.URL, url)\n c.setopt(pycurl.HTTPHEADER, [bearer, 'Content-Type: application/json'])\n c.setopt(pycurl.POST, 1)\n c.setopt(pycurl.POSTFIELDS, data)\n c.setopt(c.WRITEFUNCTION, buffers.write)\n c.setopt(c.HEADERFUNCTION, headers.write)\n c.setopt(c.VERBOSE, True)\n\n try:\n c.perform()\n status = c.getinfo(c.RESPONSE_CODE)\n c.close()\n body = buffers.getvalue()\n\n if str(status) != \"303\" :\n self.log.error(\"On \\\"get redirect curl\\\": %s , http error: %s \" % (body, str(status)))\n return False \n except pycurl.error, error:\n errno, errstr = error\n self.log.info('An error occurred: %s' % errstr)\n return False\n \n redirect = self.tts\n for item in headers.getvalue().split(\"\\n\"):\n if \"location\" in item:\n redirect = redirect + item.strip().replace(\"location: \", \"\")\n\n headers = {'Authorization': 'Bearer ' + self.exchanged_token.strip()}\n response = requests.get(redirect, headers=headers)\n\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as e:\n # Whoops it wasn't a 200\n self.log.error(\"get_certificate() Error: %s \" %str(e))\n return False\n\n with open('/tmp/output.json', 'w') as outf:\n outf.write(response.content)\n else:\n self.log.error(\"No location in redirect response\")\n\n return True",
"def request(domain):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n try:\n client().certificates.request_acme_certificate(domain)\n except Exception as e:\n raise CLIException(str(e))",
"def status():\n return jsonify(service='scwr-api-requirements', status='ok')",
"def fusion_api_get_certificate_status(self, api=None, headers=None):\n return self.certificate_status.get(api, headers)",
"def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }",
"def http_get(config_args):\n #global config_args\n \n \n try:\n #r = requests.get(config_args.address,headers=headers,verify=config_args.cacert)\n if 'https' in config_args.address.lower() and config_args.host != None:\n headers = {'host': config_args.host}\n r = requests.get(config_args.address,headers=headers,verify=False)\n #print(config_args.address.lower())\n elif 'https' in config_args.address.lower() and config_args.host == None:\n r = requests.get(config_args.address,verify=False)\n print(config_args.address.lower())\n else:\n r = requests.get(config_args.address)\n\n if r.status_code != 200 and r.status_code != 301 and r.status_code != 302:\n print(\"Request failed...code: {}\".format(r.status_code))\n except KeyboardInterrupt:\n print ('Stopping now....')\n try:\n sys.exit(0)\n except SystemExit:\n os._exit(0) \n \n return r",
"def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()",
"def print_ofpt_get_config_request(msg):\n pass",
"def certificate_status_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n acm = session.client(\"acm\")\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n acm_certs = list_certificates(cache, session)\n for carn in acm_certs:\n # Get ACM Cert Details\n cert = acm.describe_certificate(CertificateArn=carn)[\"Certificate\"]\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(cert,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n cDomainName = str(cert['DomainName'])\n cIssuer = str(cert['Issuer'])\n cSerial = str(cert['Serial'])\n cStatus = str(cert['Status'])\n cKeyAlgo = str(cert['KeyAlgorithm'])\n # this is a passing check\n if cStatus == 'ISSUED':\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-status-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.5] ACM Certificates should be correctly validated\",\n \"Description\": f\"ACM Certificate {carn} is successfully issued\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on certificate issuing, please refer to the Issuing Certificates section of the AWS Certificate Manager User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/acm/latest/userguide/gs.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding\n elif cStatus == 'EXPIRED' or \\\n cStatus == 'VALIDATION_TIMED_OUT' or \\\n cStatus == 'REVOKED' or \\\n cStatus == 'FAILED':\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-renewal-status-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"HIGH\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.5] ACM Certificates should be correctly validated\",\n \"Description\": f\"ACM Certificate {carn} has not been successfully issued. State: {cStatus}\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on certificate issuing, please refer to the Issuing Certificates section of the AWS Certificate Manager User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/acm/latest/userguide/gs.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding",
"def _service_status(res, ctx):\n\n if _has_error_code(res):\n return print_errors(res, ctx)\n\n template = '''\\\nname: {name}\nconfig-location: {config_location}\nlog-location: {log_location}\nscript-location: {script_location}\nrunning: {running}\nenabled: {enabled}\ncontainer-running: {container_running}\ndeployment: {deployment}\nconfig: {config}'''\n\n result = template.format(name=res['name'],\n config_location=res['config_location'],\n log_location=res['log_location'],\n script_location=res['script_location'],\n running=res['running'],\n enabled=res['enabled'],\n container_running=res['container_status'].get('running', False),\n deployment=res['deployment'],\n config=res['config'])\n\n if 'callback_uri' in res:\n result += \"\\ncallback-uri: {callback_uri}\".format(callback_uri=res['callback_uri'])\n\n return result",
"def health_check():\n return dict(api_status='OK')",
"def CheckRapi(options, args):\n if args: # rapi doesn't take any arguments\n print(\"Usage: %s [-f] [-d] [-p port] [-b ADDRESS]\" %\n sys.argv[0], file=sys.stderr)\n sys.exit(constants.EXIT_FAILURE)\n\n if options.max_clients < 1:\n print(\"%s --max-clients argument must be >= 1\" %\n sys.argv[0], file=sys.stderr)\n sys.exit(constants.EXIT_FAILURE)\n\n ssconf.CheckMaster(options.debug)\n\n # Read SSL certificate (this is a little hackish to read the cert as root)\n if options.ssl:\n options.ssl_params = http.HttpSslParams(ssl_key_path=options.ssl_key,\n ssl_cert_path=options.ssl_cert,\n ssl_chain_path=options.ssl_chain)\n else:\n options.ssl_params = None",
"def _parse_certificate(cls, response):\n links = _parse_header_links(response)\n try:\n cert_chain_uri = links[u'up'][u'url']\n except KeyError:\n cert_chain_uri = None\n return (\n response.content()\n .addCallback(\n lambda body: messages.CertificateResource(\n uri=cls._maybe_location(response),\n cert_chain_uri=cert_chain_uri,\n body=body))\n )",
"def main():\n\n parser = argparse.ArgumentParser()\n parser.add_argument('-H', '--host', required=True)\n parser.add_argument('-p', '--port', default=443)\n parser.add_argument('-u', '--url', default='/')\n parser.add_argument('-c', '--cert', required=True)\n parser.add_argument('-k', '--key', required=True)\n parser.add_argument('-P', '--perfdata', action='append')\n args = parser.parse_args()\n\n csv = get_csv(args)\n rows = get_rows(csv)\n\n frontend_errors = get_frontend_errors(rows)\n backend_errors = get_backend_errors(rows)\n member_errors = get_member_errors(rows)\n perfdata = get_perfdata(args.perfdata, rows)\n\n code = NAGIOS_OK\n if member_errors:\n code = NAGIOS_WARNING\n if frontend_errors or backend_errors:\n code = NAGIOS_CRITICAL\n\n status = ['OK', 'WARNING', 'CRITICAL'][code]\n print '{} frontend errors {}; backend errors {}; member errors {} | {}'.\\\n format(status,\n ', '.join(frontend_errors) if frontend_errors else 'none',\n ', '.join(backend_errors) if backend_errors else 'none',\n ', '.join(member_errors) if member_errors else 'none',\n ' '.join(perfdata))\n\n sys.exit(code)",
"def info(name):\n try:\n cert = client().certificates.get(name)\n if not cert:\n logger.info('ctl:cert:info', 'No certificates found')\n return\n click.echo(click.style(cert[\"id\"], fg=\"white\", bold=True))\n click.echo(\n click.style(\" * Domain: \", fg=\"yellow\") + cert[\"domain\"]\n )\n click.echo(\n click.style(\" * Type: \", fg=\"yellow\") +\n \"{0}-bit {1}\".format(cert[\"keylength\"], cert[\"keytype\"])\n )\n click.echo(\n click.style(\" * SHA1: \", fg=\"yellow\") + cert[\"sha1\"]\n )\n click.echo(\n click.style(\" * Expires: \", fg=\"yellow\") +\n cert[\"expiry\"].strftime(\"%c\")\n )\n if cert.assigns:\n imsg = \", \".join([y[\"name\"] for y in cert[\"assigns\"]])\n click.echo(click.style(\" * Assigned to: \", fg=\"yellow\") + imsg)\n except Exception as e:\n raise CLIException(str(e))",
"def test_failure_config(self):\n resource_conf = {\n \"enable_dns_support\": \"true\"\n }\n scan_result = check.scan_resource_conf(conf=resource_conf)\n self.assertEqual(CheckResult.FAILED, scan_result)",
"def generate(name, domain, country, state, locale, email,\n keytype, keylength):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n if not country:\n logger.info(\n \"ctl:cert:generate\",\n \"Two-character country code (ex.: 'US' or 'CA')\"\n )\n country = click.prompt(\"Country code\")\n if not state:\n state = click.prompt(\"State/Province\")\n if not locale:\n locale = click.prompt(\"City/Town/Locale\")\n if not email:\n email = click.prompt(\"Contact email [optional]\")\n try:\n cmd = client().certificates.generate\n job, data = cmd(\n name, domain, country, state, locale, email, keytype, keylength)\n handle_job(job)\n except Exception as e:\n raise CLIException(str(e))",
"def cli(ctx, api_key, host, port, campaign_number):\n ### Plan\n\n #### Configuration\n # Check what variables are still needed\n ## Read config file for those specific needs\n ### Error if any of the above values are not provided in the config file or CLI\n\n if api_key is None or host is None or port is None:\n config = configparser.ConfigParser()\n config.read(ctx.config)\n\n if api_key is None:\n api_key = config['GOPHISH']['api_key']\n logger.info('API Key from config file : {}'.format(api_key))\n\n if host is None:\n host = config['GOPHISH']['host']\n logger.info('Host from config file : {}'.format(host))\n\n if port is None:\n port = config['GOPHISH']['port']\n logger.info('Port from config file : {}'.format(port))\n\n ctx.api_key = api_key\n ctx.host = host\n ctx.port = port\n ctx.campaign_number = campaign_number\n ctx.campaign_info = None"
] | [
"0.56574243",
"0.5644858",
"0.5579412",
"0.55728805",
"0.5509885",
"0.5457003",
"0.54091847",
"0.5372856",
"0.5328325",
"0.53195554",
"0.5303918",
"0.52742136",
"0.5230642",
"0.52138305",
"0.5192459",
"0.5187366",
"0.51676726",
"0.5148341",
"0.51295745",
"0.5124653",
"0.5091486",
"0.50788873",
"0.50488615",
"0.4998609",
"0.49974316",
"0.49968612",
"0.49696502",
"0.49694422",
"0.49669597",
"0.49655607"
] | 0.60469764 | 0 |
Generate certificate signing request used to get certificate [Arguments] | def fusion_api_generate_certificate_signing_request(self, body, api=None, headers=None):
return self.wsc.post(body, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def req_handler(args):\n key = _get_key(args)\n subject = get_subject_arguments()\n req = create_certificate_request(key, subject=subject, file_name=args.req_out)\n if not args.req_out:\n print(print_certificate_request(req))\n return req",
"def req_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n req = create_x509_req(key, subject_info)\n do_output(req_to_pem(req), args, 'req')",
"def create_x509_req(privkey, subject_info):\n builder = x509.CertificateSigningRequestBuilder()\n builder = builder.subject_name(subject_info.get_name())\n builder = subject_info.install_extensions(builder)\n\n # create final request\n req = builder.sign(private_key=privkey, algorithm=SHA256(), backend=get_backend())\n return req",
"def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]",
"def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }",
"def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()",
"def sign_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n # Load certificate request\n if not args.request:\n die(\"Need --request\")\n subject_csr = load_req(args.request)\n\n reset_info = None\n if args.reset:\n reset_info = info_from_args(args)\n\n # Load CA info\n if not args.ca_info:\n die(\"Need --ca-info\")\n if args.ca_info.endswith('.csr'):\n issuer_obj = load_req(args.ca_info)\n else:\n issuer_obj = load_cert(args.ca_info)\n\n # Load CA private key\n issuer_key = load_key(args.ca_key, load_password(args.password_file))\n if not same_pubkey(issuer_key, issuer_obj):\n die(\"--ca-private-key does not match --ca-info data\")\n\n # Certificate generation\n cert = do_sign(subject_csr, issuer_obj, issuer_key, args.days, args.path_length, args.request, reset_info=reset_info)\n\n # Write certificate\n do_output(cert_to_pem(cert), args, 'x509')",
"def createCertRequest(pkey, digest=\"sha256\", **name):\n req = crypto.X509Req()\n subj = req.get_subject()\n\n for key, value in name.items():\n setattr(subj, key, value)\n\n req.set_pubkey(pkey)\n req.sign(pkey, digest)\n return req",
"def sign_handler(args):\n if not args.issuer_key and not args.issuer_cert:\n key = _get_key(args)\n subject = get_subject_arguments()\n\n cert = selfsigned_certificate_for_key(\n key,\n subject=subject,\n serial_number=int(args.serial_number),\n length=args.duration,\n file_name=args.cert_out\n )\n\n else:\n req = _get_request(args)\n issuer_cert = load_certificate(args.issuer_cert)\n issuer_key = load_key(args.issuer_key)\n cert = sign_request(\n req,\n issuer_cert=issuer_cert,\n issuer_key=issuer_key,\n length=args.duration,\n file_name=args.cert_out\n )\n\n if not args.cert_out:\n print(print_certificate(cert))",
"def get_ssl_certificate():",
"def get_ssl_certificate() :",
"def generate_key_and_cert():\n signing_key = rsa.generate_private_key(backend=crypto_default_backend(), public_exponent=65537, key_size=2048)\n subject = issuer = x509.Name(\n [\n x509.NameAttribute(NameOID.COUNTRY_NAME, 'NO'),\n x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, 'OSLO'),\n x509.NameAttribute(NameOID.LOCALITY_NAME, 'OSLO'),\n x509.NameAttribute(NameOID.ORGANIZATION_NAME, 'Intility AS'),\n x509.NameAttribute(NameOID.COMMON_NAME, 'intility.no'),\n ]\n )\n signing_cert = (\n x509.CertificateBuilder()\n .subject_name(subject)\n .issuer_name(issuer)\n .public_key(signing_key.public_key())\n .serial_number(x509.random_serial_number())\n .not_valid_before(datetime.utcnow())\n .not_valid_after(\n # Our certificate will be valid for 10 days\n datetime.utcnow()\n + timedelta(days=10)\n # Sign our certificate with our private key\n )\n .sign(signing_key, hashes.SHA256(), crypto_default_backend())\n .public_bytes(crypto_serialization.Encoding.DER)\n )\n return signing_key, signing_cert",
"def make_cert_for_spki_request(spki_req_b64, serial, ident):\n spki_obj = netscape_spki_from_b64(spki_req_b64)\n if spki_obj is None:\n raise ValueError('Invalid SPKI object')\n\n root_crt = _try_load_ca_cert(cfg.ca_cert_path())\n root_key = _try_load_ca_private_key(cfg.ca_private_key_path())\n crt = _make_base_cert(spki_obj.get_pubkey(), 365, ident, serial)\n crt.set_issuer(root_crt.get_subject())\n crt.sign(root_key, 'sha256')\n return crypto.dump_certificate(crypto.FILETYPE_ASN1, crt)",
"def selfsign_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n subject_csr = create_x509_req(key, subject_info)\n\n # sign created request\n cert = do_sign(subject_csr, subject_csr, key, args.days, args.path_length, '<selfsign>')\n do_output(cert_to_pem(cert), args, 'x509')",
"def generate(name, domain, country, state, locale, email,\n keytype, keylength):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n if not country:\n logger.info(\n \"ctl:cert:generate\",\n \"Two-character country code (ex.: 'US' or 'CA')\"\n )\n country = click.prompt(\"Country code\")\n if not state:\n state = click.prompt(\"State/Province\")\n if not locale:\n locale = click.prompt(\"City/Town/Locale\")\n if not email:\n email = click.prompt(\"Contact email [optional]\")\n try:\n cmd = client().certificates.generate\n job, data = cmd(\n name, domain, country, state, locale, email, keytype, keylength)\n handle_job(job)\n except Exception as e:\n raise CLIException(str(e))",
"def sign_certificate(csr):\n unique_filename = str(uuid.uuid4().hex)\n\n file = open(\"./csr_req/%s.csr\" % unique_filename, \"w\")\n file.write(csr.decode(\"utf-8\"))\n file.close()\n\n subprocess.run([\"../ca/scripts/sign.sh\", unique_filename], check=False)\n\n file = open(\"./csr_req/%s.p7b\" % unique_filename, \"r\")\n cert = file.read()\n\n os.remove(\"./csr_req/%s.csr\" % unique_filename)\n os.remove(\"./csr_req/%s.p7b\" % unique_filename)\n\n return cert",
"def fusion_api_create_certificate_request(self, body, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/certificaterequest'\n return self.ic.post(uri=uri, body=body, api=api, headers=headers, param=param)",
"def opensslCmsCertCreate( ownerCertFile ):\n opensslCmdArgs = [ \"openssl\", \"crl2pkcs7\", \"-certfile\", ownerCertFile,\n \"-nocrl\", \"-outform\", \"der\" ]\n ownerCertCmsDerBase64 = runOpensslCmd( opensslCmdArgs, [ \"base64\" ] )\n return ownerCertCmsDerBase64",
"def generate_cybersource_sa_signature(payload):\n # This is documented in certain CyberSource sample applications:\n # http://apps.cybersource.com/library/documentation/dev_guides/Secure_Acceptance_SOP/html/wwhelp/wwhimpl/js/html/wwhelp.htm#href=creating_profile.05.6.html\n keys = payload[\"signed_field_names\"].split(\",\")\n message = \",\".join(f\"{key}={payload[key]}\" for key in keys)\n\n digest = hmac.new(\n settings.CYBERSOURCE_SECURITY_KEY.encode(\"utf-8\"),\n msg=message.encode(\"utf-8\"),\n digestmod=hashlib.sha256,\n ).digest()\n\n return b64encode(digest).decode(\"utf-8\")",
"def create_selfsigned_certificates(name):\n pass",
"def request(domain):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n try:\n client().certificates.request_acme_certificate(domain)\n except Exception as e:\n raise CLIException(str(e))",
"def sign_certificate_request(csr, rootkey, rootcrt, client_key, domain_name, notBefore, notAfter):\n\n serial_number = int(str(uuid.uuid4().int)[:20])\n crt = x509.CertificateBuilder().subject_name(\n csr.subject\n ).issuer_name(\n rootcrt.subject\n ).public_key(\n csr.public_key()\n ).serial_number(\n serial_number # pylint: disable=no-member\n ).not_valid_before(\n notBefore\n ).not_valid_after(\n notAfter\n ).add_extension(\n extension=x509.KeyUsage(\n digital_signature=True, key_encipherment=True, content_commitment=True,\n data_encipherment=False, key_agreement=False, encipher_only=False, decipher_only=False, key_cert_sign=False, crl_sign=False\n ),\n critical=True\n ).add_extension(\n extension=x509.BasicConstraints(ca=False, path_length=None),\n critical=True\n ).add_extension(\n extension=x509.AuthorityKeyIdentifier.from_issuer_public_key(rootkey.public_key()),\n critical=False\n ).add_extension(\n csr.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME).value,\n critical=False,\n ).sign(\n private_key=rootkey,\n algorithm=hashes.SHA256(),\n backend=default_backend()\n )\n\n ##storing client's .crt\n with open(domain_name + \".crt\", 'wb') as f:\n f.write(crt.public_bytes(encoding=serialization.Encoding.PEM))",
"def Certificate(self) -> _n_8_t_0:",
"def Certificate(self) -> _n_8_t_0:",
"def CreateCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"CreateCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.CreateCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def generate_verification_code(\n self, certificate_name, if_match, resource_group_name, provisioning_service_name, certificatename=None, certificateraw_bytes=None, certificateis_verified=None, certificatepurpose=None, certificatecreated=None, certificatelast_updated=None, certificatehas_private_key=None, certificatenonce=None, custom_headers=None, raw=False, **operation_config):\n # Construct URL\n url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/certificates/{certificateName}/generateVerificationCode'\n path_format_arguments = {\n 'certificateName': self._serialize.url(\"certificate_name\", certificate_name, 'str'),\n 'subscriptionId': self._serialize.url(\"self.config.subscription_id\", self.config.subscription_id, 'str'),\n 'resourceGroupName': self._serialize.url(\"resource_group_name\", resource_group_name, 'str'),\n 'provisioningServiceName': self._serialize.url(\"provisioning_service_name\", provisioning_service_name, 'str')\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {}\n if certificatename is not None:\n query_parameters['certificate.name'] = self._serialize.query(\"certificatename\", certificatename, 'str')\n if certificateraw_bytes is not None:\n query_parameters['certificate.rawBytes'] = self._serialize.query(\"certificateraw_bytes\", certificateraw_bytes, 'bytearray')\n if certificateis_verified is not None:\n query_parameters['certificate.isVerified'] = self._serialize.query(\"certificateis_verified\", certificateis_verified, 'bool')\n if certificatepurpose is not None:\n query_parameters['certificate.purpose'] = self._serialize.query(\"certificatepurpose\", certificatepurpose, 'str')\n if certificatecreated is not None:\n query_parameters['certificate.created'] = self._serialize.query(\"certificatecreated\", certificatecreated, 'iso-8601')\n if certificatelast_updated is not None:\n query_parameters['certificate.lastUpdated'] = self._serialize.query(\"certificatelast_updated\", certificatelast_updated, 'iso-8601')\n if certificatehas_private_key is not None:\n query_parameters['certificate.hasPrivateKey'] = self._serialize.query(\"certificatehas_private_key\", certificatehas_private_key, 'bool')\n if certificatenonce is not None:\n query_parameters['certificate.nonce'] = self._serialize.query(\"certificatenonce\", certificatenonce, 'str')\n query_parameters['api-version'] = self._serialize.query(\"self.api_version\", self.api_version, 'str')\n\n # Construct headers\n header_parameters = {}\n header_parameters['Content-Type'] = 'application/json; charset=utf-8'\n if self.config.generate_client_request_id:\n header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())\n if custom_headers:\n header_parameters.update(custom_headers)\n header_parameters['If-Match'] = self._serialize.header(\"if_match\", if_match, 'str')\n if self.config.accept_language is not None:\n header_parameters['accept-language'] = self._serialize.header(\"self.config.accept_language\", self.config.accept_language, 'str')\n\n # Construct and send request\n request = self._client.post(url, query_parameters)\n response = self._client.send(request, header_parameters, stream=False, **operation_config)\n\n if response.status_code not in [200]:\n raise models.ErrorDetailsException(self._deserialize, response)\n\n deserialized = None\n\n if response.status_code == 200:\n deserialized = self._deserialize('VerificationCodeResponse', response)\n\n if raw:\n client_raw_response = ClientRawResponse(deserialized, response)\n return client_raw_response\n\n return deserialized",
"def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def request_certificate(request):\r\n if request.method == \"POST\":\r\n if request.user.is_authenticated():\r\n xqci = XQueueCertInterface()\r\n username = request.user.username\r\n student = User.objects.get(username=username)\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get('course_id'))\r\n course = modulestore().get_course(course_key, depth=2)\r\n\r\n status = certificate_status_for_student(student, course_key)['status']\r\n if status in [CertificateStatuses.unavailable, CertificateStatuses.notpassing, CertificateStatuses.error]:\r\n logger.info('Grading and certification requested for user {} in course {} via /request_certificate call'.format(username, course_key))\r\n status = xqci.add_cert(student, course_key, course=course)\r\n return HttpResponse(json.dumps({'add_status': status}), mimetype='application/json')\r\n return HttpResponse(json.dumps({'add_status': 'ERRORANONYMOUSUSER'}), mimetype='application/json')",
"def _create_certificate_chain():\n caext = X509Extension(b\"basicConstraints\", False, b\"CA:true\")\n not_after_date = datetime.date.today() + datetime.timedelta(days=365)\n not_after = not_after_date.strftime(\"%Y%m%d%H%M%SZ\").encode(\"ascii\")\n\n # Step 1\n cakey = PKey()\n cakey.generate_key(TYPE_RSA, 2048)\n cacert = X509()\n cacert.set_version(2)\n cacert.get_subject().commonName = \"Authority Certificate\"\n cacert.set_issuer(cacert.get_subject())\n cacert.set_pubkey(cakey)\n cacert.set_notBefore(b\"20000101000000Z\")\n cacert.set_notAfter(not_after)\n cacert.add_extensions([caext])\n cacert.set_serial_number(0)\n cacert.sign(cakey, \"sha256\")\n\n # Step 2\n ikey = PKey()\n ikey.generate_key(TYPE_RSA, 2048)\n icert = X509()\n icert.set_version(2)\n icert.get_subject().commonName = \"Intermediate Certificate\"\n icert.set_issuer(cacert.get_subject())\n icert.set_pubkey(ikey)\n icert.set_notBefore(b\"20000101000000Z\")\n icert.set_notAfter(not_after)\n icert.add_extensions([caext])\n icert.set_serial_number(0)\n icert.sign(cakey, \"sha256\")\n\n # Step 3\n skey = PKey()\n skey.generate_key(TYPE_RSA, 2048)\n scert = X509()\n scert.set_version(2)\n scert.get_subject().commonName = \"Server Certificate\"\n scert.set_issuer(icert.get_subject())\n scert.set_pubkey(skey)\n scert.set_notBefore(b\"20000101000000Z\")\n scert.set_notAfter(not_after)\n scert.add_extensions(\n [X509Extension(b\"basicConstraints\", True, b\"CA:false\")]\n )\n scert.set_serial_number(0)\n scert.sign(ikey, \"sha256\")\n\n return [(cakey, cacert), (ikey, icert), (skey, scert)]",
"def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")"
] | [
"0.7107218",
"0.69851446",
"0.6928943",
"0.68775123",
"0.6876872",
"0.68521",
"0.68254477",
"0.6718467",
"0.6630469",
"0.6468456",
"0.64300436",
"0.64171946",
"0.6409522",
"0.6374398",
"0.6368085",
"0.6367285",
"0.6330612",
"0.61382043",
"0.6105512",
"0.6057998",
"0.60406613",
"0.597868",
"0.5976932",
"0.5976932",
"0.5919605",
"0.58955413",
"0.5879299",
"0.577707",
"0.5767665",
"0.5744921"
] | 0.7087885 | 1 |
Used to import appliance certificate into OneView [Arguments] | def fusion_api_import_appliance_certificate(self, body, api=None, headers=None, param=''):
return self.wsc.put(body, api=api, headers=headers, param=param) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_import_client_certificate(self, body, api=None, headers=None):\n return self.client_certificate.post(body, api, headers)",
"def show_command(args):\n for fn in args.files:\n ext = os.path.splitext(fn)[1].lower()\n if ext == '.csr':\n cmd = ['openssl', 'req', '-in', fn, '-text']\n elif ext == '.crt':\n cmd = ['openssl', 'x509', '-in', fn, '-text']\n else:\n die(\"Unsupported file: %s\", fn)\n subprocess.check_call(cmd)",
"def configure_app(self, app, parser):\n parser.add_argument(\n 'infile',\n nargs=argparse.OPTIONAL,\n default='-',\n help='JSON-encoded glucose data'\n )",
"def do_import(args):\n base64str = b''\n for infile_name in args.infile_names:\n if args.png:\n chunk = subprocess.check_output(['zbarimg', '--raw', infile_name])\n base64str += chunk\n elif args.base64:\n with open(infile_name, 'rb') as infile:\n chunk = infile.read()\n base64str += chunk\n\n raw = base64.b64decode(base64str)\n paperkey = subprocess.Popen(['paperkey', '--pubring', args.pubkey],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n (paperkey_stdout, _) = paperkey.communicate(raw)\n gpg = subprocess.Popen(['gpg', '--import'], stdin=subprocess.PIPE)\n gpg.communicate(paperkey_stdout)",
"def add_certificate_arguments(parser):\n group = parser.add_argument_group(\"Certificate management\")\n group.add_argument(\n \"-sn\", \"--serial_number\",\n help=\"Serial number for the certificate\",\n type=int,\n default=1\n )\n group.add_argument(\n \"-d\", \"--duration\",\n help=\"Period of validity for certificate (seconds)\",\n type=int,\n default=60*60*24*(365*100+25)\n )",
"def _ParseCertificateArguments(client, args):\n self_managed = None\n managed = None\n certificate_type = None\n if args.certificate:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.SELF_MANAGED\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n self_managed = client.messages.SslCertificateSelfManagedSslCertificate(\n certificate=certificate, privateKey=private_key)\n if args.domains:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.MANAGED\n managed = client.messages.SslCertificateManagedSslCertificate(\n domains=args.domains)\n return certificate_type, self_managed, managed",
"def run(self, line):\r\n if os.name == 'nt':\r\n if not ctypes.windll.shell32.IsUserAnAdmin() != 0:\r\n self.app.typepath.adminpriv = False\r\n elif not os.getuid() == 0:\r\n self.app.typepath.adminpriv = False\r\n\r\n nargv = []\r\n curr = []\r\n argfound = False\r\n\r\n if \"--version\" in line or \"-V\" in line:\r\n sys.stdout.write(\"\"\"%(progname)s %(version)s\\n\"\"\" % \\\r\n {'progname': versioning.__longname__, 'version': \\\r\n versioning.__version__})\r\n sys.stdout.flush()\r\n sys.exit(self.retcode)\r\n\r\n else:\r\n for argument in enumerate(line):\r\n if not argfound and not argument[1].startswith('-'):\r\n nargv = line[argument[0]:]\r\n break\r\n else:\r\n argfound = False\r\n\r\n if argument[1] == \"-c\":\r\n argfound = True\r\n\r\n curr.append(argument[1])\r\n\r\n (self.opts, _) = self.parser.parse_args(curr)\r\n\r\n try:\r\n Encryption.encode_credentials('test')\r\n self.app.set_encode_funct(Encryption.encode_credentials)\r\n self.app.set_decode_funct(Encryption.decode_credentials)\r\n self.encoding = True\r\n except redfish.hpilo.risblobstore2.ChifDllMissingError:\r\n self.encoding = False\r\n\r\n if self.opts.config is not None and len(self.opts.config) > 0:\r\n if not os.path.isfile(self.opts.config):\r\n self.retcode = ReturnCodes.CONFIGURATION_FILE_ERROR\r\n sys.exit(self.retcode)\r\n\r\n self.app.config_file = self.opts.config\r\n\r\n self.app.config_from_file(self.app.config_file)\r\n if self.opts.logdir and self.opts.debug:\r\n logdir = self.opts.logdir\r\n else:\r\n logdir = self.app.config.get_logdir()\r\n\r\n if logdir and self.opts.debug:\r\n try:\r\n os.makedirs(logdir)\r\n except OSError as ex:\r\n if ex.errno == errno.EEXIST:\r\n pass\r\n else:\r\n raise\r\n\r\n if self.opts.debug:\r\n logfile = os.path.join(logdir, versioning.__shortname__+'.log')\r\n\r\n # Create a file logger since we got a logdir\r\n lfile = logging.FileHandler(filename=logfile)\r\n formatter = logging.Formatter(\"%(asctime)s %(levelname)s\\t: \" \\\r\n \"%(message)s\")\r\n\r\n lfile.setFormatter(formatter)\r\n lfile.setLevel(logging.DEBUG)\r\n LOGGER.addHandler(lfile)\r\n self.app.LOGGER = LOGGER\r\n\r\n cachedir = None\r\n if self.opts.nocache:\r\n self.app.config.set_cache(False)\r\n else:\r\n self.app.config.set_cachedir(os.path.join(self.opts.config_dir, \\\r\n 'cache'))\r\n cachedir = self.app.config.get_cachedir()\r\n\r\n if cachedir:\r\n try:\r\n os.makedirs(cachedir)\r\n except OSError as ex:\r\n if ex.errno == errno.EEXIST:\r\n pass\r\n else:\r\n raise\r\n\r\n if (\"login\" in line or any(x.startswith(\"--url\") for x in line) or not line)\\\r\n and not (any(x.startswith((\"-h\", \"--h\")) for x in nargv) or \"help\" in line):\r\n self.app.logout()\r\n else:\r\n self.app.restore()\r\n self.opts.is_redfish = self.app.updatedefinesflag(redfishflag=\\\r\n self.opts.is_redfish)\r\n\r\n if nargv:\r\n try:\r\n self.retcode = self._run_command(self.opts, nargv)\r\n if self.app.config.get_cache():\r\n if (\"logout\" not in line) and (\"--logout\" not in line):\r\n self.app.save()\r\n else:\r\n self.app.logout()\r\n except Exception as excp:\r\n self.handle_exceptions(excp)\r\n\r\n return self.retcode\r\n else:\r\n self.cmdloop(self.opts)\r\n\r\n if self.app.config.get_cache():\r\n self.app.save()\r\n else:\r\n self.app.logout()",
"def cli():\n prog_desc = 'Run an Answer Agreement Analysis on the command line.'\n parser = argparse.ArgumentParser(description=prog_desc)\n parser.add_argument('datafile', help='A file with the data for analysis')\n parser.add_argument('-x', '--xlsform', help=(\n 'The XlsForm used to create this dataset. This is optional. Not all '\n 'datasets come from ODK. If supplied, then the XlsForm information is '\n 'used to remove points of comparison, such as \"calculate\" types.'\n ))\n parser.add_argument('-g', '--group_column', help=(\n 'The column used to identify groups in the dataset. If not supplied, '\n 'then the entire dataset is treated as from one group.'\n ))\n parser.add_argument('-f', '--first', help=(\n 'The first column to start analyzing. If not supplied, then the first '\n 'column of the dataset is used.'\n ))\n parser.add_argument('-l', '--last', help=(\n 'The last column to analyze. If not supplied, then the last column of '\n 'the dataset is used.'\n ))\n parser.add_argument('-s', '--separator', action='store_true', help=(\n 'If option is supplied (with no argument!), then the separator is '\n 'switched to the hyphen \"-\". By default, the colon \":\" is used. This '\n 'is only used if an ODK file is passed in.'\n ))\n args = parser.parse_args()\n if args.xlsform:\n sep = '-' if args.separator else ':'\n agree = DatasetAgreement.from_file_and_odk(args.datafile, args.xlsform,\n args.group_column,\n args.first, args.last, sep)\n else:\n agree = DatasetAgreement.from_file(args.datafile, args.group_column,\n mask_first=args.first,\n mask_last=args.last)\n agree.print_summary()",
"def __init__(self, args):\n if len(args) < 5:\n raise ProgArgumentsErr(args[0])\n else:\n (config_file_name, clients_file_name, transactions_file_name, compnies_file_name) = args[1:5]\n \n Application.setConfigFileName(config_file_name)\n Application.setClientsFileName(clients_file_name)\n Application.setTransactiosFileName(transactions_file_name)\n Application.setCompaniesFileName(compnies_file_name)\n \n self.clientMgr = ClientManager.getInstance()\n self.tradesAppl = TradingApplication.getInstance()\n self.sec = SecurityManager.getInstance()",
"def main(argv):\n\n\n parser = argparse.ArgumentParser(description='convert der to raw')\n parser.add_argument('-s','--secretkey_file', help='Secret key', required=True)\n parser.add_argument('-p','--publickey_file', help='Public key', required=True)\n args = parser.parse_args()\n\n secretkey_file = args.secretkey_file\n publickey_file = args.publickey_file\n\n\n privkey = SigningKey.from_der(open(secretkey_file).read())\n pubkey = VerifyingKey.from_der(open(publickey_file).read())\n\n open(secretkey_file[0:-4] + \".bin\", \"wb\").write(privkey.to_string())\n open(publickey_file[0:-4] + \".bin\", \"wb\").write(pubkey.to_string())",
"def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]",
"def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def main():\n # This is used to store the certificate filename\n cert = \"\"\n\n # Setup a signal handler to catch control-c and clean up the cert temp file\n # No way to catch sigkill so try not to do that.\n # noinspection PyUnusedLocal\n def sigint_handler(sig, frame): # pylint:disable=unused-argument\n \"\"\"Handle interrupt signals.\"\"\"\n if not args.cert:\n try:\n os.unlink(cert)\n except OSError: # pylint:disable=pointless-except\n pass\n print \"Exiting...\"\n sys.exit(0)\n\n parser = ArgumentParser('Remote APIC API Inspector and GUI Log Server')\n\n parser.add_argument('-a', '--apicip', required=False, default='8.8.8.8',\n help='If you have a multihomed system, where the ' +\n 'apic is on a private network, the server will ' +\n 'print the ip address your local system has a ' +\n 'route to 8.8.8.8. If you want the server to ' +\n 'print a more accurate ip address for the ' +\n 'server you can tell it the apicip address.')\n\n parser.add_argument('-c', '--cert', type=str, required=False,\n help='The server certificate file for ssl ' +\n 'connections, default=\"server.pem\"')\n\n parser.add_argument('-d', '--delete_imdata', action='store_true',\n default=False, required=False,\n help='Strip the imdata from the response and payload')\n\n parser.add_argument('-e', '--exclude', action='append', nargs='*',\n default=[], choices=['subscriptionRefresh',\n 'aaaRefresh',\n 'aaaLogout',\n 'HDfabricOverallHealth5min-0',\n 'topInfo', 'all'],\n help='Exclude certain types of common noise queries.')\n\n parser.add_argument('-i', '--indent', type=int, default=2, required=False,\n help='The number of spaces to indent when pretty ' +\n 'printing')\n\n parser.add_argument('-l', '--location', default='/apiinspector',\n required=False,\n help='Location that transaction logs are being ' +\n 'sent to, default=/apiinspector')\n\n parser.add_argument('-n', '--nice-output', action='store_true',\n default=False, required=False,\n help='Pretty print the response and payload')\n\n parser.add_argument('-p', '--port', type=int, required=False, default=8987,\n help='Local port to listen on, default=8987')\n\n parser.add_argument('-s', '--sslport', type=int, required=False,\n default=8443,\n help='Local port to listen on for ssl connections, ' +\n 'default=8443')\n\n parser.add_argument('-r', '--requests-log', action='store_true',\n default=False, required=False,\n help='Log server requests and response codes to ' +\n 'standard error')\n\n parser.add_argument('-t', '--title', default='SimpleAciUiLogServer',\n required=False,\n help='Change the name shown for this application ' +\n 'when accessed with a GET request')\n\n parser.add_argument('-ty', '--type', action='append', nargs='*',\n default=['all'], choices=['POST', 'GET', 'undefined',\n 'EventChannelMessage'],\n help='Limit logs to specific request types.')\n\n args = parser.parse_args()\n\n logging.basicConfig(level=logging.DEBUG,\n format='%(asctime)s %(levelname)s - \\n%(message)s')\n if args.exclude:\n # Flatten the list\n args.exclude = [val for sublist in args.exclude for val in sublist]\n\n if not args.location.startswith(\"/\"):\n args.location = \"/\" + str(args.location)\n\n if args.type:\n # Flatten the list\n args.type = [val for sublist in args.type for val in sublist]\n\n ThreadingSimpleAciUiLogServer.prettyprint = args.nice_output\n ThreadingSimpleAciUiLogServer.indent = args.indent\n ThreadingSimpleAciUiLogServer.strip_imdata = args.delete_imdata\n\n # Instantiate a http server\n http_server = ThreadingSimpleAciUiLogServer((\"\", args.port),\n log_requests=args.requests_log,\n location=args.location,\n excludes=args.exclude,\n app_name=args.title)\n\n if not args.cert:\n # Workaround ssl wrap socket not taking a file like object\n cert_file = tempfile.NamedTemporaryFile(delete=False)\n cert_file.write(SERVER_CERT)\n cert_file.close()\n cert = cert_file.name\n print(\"\\n+++WARNING+++ Using an embedded self-signed certificate for \" +\n \"HTTPS, this is not secure.\\n\")\n else:\n cert = args.cert\n\n # Instantiate a https server as well\n https_server = ThreadingSimpleAciUiLogServer((\"\", args.sslport),\n cert=cert,\n location=args.location,\n log_requests=args.requests_log,\n excludes=args.exclude,\n app_name=args.title)\n\n signal.signal(signal.SIGINT, sigint_handler) # Or whatever signal\n\n # Example of registering a function for a specific method. The funciton\n # needs to exist of course. Note: undefined seems to be the same as a\n # GET but the logging facility on the APIC seems to get in a state where\n # instead of setting the method properly it sets it to undefined.\n # These registered functions could then be used to take specific actions or\n # be silent for specific methods.\n # http_server.register_function(GET)\n # http_server.register_function(POST)\n # http_server.register_function(HEAD)\n # http_server.register_function(DELETE)\n # http_server.register_function(undefined)\n # http_server.register_function(EventChannelMessage)\n\n # This simply sets up a socket for UDP which has a small trick to it.\n # It won't send any packets out that socket, but this will allow us to\n # easily and quickly interogate the socket to get the source IP address\n # used to connect to this subnet which we can then print out to make for\n # and easy copy/paste in the APIC UI.\n ip_add = [(s.connect((args.apicip, 80)), s.getsockname()[0], s.close()) for\n s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]\n\n print(\"Servers are running and reachable via:\\n\")\n print(\"http://\" + str(ip_add) + \":\" + str(args.port) + args.location)\n print(\"https://\" + str(ip_add) + \":\" + str(args.sslport) + args.location +\n \"\\n\")\n print(\"Make sure your APIC(s) are configured to send log messages: \" +\n \"welcome username -> Start Remote Logging\")\n print(\"Note: If you connect to your APIC via HTTPS, configure the \" +\n \"remote logging to use the https server.\")\n serve_forever([http_server, https_server])",
"def _set_arguments(self):\n cert_location = f\"dependencies{sep}certificates{sep}localuser.crt\"\n key_location = f\"dependencies{sep}certificates{sep}localuser.key\"\n assert Path(cert_location).exists(), (\n f\"The certificate isn't \"\n f\"present at location {Path(cert_location).absolute()}\"\n )\n assert Path(key_location).exists(), (\n f\"The certificate key isn't \"\n f\"present at location {Path(key_location).absolute()}\"\n )\n self._arguments = [\n (\n \"test-certificate-verify\",\n [\"-k\", key_location, \"-c\", cert_location],\n ),\n (\n \"test-sig-algs\",\n [],\n ),\n (\n \"test-clienthello-md5\",\n [],\n ),\n (\n \"test-tls13-pkcs-signature\",\n [],\n ),\n ]",
"def add_arguments(self, parser):\n parser.add_argument(\"json_file\", type=argparse.FileType(\"r\"))\n parser.add_argument(\"netbox_version\", type=version.parse)",
"def cli_arguments():\n\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n usage=f\"\\n{Color.DETAIL}pdforce.py [-p <pdf>] [-w <wordlist>] [-e <encoding>] [-o <output>] [-c] [-h/--help]{Color.END}\",\n description=f\"{Color.EMPHASIS}{TITLE}\\nLightweight PDF password cracker. USE FOR LEGAL INTENTS ONLY.{Color.END}\",\n epilog=f\"{Color.EMPHASIS}Made by @poponealex - https://github.com/poponealex{Color.END}\",\n )\n\n parser.add_argument(\n \"-p\",\n \"--pdf\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the pdf file.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-w\",\n \"--wordlist\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the wordlist.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-e\",\n \"--encoding\",\n type=str,\n help=f\"{Color.INFORMATION}Specify an encoding for the wordlist (https://docs.python.org/3/library/codecs.html#standard-encodings). The default encoding is platform dependent. Use 'iso8859_1' for rockyou. {Color.END}\",\n action=\"store\",\n default=None,\n )\n\n parser.add_argument(\n \"-o\",\n \"--output\",\n help=f\"{Color.INFORMATION}Output the cracked password to a new file.{Color.END}\",\n action=\"store\",\n )\n\n parser.add_argument(\n \"-c\",\n \"--copy\",\n help=f\"{Color.INFORMATION}Copy the password to the clipboard.{Color.END}\",\n action=\"store_true\",\n )\n\n return parser.parse_args()",
"def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"--download_path\",\n default=None,\n help=\"Free or auth\"\n )\n parser.add_argument(\n \"--download_type\", default=\"free\", help=\"Free or auth\"\n )\n parser.add_argument(\n \"--ipversion\",\n default=\"ipv4\", help=\"IP Version format \"\n )\n parser.add_argument(\n \"--format\",\n default=\"csv\",\n help=\"DB AVAILABLE FORMATS CSV or BIN\"\n )\n parser.add_argument(\"--product\", default=\"db1\", help=\"PRODUCT\")\n parser.add_argument(\n \"--token\",\n help=\"token used in order to authenticate\"\n \"in case of downloading the auth required DBs\"\n )\n parser.add_argument(\"--unzip\", default=True, help=\"\")\n parser.add_argument(\"--numbertoipv4\", default=True, help=\"\")\n\n args = parser.parse_args()\n\n cli_util = CliUtil(\n **{x: y for x, y in args._get_kwargs()}\n )\n cli_util.exec()\n return 0",
"def info_from_args(args):\n return CertInfo(\n subject=parse_dn(args.subject),\n usage=parse_list(args.usage),\n alt_names=parse_list(args.san),\n ocsp_nocheck=args.ocsp_nocheck,\n ocsp_must_staple=args.ocsp_must_staple,\n ocsp_must_staple_v2=args.ocsp_must_staple_v2,\n ocsp_urls=parse_list(args.ocsp_urls),\n crl_urls=parse_list(args.crl_urls),\n issuer_urls=parse_list(args.issuer_urls),\n permit_subtrees=parse_list(args.permit_subtrees),\n exclude_subtrees=parse_list(args.exclude_subtrees),\n ca=args.CA,\n path_length=args.path_length)",
"def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def req_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n req = create_x509_req(key, subject_info)\n do_output(req_to_pem(req), args, 'req')",
"def __check_opts(self):\n self.ca_cert_file = os.environ['HOME'] + '/.cat_installer/ca.pem'\n self.pfx_file = os.environ['HOME'] + '/.cat_installer/user.p12'\n if not os.path.isfile(self.ca_cert_file):\n print(Messages.cert_error)\n sys.exit(2)",
"def AddCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server.\n \"\"\"\n parser.add_argument('--certificate', help=help_text, required=required)",
"def fusion_api_import_server_certificate(self, body, api=None, headers=None):\n return self.server_certificate.post(body, api, headers)",
"def AddClientCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server. Database Migration Service\n encrypts the value when storing it.\n \"\"\"\n parser.add_argument('--client-certificate', help=help_text, required=required)",
"def main():\n\n parser = argparse.ArgumentParser()\n parser.add_argument('-H', '--host', required=True)\n parser.add_argument('-p', '--port', default=443)\n parser.add_argument('-u', '--url', default='/')\n parser.add_argument('-c', '--cert', required=True)\n parser.add_argument('-k', '--key', required=True)\n parser.add_argument('-P', '--perfdata', action='append')\n args = parser.parse_args()\n\n csv = get_csv(args)\n rows = get_rows(csv)\n\n frontend_errors = get_frontend_errors(rows)\n backend_errors = get_backend_errors(rows)\n member_errors = get_member_errors(rows)\n perfdata = get_perfdata(args.perfdata, rows)\n\n code = NAGIOS_OK\n if member_errors:\n code = NAGIOS_WARNING\n if frontend_errors or backend_errors:\n code = NAGIOS_CRITICAL\n\n status = ['OK', 'WARNING', 'CRITICAL'][code]\n print '{} frontend errors {}; backend errors {}; member errors {} | {}'.\\\n format(status,\n ', '.join(frontend_errors) if frontend_errors else 'none',\n ', '.join(backend_errors) if backend_errors else 'none',\n ', '.join(member_errors) if member_errors else 'none',\n ' '.join(perfdata))\n\n sys.exit(code)",
"def cmd_appe(args):",
"def main():\n ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z'\n #cert_file_name = os.path.join(os.path.dirname(__file__), \"testcert.pem\")\n\n parser = argparse.ArgumentParser(description='Parse a certificate and show days left')\n parser.add_argument('-v', '--verbose', action='store_true', help='show full certificate')\n parser.add_argument('cert', nargs='+', help='certifcate file(s)')\n args = parser.parse_args()\n for cert_file_name in args.cert:\n try:\n cert_dict = ssl._ssl._test_decode_cert(cert_file_name)\n serial = cert_dict['serialNumber']\n subject = dict(x[0] for x in cert_dict['subject'])\n issued_to = subject['commonName']\n time_left = datetime.datetime.strptime(cert_dict['notAfter'], ssl_date_fmt) - datetime.datetime.utcnow()\n if args.verbose:\n pp(cert_dict)\n ssl_expires_in(issued_to, serial, time_left)\n\n except Exception as error:\n print(\"Error decoding certificate: {:}\".format(error))",
"def main(cli_args):\n store_obj = cert_human_py3.CertChainStore.from_socket(\n host=cli_args.host, port=cli_args.port\n )\n\n print(store_obj.dump_json)",
"def test_file_data_arguments():\n filename = 'wso_args.json'\n assert AUTH.check_file_exists(filename) is True\n\n assert AUTH.verify_config(filename, 'authorization',\n AUTH.encode(RANDOM_USERNAME,\n RANDOM_PASSWORD)) is True\n assert AUTH.verify_config(filename, 'url', RANDOM_URL) is True\n assert AUTH.verify_config(filename, 'aw-tenant-code',\n RANDOM_TENANTCODE) is True",
"def alert_to_v1_context(alert: Any, args: Dict[str, Any]) -> Dict[str, Any]:\n ec = {\n 'ID': alert.get('id'),\n 'Status': alert.get('status'),\n 'AlertTime': alert.get('alertTime'),\n 'Policy': {\n 'ID': demisto.get(alert, 'policy.policyId'),\n 'Name': demisto.get(alert, 'policy.name'),\n 'Type': demisto.get(alert, 'policy.policyType'),\n 'Severity': demisto.get(alert, 'policy.severity'),\n 'Remediable': demisto.get(alert, 'policy.remediable')\n },\n 'Resource': {\n 'ID': demisto.get(alert, 'resource.id'),\n 'Name': demisto.get(alert, 'resource.name'),\n 'Account': demisto.get(alert, 'resource.account'),\n 'AccountID': demisto.get(alert, 'resource.accountId')\n }\n }\n if 'resource_keys' in args:\n # if resource_keys argument was given, include those items from resource.data\n extra_keys = demisto.getArg('resource_keys')\n resource_data = {}\n keys = extra_keys.split(',')\n for key in keys:\n resource_data[key] = demisto.get(alert, f'resource.data.{key}')\n\n ec['Resource']['Data'] = resource_data\n\n if alert.get('alertRules'):\n ec['AlertRules'] = [alert_rule.get('name') for alert_rule in alert.get('alertRules')]\n\n return ec"
] | [
"0.5239947",
"0.5211276",
"0.5210403",
"0.51984346",
"0.5181014",
"0.51681864",
"0.50820124",
"0.5056354",
"0.5039777",
"0.50120467",
"0.49912164",
"0.49804226",
"0.4976095",
"0.4974017",
"0.49715108",
"0.49638125",
"0.4950888",
"0.49498275",
"0.494732",
"0.4929162",
"0.491923",
"0.49056086",
"0.48643604",
"0.4851059",
"0.4845478",
"0.4831911",
"0.48154396",
"0.48117858",
"0.4811691",
"0.47760993"
] | 0.5713314 | 0 |
Retrieve internal CA signed Rabbitmq client certificate. [Example] ${resp} = Fusion Api Get Rabbitmq Client Certificates | | | | def fusion_api_get_rabbitmq_client_certificate(self, param='', api=None, headers=None):
return self.rabmq.get(param=param, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_ssl_certificate():",
"def get_ssl_certificate() :",
"def fusion_api_get_ca_certificate(self, uri=None, api=None, headers=None, param=''):\n return self.ca.get(uri=uri, api=api, headers=headers, param=param)",
"def client_certificate(self) -> str:\n return pulumi.get(self, \"client_certificate\")",
"def client_certificate(self) -> str:\n return pulumi.get(self, \"client_certificate\")",
"def ca():\n return trustme.CA()",
"def fusion_api_get_client_certificate(self, ip, api=None, headers=None):\n return self.client_certificate.get(ip, api, headers)",
"def ca_certificate(self) -> str:\n return pulumi.get(self, \"ca_certificate\")",
"def ca_certificate(self) -> str:\n return pulumi.get(self, \"ca_certificate\")",
"def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)",
"def dcos_ca_bundle():\n resp = sdk_cmd.cluster_request('GET', '/ca/dcos-ca.crt')\n cert = resp.content.decode('ascii')\n assert cert is not None\n return cert",
"def test_get_certificate(self):\n chain = _create_certificate_chain()\n [(cakey, cacert), (ikey, icert), (skey, scert)] = chain\n\n context = Context(SSLv23_METHOD)\n context.use_certificate(scert)\n client = Connection(context, None)\n cert = client.get_certificate()\n assert cert is not None\n assert \"Server Certificate\" == cert.get_subject().CN",
"def certificate_auth():\r\n url = 'https://www.12306.cn'\r\n response = requests.get(url, verify=False)\r\n print(response.status_code)\r\n print(response.text)",
"def get_cacert(self):\n return self.creds.get('cacert')",
"def _parse_certificate(cls, response):\n links = _parse_header_links(response)\n try:\n cert_chain_uri = links[u'up'][u'url']\n except KeyError:\n cert_chain_uri = None\n return (\n response.content()\n .addCallback(\n lambda body: messages.CertificateResource(\n uri=cls._maybe_location(response),\n cert_chain_uri=cert_chain_uri,\n body=body))\n )",
"def xforwardedforclientcertclientverifyalias(self) -> str:\n return pulumi.get(self, \"xforwardedforclientcertclientverifyalias\")",
"def get_authentication_certificate(hostname:str) -> str:\r\n host = hostname.split(\":\")[0]\r\n port = int(hostname.split(\":\")[1] or 443)\r\n conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\r\n context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)\r\n sock = context.wrap_socket(conn, server_hostname=host)\r\n sock.connect((host, port))\r\n cert = ssl.DER_cert_to_PEM_cert(sock.getpeercert(True))\r\n return str.encode(cert)",
"def fusion_api_get_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.get(aliasname, api, headers)",
"def org_apache_felix_https_clientcertificate(self) -> ConfigNodePropertyDropDown:\n return self._org_apache_felix_https_clientcertificate",
"def test_cert(self):\n\n try:\n client = SSLClient(host=FQDN, ip=APPLIANCE, usercert=CERT, sslverifyhost=True, cabundle=CABUNDLE)\n self.assertTrue(1==1, \"SSLClient connects with cabundle\")\n except Exception as exception:\n print(exception)\n self.fail(\"SSLClient did not connect\")\n \n response = client.send_command('LIST')\n self.assertEqual(response.ret, 100)\n\n client.disconnect()",
"def _get_ca_bundle():\n try:\n import certifi\n return certifi.where()\n except ImportError:\n pass",
"def tls_certificate(ca):\n interface, _host, _port = _get_conn_data(ANY_INTERFACE_IPV4)\n return ca.issue_cert(ntou(interface))",
"def endpoint_tls_ca(self) -> Optional[bytes]:\n if self.is_ready and (data := self._data):\n if data.endpoint_tls_ca:\n return data.endpoint_tls_ca.encode()\n return None",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def ca_certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ca_certificate\")",
"def client_certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"client_certificate\")",
"def certificate(self) -> str:\n return pulumi.get(self, \"certificate\")",
"def cert_challenge_http(self) -> 'outputs.CertHttpChallengeResponse':\n return pulumi.get(self, \"cert_challenge_http\")"
] | [
"0.72408545",
"0.718397",
"0.66533136",
"0.65521437",
"0.65521437",
"0.6517693",
"0.6469372",
"0.64545226",
"0.64545226",
"0.6448066",
"0.64047617",
"0.6286404",
"0.62447995",
"0.61983234",
"0.61817825",
"0.60603267",
"0.6026877",
"0.60216177",
"0.6007357",
"0.60045433",
"0.59897804",
"0.5969606",
"0.59001625",
"0.58895856",
"0.58895856",
"0.58895856",
"0.58870214",
"0.5883106",
"0.5845906",
"0.5830165"
] | 0.75156265 | 0 |
Gets a default or paginated collection of connection templates [Arguments] | def fusion_api_get_connection_templates(self, uri=None, param='', api=None, headers=None):
return self.ct.get(uri=uri, api=api, headers=headers, param=param) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_templates(self):\n return self.http_call(\"get\", url=f\"{self.base_url}/templates\").json()",
"def load_network_templates(self) -> List:\n try:\n network_templates = self.api.get(host=self.host, endpoint=f\"/api/v1/orgs/{self.oid}/networktemplates\")\n except Exception as e:\n logger.error(f\"{TextColors.FAIL}Error getting network templates:{TextColors.ENDC} {e}\")\n raise e\n self.network_templates = network_templates",
"def get_queryset(self):\n return Template.objects.all()",
"def get_templates(self):\n\n data = self.request_from_server('templates')\n self.templates = data",
"def get_all_templates(cls):\n raise NotImplementedError()",
"def list_templates(self):\n raise NotImplementedError()",
"def fusion_api_get_default_connection_template(self, api=None, headers=None):\n return self.ct.get_default(api=api, headers=headers)",
"def templatelist(cls):\n return cls._templatelist",
"def list_templates(request):\n templates = models.Template.all().order('name')\n return utility.respond(request, 'admin/list_templates', {'templates': templates})",
"def template_list(self):\n return self.ezx.get_template_list()",
"def get_schemas(self):\n templates = [['Template GUID']]\n r = self.system_cursor.execute('{Call wtGetTemplateList(%s)}' % (self.dsn['ProfileGuid'],))\n for row in r.fetchall():\n templates.append([row.TEMPLATE_GUID])\n return templates",
"def T(request):\n\treturn all_templates[request.param]",
"def wpst_get_connection_templates(self, sessionId, uri=None, param='', api=None, headers=None):\n if api:\n headers = self.fusion_client._set_req_api_version(api=api)\n elif headers is None:\n headers = self.fusion_client._headers\n headers['auth'] = sessionId\n\n return (self.ct.get(uri=uri, api=api, headers=headers, param=param))",
"def get_available_templates_list():\n page = import_page.ImportPage()\n page.open()\n return page.open_download_template_modal().available_templates_list",
"def get_all_templates(self):\n url = self.base_url + \"v2/template/\"\n\n resp = requests.get(url=url, headers=self.headers)\n return resp.json(), resp.status_code",
"def template_list(call=None):\n templates = {}\n session = _get_session()\n vms = session.xenapi.VM.get_all()\n for vm in vms:\n record = session.xenapi.VM.get_record(vm)\n if record[\"is_a_template\"]:\n templates[record[\"name_label\"]] = record\n return templates",
"def get_templates(self):\n return [{\"id\": tmplt[\"template_id\"], \"name\": tmplt[\"name\"]}\n for tmplt in Template.objects(user_id=self.user_id, active=True)]",
"def get_project_templates(session=konfuzio_session()) -> List[dict]:\n url = get_project_url()\n r = session.get(url=url)\n r.raise_for_status()\n sorted_templates = sorted(r.json()['section_labels'], key=itemgetter('id'))\n return sorted_templates",
"def all_templates():\r\n # TODO use memcache to memoize w/ expiration\r\n templates = defaultdict(list)\r\n for category, descriptor in XBlock.load_classes():\r\n if not hasattr(descriptor, 'templates'):\r\n continue\r\n templates[category] = descriptor.templates()\r\n\r\n return templates",
"def test_get_subscription_templates(self):\n pass",
"def get_templates(self):\n\n return self._templates",
"def retrieve_templates(self, category):\n\t\ttemplate_list_pool = self.template_list_pool\n\t\tfrom_redis = False\n\t\tclass_id = category + '#*'\n\t\tfirst_type_code,second_type_code = category.split('.')\n\n\t\t# if class_id not in template_dic_pool.keys():\n\t\t# Get template from redis at first.\n\t\tif template_redis.get(class_id) is not None:\n\t\t\ttemplate_list_pool[class_id] = ujson.loads(template_redis.get(class_id))\n\t\t\tfrom_redis = True\n\t\telse:\n\t\t\ttemplate_list_pool[class_id] = None\n\n\t\t# Search template from database when template is not in redis.\n\t\tif template_list_pool[class_id] is None:\n\t\t\t# import pdb;pdb.set_trace()\n\t\t\ttemplates = AssemblyTemplate.retrieve_lv2assembly_template_list( category )\n\t\t\ttemplate_list_pool[class_id] = templates\n\n\t\t\t# Store template in redis.\n\t\t\ttemplate_redis.delete(class_id)\n\t\t\ttemplate_redis.set(class_id, ujson.dumps(template_list_pool[class_id]))\n\n\t\treturn template_list_pool[class_id],from_redis",
"def get_connections(\n self,\n host_groups=None, # type: List[models.ReferenceType]\n hosts=None, # type: List[models.ReferenceType]\n protocol_endpoints=None, # type: List[models.ReferenceType]\n volumes=None, # type: List[models.ReferenceType]\n authorization=None, # type: str\n x_request_id=None, # type: str\n filter=None, # type: str\n host_group_names=None, # type: List[str]\n host_names=None, # type: List[str]\n limit=None, # type: int\n offset=None, # type: int\n protocol_endpoint_names=None, # type: List[str]\n sort=None, # type: List[str]\n total_item_count=None, # type: bool\n volume_names=None, # type: List[str]\n async_req=False, # type: bool\n _return_http_data_only=False, # type: bool\n _preload_content=True, # type: bool\n _request_timeout=None, # type: Optional[int]\n ):\n # type: (...) -> models.ConnectionGetResponse\n kwargs = dict(\n authorization=authorization,\n x_request_id=x_request_id,\n filter=filter,\n host_group_names=host_group_names,\n host_names=host_names,\n limit=limit,\n offset=offset,\n protocol_endpoint_names=protocol_endpoint_names,\n sort=sort,\n total_item_count=total_item_count,\n volume_names=volume_names,\n async_req=async_req,\n _return_http_data_only=_return_http_data_only,\n _preload_content=_preload_content,\n _request_timeout=_request_timeout,\n )\n kwargs = {k: v for k, v in kwargs.items() if v is not None}\n endpoint = self._connections_api.api20_connections_get_with_http_info\n _process_references(host_groups, ['host_group_names'], kwargs)\n _process_references(hosts, ['host_names'], kwargs)\n _process_references(protocol_endpoints, ['protocol_endpoint_names'], kwargs)\n _process_references(volumes, ['volume_names'], kwargs)\n return self._call_api(endpoint, kwargs)",
"def ListTemplates(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def get_instance_templates(self):\n response = self.call_api('/global/instanceTemplates')\n return {\n template['name']: template for template in response.get('items', [])\n }",
"def get_wrapper_template():\n\n return wrapper_templates.template_collection",
"async def templates(self, token: Any) -> List[Template]:\n response = await self._invoke({\n \"jsonrpc\": \"2.0\",\n \"method\": \"ProjectAPI.Templates\",\n \"id\": self.__next_id(),\n \"params\": [token, ]\n })\n assert response.status // 100 == 2, str(response.status) + \" \" + str(response.reason)\n payload = await response.json()\n if 'error' in payload:\n raise ProjectAPIError.from_json('templates', payload['error'])\n return [Template.from_json(x) for x in (payload['result'] or [])]",
"def templateMappings(self):\n raise NotImplementedError",
"def templates(self):\n if self._templates is None:\n self._templates = self.get_all_templates()\n return self._templates",
"def getConnectionList(self):\n return []"
] | [
"0.6215655",
"0.6188987",
"0.61315364",
"0.6072817",
"0.60249364",
"0.59981847",
"0.5976192",
"0.58907515",
"0.5838264",
"0.5799378",
"0.5787603",
"0.57628065",
"0.56953496",
"0.559609",
"0.55930907",
"0.5544358",
"0.5491033",
"0.5429208",
"0.5394012",
"0.53638613",
"0.53619844",
"0.5359423",
"0.5332532",
"0.5331494",
"0.52836823",
"0.5257791",
"0.52540064",
"0.5244217",
"0.52418876",
"0.5221816"
] | 0.6771585 | 0 |
Gets the default network connection template. During a network create the default connection value will be inherited from this template. [Example] ${resp} = Fusion Api Get Default Connection Template | | | def fusion_api_get_default_connection_template(self, api=None, headers=None):
return self.ct.get_default(api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_default_network_policy(con):\n try:\n return con.network_policy_read(fq_name=conf.get('default_network_policy', 'UNEXPECTED_VALUE'))\n except NoIdError:\n log.debug('Unable to find default_network_policy')\n return None",
"def get_default(self, create=True):\n if self._default_network is None and create:\n log.debug(\"Creating default network...\")\n self._default_network = self.create('default', driver='bridge')\n\n return self._default_network",
"def get_default_resource(self, name):\n if not self._default_resource:\n self._default_resource = self.get(name=name)\n\n return self._default_resource",
"def get_default_pool(con):\n try:\n return con.floating_ip_pool_read(fq_name=conf.get('default_pool', 'UNEXPECTED_VALUE'))\n except NoIdError:\n log.debug('Unable to find pool.')\n return None",
"def get_default(self):\n\n\t\treturn self.__default",
"def get_default_gateway(self):\n\t\treturn call_sdk_function('PrlSrvCfgNet_GetDefaultGateway', self.handle)",
"def get_podman_default_hostname(): # type: () -> str\n hostname = None\n try:\n stdout = raw_command(['podman', 'system', 'connection', 'list', '--format=json'], capture=True)[0]\n except SubprocessError:\n stdout = '[]'\n\n try:\n connections = json.loads(stdout)\n except json.decoder.JSONDecodeError:\n return hostname\n\n for connection in connections:\n # A trailing indicates the default\n if connection['Name'][-1] == '*':\n hostname = connection['URI']\n break\n\n return hostname",
"def create_default_network(context):\n return [{\n 'type': 'templates/network.py',\n 'name': 'fc-network',\n 'properties': {\n 'resourceName': 'network',\n 'name': 'network',\n 'projectId': '$(ref.fc-project.projectId)',\n 'autoCreateSubnetworks': True,\n # We pass the dependsOn list into the network template as a\n # parameter. Deployment Manager doesn't support dependsOn for\n # template-call nodes, so we can't have this resource itself depend on\n # the project-wide resources.\n 'dependsOn': '$(ref.fc-project.resourceNames)',\n },\n }]",
"def default(self):\n # get my default value\n default = self._default\n # if it is still at its trivial value\n if default is schemata.component.default:\n # ask my protocol\n return self.protocol.pyre_default\n # otherwise, return it\n return default",
"def get_default_gateway(self):\n\t\treturn call_sdk_function('PrlSrvCfg_GetDefaultGateway', self.handle)",
"def get_template(self):\n if self.get_website:\n return self.get_website.get_template()\n else:\n return default_entity.get_website.get_template()",
"def default(self):\n return self.get(name='Unknown')",
"def get_network_default_gateway(self):\n return self.mycam.devicemgmt.GetNetworkDefaultGateway()",
"def get_defaultgw(self):\n return self.get_ipv4_defaultgw()",
"def getDefault(self):\n return DefaultController.getInstance()",
"def get_default(cls):\n raise NotImplementedError",
"def Default():\n return _DEFAULT",
"def get_net(con):\n try:\n return con.virtual_network_read(fq_name=conf.get('default_net', 'UNEXPECTED_VALUE'))\n except NoIdError:\n log.debug('Unable to find net.')\n return None",
"def get_default(self) -> T | None:\n return (\n self.default # TODO: deepcopy mutable defaults?\n if self.default_factory is None\n else self.default_factory()\n )",
"def test_get_default_network(self):\n pass",
"def getDefault():",
"def default():\n return DefaultSwh.default()",
"def GetDefaultWiredNetwork(self):\n config = ConfigParser.ConfigParser()\n config.read(self.wired_conf)\n profileList = config.sections()\n for profile in profileList:\n if config.has_option(profile, \"default\"):\n if misc.to_bool(config.get(profile, \"default\")):\n return profile\n return None",
"def get_default_resource_pool(self):\n try:\n return self.client.list_resource_pools()[0]['resource_pool']\n except VMwareError as e:\n raise VMwareBackendError(e)",
"def test_default_connection_updated_correctly(self):\n result = self.run_cli_command(\n \"--skip-consistency-check\",\n \"config\",\n \"get\",\n \"agent.default_connection\",\n cwd=self._get_cwd(),\n )\n assert result.stdout == \"fetchai/stub:0.21.3\\n\"",
"def get_default():\n backend, opts = parse_default()\n assert backend is not None\n return load_backend(backend, opts)",
"def default(self) -> object:\n return self._default",
"def get_default_gateway(self):\n\t\treturn call_sdk_function('PrlVmDevNet_GetDefaultGateway', self.handle)",
"def get_default_ip_address():\r\n gws = netifaces.gateways() # get all gateways\r\n default = gws['default'] # get the default gw\r\n adapter = default[2][1] # get the adapter identifier\r\n realadapter = netifaces.ifaddresses(adapter) # get the adapter\r\n addr_dict = realadapter[2][0] # get the first ipv4 address tuple\r\n return addr_dict['addr']",
"def default_endpoint(self) -> str:\n return self.settings[\"default_endpoint\"]"
] | [
"0.6309045",
"0.62490034",
"0.5907235",
"0.5905433",
"0.5886933",
"0.58698976",
"0.5831931",
"0.58188957",
"0.5802485",
"0.5784825",
"0.5722665",
"0.57059497",
"0.5699278",
"0.56755865",
"0.56710696",
"0.565251",
"0.5624445",
"0.562275",
"0.560671",
"0.5573889",
"0.5573223",
"0.5566893",
"0.5564317",
"0.5557853",
"0.55417883",
"0.5535507",
"0.5510574",
"0.55104566",
"0.550003",
"0.545098"
] | 0.78695226 | 0 |
Updates a specific connection template. [Arguments] | def fusion_api_update_connection_template(self, body, uri=None, api=None, headers=None):
return self.ct.update(body, uri=uri, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_update_default_connection_template(self, body, api=None, headers=None):\n return self.ct.update_default(body, api=api, headers=headers)",
"def update_question_template(xblock_id, question_template, image_url, resolver, updated_variables, answer_template):\n \n connection = mysql.connector.connect(**s.database)\n \n clean_up_variables_and_expressions(xblock_id, connection)\n \n update_question_template_content(xblock_id, connection, question_template, image_url, resolver, answer_template)\n \n create_variables(xblock_id, connection, updated_variables)\n \n connection.commit()\n connection.close()",
"def update_gateway_template(\n templates: Dict[str, Any], source_data: str,\n namespace: Optional[str], purpose: str,\n) -> None:\n gateway_templates = templates['gateway-templates']\n assert isinstance(gateway_templates, list)\n for gateway_template in gateway_templates:\n if (\n gateway_template.get('namespace') == namespace\n and gateway_template.get('purpose') == purpose\n ):\n gateway_template['template'] = source_data\n return\n gateway_templates.append({\n 'namespace': namespace,\n 'purpose': purpose,\n 'template': source_data,\n })",
"def edit(self, connection_id, arguments, template):\n context = self.context\n self.connection_id = str(connection_id)\n arguments = str(arguments)\n self.arguments_src = arguments\n self._arg = Aqueduct.parse(arguments)\n if not isinstance(template, (str, unicode)):\n template = str(template)\n self.src = template\n self.template = t = context.template_class(template)\n t.cook()\n context._v_query_cache={}, Bucket()",
"def template(self, template):\n self._template = template",
"def assign_template_to_host(self, host_id):\n\n return self.connection.do_request(\"template.update\", teamplateid=self.template_id, hosts=[host_id])",
"def update_template(template, trial):\n assert isinstance(template, dict) or isinstance(template, list)\n items = template.items() if isinstance(template, dict) else enumerate(template)\n\n for key, value in items:\n if isinstance(value, str):\n if value in trial:\n template[key] = trial[value]\n elif isinstance(value, dict) or isinstance(value, list):\n template[key] = ConfigGenerator.update_template(template[key], trial)\n\n return template",
"def setTemplate(self, template):\n self.template = template",
"def set_template(self, template, templateType, blogid=1):\n return self.execute(\"metaWeblog.setTemplate\", self.appkey, blogid, self.username, self.password, template, templateType)",
"def template(self, template):\n\n self._template = template",
"def template(self, template):\n\n self._template = template",
"def _update_template(template_path):\n template_definition = template_path\n\n # template output directory is output/templates, so need to create that location before pulling out the templates\n template_location = template_utilities.get_template_directory()\n\n # Install the template and get the path to the template directory for updating the configuration file.\n templates_path = template_utilities.install_template(template_location, template_definition)\n\n if templates_path:\n # Now need to find the templates definition of that zip file and locate it in the file system so that it can be\n settings = get_configuration()\n\n # Override the configuration details with the new template path. This should probably be handled by the\n # publishing plugin, but for now this will work\n settings.publishing.templates = str(templates_path.relative_to(get_configuration_root()))\n configuration_file_path = get_configuration_root() / 'config.yaml'\n\n dump_configuration(configuration_file_path, settings)",
"def test_update_template_subscription(self):\n pass",
"def update_with_template_args(args, list_args=None):\r\n if not args.get('--template'):\r\n return\r\n\r\n list_args = list_args or []\r\n\r\n template_path = args.pop('--template')\r\n if not os.path.exists(template_path):\r\n raise ArgumentError(\r\n 'File does not exist [-t | --template] = %s'\r\n % template_path)\r\n\r\n config = configparser.ConfigParser()\r\n ini_str = '[settings]\\n' + open(\r\n os.path.expanduser(template_path), 'r').read()\r\n ini_fp = StringIO(ini_str)\r\n config.readfp(ini_fp)\r\n\r\n # Merge template options with the options passed in\r\n for key, value in config.items('settings'):\r\n option_key = '--%s' % key\r\n if option_key in list_args:\r\n value = value.split(',')\r\n if not args.get(option_key):\r\n args[option_key] = value",
"def test_update_subscription_template(self):\n pass",
"def update_service_template(\n templates: Dict[str, Any], source_data: str,\n namespace: Optional[str], service: Optional[str], color: Optional[str], purpose: str,\n) -> None:\n service_templates = templates['service-templates']\n assert isinstance(service_templates, list)\n for service_template in service_templates:\n if (\n service_template.get('namespace') == namespace\n and service_template.get('service') == service\n and service_template.get('color') == color\n and service_template.get('purpose') == purpose\n ):\n service_template['template'] = source_data\n return\n service_templates.append({\n 'namespace': namespace,\n 'service': service,\n 'color': color,\n 'purpose': purpose,\n 'template': source_data,\n })",
"def update():\n if Project.use_templates:\n defaults = _project_defaults()\n\n template = Template()\n\n for template_dir in [os.path.abspath(os.path.join(herringlib, 'herringlib', 'templates'))\n for herringlib in HerringFile.herringlib_paths]:\n\n info(\"template directory: %s\" % template_dir)\n # noinspection PyArgumentEqualDefault\n template.generate(template_dir, defaults, overwrite=False)",
"def match_network_template_to_site(self):\n for site in self.sites:\n if 'networktemplate_id' in site and site['networktemplate_id']:\n matched_network_template = [nt for nt in self.network_templates if nt['id'] == site['networktemplate_id']]\n if len(matched_network_template) >= 1:\n site['network_template'] = matched_network_template[0]",
"def update_datasource_connection(connection, id, body, error_msg=None):\n url = f\"{connection.base_url}/api/datasources/connections/{id}\"\n for op_dict in body[\"operationList\"]:\n alter_patch_req_body(op_dict, \"/datasourceLogin\", \"/databaseLoginId\")\n response = connection.session.patch(url=url, json=body)\n if not response.ok:\n if error_msg is None:\n error_msg = f\"Error updating Datasource Connection with ID: {id}\"\n response_handler(response, error_msg)\n return response",
"def setUITemplate(*args, popTemplate: bool=True, pushTemplate: bool=True, q=True, query=True,\n **kwargs)->Union[AnyStr, Any]:\n pass",
"def update_template():\n\n # Open, and read, the template file\n with open(\"template.html\", \"r\") as f:\n soup = BeautifulSoup(f.read(), features=\"html5lib\")\n\n # Add the plots in the correct places\n for div in soup.find_all(\"div\", class_=\"plot\"):\n with open(div[\"src\"], \"r\") as f:\n plot = BeautifulSoup(f.read(), features=\"html5lib\")\n div.replace_with(plot.html.body.div)\n\n # Write the finished report to document.html\n with open(\"document.html\", \"w\") as f:\n f.write(soup.prettify())",
"def test_update_device_template(self):\n pass",
"def generate_config_from_tmpl(compose_path, args, sql_conn=''):\n raw_conf = open(compose_path, 'r').read()\n replace_map = {\n 'TEMPLATE_VAR_PROJECT': args.project,\n 'TEMPLATE_VAR_DOMAIN': args.host,\n 'TEMPLATE_VAR_SERVICE_ACCOUNT': args.service_account,\n 'TEMPLATE_VAR_OAUTH2_CLIENT_ID': args.oauth2_client_id,\n 'TEMPLATE_VAR_SQL_INSTANCE_CONN': sql_conn,\n 'TEMPLATE_VAR_DBPASS': args.cloud_sql_password,\n 'TEMPLATE_VAR_DOCKER_PROJECT': args.project.replace(':', '/'),\n 'TEMPLATE_VAR_DOCKER_TAG': args.branch,\n }\n for template_var, value in replace_map.items():\n raw_conf = raw_conf.replace(template_var, value)\n with tempfile.NamedTemporaryFile() as real_conf:\n real_conf.write(raw_conf)\n real_conf.flush()\n yield real_conf.name",
"def update_project_template(push: bool = False,\n project_template_path: Optional[Pathy] = None):\n cwd = pathlib.Path.cwd().resolve()\n\n # get ballet project info -- must be at project root directory with a\n # ballet.yml file.\n try:\n project = Project.from_path(cwd)\n except ConfigurationError:\n raise ConfigurationError('Must run command from project root.')\n\n repo = project.repo\n original_head = repo.head.commit.hexsha[:7]\n\n if repo.is_dirty():\n raise BalletError(\n 'Can\\'t update project template with uncommitted changes. '\n 'Please commit your changes and try again.')\n\n if repo.head.ref.name != DEFAULT_BRANCH:\n raise ConfigurationError(\n 'Must run command from branch {master}'\n .format(master=DEFAULT_BRANCH))\n\n if TEMPLATE_BRANCH not in repo.branches:\n raise ConfigurationError(\n 'Could not find \\'{}\\' branch.'.format(TEMPLATE_BRANCH))\n\n # check for upstream updates to ballet\n new_version = _check_for_updated_ballet()\n if new_version:\n _warn_of_updated_ballet(new_version)\n\n with tempfile.TemporaryDirectory() as _tempdir:\n tempdir = pathlib.Path(_tempdir)\n\n # cookiecutter returns path to the resulting project dir\n logger.debug('Re-rendering project template at {}'.format(tempdir))\n updated_template = _render_project_template(\n cwd, tempdir, project_template_path=project_template_path)\n updated_repo = git.Repo(updated_template)\n\n # tempdir is a randomly-named dir suitable for a random remote name\n # to avoid conflicts\n remote_name = tempdir.name\n\n remote = repo.create_remote(\n remote_name, updated_repo.working_tree_dir)\n remote.fetch()\n\n repo.heads[TEMPLATE_BRANCH].checkout()\n try:\n logger.debug('Merging re-rendered template to project-template '\n 'branch')\n repo.git.merge(\n remote_name + '/' + DEFAULT_BRANCH,\n allow_unrelated_histories=True,\n strategy_option='theirs',\n squash=True,\n )\n if not repo.is_dirty():\n logger.info('No updates to template -- done.')\n return\n commit_message = _make_template_branch_merge_commit_message()\n logger.debug('Committing updates: {}'.format(commit_message))\n repo.git.commit(m=commit_message)\n except GitCommandError:\n logger.critical(\n 'Could not merge changes into {template_branch} branch, '\n 'update failed'\n .format(template_branch=TEMPLATE_BRANCH))\n raise\n finally:\n _safe_delete_remote(repo, remote_name)\n logger.debug('Checking out master branch')\n repo.heads[DEFAULT_BRANCH].checkout()\n\n try:\n logger.debug('Merging project-template branch into master')\n repo.git.merge(TEMPLATE_BRANCH, no_ff=True)\n except GitCommandError as e:\n if 'merge conflict' in str(e).lower():\n logger.critical('\\n'.join([\n 'Update failed due to a merge conflict.',\n 'Fix conflicts, and then complete merge manually:',\n ' $ git add .',\n ' $ git commit --no-edit',\n 'Otherwise, abandon the update:',\n ' $ git reset --merge {original_head}'\n ]).format(original_head=original_head))\n raise\n\n if push:\n _push(project)\n\n _log_recommended_reinstall()",
"def push_template(config: Config) -> int:\n if not config.purpose:\n log.warning('When pushing a template, the `purpose` argument is required.')\n return 3\n if config.filename == '-':\n source_data = sys.stdin.read()\n elif not config.filename or not os.path.isfile(config.filename):\n log.warning('No such local file `{file}`', file=config.filename)\n return 4\n else:\n with open(config.filename, 'r') as f:\n source_data = f.read()\n log.debug(\"Pulling current templates\")\n templates = pull_document(config, TEMPLATES_DOCUMENT) or create_initial_templates_document()\n if config.category == 'gateway':\n update_gateway_template(templates, source_data, config.namespace, config.purpose)\n elif config.category == 'service':\n update_service_template(\n templates, source_data, config.namespace, config.service, config.color, config.purpose,\n )\n else:\n log.warning('Unknown or unset category `{cat}`', cat=config.category)\n return 5\n push_document(config, TEMPLATES_DOCUMENT, templates)\n return 0",
"def post_service_template_update(self, resource_id, resource_dict):\n pass",
"def edit_server_profile_template(profile_template_obj):\n FusionUIBase.navigate_to_section(SectionType.SERVER_PROFILE_TEMPLATES, time_for_loading=5)\n\n total = len(profile_template_obj)\n not_exists = 0\n edited = 0\n\n for n, profile_template in enumerate(profile_template_obj):\n logger.info(\"{2} No: {0} --- Total: {1} {2}\".format((n + 1), total, '-' * 14))\n logger.info(\"editing a server profile template with name '%s' ...\" % profile_template.name)\n # checking if the profile is not existing for editing\n if not VerifyServerProfileTemplate.verify_server_profile_template_exist(profile_template.name, fail_if_false=False):\n logger.warn(\"server profile template '%s' does not exist\" % profile_template.name)\n not_exists += 1\n continue\n\n # get new server hardware type for edit\n enclosure_group = profile_template.enclgroup if getattr(profile_template, 'enclgroup', None) is not None else None\n sht_new = None\n if getattr(profile_template, 'new_sht_ref_server', None) is not None:\n logger.info(\"getting server hardware type of server hardware '%s'\" % profile_template.new_sht_ref_server)\n from FusionLibrary.ui.servers.serverhardware import get_type_of_server_hardware\n sht_new = get_type_of_server_hardware(profile_template.new_sht_ref_server)\n FusionUIBase.navigate_to_section(SectionType.SERVER_PROFILE_TEMPLATES, time_for_loading=5)\n elif getattr(profile_template, 'hardwareType', None) is not None:\n sht_new = profile_template.hardwareType\n\n # open Edit SPT dialog and enter data ...\n CommonOperationServerProfileTemplate.click_server_profile_template(profile_template.name)\n\n EditServerProfileTemplate.select_action_edit()\n EditServerProfileTemplate.wait_edit_server_profile_template_dialog_shown()\n BuiltIn().sleep(2)\n EditServerProfileTemplate.input_name(profile_template.newName) if getattr(profile_template, 'newName', None) is not None else None\n EditServerProfileTemplate.input_description(profile_template.desc) if getattr(profile_template, 'desc', None) is not None else None\n\n sht_selected = EditServerProfileTemplate.get_selected_server_hardware_type()\n if sht_new is not None and sht_new not in sht_selected:\n logger.info(\"server hardware type '%s' is NOT consistent with current value '%s'\" % (sht_new, sht_selected))\n EditServerProfileTemplate.ChangeServerHardwareTypeAndEnclosureGroup.change_server_hardware_type(sht_new, enclosure_group, timeout=5, fail_if_false=False)\n\n eg_selected = EditServerProfileTemplate.get_selected_enclosure_group()\n if enclosure_group is not None and enclosure_group not in eg_selected:\n logger.warn(\"enclosure group '%s' is NOT consistent with test data '%s'\" % (eg_selected, enclosure_group))\n EditServerProfileTemplate.ChangeServerHardwareTypeAndEnclosureGroup.change_enclosure_group(enclosure_group, timeout=5, fail_if_false=False)\n\n if getattr(profile_template, 'Affinity', None) is not None:\n logger.info(\"test data for 'Affinity' is found: <%s>, start setting Affinity ...\" % profile_template.Affinity)\n EditServerProfileTemplate.select_affinity_by_text(profile_template.Affinity)\n\n if getattr(profile_template, 'Firmware', None) is not None:\n logger.info(\"test data for 'Firmware' is found: <%s>, start setting Firmware Baseline ...\" % profile_template.Firmware)\n logger.debug(\"test data for 'Firmware' is found: <%s>\" % profile_template.Firmware, also_console=False)\n # set Firmware Baseline and force-installation option\n CommonOperationServerProfileTemplate.Firmware.set(profile_template.Firmware)\n\n if getattr(profile_template, 'Connections', None) is not None:\n logger.debug(\"test data for 'Connections' is found: <%s>\" % profile_template.Connections, also_console=False)\n logger.info(\"test data for 'Connections' is found, start adding connections ...\")\n # add connections\n CommonOperationServerProfileTemplate.Connection().set(profile_template.Connections)\n\n if getattr(profile_template, 'LocalStorage', None) is not None:\n logger.debug(\"test data for 'Local Storage' is found: <%s>\" % profile_template.LocalStorage, also_console=False)\n logger.info(\"test data for 'Local Storage' is found, start setting local storage options ... \")\n CommonOperationServerProfileTemplate.LocalStorage.set(profile_template.LocalStorage)\n\n if getattr(profile_template, 'SANStorage', None) is not None:\n BuiltIn().sleep(3)\n logger.debug(\"test data for 'SAN Storage' is found:<%s>\" % profile_template.SANStorage, also_console=False)\n logger.info(\"test data for 'SAN Storage' is found, start setting SAN storage options and adding volumes ...\")\n # select \"Manage SAN Storage\" checkbox\n CommonOperationServerProfileTemplate.SANStorage.set(profile_template.SANStorage)\n\n sht_selected = EditServerProfileTemplate.get_selected_server_hardware_type()\n if getattr(profile_template, 'BootSettings', None) is not None:\n logger.debug(\"test data for 'Boot Settings' is found: <%s>\" % profile_template.BootSettings, also_console=False)\n logger.info(\"test data for 'Boot Settings' is found, start setting its options ...\")\n CommonOperationServerProfileTemplate.BootSettings.set(profile_template, server_hardware_type=sht_selected)\n\n # 'BIOSSettings' part is ignored since BIOS setting is complicated to verify the result, therefor\n # might be better to use a dedicated tool to do this part automation separately\n\n if getattr(profile_template, 'Advanced', None) is not None:\n BuiltIn().sleep(3)\n logger.debug(\"test data for 'Advanced' is found: <%s>\" % profile_template.Advanced, also_console=False)\n logger.info(\"test data for 'Advanced' is found, start setting its options ...\")\n # select \"MAC/WWN/Serial/Hide unused FlexNICs\" radio box\n EditServerProfileTemplate.Advanced.set(profile_template)\n\n EditServerProfileTemplate.click_ok_button()\n # logger.debug(\"sleeping for 8 seconds ...\")\n # BuiltIn().sleep(8)\n # if EditServerProfileTemplate.get_error_message_from_boot_mode() is not None:\n if CommonOperationServerProfileTemplate.BootSettings.get_error_message_from_boot_mode() is not None:\n logger.warn(\"test data may be wrongly defined for 'Boot mode', which caused an error that blocks profile being created. \"\n \"test will skip this profile '%s' and continue to create other server profiles\" % profile_template.name)\n continue\n\n BuiltIn().sleep(2)\n status, msg = FusionUIBase.get_error_message_from_dialog(timeout=10)\n if status is True:\n logger.warn(\"unexpected error occurred: %s\" % msg)\n ui_lib.fail_test(msg)\n\n EditServerProfileTemplate.wait_edit_server_profile_template_dialog_disappear(timeout=300)\n FusionUIBase.show_activity_sidebar()\n profile_name = profile_template.newName if getattr(profile_template, 'newName', None) is not None else profile_template.name\n FusionUIBase.wait_activity_action_ok(profile_name, 'Update', timeout=300, fail_if_false=True)\n FusionUIBase.show_activity_sidebar()\n CommonOperationServerProfileTemplate.wait_server_profile_template_status_ok(profile_name, timeout=300, fail_if_false=True)\n logger.info(\"edited server profile '%s' successfully\" % profile_name)\n edited += 1\n\n logger.info(\"{0} == Summary == {0}\".format('-' * 14))\n if total - not_exists == 0:\n logger.warn(\"no server profile template to edit! all %s server profile template(s) is NOT existing, test is considered FAILED\" % not_exists)\n return False\n else:\n if edited < total:\n logger.warn(\"not all of the server profile template(s) is successfully edited - %s out of %s edited \" % (edited, total))\n if edited + not_exists == total:\n logger.warn(\"%s not-existing server profile template(s) is skipped being edited, test is considered FAILED\" % not_exists)\n return False\n else:\n logger.warn(\"%s not-existing server profile template(s) is skipped being edited, %s profile template(s) left is failed being edited \" % (not_exists, total - edited - not_exists))\n return False\n\n logger.info(\"all of the server profile template(s) is successfully edited - %s out of %s \" % (edited, total))\n return True",
"def template_name(self, template_name):\n\n self._template_name = template_name",
"def set_remote_template(self, filename):\n if self.template_exists(filename):\n self.client.service.SetRemoteTemplate(filename=filename)\n else:\n raise LiveDocxError('Remote template \"%s\" not exists' % filename)",
"def __call__(self, template, obj=None):\n for engine in self.engines:\n filename = engine.find_template_filename(template)\n if filename:\n if obj:\n self.res.locals.update(obj)\n html = engine.render_source(filename, self.res.locals)\n self.res.send_html(html)\n break\n else:\n raise ValueError(\"Could not find a template with name '%s'\" % template)"
] | [
"0.6099811",
"0.59442204",
"0.5905751",
"0.57469434",
"0.5522224",
"0.5516845",
"0.5464372",
"0.5455976",
"0.54337114",
"0.5396981",
"0.5396981",
"0.53957087",
"0.5352702",
"0.5321659",
"0.5236404",
"0.51731855",
"0.51170504",
"0.511649",
"0.5103829",
"0.5086445",
"0.50662416",
"0.5020498",
"0.5013",
"0.4953942",
"0.49341264",
"0.48969656",
"0.48586997",
"0.47701764",
"0.47579348",
"0.4757472"
] | 0.6841243 | 0 |
Updates the default network connection template. During a network create the default connection value will be inherited from this template. [Arguments] | def fusion_api_update_default_connection_template(self, body, api=None, headers=None):
return self.ct.update_default(body, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_default_network(context):\n return [{\n 'type': 'templates/network.py',\n 'name': 'fc-network',\n 'properties': {\n 'resourceName': 'network',\n 'name': 'network',\n 'projectId': '$(ref.fc-project.projectId)',\n 'autoCreateSubnetworks': True,\n # We pass the dependsOn list into the network template as a\n # parameter. Deployment Manager doesn't support dependsOn for\n # template-call nodes, so we can't have this resource itself depend on\n # the project-wide resources.\n 'dependsOn': '$(ref.fc-project.resourceNames)',\n },\n }]",
"def get_default(self, create=True):\n if self._default_network is None and create:\n log.debug(\"Creating default network...\")\n self._default_network = self.create('default', driver='bridge')\n\n return self._default_network",
"def fusion_api_get_default_connection_template(self, api=None, headers=None):\n return self.ct.get_default(api=api, headers=headers)",
"def test_default_connection_updated_correctly(self):\n result = self.run_cli_command(\n \"--skip-consistency-check\",\n \"config\",\n \"get\",\n \"agent.default_connection\",\n cwd=self._get_cwd(),\n )\n assert result.stdout == \"fetchai/stub:0.21.3\\n\"",
"def test_default_connection_updated(self):\n assert self.agent_config.default_connection == self.new_connection_id",
"def default_docker_pull_conn_name(self, default_docker_pull_conn_name: str):\n\n self._default_docker_pull_conn_name = default_docker_pull_conn_name",
"def _default(self):\n\n self.app.render(infoNetwork.all())",
"def test_default_routing_updated_correctly(self):\n result = self.run_cli_command(\n \"--skip-consistency-check\",\n \"config\",\n \"get\",\n \"agent.default_routing\",\n cwd=self._get_cwd(),\n )\n assert (\n result.stdout\n == f'{{\"{DefaultMessage.protocol_id}\": \"{StubConnection.connection_id}\"}}\\n'\n )",
"def createNet(self):\n\n sw = OVSKernelSwitch\n topo = G2Topo(self.config.topoData)\n ctrl = RemoteController('c', ip=REMOTE_CONTROLLER_IP, port=CONTROLLER_PORT)\n\n # Default link parameters.\n # HTB: Hierarchical Token Bucket rate limiter.\n spec = self.config.topoData['defaultLinkInfo']\n if spec:\n mybw = float(spec['bw'])\n mydelay = spec['delay']\n myloss = float(spec['loss'])\n link = partial(TCLink, delay=mydelay, bw=mybw, loss=myloss)\n if spec['max_queue_size'] != 'N/A' and spec['use_htb'] == 'N/A':\n myqueue = int(spec['max_queue_size'])\n link = partial(TCLink, delay=mydelay, bw=mybw, loss=myloss, max_queue_size=myqueue)\n if spec['max_queue_size'] == 'N/A' and spec['use_htb'] != 'N/A':\n myhtb = bool(spec['use_htb'])\n link = partial(TCLink, delay=mydelay, bw=mybw, loss=myloss, use_htb=myhtb)\n if spec['max_queue_size'] != 'N/A' and spec['use_htb'] != 'N/A':\n myqueue = int(spec['max_queue_size'])\n myhtb = bool(spec['use_htb'])\n link = partial(TCLink, delay=mydelay, bw=mybw, loss=myloss, max_queue_size=myqueue, use_htb=myhtb)\n else:\n # No spec for default parameters, using Mininet defaults.\n info(\"**** [G2]: using Mininet default parameters for links other than those configured in link_info \\n\")\n link = TCLink\n\n # Configure bw, delay, loss, etc. for some links that are specified in config file.\n for spec in self.config.topoData['linkInfos']:\n src = spec['src']\n dst = spec['dst']\n try:\n linkInfo = topo.linkInfo(src, dst)\n if spec['bw'] != 'N/A':\n linkInfo['bw'] = float(spec['bw']) # Mbit\n if spec['delay'] != 'N/A':\n linkInfo['delay'] = spec['delay'] # ms\n if spec['loss'] != 'N/A':\n linkInfo['loss'] = float(spec['loss']) # Percentage\n if spec['max_queue_size'] != 'N/A':\n linkInfo['max_queue_size'] = int(spec['max_queue_size'])\n if spec['use_htb'] != 'N/A':\n linkInfo['use_htb'] = bool(spec['use_htb'])\n\n topo.setlinkInfo(src,dst,linkInfo)\n except KeyError:\n info(\"**** [G2]: no link exists between switch pair (%s, %s) \\n\" %(src, dst))\n\n # Assign a fraction of overall CPU time to Mininet hosts.\n nHosts = float(len(self.config.topoData['hosts']))\n cpuHostFrac = 0.50/nHosts\n # 'cpu' is the fraction of CPU that each host would get.\n # Indirectly, it sets 'cpu.cfs_quota_us': the total available run-time within a period (in microseconds).\n # Mininet uses the following scheme: cfs_quota_us = (cpuHostFrac * nCPU * period_us) microseconds.\n # 'period_us' sets cpu.cfs_period_us.\n # Larger period would allow for increased burst capacity.\n host = custom(CPULimitedHost, cpu=cpuHostFrac, period_us=100000)\n\n net = Mininet(topo=topo,\n host=host,\n switch=sw,\n controller=ctrl,\n waitConnected=True,\n autoStaticArp=True,\n link=link)\n\n # Create a default route for each host.\n # Turn on tcpdump on each host if debug mode is on.\n for hs in topo.hosts():\n net.getNodeByName(hs).setDefaultRoute(intf='%s-eth0' %hs) # 1st interface on hosts is hi-eth0\n if self.config.isDebug:\n net.getNodeByName(hs).cmd('tcpdump -w %s.pcap -i %s-eth0 &' %(hs,hs))\n return net",
"def test_get_default_network(self):\n pass",
"def contrail_net_template(self):\n\n plugin.prepare_contrail_plugin(self, slaves=5)\n\n # enable plugin in contrail settings\n plugin.activate_plugin(self)\n\n # activate vSRX image\n vsrx_setup_result = plugin.activate_vsrx()\n\n self.fuel_web.update_nodes(\n self.cluster_id,\n {\n 'slave-01': ['controller'],\n 'slave-02': ['compute'],\n 'slave-03': ['contrail-config',\n 'contrail-control',\n 'contrail-db'],\n 'slave-04': ['contrail-config',\n 'contrail-control',\n 'contrail-db'],\n 'slave-05': ['contrail-config',\n 'contrail-control',\n 'contrail-db'],\n },\n )\n\n plugin.net_group_preparation(self)\n\n network_template = self.get_network_templ('network_template_1')\n self.fuel_web.client.upload_network_template(\n cluster_id=self.cluster_id, network_template=network_template)\n\n openstack.deploy_cluster(self)\n\n # run OSTF tests\n if vsrx_setup_result:\n self.fuel_web.run_ostf(cluster_id=self.cluster_id)",
"def layout_network(layout_name=None, network=None, base_url=DEFAULT_BASE_URL):\n suid = networks.get_network_suid(network, base_url=base_url)\n if layout_name is None:\n res = commands.commands_post('layout apply preferred networkSelected=\"SUID:' + str(suid) + '\"',\n base_url=base_url)\n return res\n else:\n res = commands.commands_post('layout ' + layout_name + ' network=\"SUID:' + str(suid) + '\"', base_url=base_url)\n return res",
"def create_netmiko_connection(self, opt):\n\n key = opt['ip']\n conn_list = ['None', 'None', 'None']\n net_connect_dict = self._netmiko_connection\n auth = (opt['username'], opt['password'])\n if key not in net_connect_dict:\n # case 1: No key create a connection\n try:\n net_connect = self._establish_netmiko_handler(opt, net_connect_dict)\n if net_connect:\n hashed_auth = self._hash_auth_string(auth)\n conn_list[0] = net_connect\n conn_list[1] = hashed_auth\n conn_list[2] = threading.Lock()\n net_connect_dict[key] = conn_list\n except ValueError as err:\n raise\n except Exception as err:\n raise\n\n else:\n existing_hash = net_connect_dict[key][1]\n conn_list = self._get_netmiko_connection(key)\n conn_obj = conn_list[0]\n if self._check_auth_string(existing_hash, auth):\n # case 2: check if connection object is alive\n if conn_obj.is_alive() is True:\n conn_obj.set_base_prompt()\n return\n # case 3: Assume user value is new so delete existing\n # and add new connection object for this\n else:\n #disconnect stale object\n conn_list[2].acquire()\n conn_obj.disconnect()\n conn_list[2].release()\n\n # Existing object is not valid so clear and create new\n # connection\n del net_connect_dict[key]\n try:\n net_connect = self._establish_netmiko_handler(opt, net_connect_dict)\n if net_connect:\n new_hash = self._hash_auth_string(auth)\n conn_list[0] = net_connect\n conn_list[1] = new_hash\n conn_list[2] = threading.Lock()\n net_connect_dict[key] = conn_list\n except ValueError as error:\n raise\n except Exception:\n raise Exception",
"def get_default(self, parent):\n\n # TODO fix this\n Reference = load('zbx.config.Reference')\n\n return Reference(self.model, parent, self.default, self.append_host)",
"def test_default_routing_updated(self):\n assert self.agent_config.default_routing == {\n self.new_protocol_id: self.new_connection_id\n }",
"def dvs_connect_default_net(self):\n self.show_step(1)\n self.env.revert_snapshot(\"dvs_vcenter_systest_setup\")\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n os_ip = self.fuel_web.get_public_vip(cluster_id)\n os_conn = os_actions.OpenStackActions(\n os_ip, SERVTEST_USERNAME,\n SERVTEST_PASSWORD,\n SERVTEST_TENANT)\n\n # Create security group with rules for ssh and ping\n security_group = os_conn.create_sec_group_for_ssh()\n\n _s_groups = os_conn.neutron.list_security_groups()['security_groups']\n _srv_tenant = os_conn.get_tenant(SERVTEST_TENANT).id\n default_sg = [sg for sg in _s_groups\n if sg['tenant_id'] == _srv_tenant and\n sg['name'] == 'default'][0]\n\n network = os_conn.nova.networks.find(label=self.inter_net_name)\n\n # Create access point server\n access_point, access_point_ip = openstack.create_access_point(\n os_conn=os_conn,\n nics=[{'net-id': network.id}],\n security_groups=[security_group.name, default_sg['name']])\n\n self.show_step(2)\n self.show_step(3)\n openstack.create_instances(os_conn=os_conn,\n nics=[{'net-id': network.id}],\n vm_count=1,\n security_groups=[default_sg['name']])\n openstack.verify_instance_state(os_conn)\n\n # Get private ips of instances\n instances = [instance for instance in os_conn.get_servers()\n if instance.id != access_point.id]\n ips = [os_conn.get_nova_instance_ip(i, net_name=self.inter_net_name)\n for i in instances]\n\n self.show_step(4)\n ip_pair = dict.fromkeys(ips)\n for key in ip_pair:\n ip_pair[key] = [value for value in ips if key != value]\n openstack.check_connection_through_host(access_point_ip, ip_pair)",
"def fusion_api_update_connection_template(self, body, uri=None, api=None, headers=None):\n return self.ct.update(body, uri=uri, api=api, headers=headers)",
"def _update_connection_num(self, client_ip):\r\n\t\tcur_conn, max_conn = comm_server.get_current_connection_num()\r\n\t\tself.children[\"label_connections\"].config( \\\r\n\t\t\ttext = \"連接數: {0}/{1}\".format(cur_conn, max_conn))\r\n\r\n\t\t_logger.debug(\"Connection number is updated.\")",
"def dvs_connect_nodefault_net(self):\n self.show_step(1)\n self.env.revert_snapshot(\"dvs_vcenter_systest_setup\")\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n os_ip = self.fuel_web.get_public_vip(cluster_id)\n os_conn = os_actions.OpenStackActions(\n os_ip, SERVTEST_USERNAME,\n SERVTEST_PASSWORD,\n SERVTEST_TENANT)\n\n tenant = os_conn.get_tenant(SERVTEST_TENANT)\n\n # Create security group with rules for ssh and ping\n security_group = os_conn.create_sec_group_for_ssh()\n\n _sg_groups = os_conn.neutron.list_security_groups()['security_groups']\n _srv_tenant = os_conn.get_tenant(SERVTEST_TENANT).id\n default_sg = [sg for sg in _sg_groups\n if sg['tenant_id'] == _srv_tenant and\n sg['name'] == 'default'][0]\n\n self.show_step(2)\n net_1 = os_conn.create_network(\n network_name=self.net_data[0].keys()[0],\n tenant_id=tenant.id\n )['network']\n\n subnet = os_conn.create_subnet(\n subnet_name=net_1['name'],\n network_id=net_1['id'],\n cidr=self.net_data[0][self.net_data[0].keys()[0]],\n ip_version=4)\n\n # Check that network are created\n assert_true(os_conn.get_network(net_1['name'])['id'] == net_1['id'])\n\n # Create Router_01, set gateway and add interface to external network\n router_1 = os_conn.create_router('router_1', tenant=tenant)\n\n # Add net_1 to router_1\n os_conn.add_router_interface(router_id=router_1[\"id\"],\n subnet_id=subnet[\"id\"])\n\n access_point, access_point_ip = openstack.create_access_point(\n os_conn=os_conn,\n nics=[{'net-id': net_1['id']}],\n security_groups=[security_group.name, default_sg['name']])\n\n self.show_step(3)\n self.show_step(4)\n openstack.create_instances(os_conn=os_conn,\n nics=[{'net-id': net_1['id']}],\n vm_count=1,\n security_groups=[default_sg['name']])\n openstack.verify_instance_state(os_conn)\n\n # Get private ips of instances\n instances = [instance for instance in os_conn.get_servers()\n if instance.id != access_point.id]\n ips = [os_conn.get_nova_instance_ip(i, net_name=net_1['name'])\n for i in instances]\n\n self.show_step(5)\n ip_pair = dict.fromkeys(ips)\n for key in ip_pair:\n ip_pair[key] = [value for value in ips if key != value]\n openstack.check_connection_through_host(access_point_ip, ip_pair)",
"def reset_network(self, instance):\n LOG.debug(\"reset_network\")\n return",
"def initialize_gateway(self, network_ref):\n raise NotImplementedError()",
"def initialise_network(self):\n raise NotImplementedError",
"def dvs_update_network(self):\n self.show_step(1)\n self.env.revert_snapshot(\"dvs_vcenter_systest_setup\")\n\n cluster_id = self.fuel_web.get_last_created_cluster()\n\n self.show_step(2)\n os_ip = self.fuel_web.get_public_vip(cluster_id)\n os_conn = os_actions.OpenStackActions(\n os_ip, SERVTEST_USERNAME,\n SERVTEST_PASSWORD,\n SERVTEST_TENANT)\n\n tenant = os_conn.get_tenant(SERVTEST_TENANT)\n\n net_1 = os_conn.create_network(\n network_name=self.net_data[0].keys()[0],\n tenant_id=tenant.id)['network']\n\n os_conn.create_subnet(\n subnet_name=net_1['name'],\n network_id=net_1['id'],\n cidr=self.net_data[0][self.net_data[0].keys()[0]],\n ip_version=4)\n\n assert_true(os_conn.get_network(net_1['name'])['id'] == net_1['id'])\n\n self.show_step(3)\n os_conn.neutron.update_network(net_1[\"id\"],\n {\"network\": {\"name\": 'net_2'}})\n\n assert_true(os_conn.get_network('net_2')['id'] == net_1['id'])\n\n self.show_step(4)\n default_net = os_conn.nova.networks.find(label=self.inter_net_name)\n os_conn.neutron.update_network(\n default_net.id, {\"network\": {\"name\": 'spring'}})\n\n assert_true(os_conn.get_network('spring')['id'] == default_net.id)",
"async def async_step_network_connection(self, user_input=None):\n if user_input:\n user_input[CONF_PROTOCOL] = NETWORK\n entry_result = self.async_create_entry(\n title=\"AMS Reader\", data=user_input,\n )\n if entry_result:\n return entry_result\n\n return self.async_show_form(\n step_id=\"network_connection\",\n data_schema=DATA_SCHEMA_NETWORK_DATA,\n errors={},\n )",
"def default (no_flow = False,\n network = \"192.168.0.0/24\", # Address range\n first = 100, last = 199, count = None, # Address range\n ip = \"192.168.0.254\",\n router = (), # Auto\n dns = ()): # Auto\n launch(no_flow, network, first, last, count, ip, router, dns)",
"def get_default_config(self):\n if not self.iface_type:\n return None\n\n defaults = {}\n defaults['description'] = self.interface_name + ' Interface'\n defaults['admin'] = 'up'\n if self.is_ethernet:\n defaults['speed'] = 'auto'\n defaults['duplex'] = 'auto'\n defaults['type'] = 'bridged'\n elif self.iface_type == 'Bridge-Aggregation':\n defaults['type'] = 'bridged'\n else:\n defaults['type'] = 'routed'\n\n return defaults",
"def init_network(session: \"Session\", new_network_name: str) -> None:\n url_tail = f\"/{CoordConstsV2.RSC_NETWORKS}\"\n _post(session, url_tail, None, params={CoordConstsV2.QP_NAME: new_network_name})",
"def default(self, default):\n self._default = default\n return self",
"def set_deafult_gw(self, args):\n\n gw_ip = ip_address(args.ip)\n gw_info = UplinkGatewayInfo()\n gw_info.update_ip(str(gw_ip))\n print(\"set Default gw IP to %s\" % gw_info.get_gw_ip())",
"def default_connector(action, connect_params: object = None):\n default_connection = Connection()\n if connect_params:\n params = connect_params\n else:\n params = {\n \"host_url\": \"http://host_url\",\n \"credentials\": {\"username\": \"user1\", \"password\": \"password\"},\n \"verify_ssl\": False,\n }\n default_connection.connect(params)\n action.connection = default_connection\n return action"
] | [
"0.6304816",
"0.5861226",
"0.5853684",
"0.5806563",
"0.55561864",
"0.5391252",
"0.5345937",
"0.52172774",
"0.5185026",
"0.514164",
"0.51319385",
"0.5113893",
"0.5094148",
"0.5078497",
"0.5038522",
"0.503765",
"0.4990501",
"0.49837723",
"0.49665818",
"0.4938572",
"0.4913054",
"0.4902203",
"0.49018446",
"0.48941332",
"0.4884679",
"0.48747113",
"0.4853366",
"0.4844689",
"0.48440698",
"0.48371273"
] | 0.653577 | 0 |
Updates a Datacenter. [Arguments] | def fusion_api_edit_datacenter(self, body, uri, api=None, headers=None):
return self.dc.update(body, uri, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update(self, code, *args, **kwargs):\n\n if not args and not kwargs:\n raise Exception('attributes for CostCenter are missing')\n\n attributes = args[0] if args else kwargs\n attributes = dict((k, v) for k, v in attributes.items())\n attributes.update({'service': self.SERVICE})\n _, _, cost_center = self.http_client.put(\"/costcenters/{code}\".format(code=code), body=attributes)\n return cost_center",
"def do_update(cs, args):\n opts = {}\n opts['memory'] = args.memory\n opts['cpu'] = args.cpu\n opts['name'] = args.name\n if 'auto_heal' in args and args.auto_heal:\n opts['auto_heal'] = True\n if 'no_auto_heal' in args and args.no_auto_heal:\n opts['auto_heal'] = False\n opts = zun_utils.remove_null_parms(**opts)\n if not opts:\n raise exc.CommandError(\"You must update at least one property\")\n container = cs.containers.update(args.container, **opts)\n _show_container(container)",
"async def do_update(self, data):\n old = await self.config()\n\n new = old.copy()\n new.update(data)\n\n verrors = ValidationErrors()\n\n for attr, minlen, maxlen in (\n ('access_key', 5, 20),\n ('secret_key', 8, 40),\n ):\n curlen = len(new.get(attr, ''))\n if curlen < minlen or curlen > maxlen:\n verrors.add(\n f's3_update.{attr}', f'Attribute should be {minlen} to {maxlen} in length'\n )\n\n if not new['storage_path']:\n verrors.add('s3_update.storage_path', 'Storage path is required')\n else:\n await check_path_resides_within_volume(\n verrors, self.middleware, 's3_update.storage_path', new['storage_path']\n )\n\n if not verrors:\n if new['storage_path'].rstrip('/').count('/') < 3:\n verrors.add(\n 's3_update.storage_path',\n 'Top level datasets are not allowed. i.e /mnt/tank/dataset is allowed'\n )\n else:\n # If the storage_path does not exist, let's create it\n if not os.path.exists(new['storage_path']):\n os.makedirs(new['storage_path'])\n\n if new['certificate']:\n verrors.extend((await self.middleware.call(\n 'certificate.cert_services_validation', new['certificate'], 's3_update.certificate', False\n )))\n\n if new['bindip'] not in await self.bindip_choices():\n verrors.add('s3_update.bindip', 'Please provide a valid ip address')\n\n if verrors:\n raise verrors\n\n new['disks'] = new.pop('storage_path')\n\n await self._update_service(old, new)\n\n if (await self.middleware.call('filesystem.stat', new['disks']))['user'] != 'minio':\n await self.middleware.call(\n 'filesystem.setperm',\n {\n 'path': new['disks'],\n 'mode': str(775),\n 'uid': (await self.middleware.call('dscache.get_uncached_user', 'minio'))['pw_uid'],\n 'gid': (await self.middleware.call('dscache.get_uncached_group', 'minio'))['gr_gid'],\n 'options': {'recursive': True, 'traverse': False}\n }\n )\n\n return await self.config()",
"def fedora_update(container_path, update_body={}):\n initial_data = fedora_get(container_path)\n updated_data = {**initial_data, **update_body}\n updated_data['@context'] = {**initial_data['@context'], **FEDORA_CONTEXT}\n updated_data['@type'] = initial_data['@type'] + ['http://purl.org/dc/dcmitype/Dataset']\n request = requests.put('{}{}/{}'.format(settings.FEDORA_URL,\n PUBLICATIONS_CONTAINER, container_path),\n auth=(settings.FEDORA_USERNAME,\n settings.FEDORA_PASSWORD),\n headers=FEDORA_HEADERS,\n data=json.dumps(updated_data))\n request.raise_for_status()\n return fedora_get(container_path)",
"def update(*args):",
"def update(self, ds, **kwargs):\n ds.set_status(self._db, self._es, self._queue, DatasetStatus.INDEXING)\n\n self._es.delete_ds(ds.id)\n for mol_db_dict in ds.config['databases']:\n mol_db = MolecularDB(name=mol_db_dict['name'],\n version=mol_db_dict.get('version', None),\n iso_gen_config=ds.config['isotope_generation'])\n self._es.index_ds(ds.id, mol_db)\n\n ds.set_status(self._db, self._es, self._queue, DatasetStatus.FINISHED)",
"def do_update(self, args):\n args = shlex.split(args)\n dicti = storage.all()\n if not args:\n print(\"** class name missing **\")\n elif not args[0] in name_of_class:\n print(\"** class doesn't exist **\")\n elif len(args) == 1:\n print(\"** instance id missing **\")\n elif not \"{}.{}\".format(args[0], args[1]) in dicti:\n print(\"** no instance found **\")\n elif len(args) == 2:\n print(\"** attribute name missing **\")\n elif len(args) == 3:\n print(\"** value missing **\")\n else:\n key = dicti[\"{}.{}\".format(args[0], args[1])]\n setattr(key, args[2], args[3])\n key.save()",
"def update(self, **kwargs):\n return self.client.api.update_container(self.id, **kwargs)",
"def update(self, args):\n pass",
"def update(self, *args, **kw):\n pass",
"def deploy_dc(self):\n print(\"==> Deploying Data Center\")\n # TODO: Replace Marvin\n mrv = marvin.marvinInit.MarvinInit(self.marvin_config)\n mrv.init()\n dc = marvin.deployDataCenter.DeployDataCenters(mrv.getTestClient(), mrv.getParsedConfig())\n dc.deploy()",
"def update_command(arguments: List[str]) -> None:\n if len(arguments) != 3:\n print('Required 2 argument for update command') # noqa: WPS421\n return\n token = token_load.load()\n logic.update(token, gist_id=arguments[1], filename=arguments[2])",
"def update(self, arguments):\n puts_err(colored.red(\"Not implemented!\"))",
"def update(self, *args, **kwargs):\n pass",
"def update(self, *args, **kwargs):\n pass",
"def update(self, *args, **kwargs):\n pass",
"def do_update(args):\n # if args.verbosity > 0:\n log.info(\"Verbosity: %d\" % args.verbosity)\n log.info(\"Data directory: %s\" % get_data_dir(args))\n log.info(\"Updating...\")\n csl = update_list(args, 'csl')\n # if args.verbosity > 0:\n log.info(\"Done.\")\n return True",
"def edit_centerinfo(request):\n center = request.session.get('center')\n center = DeliveryCenter.objects.get(pk=center.pk)\n form = UpdateCenterInfo(instance=center)\n if request.method == 'POST':\n form = UpdateCenterInfo(instance=center, data=request.POST)\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(reverse('package.views.cm_dashboard'))\n return render_to_response('edit_dc_info.html', { 'form':form },\n context_instance=RequestContext(request))",
"def update(self, **kwargs):\n\n host = self.get()\n if not host:\n self.raiseNotFoundError()\n return host.update(**kwargs)",
"def update(self, **options):\n pass",
"def do_project_update(cs, args):\n raise NotImplementedError",
"def update( d, **kw):\n d.update( d, **kw )\n return d",
"def do_update(self, args):\n args = args.split()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n if len(args) == 2:\n print(\"** attribute name missing **\")\n return\n if len(args) == 3:\n print(\"** value missing **\")\n return\n if args[0] not in HBNBCommand.valid_classes.keys():\n print(\"** class doesn't exist **\")\n return\n all_objs = storage.all(args[0])\n for k, v in all_objs.items():\n if k == args[1]:\n setattr(v, args[2], args[3])\n storage.save()\n return\n print(\"** no instance found **\")",
"def update(args, config):\n print('Updates an HPC fleet with name \"{}\"'.format(args.fleet_name))",
"def update_data(update_method):\n log.debug('Starting update')\n cmd = ['/usr/bin/python', wf.workflowfile('update.py')]\n if update_method == 'force':\n cmd.append('--update')\n cmd.append('force')\n\n # Update projects data\n log.debug('Run update command : {}'.format(cmd))\n run_in_background('update', cmd)\n\n return 0",
"def update(self, *args, **kwargs): # real signature unknown\n pass",
"def update(self, *args, **kwargs): # real signature unknown\n pass",
"def update(self, *args, **kwargs): # real signature unknown\n pass",
"def update(self, *args, **kwargs): # real signature unknown\n pass",
"def update(self, *args, **kwargs): # real signature unknown\n pass"
] | [
"0.64845985",
"0.6361359",
"0.57992005",
"0.5709713",
"0.5695884",
"0.5656915",
"0.56096",
"0.5583279",
"0.5574616",
"0.5565574",
"0.54715985",
"0.5468011",
"0.5464512",
"0.5455433",
"0.5455433",
"0.5455433",
"0.54455954",
"0.5437029",
"0.5397037",
"0.53785485",
"0.5373082",
"0.5367154",
"0.53660125",
"0.5355238",
"0.5350077",
"0.5322081",
"0.5322081",
"0.5322081",
"0.5322081",
"0.5322081"
] | 0.68879116 | 0 |
Removes Datacenters. If name or uri are not specified, all datacenters are removed. [Arguments] | def fusion_api_remove_datacenter(self, name=None, uri=None, api=None, headers=None):
return self.dc.delete(name, uri, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cleanUp(name):\n clovr = pymongo.Connection().clovr\n clovr.clusters.remove(dict(name=name))",
"def fusion_api_delete_network_set(self, name=None, uri=None, api=None, headers=None):\n return self.network_set.delete(name, uri, api, headers)",
"def delete_cluster(self):",
"def deletecollection_namespaced_cluster_network(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method deletecollection_namespaced_cluster_network\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/clusternetworks'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def cleanup_infrastructure_compute(config, context):\n log.info(\"### Cleaning up infrastructure ###\")\n admin = context.getAdministrationService()\n for datacenter in admin.listDatacenters():\n cleanup_infrastructure_storage(config, datacenter)\n cleanup_infrastructure_network(config, datacenter)\n # This will remove the datacenter and all hypervisors\n # (if they don't contain deplopyed VMs)\n log.info(\"Removing datacenter %s...\" % datacenter.getName())\n datacenter.delete()",
"def delete_namespaced_cluster_network(self, body, name, **kwargs):\n\n all_params = ['body', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_namespaced_cluster_network\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `delete_namespaced_cluster_network`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `delete_namespaced_cluster_network`\")\n\n resource_path = '/oapi/v1/clusternetworks/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def clean_up_daemon_sets_in_namespaces_with_cleanup_policy(self, namespaces, cleanup_policy):\n return self.delete_resource_with_cleanup_policy(namespaces, cleanup_policy,\n self.apps_api.delete_collection_namespaced_daemon_set, \"DS\")",
"def fusion_api_remove_san_manager(self, name=None, uri=None, api=None, headers=None):\n return self.dm.delete(name, uri, api, headers)",
"def delete_worker_groups(cls, args, config):\n if len(args) == 0:\n raise MOLNSException(\"USAGE: molns worker delete name\")\n return\n config.delete_object(name=args[0], kind='WorkerGroup')",
"def delete_provider(cls, args, config):\n # print \"MOLNSProvider.delete_provider(args={0}, config={1})\".format(args, config)\n if len(args) == 0:\n print \"USAGE: molns provider delete name\"\n return\n config.delete_object(name=args[0], kind='Provider')",
"def RemoveBucketsCommand(self, args, unused_sub_opts=None, headers=None,\n debug=0):\n # Expand bucket name wildcards, if any.\n for uri_str in args:\n for uri in self.CmdWildcardIterator(uri_str, headers=headers,\n debug=debug):\n if uri.object_name:\n raise CommandException('\"rb\" command requires a URI with no object '\n 'name')\n print 'Removing %s...' % uri\n uri.delete_bucket(headers)",
"def delete(configsetname):\n cnfset = configsetPath(configsetname)\n files = os.listdir(cnfset)\n for f in files: os.remove(os.path.join(cnfset, f))\n os.rmdir(cnfset)\n return None",
"def Run(self, args):\n cli = self.context['clusteradmin']\n msg = (self.context['clusteradmin-msgs'].\n BigtableclusteradminProjectsZonesClustersDeleteRequest(\n name=util.ClusterUrl(args)))\n result = cli.projects_zones_clusters.Delete(msg)\n log.DeletedResource(args.cluster, kind='cluster',\n details='in zone [{0}]'.format(args.zone))\n return result",
"def RemoveObjsCommand(self, args, sub_opts=None, headers=None,\n debug=0):\n continue_on_error = False\n if sub_opts:\n for o, unused_a in sub_opts:\n if o == '-f':\n continue_on_error = True\n # Expand object name wildcards, if any.\n for uri_str in args:\n for uri in self.CmdWildcardIterator(uri_str, headers=headers,\n debug=debug):\n if uri.names_container():\n if uri.is_cloud_uri():\n # Before offering advice about how to do rm + rb, ensure those\n # commands won't fail because of bucket naming problems.\n boto.s3.connection.check_lowercase_bucketname(uri.bucket_name)\n uri_str = uri_str.rstrip('/\\\\')\n raise CommandException('\"rm\" command will not remove buckets. To '\n 'delete this/these bucket(s) do:\\n\\tgsutil rm '\n '%s/*\\n\\tgsutil rb %s' % (uri_str, uri_str))\n print 'Removing %s...' % uri\n try:\n uri.delete_key(validate=False, headers=headers)\n except Exception, e:\n if not continue_on_error:\n raise",
"def remove_cluster(config, nova, neutron, cinder, conn):\n\n cluster_info = OSClusterInfo(nova, neutron, cinder, config, conn)\n masters = cluster_info.get_instances(\"node\")\n workers = cluster_info.get_instances(\"master\")\n\n tasks = [host.delete(neutron) for host in masters if host]\n tasks += [host.delete(neutron) for host in workers if host]\n if tasks:\n LOGGER.debug(\"Deleting Instances ...\")\n loop = asyncio.get_event_loop()\n loop.run_until_complete(asyncio.wait(tasks))\n loop.close()\n\n LoadBalancer(config, conn).delete()\n\n sg_name = '%s-sec-group' % config['cluster-name']\n secg = conn.list_security_groups({\"name\": sg_name})\n if secg:\n LOGGER.debug(\"Deleting SecurityGroup %s ...\", sg_name)\n for sg in secg:\n for rule in sg.security_group_rules:\n conn.delete_security_group_rule(rule['id'])\n\n for port in conn.list_ports():\n if sg.id in port.security_groups:\n conn.delete_port(port.id)\n conn.delete_security_group(sg_name)\n\n # This needs to be replaced with OpenStackAPI in the future\n for vol in cinder.volumes.list():\n try:\n if config['cluster-name'] in vol.name and vol.status != 'in-use':\n try:\n vol.delete()\n except (BadRequest, NotFound):\n pass\n\n except TypeError:\n continue\n\n # delete the cluster key pair\n conn.delete_keypair(config['cluster-name'])",
"def cleanup(self) -> None:\n try:\n self._cluster_client.delete_cluster(\n request={\n 'project_id': self.cluster_metadata.project_id,\n 'region': self.cluster_metadata.region,\n 'cluster_name': self.cluster_metadata.cluster_name,\n })\n except Exception as e:\n if e.code == 403:\n _LOGGER.error(\n 'Due to insufficient project permissions, '\n 'unable to clean up the default cluster: %s',\n self.cluster_metadata.cluster_name)\n raise ValueError(\n 'You cannot delete a cluster in project: {}'.format(\n self.cluster_metadata.project_id))\n elif e.code == 404:\n _LOGGER.error(\n 'Cluster does not exist: %s', self.cluster_metadata.cluster_name)\n raise ValueError(\n 'Cluster was not found: {}'.format(\n self.cluster_metadata.cluster_name))\n else:\n _LOGGER.error(\n 'Failed to delete cluster: %s', self.cluster_metadata.cluster_name)\n raise e",
"def cluster_destroy(extra_args=None):\n cmd = [\"pcs\", \"cluster\", \"destroy\"]\n\n if isinstance(extra_args, (list, tuple)):\n cmd += extra_args\n\n log.debug(\"Running cluster destroy: %s\", cmd)\n\n return __salt__[\"cmd.run_all\"](cmd, output_loglevel=\"trace\", python_shell=False)",
"def remove_compute(compute_targets):\n for name, ct in compute_targets.items():\n compute_targets[name].delete()",
"def delete(self, *names):\n if len(names) != 1:\n raise RedisClusterException(\"deleting multiple keys is not implemented in pipeline command\")\n\n return self.execute_command('DEL', names[0])",
"def DeleteContainers(self):\n for container in itertools.chain(*list(self.containers.values())):\n container.Delete()",
"def delete_cluster(self):\n cf_namespace_id = self.create_or_fetch_namespace()\n self.delete_action(cf_namespace_id)\n self.create_action(cf_namespace_id)\n self.invoke_action(cf_namespace_id)",
"def delete_suggester(DomainName=None, SuggesterName=None):\n pass",
"def remove(self):\n method = \"remove_cluster\"\n params = {\n \"cluster_id\": self.id\n }\n make_request = self._client.connection.make_request\n return make_request(method, params)",
"def remove_dataset(self, name):\n payload = {\"name\": name}\n r = self.request(\n \"delete\", url_path_join(USER_DATASET_RESOURCE_URL, self.owner), payload=payload\n )\n self.check_and_raise(r)",
"def Run(self, args):\n cluster_ref = args.CONCEPTS.cluster.Parse()\n items = [command_util.ClusterMessage(name=cluster_ref.vmwareClustersId)]\n\n if not args.validate_only:\n command_util.ConfirmationPrompt('cluster', items, 'deleted')\n\n client = apis.ClustersClient()\n operation = client.Delete(args)\n\n if args.validate_only:\n return None\n\n # when using --allow-missing without --async on a non-existing resource,\n # it would return an operation object with an empty name.\n # return early to avoid potential polling error.\n if operation.name is None:\n return None\n\n if args.async_:\n log.DeletedResource(cluster_ref, 'Anthos Cluster on VMware', args.async_)\n return operation\n else:\n operation_client = operations.OperationsClient()\n response = operation_client.Wait(operation)\n log.DeletedResource(cluster_ref, 'Anthos Cluster on VMware', args.async_)\n return response",
"def Datacenters(self):\n if not self._datacenters:\n dcs = self._get_objects(vim.Datacenter)\n for dc in dcs:\n self._datacenters[dc.name] = dc\n return self._datacenters",
"def delete(self, name, *args):\n\n if isinstance(name, string_types):\n name = dns.name.from_text(name, None)\n if len(args) == 0:\n self.find_rrset(self.authority, name, dns.rdataclass.ANY,\n dns.rdatatype.ANY, dns.rdatatype.NONE,\n dns.rdatatype.ANY, True, True)\n elif isinstance(args[0], dns.rdataset.Rdataset):\n for rds in args:\n for rd in rds:\n self._add_rr(name, 0, rd, dns.rdataclass.NONE)\n else:\n args = list(args)\n if isinstance(args[0], dns.rdata.Rdata):\n for rd in args:\n self._add_rr(name, 0, rd, dns.rdataclass.NONE)\n else:\n rdtype = args.pop(0)\n if isinstance(rdtype, string_types):\n rdtype = dns.rdatatype.from_text(rdtype)\n if len(args) == 0:\n self.find_rrset(self.authority, name,\n self.zone_rdclass, rdtype,\n dns.rdatatype.NONE,\n dns.rdataclass.ANY,\n True, True)\n else:\n for s in args:\n rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s,\n self.origin)\n self._add_rr(name, 0, rd, dns.rdataclass.NONE)",
"def delete_controller(cls, args, config):\n # print \"MOLNSProvider.delete_provider(args={0}, config={1})\".format(args, config)\n if len(args) == 0:\n raise MOLNSException(\"USAGE: molns cluser delete name\")\n config.delete_object(name=args[0], kind='Controller')",
"def remove_cpds(self, *cpds):\n for cpd in cpds:\n if isinstance(cpd, str):\n cpd = self.get_cpds(cpd)\n self.cpds.remove(cpd)",
"def fusion_api_delete_fc_network(self, name=None, uri=None, api=None, headers=None):\n return self.fc_network.delete(name, uri, api, headers)"
] | [
"0.6081035",
"0.57693714",
"0.56088656",
"0.5605488",
"0.559625",
"0.55759764",
"0.55257326",
"0.5464747",
"0.54225147",
"0.541714",
"0.5382911",
"0.5354284",
"0.5338459",
"0.52975005",
"0.5288256",
"0.5257809",
"0.5253959",
"0.52303916",
"0.5221915",
"0.52050453",
"0.5193717",
"0.5193203",
"0.5160788",
"0.5160264",
"0.51595527",
"0.51399547",
"0.5130392",
"0.51203537",
"0.5115429",
"0.51146555"
] | 0.7384288 | 0 |
Adds a SAN Manager. [Arguments] | def fusion_api_add_san_manager(self, body, providerId=None, uri=None, api=None, headers=None):
return self.dm.create(body, providerId, uri, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_manager(self, agent):\n with self.simulation_mutex:\n self.get(\"manager_agents\")[agent.name] = agent",
"def add_sap (self):\n raise NotImplementedError",
"def do_add(self, args):\n argument_list = args.split()\n if len(argument_list) < 1:\n self.__bad_arguments(\"add\")\n else:\n print \"Added \" + args + \".\"\n AssassinsManager.add_assassin(self.assassins_manager, args.split()[0])",
"def consul_register(self):\n self.log.debug(\"consul-register\")\n self.consul.agent.service.register(\n self.svc_name,\n address=self.this_host,\n check=consulate.models.agent.Check(\n name=\"qemu-process\",\n args=[\n \"/bin/sh\",\n \"-c\",\n \"test -e /proc/$(< /run/qemu.{}.pid )/mem || exit 2\".format(\n self.name\n ),\n ],\n interval=\"5s\",\n ),\n )",
"def add_sap (self, sap_obj=None, id=None, name=None, binding=None, sap=None,\n technology=None, delay=None, bandwidth=None, cost=None,\n controller=None, orchestrator=None, l2=None, l4=None,\n metadata=None):\n if sap_obj is None:\n sap_obj = NodeSAP(id=id, name=name, binding=binding, metadata=metadata)\n self.add_node(sap_obj)\n return sap_obj",
"def create_manager(self, username, tenancy):\n raise NotImplementedError",
"def create_manager(self, name, pos, dept):\n self.manager[dept.upper()].append(\n {\n 'name': name,\n 'pos': pos,\n 'dept': dept,\n 'senior': [],\n 'junior': [],\n 'trainee': []\n }\n )",
"def add(\n self,\n Enabled=None,\n InternalRootPathCost=None,\n Mac=None,\n PortPriority=None,\n Priority=None,\n UpdateRequired=None,\n VlanId=None,\n ):\n # type: (bool, int, str, int, str, bool, int) -> Vlan\n return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))",
"def fusion_api_get_san_manager(self, uri=None, param='', api=None, headers=None):\n return self.dm.get(uri=uri, api=api, headers=headers, param=param)",
"def add_merchant(street, merchant):\r\n street.append(merchant)",
"def fusion_api_add_rack_manager(self, body, api=None, headers=None):\n return self.rackmanager.post(body, api, headers)",
"def add_AS(self, asn, prefixes):\n try:\n n = int(asn)\n except:\n error(\"Invalid AS number: \" + str(asn))\n self._check_prefix(prefixes)\n tmp = self._addRouter_v6('as'+str(n)+'r1', config=(RouterConfig, {\n 'daemons': [(BGP, {'address_families': (\n _bgp.AF_INET6(networks=prefixes),),\n 'advertisement_timer': 1,\n 'hold_time': 9})]}))\n new_as = AS(n, (tmp,))\n self.addOverlay(new_as)\n return new_as",
"def soundbox_addaliases(self, msg, args):\n sound = self.findSound(args[0])\n if sound == None:\n return \"Sound {} not found.\".format(args[0])\n sound.addAliases(args[:1])\n return \"Aliases were successfully added.\"",
"def add(self, auto_on=False):\n if self._addr in LogManager._managers_by_addr:\n writer.just_info(f'ERROR: Adding `{self._addr}` manager failed - address already exists')\n return False\n LogManager._managers_by_addr[self._addr] = self\n if auto_on:\n if address.should_be_on(self._addr) == '+':\n self.on()\n else:\n self.off()\n refresh(only_aunts=True)\n return True",
"def register_manager(self, update, context):\r\n new_manager_chat_id = update['message']['chat']['id']\r\n new_manager_name = update['message']['chat']['first_name']\r\n\r\n with open('managers.json') as obj:\r\n managers = json.load(obj)\r\n\r\n managers[new_manager_name] = new_manager_chat_id\r\n\r\n with open('managers.json', 'w') as obj:\r\n json.dump(managers, obj)\r\n\r\n context.bot.send_message(chat_id=update.message.chat_id, text=f'{new_manager_name} - {new_manager_chat_id}')",
"def __add_snmp_management(self, tag, address,\n port_num=\"161\", auth_string=\"community\"):\n manage = etree.SubElement(tag, \"management\", type=\"snmp\")\n addr = etree.SubElement(manage, \"address\")\n addr.text = address\n port = etree.SubElement(manage, \"port\")\n port.text = port_num\n auth = etree.SubElement(manage, \"auth\")\n auth.text = auth_string",
"def add(self, transport, address=None):\r\n\r\n if not address:\r\n address = str(uuid.uuid1())\r\n\r\n if address in self.recipients:\r\n self.recipients[address].add(transport)\r\n else:\r\n self.recipients[address] = RecipientManager(transport, address)\r\n\r\n return address",
"def SetManager(self, mgr):\r\n\r\n self.manager = mgr",
"def AddSystemSetup(self, ds):\n self.IsSystemSetup = True\n self.SystemSetup = ds",
"def fusion_api_edit_san_manager(self, body, uri, api=None, headers=None):\n return self.dm.update(body, uri, api, headers)",
"def manager():\n pass",
"def use_manager(manager_name, environment):\n assert manager_name in environment.managers, (\n 'Manager {selected} was not created by this test run. '\n 'Available managers are: {available}'.format(\n selected=manager_name,\n available=', '.join(environment.managers.keys()),\n )\n )\n\n manager = environment.managers[manager_name]\n\n environment.add_cleanup(\n environment.cfy.profiles.delete,\n kwargs={\n 'profile_name': manager['ip'],\n },\n )\n\n environment.cfy.profiles.use(\n ip=manager['ip'],\n username=manager['username'],\n password=manager['password'],\n rest_certificate=manager['certificate_path'],\n )",
"def add_manager(self, info):\n self.cursor.execute(\"\"\"SELECT COUNT(*) FROM managerpersonal WHERE phone=%s\"\"\", (int(info['phone']),))\n if not self.cursor.fetchone()[0]:\n self.cursor.execute(\"\"\"INSERT INTO managerpersonal VALUES (%s,%s)\"\"\", (int(info['phone']), info['address']))\n self.cursor.execute(\"\"\"INSERT INTO managercredentials (loginID, firstName, lastName, salt, pass_key, phone)\n VALUES (%s,%s,%s,%s,%s,%s)\"\"\", (info['loginID'], info['firstName'], info['lastName'], info['salt'],\n info['key'], int(info['phone'])))\n\n self.db.commit()\n self.cursor.execute(\"\"\"SELECT COUNT(*) FROM customercredentials WHERE loginID=%s\"\"\", (info['loginID'],))\n result = self.cursor.fetchone()\n if result[0]:\n self.cursor.execute(\"\"\"DELETE FROM customerCredentials WHERE loginID=%s\"\"\", (info['loginID'],))\n self.db.commit()\n self.cursor.execute(\"\"\"SELECT COUNT(*) FROM customerCredentials WHERE phone=%s\"\"\", (int(info['phone']),))\n phone_count = self.cursor.fetchone()\n if not phone_count[0]:\n self.cursor.execute(\"\"\"DELETE FROM customerPersonal WHERE phone=%s\"\"\", (int(info['phone']),))\n self.db.commit()\n self.update_book_scores()\n self.update_comment_usefulness()",
"def add_segm(*args):\n return _ida_segment.add_segm(*args)",
"def test_add_team_manager_to_team(self):\n pass",
"def manage_afterAdd(self, item, container) :\n item.manage_permission(Permissions.AddPortalContent,\n ['Manager'])\n item.manage_permission(Permissions.AccessContentsInformation,\n ['Member', 'Manager'])\n item.manage_permission(Permissions.View,\n ['Manager',])\n BaseTool.inheritedAttribute('manage_afterAdd')(self, item, container)",
"def assign_store_manager(user_name: str, new_store_manager_name: str, store_name: str):\n\n user_name = auth.get_username_from_hash(user_name)\n permission_handler.is_permmited_to(user_name, Action.ADD_MANAGER.value, store_name)\n permission_handler.assign_store_employee(action.MANAGER_INITIAL_PERMISSIONS,\n new_store_manager_name,\n store_name)\n user_handler.assign_store_employee(user_name, new_store_manager_name, store_name)",
"def add_member():\n client = RequestManager()\n client.set_method(\"POST\")\n client.set_endpoint(\"/accounts/{0}/memberships\".format(CONFIG_DATA['account_id']))\n body = {\"person_id\": CONFIG_DATA['member_id']}\n client.set_body(json.dumps(body))\n client.execute_request()",
"def register_souma(self):\n from nucleus.nucleus.models import Souma\n\n self.souma = Souma(id=uuid4().hex[:32])\n self.souma.generate_keys()\n payload = json.dumps({\"soumas\": [self.souma.export(include=[\"id\", \"crypt_public\", \"sign_public\"]), ]})\n path = \"/v0/soumas/\"\n return self.app.post(path, data=payload, content_type='application/json', base_url=base_url)",
"def create_alien(settings, screen, aliens):\n alien = Alien(settings, screen)\n aliens.add(alien)"
] | [
"0.5886489",
"0.54768324",
"0.54341",
"0.5215697",
"0.5150486",
"0.5121677",
"0.5074849",
"0.5028252",
"0.49931267",
"0.49460495",
"0.48038888",
"0.47869223",
"0.4780261",
"0.47727942",
"0.47576386",
"0.47391835",
"0.47355863",
"0.47277775",
"0.4703621",
"0.4696103",
"0.46918392",
"0.4669285",
"0.46618214",
"0.4656247",
"0.46547085",
"0.46307603",
"0.46077672",
"0.4599661",
"0.45848194",
"0.45796767"
] | 0.70052266 | 0 |
Updates a SAN Manager. [Arguments] | def fusion_api_edit_san_manager(self, body, uri, api=None, headers=None):
return self.dm.update(body, uri, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def manager_update(self, manager, config):\n self.request('/v1.1/managers/configs/%s' % manager, 'POST', body=config)",
"def fusion_api_update_managed_san(self, body, uri, api=None, headers=None):\n return self.ms.update(body, uri, api, headers)",
"def update_podmanager(cls, podmanager_uuid, values):\n return cls.dbdriver.update_podmanager(podmanager_uuid, values)",
"def handle(self, *args, **opts):\n data = requests.get(\"http://pubmlst.org/data/profiles/saureus.txt\")\n if data.text is not None:\n column_names = []\n for line in data.text.split('\\n'):\n if line:\n cols = line.split('\\t')\n if cols[0] == 'ST':\n column_names = [c.lower() for c in cols]\n else:\n st, created = SequenceTypes.objects.update_or_create(\n **{\n column_names[0]: cols[0],\n column_names[1]: cols[1],\n column_names[2]: cols[2],\n column_names[3]: cols[3],\n column_names[4]: cols[4],\n column_names[5]: cols[5],\n column_names[6]: cols[6],\n column_names[7]: cols[7],\n }\n )\n print('Total STs: {0}'.format(SequenceTypes.objects.count()))\n\n # Email Admin with Update\n labrat = \"Staphopia's Friendly Robot <[email protected]>\"\n subject = '[Staphopia MLST Update] - MLST info has been updated.'\n message = 'Total STs: {0}'.format(\n SequenceTypes.objects.count()\n )\n recipients = ['[email protected]', '[email protected]']\n email = EmailMessage(subject, message, labrat, recipients)\n email.send(fail_silently=False)\n else:\n raise CommandError('Unable to retrieve updated STs, try again '\n 'later?')",
"async def stan(self, ctx, *args):\n if args:\n if args[0] == 'update':\n amount = len(self.artists)\n self.artists = []\n urls_to_scrape = ['https://kprofiles.com/k-pop-girl-groups/',\n 'https://kprofiles.com/k-pop-boy-groups/',\n 'https://kprofiles.com/co-ed-groups-profiles/',\n 'https://kprofiles.com/kpop-duets-profiles/',\n 'https://kprofiles.com/kpop-solo-singers/']\n for url in urls_to_scrape:\n self.artists += scrape_kprofiles(url)\n\n database.set_attr(\"data\", \"artists\", self.artists)\n\n await ctx.send(f\"Artist list succesfully updated, {len(self.artists) - amount} new entries, \"\n f\"{len(self.artists)} total entries\")\n self.logger.info(misolog.format_log(ctx, f\"artist list updated; {len(self.artists) - amount} new, \"\n f\"{len(self.artists)} total\"))\n return\n\n elif args[0] == 'clear':\n self.artists = []\n database.set_attr(\"data\", \"artists\", self.artists)\n await ctx.send(\"Artist list cleared\")\n self.logger.info(misolog.format_log(ctx, f\"artist list cleared\"))\n return\n\n if self.artists:\n artist = str(rd.choice(self.artists))\n await ctx.send('stan ' + artist)\n self.logger.info(misolog.format_log(ctx, f\"artist={artist}\"))\n else:\n await ctx.send(\"Error: artist list is empty, please use >stan update\")\n self.logger.warning(misolog.format_log(ctx, f\"artist list empty\"))",
"def edit_station(self):\n mac = request.params.get('mac', g.DEFAULT_MAC)\n log.debug('edit_station(%s)' % mac)\n\n # collect desired request params into dictionary\n # XXX need to do form validation here\n items = request.params\n\n stations = model.Session.query(model.Station)\n station = stations.filter(model.Station.mac == mac).first()\n if not station:\n station = model.Station(mac)\n model.Session.save(station)\n station.update(items)\n model.Session.update(station)\n model.Session.commit()\n redirect_to('/admin/dashboard')",
"def fusion_api_add_san_manager(self, body, providerId=None, uri=None, api=None, headers=None):\n return self.dm.create(body, providerId, uri, api, headers)",
"def do_update(cs, args):\n opts = {}\n opts['memory'] = args.memory\n opts['cpu'] = args.cpu\n opts['name'] = args.name\n if 'auto_heal' in args and args.auto_heal:\n opts['auto_heal'] = True\n if 'no_auto_heal' in args and args.no_auto_heal:\n opts['auto_heal'] = False\n opts = zun_utils.remove_null_parms(**opts)\n if not opts:\n raise exc.CommandError(\"You must update at least one property\")\n container = cs.containers.update(args.container, **opts)\n _show_container(container)",
"def test_update_nas_share(self):\n pass",
"def update(*args):",
"async def update(self) -> None:\n # pause logic\n if not self.running.is_set():\n self.add_to_output(\"Paused...\")\n await self.running.wait()\n\n # tell the user we are updating\n self.add_to_output(f\"Updating...\")\n # create ssh connection to miner\n try:\n conn = await self.get_connection(\"root\", \"admin\")\n # tell the user we are sending the update file\n self.add_to_output(\"Sending upgrade file...\")\n # send the update file\n await self.send_file(UPDATE_FILE_S9, \"/tmp/firmware.tar\")\n # install the update and collect the result\n result = await conn.run(f'sysupgrade /tmp/firmware.tar')\n self.add_to_output(result.stdout.strip())\n # tell the user the update completed\n self.add_to_output(f\"Update completed...\")\n except OSError:\n self.add_to_output(f\"Unknown error...\")",
"def update(\n self,\n Enabled=None,\n InternalRootPathCost=None,\n Mac=None,\n PortPriority=None,\n Priority=None,\n UpdateRequired=None,\n VlanId=None,\n ):\n # type: (bool, int, str, int, str, bool, int) -> Vlan\n return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))",
"async def light_manager_update(request: Request, call_next):\n\n logger.debug(\"pre manager.update\")\n busylightapi.manager.update()\n logger.debug(\"post manager.update\")\n return await call_next(request)",
"def fusion_api_update_deployment_manager(self, body=None, uri=None, api=None, headers=None):\n return self.dep_mgr.update(body=body, uri=uri, api=api, headers=headers)",
"def update(args, config):\n print('Updates an HPC fleet with name \"{}\"'.format(args.fleet_name))",
"def update(self, args):\n pass",
"def test_update_nas_share_by_nas(self):\n pass",
"def main():\n parser = ArgumentParser(description=\"Update FCOE device udev persisted \"\n \"ordering.\")\n parser.add_argument(\"--prefix\", \"-p\", default=\"/target\",\n help=\"System files will be accessed under this \"\n \"prefix\")\n parser.add_argument(\"--sys-prefix\", \"-s\", default=\"/\",\n help=\"The /sys file system files will be accessed \"\n \"under this prefix\")\n args = parser.parse_args()\n NetworkDeviceManager(args.prefix, args.sys_prefix).process_system()",
"def Update(self, controller):\n pass",
"def update(self, system, environment_input):\n pass",
"def update(self):\n LOG.debug(\"Checking ADT Pulse cloud service for updates\")\n response = self.query(ADT_SUMMARY_URI, method='GET')\n if response.ok:\n self._update_sites(response.text)\n else:\n LOG.info(f\"Error returned from ADT Pulse service check: {response.status_code}\")",
"def update(self, code, *args, **kwargs):\n\n if not args and not kwargs:\n raise Exception('attributes for CostCenter are missing')\n\n attributes = args[0] if args else kwargs\n attributes = dict((k, v) for k, v in attributes.items())\n attributes.update({'service': self.SERVICE})\n _, _, cost_center = self.http_client.put(\"/costcenters/{code}\".format(code=code), body=attributes)\n return cost_center",
"def update_allocator(self, information, uri, timeout=-1):\n uri = uri + \"/allocator\"\n return self._helper.update(information, uri, timeout=timeout)",
"def update_system(self, system):\n try:\n rc, storage_system = self.request(\"storage-systems/%s\" % system[\"ssid\"], method=\"POST\", data=system[\"changes\"])\n except Exception as error:\n self.module.warn(\"Failed to update storage system. Array [%s]. Error [%s]\" % (system[\"ssid\"], to_native(error)))",
"async def put(self):\r\n data = await self.request.json()\r\n agent_uuid = data[\"agent_uuid\"]\r\n ip_address = data[\"ip_address\"]\r\n agent_obj = Agent.filter(Agent.uuid == agent_uuid).first()\r\n if not agent_obj:\r\n response_obj = {\"status\": \"failed\"}\r\n logger.error(\"No agent found!!!\")\r\n return web.Response(text=str(response_obj), status=500)\r\n try:\r\n Agent.update(ip_address=ip_address).where(Agent.uuid == agent_uuid)\r\n logger.info(\"Agent updated!!!\")\r\n return web.Response(text=\"successful\", status=200)\r\n except Exception as ex:\r\n response_obj = {\"status\": \"failed\"}\r\n error_message = str(ex)\r\n logger.error(error_message)\r\n return web.Response(text=str(response_obj), status=500)",
"def update(self, context, id_, update_data):\n run_playbook = update_data.get(\"run_playbook\", True)\n\n try:\n _validate_update(context, self.db_api, update_data, id_,\n eon_const.EON_RESOURCE_MANAGER)\n _resource_mgr_data = _make_response(\n self.db_api.get_resource_manager(context, id_))\n resource_mgr_type = _resource_mgr_data.get('type')\n resource_mgr_driver = driver.load_resource_mgr_driver(\n resource_mgr_type)\n\n if resource_mgr_type == eon_const.EON_RESOURCE_MGR_TYPE_VCENTER:\n name = update_data.get(\"name\")\n if name and name != _resource_mgr_data.get(\"name\"):\n msg = (_(\"vCenter name cannot be updated\"))\n raise exception.UpdateException(msg=msg)\n\n _resource_mgr_data_update = deepcopy(_resource_mgr_data)\n _resource_mgr_data_update.update(update_data)\n LOG.info(\"Updating resource manager : %s\",\n logging.mask_password(_resource_mgr_data_update))\n\n _is_creds_changed = self._is_creds_changed(\n _resource_mgr_data, _resource_mgr_data_update)\n if _is_creds_changed:\n LOG.debug(\"[%s] Validating the updated credentials/Ip \"\n \"address\" % id_)\n resource_mgr_driver.validate_update(_resource_mgr_data_update,\n _resource_mgr_data)\n # Gets the activated resources for the resource manager\n resources_data = self._get_resources(context,\n _resource_mgr_data_update,\n eon_const.EON_RESOURCE_STATE_ACTIVATED)\n\n resource_mgr_driver.update_vc_pass_through(\n context, _resource_mgr_data_update)\n if resources_data and run_playbook:\n self.db_api.update_resource_mgr_property(context,\n \"update_property\",\n id_, key=eon_const.RESOURCE_MGR_STATE_KEY,\n value=eon_const.EON_RESOURCE_MANAGER_STATE_UPDATING)\n eventlet.spawn_n(resource_mgr_driver.update,\n context, id_, resource_inventory=resources_data)\n\n self.db_api.update_resource_manager(context, id_,\n _resource_mgr_data_update)\n props = self.db_api.get_resource_mgr_properties(context,\n id_, key=eon_const.RESOURCE_MGR_STATE_KEY)\n return _make_response(_resource_mgr_data_update,\n property_list=props)\n\n except Exception as e:\n LOG.exception(e)\n msg = (_(\"Updating resource manager failed. Reason: '%s'\")\n % e.message)\n log_msg = ((\"Updating resource manager failed. Reason: '%s'\")\n % e.message)\n LOG.error(log_msg)\n raise exception.UpdateException(msg=msg)",
"def updateRecord(self, director):\n self._verify()\n keys = SIMCHAINS.keys()\n simtype = keys[int(self.simtype)]\n# servers = ServerList(director)\n sim = director.clerk.getQESimulations(id=self.id) # ?\n if sim:\n params = {\"sname\": self.sname,\n \"type\": simtype, # Type cannot be updated\n \"short_description\": self.description,\n \"label\": self.label,\n# \"serverid\": servers.id(self.server)\n }\n sim.setDirector(director)\n sim.updateRecord(params)\n return load(actor='material_simulations/espresso/sim-view', id=self.id) # Redirection to the page",
"def update_object(self, name: str) -> None:",
"def test_update_member(self):\r\n resource = 'member'\r\n cmd = member.UpdateMember(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'myname',\r\n '--tags', 'a', 'b'],\r\n {'name': 'myname', 'tags': ['a', 'b'], })",
"def fusion_api_update_hypervisor_manager(self, body=None, uri=None, api=None, headers=None):\n return self.hypervisor_mgr.update(body=body, uri=uri, api=api, headers=headers)"
] | [
"0.6040223",
"0.58976823",
"0.52411103",
"0.5226321",
"0.50675595",
"0.50528103",
"0.504731",
"0.5045723",
"0.5035978",
"0.5009271",
"0.4999926",
"0.4980094",
"0.49456164",
"0.4914688",
"0.48902246",
"0.4888154",
"0.4880528",
"0.48619416",
"0.48463622",
"0.48116904",
"0.47927755",
"0.4786432",
"0.4757484",
"0.4735724",
"0.4729163",
"0.4727779",
"0.47231397",
"0.46938658",
"0.4691348",
"0.46845496"
] | 0.6545071 | 0 |
Removes SAN Managers. [Arguments] | def fusion_api_remove_san_manager(self, name=None, uri=None, api=None, headers=None):
return self.dm.delete(name, uri, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_004_delete(self):\n ret = svcmgr.main(argv=[\"delete\", \"-s\", SVCNAME, \"--local\"])\n assert ret == 0",
"def remove(self, packages):\n if packages:\n cmd = ['dnf', 'remove'] + list(packages)\n subprocess.Popen(cmd).wait()",
"def manager_remove(self, manager):\n self.request('/v1.1/managers/configs/%s' % manager, 'DELETE')",
"def stop_managers(self):\n self._inventory_mgr.stop()\n self._metric_mgr.stop()\n self._inventory_mgr.join(timeout=constants.DEFAULT_TIMEOUT)\n self._metric_mgr.join(timeout=constants.DEFAULT_TIMEOUT)",
"def remove_all(ctx):\n skale = ctx.obj['skale']\n cnt = 0\n for sname in get_all_schains_names(skale):\n skale.manager.delete_schain(sname)\n cnt += 1\n print(f'Success. {cnt} schains were removed')",
"def destroy_all(self):\n\t\twhile self.members:\n\t\t\tself.destroy(self.members[0], _no_min=True)",
"def cleanUp(self):\r\n for group in self._groups.values():\r\n group.destroy()\r\n\r\n assert len(self._groups) == 0\r\n\r\n for machine in self._machines.copy():\r\n self.destroyMachine(machine)\r\n\r\n assert len(self._machines) == 0\r\n\r\n self.unregisterIAASHook()",
"def cleanup():\n management.call_command('cleanup')",
"def clear_managers():\n\n for manager in proxies.values():\n manager.close()\n proxies.clear()",
"def remove():\n run('pew rm {0}'.format(package_name()))",
"def remove_store_manager(user_name: str, store_manager_name: str, store_name: str):\n\n user_name = auth.get_username_from_hash(user_name)\n permission_handler.is_permmited_to(user_name, Action.REMOVE_MANAGER.value, store_name)\n permission_handler.is_working_in_store(store_manager_name, store_name)\n to_remove: list = user_handler.remove_employee(user_name, store_manager_name, store_name)\n permission_handler.remove_employee(to_remove, store_name)\n for store_employee_name in to_remove:\n publisher.send_remove_employee_msg(\n f\"You are no longer an employee in {store_name} you have been removed by {user_name}\",\n store_employee_name)\n try:\n publisher.unsubscribe(store_employee_name, store_name)\n except:\n continue",
"def removeMySims(self):\n for sim in self.sims:\n try:\n sim.destroy()\n except:\n sim.removeNode()",
"def remove_socks():\n subprocess.run('sudo rm /tmp/*.sock -f', shell=True)",
"def remove_socks():\n subprocess.run('sudo rm /tmp/*.sock -f', shell=True)",
"def test_destroy_nas_share(self):\n pass",
"def delete_provider(cls, args, config):\n # print \"MOLNSProvider.delete_provider(args={0}, config={1})\".format(args, config)\n if len(args) == 0:\n print \"USAGE: molns provider delete name\"\n return\n config.delete_object(name=args[0], kind='Provider')",
"def test_destroy_nas_share_by_nas(self):\n pass",
"def test_destroy_nas_share_by_pool(self):\n pass",
"def remove(ctx, schain_name):\n skale = ctx.obj['skale']\n skale.manager.delete_schain(schain_name, wait_for=True,\n gas_price=4500000000)\n print(f'sChain {schain_name} removed!')",
"def fusion_api_delete_deployment_manager(self, name=None, uri=None, api=None, headers=None):\n return self.dep_mgr.delete(name=name, uri=uri, api=api, headers=headers)",
"def destroy_all(self):\n for name in self.app.config['SIMPLE_DOMAINS']:\n self.connection.delete_domain(name)",
"def cleanFiles(a_file_list):\n for entry in a_file_list:\n cmd = 'sudo rm ' + entry\n os.system(cmd)",
"def demote(name):\r\n try:\r\n if name in man:\r\n man.remove(name)\r\n off.append(name)\r\n off.sort()\r\n else:\r\n print(f\"{name} cannot be demoted from a manager as they are not in the personnel list\")\r\n\r\n except TypeError:\r\n print(\"Error: Call with strings only\")",
"def do_command(self, args):\n vendorops = dbops.Vendors()\n vendorops.delete(args)",
"def _removeOcean(self):\r\n\t\tnodesToClean = [CONST.OCEANDISPSHADER, CONST.OCEANANIMSHADER, CONST.OCEAN_ANIM_PREVIEWPLANENAME]\r\n\t\tfor eachNode in nodesToClean:\r\n\t\t\ttry:\r\n\t\t\t\tcmds.delete(each)\r\n\t\t\texcept:\r\n\t\t\t\tpass",
"def destroy(self):\n\n # destroys virtual machines\n for vm in self.vms:\n try:\n vm.name = \"%s_%s\" % (self.resource_pool, vm.name)\n vm.destroy_with_files(manager=self.manager, host_address=self.host_address,\n host_user=self.host_user,\n host_password=self.host_password)\n except Manager.ExistenceException:\n self.logger.info(\"Couldn't find '%s', probably it already removed\" % vm.name)\n except:\n self.logger.error(\"Error with destroying VM '%s'\" % vm.name)\n\n sw_name = None\n\n # destroys isolated networks with vSwitches\n for net in self.networks:\n try:\n if net.isolated:\n sw_name = \"%s_%s_%s\" % (self.config.SWITCH_PREFIX, self.resource_pool, net.name)\n switch = Switch(sw_name)\n switch.destroy(self.manager, self.host_name)\n except Manager.ExistenceException:\n pass\n except:\n self.logger.error(\"Error with destroying switch '%s'\" % sw_name)\n\n # destroys common vSwitch if exist\n try:\n shared_sw_name = '%s_%s' % (self.config.SWITCH_PREFIX, self.resource_pool)\n switch = Switch(shared_sw_name)\n switch.destroy(self.manager, self.host_name)\n except Manager.ExistenceException:\n pass\n\n # destroys resource pool\n try:\n ResourcePool(self.resource_pool).destroy(self.manager, with_vms=True)\n except Manager.ExistenceException:\n pass\n except Exception as e:\n self.logger.error(e.message)\n raise e",
"def test_remove_team_manager_from_team(self):\n pass",
"def __del__(self):\n for client in self.client_list:\n del client\n for server in self.server_list:\n del server",
"def main_remove(args):\n return remove_command(args.directory, args.name)",
"def rm_network(c):\n print('Stopping local test network and removing containers')\n with c.cd('images'):\n c.run('sudo docker-compose down -v', hide='stderr')\n\n c.run('sudo rm -rf volumes/stellar-core/opt/stellar-core/buckets')\n c.run('sudo rm -f volumes/stellar-core/opt/stellar-core/*.log')\n c.run('sudo rm -rf volumes/stellar-core/tmp')"
] | [
"0.57020456",
"0.56861764",
"0.5684826",
"0.56704986",
"0.5632607",
"0.5630097",
"0.5622593",
"0.5586767",
"0.5557375",
"0.55300087",
"0.5490273",
"0.5488082",
"0.5471423",
"0.5471423",
"0.5450497",
"0.54431635",
"0.5434717",
"0.5434458",
"0.53943706",
"0.5377902",
"0.5369805",
"0.535988",
"0.535756",
"0.5347011",
"0.5338528",
"0.5337644",
"0.5336029",
"0.53192973",
"0.5317763",
"0.5267082"
] | 0.68998647 | 0 |
Gets a default or paginated collection of SAN Managers. [Arguments] | def fusion_api_get_san_manager(self, uri=None, param='', api=None, headers=None):
return self.dm.get(uri=uri, api=api, headers=headers, param=param) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_managers():\n return {'managers': get_users('managers')}",
"def manager(model):\n return model.objects",
"def list_podmanager(cls):\n return cls.dbdriver.list_podmanager()",
"def manager_active_list(self):\n _, body = self.request('/v1.1/managers/active', 'GET')\n return body",
"def get_all(self, context, type_):\n types = None\n if type_ and isinstance(type_, basestring):\n types = type_.strip(\",\").split(\",\")\n\n try:\n db_resource_mgrs_data = self.db_api.get_all_resource_managers(\n context, types=types)\n\n _resource_mgrs_data = []\n for db_resource_mgr_data in db_resource_mgrs_data:\n _resource_mgrs_data.append(_make_response(\n db_resource_mgr_data))\n except Exception as e:\n msg = (\"Error retrieving the 'resource managers' reason : %s\"\n % e.message)\n LOG.exception(msg)\n raise exception.RetrieveException(e.message)\n return _resource_mgrs_data",
"def get_managers_list(self):\n try:\n role_id = [x[0] for x in self.db_handler.get_roles_list() if x[1] == 'Менеджер'][0]\n staff_by_role = self.db_handler.get_all_staff_by_role_id(role_id)\n\n self.logger.write_to_log('managers list got', 'model')\n\n return staff_by_role\n except Exception as err:\n method_name = sys._getframe().f_code.co_name\n\n self.logger.write_to_log('exception', 'model')\n self.logger.write_to_err_log(f'exception in method {method_name} - {err}', 'model')",
"def manager_configs_list(self):\n _, body = self.request('/v1.1/managers/configs', 'GET')\n return body",
"def normalize_manager(self, manager, container):\n if not manager:\n return None\n\n if not isinstance(manager, list):\n manager = [manager]\n\n try:\n container_dn = DN(container, api.env.basedn)\n for i, mgr in enumerate(manager):\n if isinstance(mgr, DN) and mgr.endswith(container_dn):\n continue\n entry_attrs = self.backend.find_entry_by_attr(\n self.primary_key.name, mgr, self.object_class, [''],\n container_dn\n )\n manager[i] = entry_attrs.dn\n except errors.NotFound:\n raise errors.NotFound(reason=_('manager %(manager)s not found') % dict(manager=mgr))\n\n return manager",
"def list(self, request):\n directors = Director.objects.all()\n\n # Note the additional `many=True` argument to the\n # serializer. It's needed when you are serializing\n # a list of objects instead of a single object.\n serializer = DirectorSerializer(\n directors, many=True, context={'request': request})\n return Response(serializer.data)",
"def as_manager(cls):\n manager = DefaultManager.from_queryset(cls)()\n manager._built_with_as_manager = True\n return manager",
"def get_for_admin(self, admin):\n if admin.is_superuser:\n return self.get_queryset()\n return self.get_queryset().filter(owners__user=admin)",
"def get_system_managers(only_name: bool = False) -> list[str]:\n\tHasRole = DocType(\"Has Role\")\n\tUser = DocType(\"User\")\n\n\tif only_name:\n\t\tfields = [User.name]\n\telse:\n\t\tfields = [User.full_name, User.name]\n\n\tsystem_managers = (\n\t\tfrappe.qb.from_(User)\n\t\t.join(HasRole)\n\t\t.on(HasRole.parent == User.name)\n\t\t.where(\n\t\t\t(HasRole.parenttype == \"User\")\n\t\t\t& (User.enabled == 1)\n\t\t\t& (HasRole.role == \"System Manager\")\n\t\t\t& (User.docstatus < 2)\n\t\t\t& (User.name.notin(frappe.STANDARD_USERS))\n\t\t)\n\t\t.select(*fields)\n\t\t.orderby(User.creation, order=Order.desc)\n\t\t.run(as_dict=True)\n\t)\n\n\tif only_name:\n\t\treturn [p.name for p in system_managers]\n\telse:\n\t\treturn [formataddr((p.full_name, p.name)) for p in system_managers]",
"def get_admins(name):\n obj = DataService.objects(name=name).first()\n if obj is None:\n return []\n return list(obj.admins)",
"def GetManager(self):\r\n\r\n return self.manager",
"def get_objects(self):\n has_more = True\n marker = None\n while has_more:\n servers = openstack_clients.get_novaclient().servers.list(\n limit=LIST_LIMIT,\n search_opts={'all_tenants': True},\n marker=marker\n )\n\n if not servers:\n # Definitely no more; break straight away\n break\n\n # servers.list always returns a list so we can grab the last id\n has_more = len(servers) == LIST_LIMIT\n marker = servers[-1].id\n\n for server in servers:\n yield server",
"def list_servers(self, request, paginate):\n raise NotImplementedError",
"def get_all(self):\n\n servers = self._scoped_servers()\n servers = [{u'id': x.id, u'name': x.name} for x in servers]\n return self.format_collection(servers)",
"def get_for_admin(self, admin):\n if admin.is_superuser:\n return self.get_query_set()\n return self.get_query_set().filter(owners__user=admin)",
"def get_manageable_volumes(self, cinder_volumes, marker, limit, offset,\n sort_keys, sort_dirs):\n LOG.debug(\"Listing manageable Datera volumes\")\n app_instances = self._issue_api_request(URL_TEMPLATES['ai']()).values()\n\n results = []\n\n cinder_volume_ids = [vol['id'] for vol in cinder_volumes]\n\n for ai in app_instances:\n ai_name = ai['name']\n reference = None\n size = None\n safe_to_manage = False\n reason_not_safe = None\n cinder_id = None\n extra_info = None\n if re.match(UUID4_RE, ai_name):\n cinder_id = ai_name.lstrip(OS_PREFIX)\n if (not cinder_id and\n ai_name.lstrip(OS_PREFIX) not in cinder_volume_ids):\n safe_to_manage = self._is_manageable(ai)\n if safe_to_manage:\n si = list(ai['storage_instances'].values())[0]\n si_name = si['name']\n vol = list(si['volumes'].values())[0]\n vol_name = vol['name']\n size = vol['size']\n reference = {\"source-name\": \"{}:{}:{}\".format(\n ai_name, si_name, vol_name)}\n\n results.append({\n 'reference': reference,\n 'size': size,\n 'safe_to_manage': safe_to_manage,\n 'reason_not_safe': reason_not_safe,\n 'cinder_id': cinder_id,\n 'extra_info': extra_info})\n\n page_results = volutils.paginate_entries_list(\n results, marker, limit, offset, sort_keys, sort_dirs)\n\n return page_results",
"def manager_agents(self):\n return self.get(\"manager_agents\")",
"def index_queryset(self, using=None):\n return self.get_model()._default_manager.all()",
"def add_managers(self, managers: Union[List[Any], Tuple[Any]]):\n for m in self._flatten(managers):\n self.apply_configuration_defaults(m)\n self._managers.add(m)",
"def get_instance_group_managers(self, zone):\n response = self.call_api('/zones/%s/instanceGroupManagers' % zone)\n return {manager['name']: manager for manager in response.get('items', [])}",
"def getAllFlightServiceManagers(self):\n staffObject_list = self.mainObject.getStaffIO()\n flightServiceManagerObject_list = []\n for staffMember in staffObject_list:\n if staffMember.getRank() == 'flight service manager':\n flightServiceManagerObject_list.append(staffMember)\n return flightServiceManagerObject_list",
"def get_collection_manager(self, *args, **kwargs):\n return CollectionManager(self, *args, **kwargs)",
"def top_sources_male(args: Dict[str, Any]) -> List[object]:\n query = [\n {\n \"$match\": {\n \"body\": {\"$ne\": \"\"},\n \"quotesUpdated\": {\"$exists\": True},\n \"outlet\": {\"$in\": args[\"outlets\"]},\n \"publishedAt\": {\n \"$gte\": args[\"begin_date\"],\n \"$lt\": args[\"end_date\"] + timedelta(days=1),\n },\n }\n },\n {\"$project\": {\"outlet\": 1.0, \"sourcesMale\": 1.0}},\n {\"$unwind\": {\"path\": \"$sourcesMale\", \"preserveNullAndEmptyArrays\": False}},\n {\"$group\": {\"_id\": \"$sourcesMale\", \"count\": {\"$sum\": 1.0}}},\n {\"$sort\": {\"count\": args[\"sort\"]}},\n {\"$limit\": args[\"limit\"]},\n ]\n return query",
"async def get(\n self, administrator: Optional[bool] = None, term: Optional[str] = None\n ) -> r200[ListAdministratorResponse]:\n\n url_query = self.request.query\n\n try:\n page = int(url_query[\"page\"])\n except (KeyError, ValueError):\n page = 1\n\n try:\n per_page = int(url_query[\"per_page\"])\n except (KeyError, ValueError):\n per_page = 25\n\n return json_response(\n await get_data_from_req(self.request).administrators.find(\n page,\n per_page,\n administrator,\n term,\n )\n )",
"def get_personnel():\r\n if len(man) == 0:\r\n print(\"There are no managers\")\r\n else:\r\n for i in man:\r\n print(str(i))",
"def ls():\n return dynamodb.ls(OrganizationModel)",
"def male_author_sources(args: Dict[str, Any]) -> List[object]:\n query = [\n {\n \"$match\": {\n \"body\": {\"$ne\": \"\"},\n \"quotesUpdated\": {\"$exists\": True},\n \"outlet\": {\"$in\": args[\"outlets\"]},\n \"publishedAt\": {\n \"$gte\": args[\"begin_date\"],\n \"$lt\": args[\"end_date\"] + timedelta(days=1),\n },\n \"authorsFemaleCount\": 0,\n \"authorsMaleCount\": {\"$gt\": 0},\n }\n },\n {\n \"$project\": {\n \"_id\": 1,\n \"outlet\": 1,\n \"authors\": 1,\n \"authorsMale\": 1,\n \"authorsFemale\": 1,\n \"authorsUnknown\": 1,\n \"sourcesMaleCount\": 1,\n \"sourcesFemaleCount\": 1,\n \"sourcesUnknownCount\": 1,\n }\n },\n {\n \"$group\": {\n \"_id\": \"$outlet\",\n \"totalArticles\": {\"$sum\": 1},\n \"totalMaleSources\": {\"$sum\": \"$sourcesMaleCount\"},\n \"totalFemaleSources\": {\"$sum\": \"$sourcesFemaleCount\"},\n \"totalUnknownSources\": {\"$sum\": \"$sourcesUnknownCount\"},\n }\n },\n ]\n return query"
] | [
"0.6058825",
"0.5749238",
"0.5650289",
"0.5557747",
"0.5509893",
"0.54246277",
"0.53360915",
"0.53125733",
"0.5299747",
"0.5293385",
"0.521855",
"0.51953477",
"0.5093463",
"0.50840014",
"0.50631934",
"0.5049018",
"0.50417084",
"0.50401676",
"0.5027836",
"0.49998373",
"0.49812573",
"0.4967385",
"0.49494222",
"0.49340528",
"0.49329048",
"0.49212846",
"0.4920206",
"0.49033797",
"0.48740497",
"0.48704275"
] | 0.6093169 | 0 |
Gets a default or paginated collection of Drive Enclosures. [Arguments] | def fusion_api_get_drive_enclosure(self, uri=None, param='', api=None, headers=None):
return self.drive_enclosure.get(uri=uri, api=api, headers=headers, param=param) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_sas_li_logical_drive_enclosures(self, uri=None, param='', api=None, headers=None):\n param = \"/logical-drive-enclosures%s\" % param\n return self.sasli.get(uri=uri, param=param, api=api, headers=headers)",
"def fusion_api_get_enclosures(self, uri=None, param='', api=None, headers=None):\n return self.enclosure.get(uri=uri, param=param, api=api, headers=headers)",
"def get_enclosures(self, controller='all'):\n output = self.run_json('/c{}/eall show all'.format(controller))\n try:\n controllers = output['Controllers']\n except KeyError:\n raise StorcliException('Output is missing Controllers segment')\n\n return [c['Response Data'] for c in controllers]",
"def getEnclosures(self):\n return self.enclosures",
"def cli(ctx):\n return ctx.gi.cannedkeys.get_keys()",
"def get_default_scopes(self, client_id, request, *args, **kwargs):\n request.client = request.client or self._clientgetter(client_id)\n scopes = request.client.default_scopes\n log.debug('Found default scopes %r', scopes)\n return scopes",
"def fusion_api_get_enclosure_groups(self, uri=None, param='', api=None, headers=None):\n return self.enclosure_group.get(uri=uri, api=api, headers=headers, param=param)",
"def get_default_scopes(self, application=None, request=None, *args, **kwargs):\n # at the moment we assume that the default scopes are all those availables\n return list(ProtectedCapability.objects.filter(default=True).values_list('slug', flat=True))",
"def drives(self):\n if \"drives\" in self._prop_dict:\n return DrivesCollectionPage(self._prop_dict[\"drives\"])\n else:\n return None",
"def Keys(self) -> NameObjectCollectionBase.KeysCollection:",
"def get_api_keys(owner):\n api.get_all(owner)",
"def drives(self):\n return self.properties.get('drives',\n DriveCollection(self.context, ResourcePath(\"drives\", self.resource_path)))",
"def getAcdcs(url, requests):\n acdcs = []\n for request in requests:\n name=request['id']\n #if a wrong or weird name\n if len(request['key'])<3:\n print request\n continue\n if 'ACDC' not in name:\n continue\n status=request['key']\n #only completed requests\n if status != 'completed':\n continue\n #requestType=request['key'][2]\n #only acdcs\n #if requestType != 'Resubmission':\n # continue\n acdcs.append(name) \n return acdcs",
"def get_all_disks():\n return DISKS_API.get(abs_link=False)",
"def get_drive_enclosure_information(self):\n\n for device in self._devices:\n if not device['Device Type'].startswith(\"disk\"):\n continue\n enc_device, device_slot, element_index = self.get_device_slot(device['SAS Address'])\n device['Enclosure Device'] = enc_device\n device['Enclosure Slot'] = device_slot\n device['Slot Description'] = self.get_array_desc_text(enc_device, element_index)",
"def get_bucketlist():\n pass",
"def bdev_ocf_get_bdevs(client, name=None):\n params = None\n if name:\n params = {'name': name}\n return client.call('bdev_ocf_get_bdevs', params)",
"async def get_keys(self, collection):\n raise NotImplementedError",
"def boxSearch(client):\n\tfiles = []\n\titems_iter = client.folder(folder_id=0).get_items(limit=100, offset=0)\n\tfor x in items_iter:\n\t\tfiles.append(x)\n\treturn files",
"def ListScopes(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def get_key_list(self, email=\"\"):\n\t\tif email:\n\t\t\twhere_clause = \" where email = '%s'\" % email\n\t\telse:\n\t\t\twhere_clause = \"\"\n\n\t\treturn self.app.db.query(\n\t\t\t\"\"\"\n\t\t\tselect\n\t\t\t\tapi_key,\n\t\t\t\towner,\n\t\t\t\tapp_name,\n\t\t\t\temail,\n\t\t\t\turl,\n\t\t\t\tcreated\n\t\t\tfrom\n\t\t\t\tapi_keys\n\t\t\t%s\n\t\t\t\"\"\" % where_clause)",
"def main():\n store = file.Storage('token.json')\n creds = store.get()\n if not creds or creds.invalid:\n flow = client.flow_from_clientsecrets('credentials.json', SCOPES)\n creds = tools.run_flow(flow, store)\n service = build('drive', 'v2', http=creds.authorize(Http()))\n files_list = print_files_in_folder(service,'1Xy6wJozhJwLcsKKfNYASxDxBBbEHZoNy')\n if len(files_list) > 0:\n download_files(service,files_list)\n else:\n print \"No files to download\"\n\n # Call the Drive v3 API\n # results = service.files().list(q=\"mimeType='application/vnd.google-apps.folder' and 'AviPics' in parents\",\n # pageSize=10, fields=\"nextPageToken, files(id, name)\").execute()\n # results = service.children.list(folderId='1Xy6wJozhJwLcsKKfNYASxDxBBbEHZoNy')\n # items = results.get('files', [])\n\n # if not items:\n # print('No files found.')\n # else:\n # print('Files:')\n # for item in items:\n # print('{0} ({1})'.format(item['name'], item['id']))",
"def get_feed_collection(client: Client):\n collections = client.get_services()\n command_results = CommandResults(\n outputs_prefix='CybleIntel.collection',\n outputs_key_field='names',\n outputs=collections\n )\n return command_results",
"def google_drive_authenticate(self):",
"def get_collections(self): # real signature unknown; restored from __doc__\n return []",
"def get_collections(client):\n\n try:\n collections = client.collections()\n for collection in collections:\n # print(f'Collection \"{collection.name}\" has ID {collection.id}')\n return collection.name, collection.id\n\n except Exception as e:\n print(f\"An error has occurred: {e}\")\n return None",
"def keys(self):\n return DeviceKeyCollection(client=self)",
"def list(self, path, filename=None, start=None, stop=None, recursive=False, directories=False):\n storageScheme, keys = self.getkeys(\n path, filename=filename, directories=directories, recursive=recursive)\n keys = [storageScheme + \":///\" + key.bucket.name + \"/\" + key.name for key in keys]\n keys.sort()\n keys = select(keys, start, stop)\n return keys",
"def get_acls():\n return config.get_cfg_storage(ID_ACL)",
"def enclosures(\n primary_barriers,\n limit=None,\n additional_barriers=None,\n enclosure_id=\"eID\",\n clip=False,\n):\n if limit is not None:\n if isinstance(limit, BaseGeometry):\n limit = gpd.GeoSeries([limit])\n if limit.geom_type.isin([\"Polygon\", \"MultiPolygon\"]).any():\n limit_b = limit.boundary\n else:\n limit_b = limit\n barriers = pd.concat([primary_barriers.geometry, limit_b.geometry])\n else:\n barriers = primary_barriers\n unioned = barriers.unary_union\n polygons = polygonize(unioned)\n enclosures = gpd.GeoSeries(list(polygons), crs=primary_barriers.crs)\n\n if additional_barriers is not None:\n if not isinstance(additional_barriers, list):\n raise TypeError(\n \"`additional_barriers` expects a list of GeoDataFrames or GeoSeries.\"\n f\"Got {type(additional_barriers)}.\"\n )\n additional = pd.concat([gdf.geometry for gdf in additional_barriers])\n\n inp, res = enclosures.sindex.query_bulk(\n additional.geometry, predicate=\"intersects\"\n )\n unique = np.unique(res)\n\n new = []\n\n for i in unique:\n poly = enclosures.values.data[i] # get enclosure polygon\n crossing = inp[res == i] # get relevant additional barriers\n buf = pygeos.buffer(poly, 0.01) # to avoid floating point errors\n crossing_ins = pygeos.intersection(\n buf, additional.values.data[crossing]\n ) # keeping only parts of additional barriers within polygon\n union = pygeos.union_all(\n np.append(crossing_ins, pygeos.boundary(poly))\n ) # union\n polygons = np.array(\n list(polygonize(_pygeos_to_shapely(union)))\n ) # polygonize\n within = pygeos.covered_by(\n pygeos.from_shapely(polygons), buf\n ) # keep only those within original polygon\n new += list(polygons[within])\n\n final_enclosures = (\n gpd.GeoSeries(enclosures)\n .drop(unique)\n .append(gpd.GeoSeries(new))\n .reset_index(drop=True)\n ).set_crs(primary_barriers.crs)\n\n final_enclosures = gpd.GeoDataFrame(\n {enclosure_id: range(len(final_enclosures))}, geometry=final_enclosures\n )\n\n else:\n final_enclosures = gpd.GeoDataFrame(\n {enclosure_id: range(len(enclosures))}, geometry=enclosures\n )\n\n if clip and limit is not None:\n if not limit.geom_type.isin([\"Polygon\", \"MultiPolygon\"]).all():\n raise TypeError(\n \"`limit` requires a GeoDataFrame or GeoSeries with Polygon or \"\n \"MultiPolygon geometry to be used with clip=True.\"\n )\n _, encl_index = final_enclosures.representative_point().sindex.query_bulk(\n limit.geometry, predicate=\"contains\"\n )\n keep = np.unique(encl_index)\n return final_enclosures.iloc[keep]\n\n return final_enclosures"
] | [
"0.64401037",
"0.575162",
"0.55596346",
"0.55056226",
"0.5100261",
"0.50478417",
"0.5040822",
"0.49720213",
"0.493435",
"0.49305773",
"0.49068353",
"0.4835804",
"0.47633725",
"0.47319296",
"0.47230875",
"0.47124046",
"0.46805602",
"0.46699175",
"0.4662321",
"0.4651771",
"0.46509987",
"0.4641518",
"0.4633453",
"0.46130332",
"0.45796302",
"0.45706475",
"0.45652434",
"0.45380175",
"0.45314324",
"0.45298874"
] | 0.6370821 | 1 |
Issues a PATCH request to a drive enclosure. See RESTAPI docs for valid request bodies [Arguments] | def fusion_api_patch_drive_enclosure(self, body, uri, api=None, headers=None):
return self.drive_enclosure.patch(body, uri, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def patch(self, *args, **kwargs):\n self.request(\"patch\", *args, **kwargs)",
"def client_patch(self, path, data=None, content_type=client.MULTIPART_CONTENT, follow=False, **extra):\r\n\r\n data = data or {}\r\n response = super(client.Client, self).patch(path, data=data, content_type=content_type, **extra)\r\n if follow:\r\n response = self._handle_redirects(response, **extra)\r\n return response",
"def fusion_api_patch_enclosure(self, body, uri, api=None, headers=None, etag=None):\n return self.enclosure.patch(body, uri, api, headers, etag)",
"def patch(self, url, body=None, headers=None):\n return self._request('PATCH', url, body, headers)",
"def handle_patch(self, api, command):\n return self._make_request_from_command('PATCH', command)",
"def fusion_api_patch_fabric(self, uri, body, api=None, headers=None):\n return self.fabric.patch(uri, body, api, headers)",
"def patch(self, url_or_path, *args, **kwargs):\n return self.request.patch(url_or_path, *args, **kwargs).json()",
"def patch(self, endpoint, params=None, data=None):\n params = params or dict()\n data = data or dict()\n return self.request(verb=requests.patch, address=self.project_address + endpoint,\n params=params, data=data)",
"def httpPatch(self, url='', data='', params={}, headers={}):\n\n return self.httpRequest('PATCH', url, data, params, headers)",
"def patch(self, path, body):\n url = urljoin(self.api_endpoint, path)\n response = requests.patch(url, json=body, headers=self.headers)\n return self._check_response(response)",
"def patch(self, api_path, *args, **kwargs):\n\n\t\treturn self._do_operation(u'patch', api_path, *args, **kwargs)",
"def patch(self, url, params='', headers=None, extra_environ=None,\n status=None, upload_files=None, expect_errors=False,\n content_type=None):\n return self._gen_request(RequestMethods.PATCH,\n url, params=params, headers=headers,\n extra_environ=extra_environ, status=status,\n upload_files=upload_files,\n expect_errors=expect_errors,\n content_type=content_type)",
"def simulate_patch(app, path, **kwargs) -> _ResultBase:\n return simulate_request(app, 'PATCH', path, **kwargs)",
"def fusion_api_patch_interconnect(self, body, uri, param='', api=None, headers=None):\n return self.ic.patch(body=body, uri=uri, api=api, headers=headers, param=param)",
"def patch(self, endpoint, content=None, params=None):\n\t\treturn self._call(\"PATCH\", endpoint, content, params)",
"def simulate_patch(self, path='/', **kwargs):\n return self.simulate_request('PATCH', path, **kwargs)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)"
] | [
"0.69250757",
"0.6663205",
"0.6662519",
"0.664325",
"0.65036833",
"0.6449954",
"0.64494",
"0.6431868",
"0.6422519",
"0.6397085",
"0.639598",
"0.63724047",
"0.63440573",
"0.6308826",
"0.62631166",
"0.6232683",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324",
"0.6213324"
] | 0.6808087 | 1 |
Issues a PUT request to a drive enclosure. See RESTAPI docs for valid request bodies [Arguments] | def fusion_api_put_drive_enclosure(self, body, uri, param='', api=None, headers=None):
return self.drive_enclosure.put(body=body, uri=uri, param=param, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def put(self, *args, **kwargs):\n self.request(\"put\", *args, **kwargs)",
"def _put(self, *args, **kwargs):\n return self._request('put', *args, **kwargs)",
"def do_PUT(self,):\n self.http_method = 'PUT'\n # Nothing to do for now.\n pass",
"def put(self, *args, **kw):\n kw['method'] = 'PUT'\n return self.open(*args, **kw)",
"def _put(self, path=\"\", **kwargs):\n uri = force_json(self.uri + path)\n return self.client.request(uri, method=\"PUT\", **kwargs)",
"def putrequest(self, full_path, data=None, headers=None, files=None):\n return self.request('PUT', full_path, data, headers, files)",
"def put(self, url, body, headers={}):\n return self.request(url, \"PUT\", body, headers)",
"def do_PUT(self):\n content_len = int(self.headers.get('content-length', 0))\n post_body = self.rfile.read(content_len)\n post_body = json.loads(post_body)\n (resource, id) = self.parse_url(self.path)\n\n success = False\n\n if resource == \"categories\":\n success = update_category(id, post_body)\n if resource == \"comments\":\n success = edit_comment(id, post_body)\n if resource == \"posts\":\n success = update_post(id, post_body)\n\n if success:\n self._set_headers(204)\n else:\n self._set_headers(404)\n\n self.wfile.write(\"\".encode())",
"def put(self, path, request):\n\n try:\n data = json_decode(request.body)\n self.interface_data.set(path, data)\n response = self.interface_data.get(path, False)\n status_code = 200\n except MetadataParameterError as e:\n response = {'error': str(e)}\n status_code = 400\n except (TypeError, ValueError) as e:\n response = {'error': 'Failed to decode PUT request body: {}'.format(str(e))}\n status_code = 400\n return ApiAdapterResponse(response, status_code=status_code)",
"def http_put(self, **kwargs):\n return self.rabjcallable.put(**kwargs)",
"def fusion_api_patch_drive_enclosure(self, body, uri, api=None, headers=None):\n return self.drive_enclosure.patch(body, uri, api, headers)",
"def _put(self, path, data=None):\n headers = {'content-type': 'application/json'}\n if data:\n data = json.dumps(data)\n r = requests.put(self._url(path), data=data, headers=headers)\n assert r.status_code == 200\n return r",
"def put(call,\n headers=None,\n data=None,\n base=cloudBase,\n no_headers=False,\n raw=False,\n **kwargs): # pragma: no cover\n return _call(method=requests.put,\n call='{0}{1}'.format(base, call),\n headers=headers,\n no_headers=no_headers,\n data=data,\n raw=raw,\n **kwargs)",
"def put(self, path, request):\n\n content_type = 'application/json'\n\n try:\n data = json_decode(request.body)\n self.fileInterface.set(path, data)\n response = self.fileInterface.get(path)\n status_code = 200\n except FileInterfaceError as e:\n response = {'error': str(e)}\n status_code = 400\n except (TypeError, ValueError) as e:\n response = {'error': 'Failed to decode PUT request body: {}'.format(str(e))}\n status_code = 400\n\n logging.debug(response)\n\n return ApiAdapterResponse(response, content_type=content_type,\n status_code=status_code)",
"def put(self, *args, **kwargs):\n return self.handle_put_request()",
"def aput(url, **kwargs):\n return requests.put(url, **kwargs)",
"def simulate_put(app, path, **kwargs) -> _ResultBase:\n return simulate_request(app, 'PUT', path, **kwargs)",
"def put(self, url, body):\n return self._query(url, 'PUT', json=body)",
"def PUT(self, req):\n xml = req.xml(MAX_PUT_BUCKET_REFERER_SIZE)\n if xml:\n # check referer\n try:\n elem = fromstring(xml, 'RefererConfiguration')\n allow_empyt_referer=elem.find('AllowEmptyReferer').text\n if allow_empyt_referer not in ['true','false']:\n raise InvalidArgument()\n referer_list=elem.find('RefererList')\n\t\tswift_referers=[]\n for referer in referer_list.findall('Referer'):\n\t swift_referers.append(referer.text)\n\t\tif len(swift_referers)==0 :\n\t\t req.headers['X-Container-Read']=' '\n\t\telse:\n req.headers['X-Container-Read'] = '.r:'+','.join(get_real_url(swift_referers))\n except (XMLSyntaxError, DocumentInvalid):\n raise MalformedXML()\n except Exception as e:\n exc_type, exc_value, exc_traceback = sys.exc_info()\n LOGGER.error(e)\n raise exc_type, exc_value, exc_traceback\n resp = req.get_response(self.app)\n resp.status = HTTP_OK\n return resp",
"def _put(self, url, **kwargs):\n return self._call('PUT', url, kwargs)",
"def put(url, data=None, **kwargs):\n\n return request('put', url, data=data, **kwargs)",
"def put(self, api_path, *args, **kwargs):\n\n\t\treturn self._do_operation(u'put', api_path, *args, **kwargs)",
"def simulate_put(self, path='/', **kwargs):\n return self.simulate_request('PUT', path, **kwargs)",
"def put(self, endpoint, params=None, data=None):\n params = params or dict()\n data = data or dict()\n return self.request(verb=requests.put, address=self.project_address + endpoint,\n params=params, data=data)",
"def put(self, api_path, *args, **kwargs):\n\n return self._do_operation(u'put', api_path, *args, **kwargs)",
"def put(path: str, data={}):\n token = get_token()\n headers = {\n \"Authorization\": f\"Bearer {token}\"\n }\n return requests.put(get_base_url() + path, headers=headers, json=data)",
"def _put(self, url, data, extra_headers=None):\n headers = {'X-Requested-By': 'Unit Tests'}\n headers.update(extra_headers)\n return self.client.put(\n url,\n content_type='application/json',\n data=utils.as_json(data),\n headers=headers,\n )",
"def handle_put(self, api, command):\n return self._make_request_from_command('PUT', command)",
"def put(self, request, pk=None):\n return Response({'method': 'PUT'})",
"def put(self, request, pk=None):\n return Response({'method': 'PUT'})"
] | [
"0.72172344",
"0.7166577",
"0.71286005",
"0.70112306",
"0.68267727",
"0.6770514",
"0.6760071",
"0.6683008",
"0.66027224",
"0.65128016",
"0.6479687",
"0.6458144",
"0.6457014",
"0.6411887",
"0.63934046",
"0.63861746",
"0.638544",
"0.63830477",
"0.63397056",
"0.63353956",
"0.6322445",
"0.62833035",
"0.6237844",
"0.62303424",
"0.62286335",
"0.62212205",
"0.62124735",
"0.61990356",
"0.6188255",
"0.6188255"
] | 0.7271593 | 0 |
Creates independent sas logical jbods [Arguments] | def fusion_api_post_sas_logical_jbods(self, body, api=None, headers=None):
return self.sas_logical_jbods.post(body=body, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def write_mesh_java(self):\n fout = open(self.javaBatch1File+\".java\",\"w\")\n fout.write(\"\"\"\\\n// STAR-CCM+ macro\npackage macro;\n\nimport java.util.*;\n\nimport star.common.*;\nimport star.base.neo.*;\nimport star.resurfacer.*;\nimport star.trimmer.*;\nimport star.prismmesher.*;\nimport star.meshing.*;\n\npublic class %s extends StarMacro {\n\n public void execute() {\n execute0();\n }\n \"\"\" % (self.javaBatch1File))\n\n fout.write(\"\"\"\\\n\n private void execute0() {\n\n // Directory for output files and final sim file (if saved)\n String myPath = \"$PWD\";\n\n String myInputSTLFilename = \"%s\"; // contains aircraft geometry\n\n String myOutputMeshFilename = \"%s\"; // output sim name with volume mesh\n\n double mySphereRadius_ft = %f; // radius of freestream outer boundary in feet\n double mySphereX_ft = %f; // Center of freestream sphere in feet\n double mySphereY_ft = %f;\n double mySphereZ_ft = %f;\n double mySphereTriangles_ft = %f; // size of sphere outer boundary facets\n\n double myPrismFieldRatio = %f; // thickness ratio of near field to outermost prism\n\n int myBLcells = %d; // number of cells in boundary layer normal direction\n double myBLthickness_in = %f; // thickness of boundary layer in inches\n\n double myBaseSize_ft = %f; // mesh base size in feet\n int myCurvature = %d; // number of points to divide a circle\n double mySurfaceGrowthRate = %f; // growth rate (max size ratio) of surface triangles\n double myFeatureAngle_deg = %f; // maximum angle for defining sharp edges on ATR model\n\n double myMinMesh_pct = %f; // smallest cell size in percent\n double myEdgeTarget_pct = %f; // target size for feature curve edges in percent\n\n boolean bln_makeSurfaceMesh = %s; // use true to make surface mesh, false to skip\n boolean bln_makeVolumeMesh = %s; // use true to make volume mesh, false to skip\n boolean bln_saveMeshFile = %s; // use true to save final mesh file, false to skip\n \"\"\" % (self.STLFile,self.simMeshFile,self.mySphereRadius,self.mySphereX,self.mySphereY,\n self.mySphereZ,self.mySphereTriangles,self.myPrismFieldRatio,self.myBLcells,\n self.myBLthickness,self.myBaseSize,self.myCurvature,self.mySurfaceGrowthRate,\n self.myFeatureAngle,self.myMinMesh,self.myEdgeTarget,\n str(self.makeSurfaceMesh).lower(),str(self.makeVolumeMesh).lower(),str(self.saveMeshFile).lower()))\n\n fout.write(\"\"\"\\\n\n if (!bln_makeSurfaceMesh) bln_makeVolumeMesh = false;\n\n // Start of STAR macro\n Simulation simulation_0 = getActiveSimulation();\n\n Units units_0 = simulation_0.getUnitsManager().getPreferredUnits(new IntVector(new int[] {0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}));\n Units units_1 = ((Units) simulation_0.getUnitsManager().getObject(\"ft\"));\n units_1.setPreferred(true);\n\n PartImportManager partImportManager_0 = simulation_0.get(PartImportManager.class);\n\n // Read concatenated STL parts\n //partImportManager_0.importStlPart(resolvePath(myPath+myInputSTLFilename), \"OneSurfacePerPatch\", units_1, true, 1.0E-5);\n partImportManager_0.importStlPart(resolvePath(myInputSTLFilename), \"OneSurfacePerPatch\", units_1, true, 1.0E-5);\n\n MeshPartFactory meshPartFactory_0 = simulation_0.get(MeshPartFactory.class);\n\n SimpleSpherePart simpleSpherePart_0 = meshPartFactory_0.createNewSpherePart(simulation_0.get(SimulationPartManager.class));\n\n simpleSpherePart_0.setDoNotRetessellate(true);\n\n LabCoordinateSystem labCoordinateSystem_0 = simulation_0.getCoordinateSystemManager().getLabCoordinateSystem();\n\n simpleSpherePart_0.setCoordinateSystem(labCoordinateSystem_0);\n\n Coordinate coordinate_0 = simpleSpherePart_0.getOrigin();\n\n coordinate_0.setCoordinateSystem(labCoordinateSystem_0);\n coordinate_0.setCoordinate(units_1, units_1, units_1, new DoubleVector(new double[] {0.0, 0.0, 0.0}));\n\n // Set location of freestream sphere center (x, y, z) in feet\n coordinate_0.setValue(new DoubleVector(new double[] {mySphereX_ft, mySphereY_ft, mySphereZ_ft}));\n\n simpleSpherePart_0.getRadius().setUnits(units_1);\n\n // Set freestream sphere radius in feet\n simpleSpherePart_0.getRadius().setValue(mySphereRadius_ft);\n simpleSpherePart_0.getTessellationDensityOption().setSelected(TessellationDensityOption.MEDIUM);\n simpleSpherePart_0.rebuildSimpleShapePart();\n simpleSpherePart_0.setDoNotRetessellate(false);\n\n Region region_0 = simulation_0.getRegionManager().createEmptyRegion();\n\n region_0.setPresentationName(\"Region\");\n Boundary boundary_0 = region_0.getBoundaryManager().getBoundary(\"Default\");\n\n region_0.getBoundaryManager().removeBoundaries(new NeoObjectVector(new Object[] {boundary_0}));\n FeatureCurve featureCurve_0 = ((FeatureCurve) region_0.getFeatureCurveManager().getObject(\"Default\"));\n\n region_0.getFeatureCurveManager().removeObjects(featureCurve_0);\n FeatureCurve featureCurve_1 = region_0.getFeatureCurveManager().createEmptyFeatureCurveWithName(\"Feature Curve\");\n\n MeshPart meshPart_0 = ((MeshPart) simulation_0.get(SimulationPartManager.class).getPart(\"combined\"));\n\n simulation_0.getRegionManager().newRegionsFromParts(new NeoObjectVector(new Object[] {meshPart_0, simpleSpherePart_0}), \"OneRegion\", region_0, \"OneBoundaryPerPartSurface\", null, \"OneFeatureCurve\", featureCurve_1, false);\n\n MeshContinuum meshContinuum_0 = simulation_0.getContinuumManager().createContinuum(MeshContinuum.class);\n\n PhysicsContinuum physicsContinuum_0 = simulation_0.getContinuumManager().createContinuum(PhysicsContinuum.class);\n\n meshContinuum_0.enable(ResurfacerMeshingModel.class);\n\n // Use trimmer (Cartesian hex) mesh\n meshContinuum_0.enable(TrimmerMeshingModel.class);\n\n meshContinuum_0.enable(PrismMesherModel.class);\n\n // Base size in feet - larger values makes coarser grids, smaller values makes finer grids\n meshContinuum_0.getReferenceValues().get(BaseSize.class).setValue(myBaseSize_ft);\n\n ResurfacerMeshingModel resurfacerMeshingModel_0 = meshContinuum_0.getModelManager().getModel(ResurfacerMeshingModel.class);\n resurfacerMeshingModel_0.setDoCompatibilityRefinement(true);\n resurfacerMeshingModel_0.setDoAutomaticSurfaceRepair(false);\n\n MaxTrimmerSizeToPrismThicknessRatio maxTrimmerSizeToPrismThicknessRatio_0 = meshContinuum_0.getReferenceValues().get(MaxTrimmerSizeToPrismThicknessRatio.class);\n maxTrimmerSizeToPrismThicknessRatio_0.setLimitCellSizeByPrismThickness(true);\n SizeThicknessRatio sizeThicknessRatio_0 = maxTrimmerSizeToPrismThicknessRatio_0.getSizeThicknessRatio();\n\n // Prism to field thickness ratio\n sizeThicknessRatio_0.setNeighboringThicknessMultiplier(myPrismFieldRatio);\n\n NumPrismLayers numPrismLayers_0 = meshContinuum_0.getReferenceValues().get(NumPrismLayers.class);\n\n // Number of boundary layer cells\n numPrismLayers_0.setNumLayers(myBLcells);\n\n PrismThickness prismThickness_0 = meshContinuum_0.getReferenceValues().get(PrismThickness.class);\n\n prismThickness_0.getRelativeOrAbsoluteOption().setSelected(RelativeOrAbsoluteOption.ABSOLUTE);\n\n GenericAbsoluteSize genericAbsoluteSize_0 = ((GenericAbsoluteSize) prismThickness_0.getAbsoluteSize());\n\n Units units_2 = ((Units) simulation_0.getUnitsManager().getObject(\"in\"));\n\n genericAbsoluteSize_0.getValue().setUnits(units_2);\n\n // Boundary layer thickness in inches\n genericAbsoluteSize_0.getValue().setValue(myBLthickness_in);\n\n SurfaceCurvature surfaceCurvature_0 = meshContinuum_0.getReferenceValues().get(SurfaceCurvature.class);\n\n SurfaceCurvatureNumPts surfaceCurvatureNumPts_0 = surfaceCurvature_0.getSurfaceCurvatureNumPts();\n\n // Curvature refinement specified as number of points around a circle\n surfaceCurvatureNumPts_0.setNumPointsAroundCircle(myCurvature);\n\n SurfaceGrowthRate surfaceGrowthRate_0 = meshContinuum_0.getReferenceValues().get(SurfaceGrowthRate.class);\n\n // Surface growth rate (ratio of triangle sizes)\n surfaceGrowthRate_0.setGrowthRate(mySurfaceGrowthRate);\n\n SurfaceSize surfaceSize_0 = meshContinuum_0.getReferenceValues().get(SurfaceSize.class);\n\n RelativeMinimumSize relativeMinimumSize_0 = surfaceSize_0.getRelativeMinimumSize();\n\n // Set triangle minimum size percentage\n relativeMinimumSize_0.setPercentage(myMinMesh_pct);\n\n SimpleTemplateGrowthRate simpleTemplateGrowthRate_0 = meshContinuum_0.getReferenceValues().get(SimpleTemplateGrowthRate.class);\n\n // Set volume mesh growth rate for field (FAST, MEDIUM, SLOW, VERYSLOW)\n simpleTemplateGrowthRate_0.getGrowthRateOption().setSelected(GrowthRateOption.SLOW);\n\n // Set nearfield mesh growth rate for field (FAST, MEDIUM, SLOW, VERYSLOW)\n simpleTemplateGrowthRate_0.getSurfaceGrowthRateOption().setSelected(SurfaceGrowthRateOption.VERYSLOW);\n\n // Remove existing feature curves (will remark feature curves below)\n region_0.getFeatureCurveManager().removeObjects(featureCurve_1);\n\n MeshPipelineController meshPipelineController_0 = simulation_0.get(MeshPipelineController.class);\n\n meshPipelineController_0.initializeMeshPipeline();\n\n SurfaceRep surfaceRep_0 = ((SurfaceRep) simulation_0.getRepresentationManager().getObject(\"Initial Surface\"));\n\n Boundary boundary_1 = region_0.getBoundaryManager().getBoundary(\"combined.fuselage\");\n Boundary boundary_2 = region_0.getBoundaryManager().getBoundary(\"combined.tail\");\n Boundary boundary_3 = region_0.getBoundaryManager().getBoundary(\"combined.wing\");\n Boundary boundary_4 = region_0.getBoundaryManager().getBoundary(\"Sphere.Sphere Surface\");\n boundary_4.setBoundaryType(FreeStreamBoundary.class);\n\n // Identify feature curves using angle criteria (currently set at 17 degrees for the ATR model)\n FeatureCurve featureCurve_2 = surfaceRep_0.createFeatureEdgesOnBoundaries(new NeoObjectVector(new Object[] {boundary_1, boundary_2, boundary_3, boundary_4}), true, true, true, true, true, true, myFeatureAngle_deg, false);\n\n SurfaceSizeOption surfaceSizeOption_0 = featureCurve_2.get(MeshConditionManager.class).get(SurfaceSizeOption.class);\n\n surfaceSizeOption_0.setSurfaceSizeOption(true);\n\n SurfaceSize surfaceSize_1 = featureCurve_2.get(MeshValueManager.class).get(SurfaceSize.class);\n\n RelativeMinimumSize relativeMinimumSize_1 = surfaceSize_1.getRelativeMinimumSize();\n\n // Set feature curve minimum size (usually the same as surface triangle minimum size)\n relativeMinimumSize_1.setPercentage(myMinMesh_pct);\n\n RelativeTargetSize relativeTargetSize_0 = surfaceSize_1.getRelativeTargetSize();\n\n // Set feature curve target size as a percentage\n relativeTargetSize_0.setPercentage(myEdgeTarget_pct);\n\n SurfaceSizeOption surfaceSizeOption_1 = boundary_4.get(MeshConditionManager.class).get(SurfaceSizeOption.class);\n\n surfaceSizeOption_1.setSurfaceSizeOption(true);\n\n SurfaceSize surfaceSize_2 = boundary_4.get(MeshValueManager.class).get(SurfaceSize.class);\n\n surfaceSize_2.getRelativeOrAbsoluteOption().setSelected(RelativeOrAbsoluteOption.ABSOLUTE);\n\n AbsoluteMinimumSize absoluteMinimumSize_0 = surfaceSize_2.getAbsoluteMinimumSize();\n\n // Set minimum triangle size for freestream boundary (in feet)\n absoluteMinimumSize_0.getValue().setValue(mySphereTriangles_ft);\n\n AbsoluteTargetSize absoluteTargetSize_0 = surfaceSize_2.getAbsoluteTargetSize();\n\n // Set target triangle size for freestream boundary in feet\n absoluteTargetSize_0.getValue().setValue(mySphereTriangles_ft);\n\n // Make surface mesh\n if ( bln_makeSurfaceMesh ) meshPipelineController_0.generateSurfaceMesh();\n\n // Make volume mesh\n if ( bln_makeVolumeMesh ) meshPipelineController_0.generateVolumeMesh();\n\n // Save .sim file\n if ( bln_saveMeshFile ) simulation_0.saveState(resolvePath(myOutputMeshFilename));\n\n\n }\n}\n\"\"\")\n fout.close()",
"def joindna(*dnas, topology=\"linear\", compatibility=None, homology_length=None, unique=True, supfeature=False, product=None, process_name=None, process_description=None, pn=None, pd=None, quinable=True, **kwargs):\n kwargs.setdefault(\"_sourcefile\", None) \n kwargs.setdefault(\"process_id\", None)\n kwargs.setdefault(\"original_ids\", []) \n _sourcefile = kwargs[\"_sourcefile\"] \n process_id = kwargs[\"process_id\"] \n original_ids = kwargs[\"original_ids\"]\n\n project = None\n project = project if product is None else product\n process_name = pn if process_name is None else process_name\n process_description = pd if process_description is None else process_description\n \n if compatibility is None:\n fcompatibility = None\n compatibility = \"partial\"\n else:\n fcompatibility = compatibility\n\n if homology_length is None:\n fhomology_length = None\n if compatibility == \"complete\":\n homology_length = 0 \n else:\n homology_length = 2\n else:\n fhomology_length = homology_length\n\n new_dnas = [] \n for i, dna in enumerate(dnas):\n if dna.topology == \"circular\":\n if i == 0:\n order = \"first\"\n elif i == 1:\n order = \"second\"\n elif i == 2:\n order = \"third\"\n else:\n order = str(i) + \"th\" \n raise ValueError(\"The {} QUEEN object topology is 'circular.' Circular QUEEN objects cannot be connected with others.\".format(order)) \n new_dnas.append(dna) \n \n dnas = new_dnas\n \n #Extract history information\n history_features = [] \n for dna in dnas:\n history_features.append(dna._history_feature)\n \n construct = copy.deepcopy(dnas[0])\n positions_list = [construct._positions] \n if len(dnas) > 1:\n for dna in dnas[1:]:\n annealing = False\n feats = dna.dnafeatures\n if dna._ssdna == False and construct._ssdna == False:\n if (dna._left_end_top * construct._right_end_bottom == 1 or dna._left_end_bottom * construct._right_end_top == 1) and ((dna._left_end_top == -1 or dna._left_end_bottom == -1) or (construct._right_end_top == -1 or construct._right_end_bottom == -1)):\n if dna._left_end_top == 1:\n sticky_end = dna._left_end \n else:\n sticky_end = construct._right_end\n \n if compatibility == \"partial\":\n if len(construct._right_end) < len(dna._left_end):\n ovresult = _detect_overlap(construct._right_end, dna._left_end, allow_outies=False)\n else:\n ovresult = _detect_overlap(dna._left_end[::-1], construct._right_end[::-1], allow_outies=False) \n \n if ovresult == False:\n raise ValueError(\"The QUEEN_objects cannot be joined due to the end structure incompatibility.\")\n return False\n else:\n pass \n ovhg_length = ovresult[1][0] \n \n else:\n if construct._right_end == dna._left_end:\n ovhg_length = len(construct._right_end)\n pass \n else:\n raise ValueError(\"The QUEEN_objects cannot be joined due to the end structure incompatibility.\")\n return False\n\n new_dna = cropdna(dna, ovhg_length, len(dna.seq), quinable=0) \n else:\n if (construct._right_end == \"\" and ((dna._left_end == \"\") or (dna._left_end == dna.seq))) or (construct._right_end_top >= 0 and construct._right_end_bottom >= 0 and dna._left_end_top >= 0 and dna._left_end_bottom >= 0):\n new_dna = dna\n ovhg_length = 0 \n ovhg = \"\"\n else:\n raise ValueError(\"The QUEEN_objects cannot be joined due to the end structure incompatibility.\")\n return False\n \n elif dna._ssdna == True and construct._ssdna == True:\n annealing = True\n if len(construct._right_end) < len(dna._left_end):\n ovresult = _detect_overlap(construct._right_end, dna._left_end.translate(str.maketrans(\"ATGC\",\"TACG\"))[::-1])[1]\n new_q = ovresult[1] \n ovhg = ovresult[2]\n else:\n ovresult = _detect_overlap(dna._left_end, construct._right_end.translate(str.maketrans(\"ATGC\",\"TACG\"))[::-1])[1] \n new_q = ovresult[1] \n ovhg = ovresult[2] \n new_q = new_q[::-1]\n \n if compatibility == \"complete\":\n if len(new_q) == len(ovhg):\n pass \n else:\n raise ValueError(\"The QUEEN_objects cannot be joined due to the end structure incompatibility.\")\n return False\n new_q = construct.__class__(seq=new_q, quinable=0) \n ovhg_length = len(ovhg)\n \n else:\n raise ValueError(\"ssDNA cannot be joined with dsDNA\") \n \n if ovhg_length < homology_length and ovhg_length > 0:\n raise ValueError(\"Compatible stickey end legnth should be larger than or equal to {} bp\".format(homology_length)) \n\n feats = _slide(feats, len(construct.seq) - ovhg_length)\n feats1 = [feat for feat in construct.dnafeatures if \"broken_feature\" in feat.qualifiers]\n feats2 = [feat for feat in feats if \"broken_feature\" in feat.qualifiers]\n feats2_seqs = set([str(feat._original) for feat in feats2]) \n feats1 = [feat for feat in feats1 if str(feat._original) in feats2_seqs]\n\n if annealing == True:\n construct._seq = new_q._seq\n construct._right_end = new_q._right_end\n construct._right_end_top = new_q._right_end_top\n construct._right_end_bottom = new_q._right_end_bottom\n construct._left_end = new_q._left_end\n construct._left_end_top = new_q._left_end_top\n construct._left_end_bottom = new_q._left_end_bottom\n construct._topology = \"linear\"\n construct._positions = new_q._positions \n construct._ssdna = False\n positions_list.append(construct._positions)\n else:\n construct._seq = construct.seq + new_dna.seq \n construct._right_end = dna._right_end\n construct._right_end_top = dna._right_end_top\n construct._right_end_bottom = dna._right_end_bottom\n construct._topology = \"linear\"\n positions_list.append(new_dna._positions) \n const_features = copy.copy(construct.dnafeatures) \n \n #Restore a original feature from fragmented features\n if len(feats1) > 0 and len(feats2) > 0:\n for feat1 in feats1:\n if feat1.location.strand == -1:\n s1, e1 = feat1.location.parts[-1].start.position, feat1.location.parts[0].end.position\n else:\n s1, e1 = feat1.location.parts[0].start.position, feat1.location.parts[-1].end.position\n\n for feat2 in feats2:\n if feat2.location.strand == -1:\n s2, e2 = feat2.location.parts[-1].start.position - (len(construct.seq) - ovhg_length), feat2.location.parts[0].end.position - (len(construct.seq) - ovhg_length)\n else:\n s2, e2 = feat2.location.parts[0].start.position - (len(construct.seq) - ovhg_length), feat2.location.parts[-1].end.position - (len(construct.seq) - ovhg_length)\n \n if feat1.type == feat2.type and feat1.original == feat2.original: \n flag = 0\n for key in feat1.qualifiers:\n if key == \"broken_feature\":\n pass \n elif key in feat2.qualifiers and feat1.qualifiers[key] == feat2.qualifiers[key]:\n flag = 1\n else:\n #flag = 0\n break \n \n if flag == 1:\n note1 = feat1.qualifiers[\"broken_feature\"][0]\n label1 = \":\".join(note1.split(\":\")[:-1])\n length1 = int(note1.split(\":\")[-4]) \n pos_s1 = int(note1.split(\":\")[-1].split(\"..\")[0].replace(\" \",\"\"))\n pos_e1 = int(note1.split(\":\")[-1].split(\"..\")[1].replace(\" \",\"\"))\n\n note2 = feat2.qualifiers[\"broken_feature\"][0]\n label2 = \":\".join(note2.split(\":\")[:-1])\n length2 = int(note2.split(\":\")[-4]) \n pos_s2 = int(note2.split(\":\")[-1].split(\"..\")[0].replace(\" \",\"\"))\n pos_e2 = int(note2.split(\":\")[-1].split(\"..\")[1].replace(\" \",\"\"))\n \n #Join fragmented features\n if length1 == length2 and \"_original\" in feat1.__dict__ and \"_original\" in feat2.__dict__ and feat1.location.strand == feat2.location.strand:\n note = \"{}:{}..{}\".format(label1, pos_s1, pos_e2)\n new_seq = construct.seq[s1:e1] + dna.seq[s2:e2]\n feat1_index = const_features.index(feat1)\n new_feat = copy.deepcopy(const_features[feat1_index]) \n strand = new_feat.location.strand\n if len(feat1.location.parts) == 1 and len(feat2.location.parts) == 1:\n new_feat.location = FeatureLocation(feat1.location.parts[0].start.position, feat2.location.parts[-1].end.position, feat1.strand)\n new_feat.location.strand = strand\n else:\n locations = feat1.location.parts[0:-1] + [FeatureLocation(feat1.location.parts[-1].start.position, feat2.location.parts[0].end.position, feat1.strand)] + feat2.location.parts[0:-1]\n if strand == -1:\n locations.reverse() \n new_feat.location = CompoundLocation(locations) \n new_feat.location.strand = strand \n \n new_feat = feat1.__class__(feature=new_feat, subject=construct)\n new_feat1 = feat1.__class__(feature=feat1, subject=construct)\n new_feat2 = feat1.__class__(feature=feat2, subject=construct) \n s = new_feat.start \n e = new_feat.end if new_feat.end <= len(construct.seq) else new_feat.end - len(construct.seq) \n \n if construct.printsequence(s, e, new_feat.location.strand if new_feat.location.strand !=0 else 1) in new_feat.original:\n new_feat._id = label1.split(\":\")[1]\n construct._dnafeatures[feat1_index] = feat1.__class__(feature=new_feat)\n construct._dnafeatures[feat1_index].qualifiers[\"broken_feature\"] = [note]\n if feat2 in feats:\n del feats[feats.index(feat2)] \n \n construct._dnafeatures = construct.dnafeatures + feats\n \n construct._dnafeatures.sort(key=lambda x:x.location.parts[0].start.position)\n for feat in construct.dnafeatures:\n if \"broken_feature\" in feat.qualifiers:\n note = feat.qualifiers[\"broken_feature\"][0]\n label = \":\".join(note.split(\":\")[:-1])\n length = int(note.split(\":\")[-4]) \n pos_s = int(note.split(\":\")[-1].split(\"..\")[0].replace(\" \",\"\"))\n pos_e = int(note.split(\":\")[-1].split(\"..\")[1].replace(\" \",\"\"))\n if (pos_s == 1 and pos_e == length) or (pos_s == length and pos_e == 1):\n del feat.qualifiers[\"broken_feature\"]\n if Alphabet:\n new_record = SeqRecord(Seq(str(construct.seq), Alphabet.DNAAlphabet()))\n else:\n new_record = SeqRecord(Seq(str(construct.seq)))\n\n new_record.features = construct.dnafeatures\n new_record.annotations[\"topology\"] = topology\n construct.record = new_record \n \n if topology == \"circular\":\n construct = _circularizedna(construct)\n \n if quinable == True: \n zero_positions = [] \n for d, positions in enumerate(positions_list):\n if 0 in positions:\n zero_positions.append((len(positions),d,positions.index(0)))\n if len(zero_positions) > 0:\n zero_positions.sort() \n zero_positions.reverse() \n zero_position = 0\n for dna in dnas[0:zero_positions[0][1]]:\n zero_position += len(dna.seq)\n zero_position += zero_positions[0][2]\n construct = cutdna(construct, zero_position, quinable=0)[0]\n construct = _circularizedna(construct) \n construct._positions = tuple(range(len(construct.seq))) \n else:\n construct._positions = tuple(range(len(construct.seq))) \n\n else:\n zero_positions = [] \n for d, positions in enumerate(positions_list):\n if 0 in positions:\n zero_positions.append((len(positions),d,positions.index(0)))\n \n if len(zero_positions) > 0:\n zero_positions.sort() \n zero_positions.reverse() \n zero_origin = zero_positions[0][1]\n new_positions = [] \n for d, positions in enumerate(positions_list): \n if d == zero_origin:\n new_positions.extend(positions)\n else:\n new_positions.extend([-1] * len(positions))\n construct._positions = tuple(new_positions) \n else:\n construct._positions = tuple(range(len(construct.seq)))\n \n construct._supfeatureids() #Update feature ID\n else:\n topology = \"circular\"\n construct = _circularizedna(dnas[0])\n construct._positions = construct._positions[0:len(construct.seq)]\n \n if project is None:\n construct._unique_id = dnas[0]._unique_id\n else:\n construct._unique_id = project\n\n new_features = [] \n remove_features = [] \n for feat in construct.dnafeatures:\n if \"broken_feature\" in feat.qualifiers:\n note = feat.qualifiers[\"broken_feature\"][0]\n label = \":\".join(note.split(\":\")[:-1])\n poss, pose = list(map(int,note.split(\":\")[-1].split(\"..\")))\n length = int(note.split(\":\")[-4]) \n if feat.location.strand != -1:\n sfeat = feat.start-(poss-1) \n sfeat = sfeat if sfeat >= 0 else len(construct.seq) + sfeat\n efeat = feat.end+(length-pose)\n else:\n sfeat = feat.start-(length-pose) \n sfeat = sfeat if sfeat >= 0 else len(construct.seq) + sfeat\n efeat = feat.end+(poss-1) \n \n if feat.subject is None:\n feat.subject = construct\n \n if note.split(\":\")[-3] == construct.printsequence(sfeat, efeat, strand=feat.location.strand):\n if sfeat < efeat:\n location = FeatureLocation(sfeat, efeat, feat.location.strand) \n else:\n location = CompoundLocation([FeatureLocation(sfeat, len(construct.seq)), FeatureLocation(0, efeat, feat.location.strand)]) \n newfeat = feat.__class__(location=location, subject=construct)\n newfeat.type = feat.type\n newfeat.qualifiers = feat.qualifiers\n del newfeat.qualifiers[\"broken_feature\"]\n newfeat._id = label.split(\":\")[1]\n new_features.append(newfeat)\n remove_features.append(feat)\n\n for feat in remove_features:\n del construct._dnafeatures[construct.dnafeatures.index(feat)]\n \n for feat in new_features:\n construct._dnafeatures.append(feat) \n \n if type(supfeature) in (tuple, list) and type(supfeature[0]) == dict:\n for feature_dict in supfeature: \n construct.setfeature(feature_dict) \n elif type(supfeature) == dict:\n construct.setfeature(supfeature)\n \n if unique == True:\n new_features = [] \n for feat in construct.dnafeatures:\n if feat in new_features:\n pass \n else:\n new_features.append(feat) \n construct._dnafeatures = new_features\n\n construct.record.feartures = construct.dnafeatures\n if quinable == True:\n fproject = \"\" \n fcompatibility = \"\" if fcompatibility is None else \", compatibility='{}'\".format(str(compatibility))\n fhomology_length = \"\" if fhomology_length is None else \", homology_length={}\".format(homology_length)\n funique = \"\" if unique == True else \", unique={}\".format(unique) \n fsupfeature = \"\" if supfeature == False else \", supfeature={}\".format(str(supfeature))\n fproduct = \"\" if product is None else \", product='\" + product + \"'\"\n process_name = \"\" if process_name is None else \", process_name='\" + process_name + \"'\"\n process_description = \"\" if process_description is None else \", process_description='\" + process_description + \"'\" \n \n construct._product_id = construct._unique_id if product is None else product \n construct.record.id = construct.project\n dna_elements = \"[\" + \", \".join([\"QUEEN.dna_dict['{}']\".format(dna._product_id) for dna in dnas]) + \"]\"\n building_history = \"QUEEN.dna_dict['{}'] = joindna(*{}, topology='{}'{}{}{}{}{}{})\".format(construct._product_id, dna_elements, topology, fcompatibility, fhomology_length, fproject, fproduct, process_name, process_description) \n history_feature = _combine_history(construct, history_features) \n construct._history_feature = history_feature \n process_id, original_ids = make_processid(construct, building_history, process_id, original_ids)\n add_history(construct, [building_history, \"topology: {}\".format(topology), \",\".join([process_id] + original_ids)], _sourcefile) \n construct._check_uniqueness()\n else:\n construct.__dict__[\"_product_id\"] = dnas[0]._product_id if \"_product_id\" in dnas[0].__dict__ else dnas[0]._unique_id\n\n for dnafeature in construct.dnafeatures:\n dnafeature.subject = construct\n \n if product is None:\n pass \n else:\n product = product.replace(\" \",\"\") \n match = re.fullmatch(\"(.+)\\[(.+)\\]\", product) \n if match:\n if match.group(2).isdecimal() == True:\n construct.__class__._namespace[match.group(1)][int(match.group(2))] = construct\n else:\n construct.__class__._namespace[match.group(1)][match.group(2)] = construct\n else: \n construct.__class__._namespace[product] = construct\n return construct",
"def fusion_api_get_sas_logical_jbods(self, uri=None, param='', api=None, headers=None):\n return self.sas_logical_jbods.get(uri=uri, api=api, headers=headers, param=param)",
"def asdf_create(asdf_name, wav_dirs, sta_dir):\n\n with pyasdf.ASDFDataSet(asdf_name) as ds:\n wav_files = []\n for wav_dir in wav_dirs:\n wav_files.extend([os.path.join(root, a_file)\n for root, dirs, files in os.walk(wav_dir)\n for a_file in files])\n for _i, filename in enumerate(wav_files):\n print(\"Adding mseed file %i of %i...\" % (_i+1, len(wav_files)))\n st = read(filename)\n #Add waveforms\n ds.add_waveforms(st, tag=\"raw_recording\")\n sta_files = glob('%s/*' % sta_dir)\n for filename in sta_files:\n ds.add_stationxml(filename)\n return",
"def create_cwas(name='cwas'):\n \n inputspec = pe.Node(util.IdentityInterface(fields=['roi',\n 'subjects',\n 'regressor', \n 'cols', \n 'f_samples', \n 'strata', \n 'parallel_nodes']),\n name='inputspec')\n outputspec = pe.Node(util.IdentityInterface(fields=['F_map',\n 'p_map']),\n name='outputspec')\n \n cwas = pe.Workflow(name=name)\n \n ccb = pe.Node(util.Function(input_names=['mask_file',\n 'batches'],\n output_names=['batch_list'],\n function=create_cwas_batches),\n name='cwas_batches')\n \n ncwas = pe.MapNode(util.Function(input_names=['subjects_file_list',\n 'mask_file',\n 'regressor', \n 'cols', \n 'f_samples',\n# 'compiled_func',\n 'voxel_range', \n 'strata'],\n output_names=['result_batch'],\n function=nifti_cwas),\n name='cwas_batch',\n iterfield=['voxel_range'])\n \n mcwasb = pe.Node(util.Function(input_names=['cwas_batches',\n 'mask_file'],\n output_names=['F_file',\n 'p_file'],\n function=merge_cwas_batches),\n name='cwas_volumes')\n\n# ctf = pe.Node(util.Function(input_names=[],\n# output_names=['compiled_dot_norm'],\n# function=compile_theano_functions),\n# name='theano_functions')\n \n jmask = pe.Node(util.Function(input_names=['subjects_file_list', \n 'mask_file'],\n output_names=['joint_mask'],\n function=joint_mask),\n name='joint_mask')\n \n #Compute the joint mask\n cwas.connect(inputspec, 'subjects',\n jmask, 'subjects_file_list')\n cwas.connect(inputspec, 'roi',\n jmask, 'mask_file')\n\n #Create batches based on the joint mask\n cwas.connect(jmask, 'joint_mask',\n ccb, 'mask_file')\n cwas.connect(inputspec, 'parallel_nodes',\n ccb, 'batches')\n \n #Compute CWAS over batches of voxels\n cwas.connect(jmask, 'joint_mask',\n ncwas, 'mask_file')\n cwas.connect(inputspec, 'subjects',\n ncwas, 'subjects_file_list')\n cwas.connect(inputspec, 'regressor',\n ncwas, 'regressor')\n cwas.connect(inputspec, 'f_samples',\n ncwas, 'f_samples')\n cwas.connect(inputspec, 'cols',\n ncwas, 'cols')\n# cwas.connect(ctf, 'compiled_dot_norm',\n# ncwas, 'compiled_func')\n cwas.connect(ccb, 'batch_list',\n ncwas, 'voxel_range')\n cwas.connect(inputspec, 'strata',\n ncwas, 'strata')\n \n #Merge the computed CWAS data\n cwas.connect(ncwas, 'result_batch',\n mcwasb, 'cwas_batches')\n cwas.connect(jmask, 'joint_mask',\n mcwasb, 'mask_file')\n \n cwas.connect(mcwasb, 'F_file',\n outputspec, 'F_map')\n cwas.connect(mcwasb, 'p_file',\n outputspec, 'p_map')\n \n return cwas",
"def createMfile(dHeader):\n\tif specParamsOK(dHeader):\n createMatlabScript(dHeader)\n else:\n raise 'spec params error'",
"def _createMaster(self, *args, **kwds):\n raise NotImplementedError",
"def make_Hamiltonian(skf_dir, atom_types, disp, kpts, write_band=False, use_omp=False):\n if disp == 'D3': #from dftb manual\n dispersion = '''DftD3{\n Damping = BeckeJohnson{\n a1 = 0.5719\n a2 = 3.6017\n }\n s6 = 1.0\n s8 = 0.5883\n }\n '''\n\n elif disp == 'D30': #zero dampling\n dispersion = '''DftD3{\n Damping = ZeroDamping{\n sr6 = 0.746\n alpah6 = 4.191\n }\n s6 = 1.0\n s8 = 3.209\n }\n '''\n\n elif disp == 'D4':\n dispersion = '''DftD4{\n s6 = 1\n s8 = 0.6635015\n s9 = 1\n a1 = 0.5523240\n a2 = 4.3537076\n }\n '''\n\n elif disp == 'MBD': #1.0 from J. Phys. Chem. Lett. 2018, 9, 399−405\n dispersion = 'MBD{\\n\\tKGrid = ' + str(kpts)[1:-1] + '\\n\\tBeta = 1.0}\\n'\n\n elif disp == 'TS': #1.05 from J. Phys. Chem. Lett. 2018, 9, 399−405\n dispersion = '''TS{\n Damping = 20.0\n RangeSeparation = 1.0\n }\n '''\n\n elif disp == 'LJ':\n dispersion = 'LennardJones{Parameters = UFFParameters{}}'\n else:\n dispersion = None\n\n\n kwargs = {'Hamiltonian_SCC': 'yes',\n 'Hamiltonian_SCCTolerance': 1e-06,\n 'Hamiltonian_MaxSCCIterations': 1000,\n #'Hamiltonian_Mixer': 'DIIS{}', #Default is Broyden\n #'Hamiltonian_Dispersion': dispersion,\n 'slako_dir': skf_dir,\n 'Analysis_': '',\n 'Analysis_WriteBandOut': 'No',\n 'Analysis_MullikenAnalysis': 'No',\n 'Analysis_CalculateForces': 'Yes',\n }\n if write_band: \n kwargs['Analysis_WriteBandOut'] = 'Yes'\n if use_omp:\n kwargs['Parallel_'] = ''\n kwargs['Parallel_UseOmpThreads'] = 'Yes'\n if dispersion is not None:\n kwargs['Hamiltonian_Dispersion'] = dispersion\n\n if skf_dir.find('3ob') > 0: \n calc_type = '3ob'\n elif skf_dir.find('mio') > 0: \n calc_type = 'mio'\n elif skf_dir.find('pbc') > 0:\n calc_type = 'pbc'\n elif skf_dir.find('matsci') > 0:\n calc_type = 'matsci'\n\n #https://dftb.org/parameters/download/3ob/3ob-3-1-cc\n if calc_type == '3ob':\n kwargs['Hamiltonian_ThirdOrderFull'] = 'Yes'\n kwargs['Hamiltonian_HCorrection'] = 'Damping {\\n\\tExponent = 4.00\\n\\t}'\n HD = {\"Br\": -0.0573,\n \"C\": -0.1492,\n \"N\": -0.1535,\n \"Ca\": -0.0340, \n \"Na\": -0.0454,\n \"Cl\": -0.0697, \n \"Zn\": -0.03,\n \"O\": -0.1575,\n \"F\": -0.1623,\n \"P\": -0.14,\n \"H\": -0.1857, \n \"S\": -0.11,\n \"I\": -0.0433, \n \"K\": -0.0339,\n }\n strs = '{'\n for ele in atom_types:\n if ele == 'H':\n kwargs['Hamiltonian_MaxAngularMomentum_H']='s'\n elif ele in ['Mg', 'C', 'N', 'Ca', 'Na', 'O', 'F', 'K']:\n kwargs['Hamiltonian_MaxAngularMomentum_'+ele]='p'\n elif ele in ['Br', 'Cl', 'P', 'S', 'I', 'Zn']:\n kwargs['Hamiltonian_MaxAngularMomentum_'+ele]='d'\n else:\n raise RuntimeError(\"3-ob-1 doesnot support\", ele)\n strs +='\\n\\t'+ele+' = '+str(HD[ele])\n strs += '\\n\\t}'\n kwargs['Hamiltonian_HubbardDerivs'] = strs\n elif calc_type == 'pbc':\n #https://dftb.org/parameters/download/pbc/pbc-0-3-cc\n for ele in atom_types:\n if ele == 'H':\n kwargs['Hamiltonian_MaxAngularMomentum_H']='s'\n elif ele in ['C', 'O', 'N', 'F']:\n kwargs['Hamiltonian_MaxAngularMomentum_'+ele]='p'\n elif ele in ['Si', 'Fe']:\n kwargs['Hamiltonian_MaxAngularMomentum_'+ele]='d'\n else:\n raise RuntimeError(\"pbc-0-3 doesnot support\", ele)\n elif calc_type in ['matsci', 'mio']:\n #https://dftb.org/parameters/download/pbc/pbc-0-3-cc\n for ele in atom_types:\n if ele == 'H':\n kwargs['Hamiltonian_MaxAngularMomentum_H']='s'\n elif ele in ['B', 'O', 'C', 'N']:\n kwargs['Hamiltonian_MaxAngularMomentum_'+ele]='p'\n elif ele in ['Si']:\n kwargs['Hamiltonian_MaxAngularMomentum_'+ele]='d'\n else:\n raise RuntimeError(calc_type, \"doesnot support\", ele)\n \n #DFTB2\n\n #pbc-0-3\n #matsci\n #ob2\n #pbc\n #print(calc_type, kwargs)\n return kwargs",
"def create_job(jobrun, vcf_filenames):\n if jobrun == \"cluster\":\n \"\"\"\n Supports only PBS clusters for now.\n \"\"\"\n for i in vcf_filenames:\n job_name = os.path.basename(i)\n job_print_string = \"#PBS -N %s\\n#PBS -M [email protected]\\n#PBS -m abe\\n#PBS -V\\n#PBS -l nodes=1:ppn=4,pmem=4000mb,walltime=72:00:00\\n#PBS -q fluxod\\n#PBS -A esnitkin_fluxod\\n#PBS -l qos=flux\\n\\n/home/apirani/anaconda/bin/python /nfs/esnitkin/bin_group/scripts/Scripts_v2.0/variants_position_analysis/reason_job.py -filter2_only_snp_vcf_dir %s -filter2_only_snp_vcf_file %s\\n\" % (job_name, args.filter2_only_snp_vcf_dir, i)\n job_file_name = \"%s.pbs\" % (i)\n f1=open(job_file_name, 'w+')\n f1.write(job_print_string)\n f1.close()\n #os.system(\"mv %s/*.pbs %s/temp\" % (args.filter2_only_snp_vcf_dir, args.filter2_only_snp_vcf_dir))\n pbs_dir = args.filter2_only_snp_vcf_dir + \"/*.pbs\"\n pbs_scripts = glob.glob(pbs_dir)\n for i in pbs_scripts:\n print \"Running: qsub %s\" % i\n #os.system(\"qsub %s\" % i)\n\n elif jobrun == \"parallel-local\":\n \"\"\"\n Generate a Command list of each job and run it in parallel on different cores available on local system\n \"\"\"\n command_array = []\n command_file = \"%s/commands_list.sh\" % args.filter2_only_snp_vcf_dir\n f3 = open(command_file, 'w+')\n\n\n for i in vcf_filenames:\n job_name = os.path.basename(i)\n job_print_string = \"#PBS -N %s\\n#PBS -M [email protected]\\n#PBS -m abe\\n#PBS -V\\n#PBS -l nodes=1:ppn=4,pmem=4000mb,walltime=72:00:00\\n#PBS -q fluxod\\n#PBS -A esnitkin_fluxod\\n#PBS -l qos=flux\\n\\n/home/apirani/anaconda/bin/python /nfs/esnitkin/bin_group/scripts/Scripts_v2.0/variants_position_analysis/reason_job.py -filter2_only_snp_vcf_dir %s -filter2_only_snp_vcf_file %s\\n\" % (job_name, args.filter2_only_snp_vcf_dir, i)\n job_file_name = \"%s.pbs\" % (i)\n f1=open(job_file_name, 'w+')\n f1.write(job_print_string)\n f1.close()\n #os.system(\"mv %s/*.pbs %s/temp\" % (args.filter2_only_snp_vcf_dir, args.filter2_only_snp_vcf_dir))\n pbs_dir = args.filter2_only_snp_vcf_dir + \"/*.pbs\"\n pbs_scripts = glob.glob(pbs_dir)\n\n\n for i in pbs_scripts:\n f3.write(\"bash %s\\n\" % i)\n f3.close()\n with open(command_file, 'r') as fpp:\n for lines in fpp:\n lines = lines.strip()\n command_array.append(lines)\n fpp.close()\n print len(command_array)\n if args.numcores:\n num_cores = int(num_cores)\n else:\n num_cores = multiprocessing.cpu_count()\n results = Parallel(n_jobs=num_cores)(delayed(run_command)(command) for command in command_array)\n\n elif jobrun == \"parallel-single-cluster\":\n print \" \"\n else:\n \"\"\"\n Generate a Command list of each job and run it on local system one at a time\n \"\"\"\n command_array = []\n command_file = \"%s/commands_list.sh\" % args.filter2_only_snp_vcf_dir\n os.system(\"bash %s\" % command_file)",
"def make_dataset(\n *,\n k_size: int = 5,\n j_size: int,\n i_size: int,\n time_size: int = 4,\n grid_type: Type[ShocGridGenerator] = DiagonalShocGrid,\n corner_size: int = 0,\n) -> xarray.Dataset:\n coordinate_centre_mask = numpy.full((j_size, i_size), True)\n # Cut a chunk out of the corner where the coordinates will not be defined.\n if corner_size > 1:\n coordinate_centre_mask[-(corner_size - 1):, :+(corner_size - 1)] = False\n\n # SHOC files have a 1-cell border around the outside where the cells have\n # coordinates, but no data.\n wet_centre_mask = numpy.full((j_size, i_size), True)\n if corner_size > 0:\n wet_centre_mask[-corner_size:, :+corner_size] = False\n wet_centre_mask[-corner_size:, -corner_size:] = False\n wet_centre_mask[:+1, :] = False\n wet_centre_mask[-1:, :] = False\n wet_centre_mask[:, :+1] = False\n wet_centre_mask[:, -1:] = False\n wet_mask = c_mask_from_centres(wet_centre_mask, {\n ArakawaCGridKind.face: ('j_centre', 'i_centre'),\n ArakawaCGridKind.back: ('j_back', 'i_back'),\n ArakawaCGridKind.left: ('j_left', 'i_left'),\n ArakawaCGridKind.node: ('j_node', 'i_node'),\n })\n\n # These DataArrays are the long/lats of the grid corners. The centres are\n # derived from these by averaging the surrounding four corners.\n grid = grid_type(j=j_size, i=i_size, face_mask=coordinate_centre_mask)\n layers = ShocLayerGenerator(k=k_size)\n\n t = xarray.DataArray(\n # Note: Using pandas.date_range() directly here will lead to strange\n # behaviours, where the `record` dimension becomes a data variable with\n # a datetime64 dtype. Using a list of datetimes instead seems to avoid\n # this, resulting in record simply being a dimension.\n data=list(pandas.date_range(\"2021-11-11\", periods=time_size)),\n dims=[\"record\"],\n attrs={\n \"long_name\": \"Time\",\n \"standard_name\": \"time\",\n \"coordinate_type\": \"time\",\n },\n )\n # Note: xarray will reformat this in to 1990-01-01T00:00:00+10:00, which\n # EMS fails to parse. There is no way around this using xarray natively,\n # you have to adjust it with nctool after saving it.\n t.encoding[\"units\"] = \"days since 1990-01-01 00:00:00 +10\"\n\n botz = xarray.DataArray(\n data=numpy.random.random((j_size, i_size)) * 10 + 50,\n dims=wet_mask[\"face_mask\"].dims,\n attrs={\n \"units\": \"metre\",\n \"long_name\": \"Z coordinate at sea-bed at cell centre\",\n \"standard_name\": \"depth\",\n \"positive\": \"down\",\n \"outside\": \"9999\",\n \"missing_value\": -99.,\n }\n ).where(wet_mask.data_vars[\"face_mask\"])\n botz.values[1, 1] = -99.\n\n eta = xarray.DataArray(\n data=numpy.random.normal(0, 0.2, (time_size, j_size, i_size)),\n dims=[\"record\", *wet_mask[\"face_mask\"].dims],\n attrs={\n \"units\": \"metre\",\n \"long_name\": \"Surface elevation\",\n \"standard_name\": \"sea_surface_height_above_geoid\",\n }\n ).where(wet_mask.data_vars[\"face_mask\"])\n temp = xarray.DataArray(\n data=numpy.random.normal(12, 0.5, (time_size, k_size, j_size, i_size)),\n dims=[\"record\", \"k_centre\", *wet_mask[\"face_mask\"].dims],\n attrs={\n \"units\": \"degrees C\",\n \"long_name\": \"Temperature\",\n },\n ).where(wet_mask.data_vars[\"face_mask\"])\n\n u1 = xarray.DataArray(\n data=numpy.random.normal(0, 2, (time_size, k_size, j_size, i_size + 1)),\n dims=[\"record\", \"k_centre\", *wet_mask.data_vars[\"left_mask\"].dims],\n attrs={\n \"units\": \"metre second-1\",\n \"long_name\": \"I component of current at left face\",\n }\n )\n u2 = xarray.DataArray(\n data=numpy.random.normal(0, 2, (time_size, k_size, j_size + 1, i_size)),\n dims=[\"record\", \"k_centre\", *wet_mask.data_vars[\"back_mask\"].dims],\n attrs={\n \"units\": \"metre per second\",\n \"long_name\": \"I component of current at back face\",\n }\n )\n flag = xarray.DataArray(\n data=numpy.random.randint(0, 256, (time_size, k_size, j_size + 1, i_size + 1)),\n dims=[\"record\", \"k_centre\", *wet_mask.data_vars[\"node_mask\"].dims],\n attrs={\"long_name\": \"SHOC masking flags\"},\n )\n\n dataset = xarray.Dataset(\n data_vars={\n **layers.standard_vars,\n **grid.standard_vars,\n \"botz\": botz,\n \"t\": t,\n \"eta\": eta,\n \"temp\": temp,\n \"u1\": u1,\n \"u2\": u2,\n \"flag\": flag,\n },\n attrs={\n \"title\": \"Example SHOC dataset\",\n \"ems_version\": \"v1.2.3 fake\",\n \"Conventions\": \"CMR/Timeseries/SHOC\",\n \"nce1\": j_size,\n \"nce2\": i_size,\n \"nfe1\": j_size + 1,\n \"nfe2\": i_size + 1,\n \"gridtype\": \"NUMERICAL\",\n },\n )\n dataset.encoding[\"unlimited_dims\"] = {\"record\"}\n return dataset",
"def __createNoSeparationCommand(self):\n\n submslist = ParallelTaskHelper.getReferencedMSs(self._arg['vis'])\n if len(submslist) == 0:\n raise ValueError, 'There are no subMSs in input vis'\n \n tbTool = tbtool()\n\n listOutputMS = []\n\n subMs_idx = 0\n for subMS in submslist:\n\n # make sure the SORTED_TABLE keywords are disabled\n tbTool.open(subMS, nomodify=False)\n if 'SORTED_TABLE' in tbTool.keywordnames():\n tobeDeleted = tbTool.getkeyword('SORTED_TABLE').split(' ')[1]\n tbTool.removekeyword('SORTED_TABLE')\n os.system('rm -rf '+tobeDeleted)\n \n tbTool.close() \n\n listOutputMS.append(self.dataDir+'/%s.%04d.ms' \\\n % (self.outputBase, subMs_idx))\n subMs_idx += 1\n\n # Override the original parameters\n self.override_arg('outputvis',listOutputMS)\n \n self._consolidateOutput = False\n \n # Add to the list of jobs to execute\n subMs_idx = 0\n for subMS in submslist:\n localArgs = copy.copy(self._arg)\n localArgs['vis'] = subMS\n for key in self._arguser:\n localArgs[key] = self._arguser[key][subMs_idx]\n \n if self._arg.has_key('createmms'):\n self._arg['createmms'] = False\n localArgs['createmms'] = False\n \n subMs_idx += 1\n if not self._mpi_cluster:\n self._executionList.append(JobData(self._taskName, localArgs))\n else:\n self._executionList.append([self._taskName + '()',localArgs])",
"def nemo_wrap(ds, rho=True, b=True, alpha=True, beta=True, sigma=True, T_name='thetao', S_name='so', z_name='gdept_0'):\n nameos = dict(\n rn_lambda1=ds.get(\"rn_lambda1\", default=rn_lambda1),\n rn_lambda2=ds.get(\"rn_lambda2\", default=rn_lambda2),\n rn_a0=ds.get(\"rn_a0\", default=rn_a0),\n rn_b0=ds.get(\"rn_b0\", default=rn_b0),\n rn_mu1=ds.get(\"rn_mu1\", default=rn_mu1),\n rn_mu2=ds.get(\"rn_mu2\", default=rn_mu2),\n rn_nu=ds.get(\"rn_nu\", default=rn_nu),\n )\n\n T = ds[T_name]\n S = ds[S_name]\n z = ds[z_name]\n\n out = xr.Dataset()\n\n if not \"p_ref\" in ds:\n reference_levels = np.arange(5) * 1000\n p_ref = xr.DataArray(\n reference_levels,\n coords=(reference_levels,),\n dims=\"p_ref\",\n name=\"reference pressure for the potential quantities\",\n attrs={\n \"standard_name\": \"reference_pressure\",\n \"long_name\": \"reference pressure\",\n \"units\": \"dbar\",\n },\n )\n out = out.expand_dims({\"p_ref\": p_ref})\n else:\n out['p_ref'] = ds['p_ref']\n\n if rho:\n out[\"rho\"] = compute_rho(T, S, z=out.p_ref, **nameos)\n out[\"rho\"].attrs.update(\n {\n \"standard_name\": \"potential_density\",\n \"long_name\": \"potential density\",\n \"units\": \"kg/m3\",\n }\n )\n if b:\n out[\"buoyancy\"] = compute_b(T, S, z=out.p_ref, **nameos)\n out[\"buoyancy\"].attrs.update(\n {\n \"standard_name\": \"potential_buoyancy\",\n \"long_name\": \"potential buoyancy\",\n \"units\": \"m/s2\",\n }\n )\n if alpha:\n out[\"alpha\"] = compute_alpha(T, S, z=out.p_ref, **nameos)\n out[\"alpha\"].attrs.update(\n {\n \"standard_name\": \"potential_thermal_expansion\",\n \"long_name\": \"potential thermal expansion\",\n \"units\": \"1/degC\",\n }\n )\n if beta:\n out[\"beta\"] = compute_beta(T, S, z=out.p_ref, **nameos)\n out[\"beta\"].attrs.update(\n {\n \"standard_name\": \"potential_haline_contraction\",\n \"long_name\": \"potential haline contraction\",\n \"units\": \"1/(g/kg)\",\n }\n )\n if sigma:\n out[\"sigma\"] = compute_sigma(T, S, z=out.p_ref, **nameos)\n out[\"sigma\"].attrs.update(\n {\n \"standard_name\": \"potential_density_anomaly\",\n \"long_name\": \"potential density anomaly\",\n \"units\": \"kg/m3\",\n }\n )\n return out",
"def LATCH_create(bytes=None, rotationInvariance=None, half_ssd_size=None, sigma=None): # real signature unknown; restored from __doc__\n pass",
"def __init__(__self__, *,\n chain_of_custody_sas_key: str,\n contact_details: 'outputs.ContactDetailsResponse',\n copy_log_details: Sequence[Any],\n copy_progress: Sequence['outputs.CopyProgressResponse'],\n delivery_package: 'outputs.PackageShippingDetailsResponse',\n destination_account_details: Sequence[Any],\n error_details: Sequence['outputs.JobErrorDetailsResponse'],\n job_details_type: str,\n job_stages: Sequence['outputs.JobStagesResponse'],\n return_package: 'outputs.PackageShippingDetailsResponse',\n reverse_shipment_label_sas_key: str,\n shipping_address: 'outputs.ShippingAddressResponse',\n device_password: Optional[str] = None,\n expected_data_size_in_tera_bytes: Optional[int] = None,\n preferences: Optional['outputs.PreferencesResponse'] = None):\n pulumi.set(__self__, \"chain_of_custody_sas_key\", chain_of_custody_sas_key)\n pulumi.set(__self__, \"contact_details\", contact_details)\n pulumi.set(__self__, \"copy_log_details\", copy_log_details)\n pulumi.set(__self__, \"copy_progress\", copy_progress)\n pulumi.set(__self__, \"delivery_package\", delivery_package)\n pulumi.set(__self__, \"destination_account_details\", destination_account_details)\n pulumi.set(__self__, \"error_details\", error_details)\n pulumi.set(__self__, \"job_details_type\", 'DataBoxHeavy')\n pulumi.set(__self__, \"job_stages\", job_stages)\n pulumi.set(__self__, \"return_package\", return_package)\n pulumi.set(__self__, \"reverse_shipment_label_sas_key\", reverse_shipment_label_sas_key)\n pulumi.set(__self__, \"shipping_address\", shipping_address)\n if device_password is not None:\n pulumi.set(__self__, \"device_password\", device_password)\n if expected_data_size_in_tera_bytes is not None:\n pulumi.set(__self__, \"expected_data_size_in_tera_bytes\", expected_data_size_in_tera_bytes)\n if preferences is not None:\n pulumi.set(__self__, \"preferences\", preferences)",
"def fusion_api_patch_sas_logical_jbods(self, body, uri, api=None, headers=None):\n return self.sas_logical_jbods.patch(body=body, uri=uri, api=api, headers=headers)",
"def skel_model(action, install_path_mp, install_path_zfs, jname):\n # init vars\n # mp - mount point, zfs - zfs point\n skel_path_mp = '%s-SKELETON' % install_path_mp\n skel_path_zfs = '%s-SKELETON' % install_path_zfs\n rw_path_mp = '%s-RW' % install_path_mp\n rw_path_zfs = '%s-RW' % install_path_zfs\n \n if action == 'init':\n# create SKELETON MODEL\n# http://www.freebsd.org/doc/en_US.ISO8859-1/books/handbook/jails-application.html\n log(\" INFO: Init BASE-SKELETON zfs START\")\n# Create a skeleton for the read-write portion of the system\n os.system('zfs create %s' % skel_path_zfs)\n os.system('zfs set mountpoint=%s %s' % (skel_path_mp, skel_path_zfs))\n os.system('zfs create %s' % rw_path_zfs)\n os.system('zfs set mountpoint=%s %s' % (rw_path_mp, rw_path_zfs))\n\n os.system('mkdir -p %s/home %s/usr-X11R6 %s/distfiles %s/usr-share-keys/pkg' % (skel_path_mp, skel_path_mp, skel_path_mp, skel_path_mp))\n os.system('mv %s/etc %s' % (install_path_mp, skel_path_mp ))\n os.system('mv %s/usr/local %s/usr-local' % (install_path_mp, skel_path_mp ))\n os.system('mv %s/tmp %s' % (install_path_mp, skel_path_mp ))\n os.system('mv %s/var %s' % (install_path_mp, skel_path_mp ))\n os.system('mv %s/root %s' % (install_path_mp, skel_path_mp ))\n# mergemaster to install missing configuration files. Then, remove the the extra directories that mergemaster creates:\n# os.system('mergemaster -t %s/var/tmp/temproot -D %s -i' % (skel_path, skel_path))\n# os.system('rm -R %(key)s/bin %(key)s/boot %(key)s/lib %(key)s/libexec %(key)s/mnt %(key)s/proc %(key)s/rescue %(key)s/sbin %(key)s/sys %(key)s/usr %(key)s/dev' % {'key': skel_path})\n# Now, symlink the read-write file system to the read-only file system. Ensure that the symlinks are created in the correct s/ locations as the creation of directories in the wrong locations will cause the installation to fail.\n os.chdir('%s' % install_path_mp)\n os.system('mkdir SROOT')\n os.system('ln -s SROOT/etc etc')\n os.system('ln -s SROOT/home home')\n os.system('ln -s SROOT/root root')\n os.system('ln -s /SROOT/usr-local usr/local')\n os.system('ln -s /SROOT/usr-share-keys usr/share/keys')\n os.system('ln -s /SROOT/usr-X11R6 usr/X11R6')\n os.system('ln -s /SROOT/distfiles usr/ports/distfiles')\n os.system('ln -s SROOT/tmp tmp')\n os.system('ln -s SROOT/var var')\n# Create a generic /home/j/skel/etc/make.conf containing this line\n os.system('echo \\\"WRKDIRPREFIX?= /SROOT/portbuild\\\" > %s/etc/make.conf' % skel_path_mp )\n# Create zfs BASE-SKELETON snapshot which will be used for installation \n os.system('zfs snapshot %s@install' % skel_path_zfs)\n log(\" INFO: Init BASE-SKELETON zfs FINISH\")\n \n# install SKELETON jail \n if action == 'install':\n# install RW fs for jail\n os.system('zfs send %s/BASE-SKELETON@install | zfs receive -F %s/BASE-RW/%s' % (jzfs, jzfs, jname))\n# remove receive snapshot \n os.system('zfs destroy %s/BASE-RW/%s@install' % (jzfs, jname))\n# create jail local config - mount skel model for jail hosme dir\n if jname == 'BASE-update':\n os.system('echo \\\"%sBASE %s%s nullfs rw 0 0\\\" > %sBASE-RW/%s/etc/fstab' % (jpath, jpath, jname, jpath, jname))\n else:\n os.system('echo \\\"%sBASE %s%s nullfs ro 0 0\\\" > %sBASE-RW/%s/etc/fstab' % (jpath, jpath, jname, jpath, jname))\n \n os.system('echo \\\"%sBASE-RW/%s %s%s/SROOT nullfs rw 0 0\\\" >> %sBASE-RW/%s/etc/fstab' % (jpath, jname, jpath, jname, jpath, jname))\n temp_add_cfg = ['### BASE mount settings ###', 'mount.fstab=\"%sBASE-RW/%s/etc/fstab\";' % (jpath, jname), 'mount.devfs;']\n return temp_add_cfg",
"def init_analysis(session, args):\r\n path = os.path.join(session.abs_path,'{}_{}_{}_{}'.format(args[12],args[13],args[14],args[15]))\r\n session.case = Case(path, session.method)\r\n case = session.case\r\n if args[0].split('#')[0]=='R':\r\n args[7]= -args[7]\r\n\r\n case.file_U.set_field('internalField', 'uniform ({} {} 0)'.format(args[6], args[7]))\r\n\r\n case.file_U.set_field('boundaryField',\r\n {'inlet': {'type': 'freestream',\r\n 'freestreamValue': 'uniform ({} {} 0)'.format(args[6], args[7])},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'fixedValue', 'value':'uniform (0 0 0)'},\r\n 'extrados': {'type': 'fixedValue', 'value':'uniform (0 0 0)'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n case.file_p.set_field('internalField', 'uniform {}'.format(args[10]))\r\n case.file_p.set_field('boundaryField',\r\n {'inlet': {'type': 'freestreamPressure'},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'zeroGradient'},\r\n 'extrados': {'type': 'zeroGradient'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n session.case.file_T.set_field('internalField', 'uniform {}'.format(args[11]))\r\n session.case.file_T.set_field('boundaryField',\r\n {'inlet': {'type': 'fixedValue', 'value': 'uniform {}'.format(args[11])},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'slip'},\r\n 'extrados': {'type': 'slip'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n \"\"\"\r\n\r\n session.case.file_U.set_field('internalField', 'uniform ({} {} 0)'.format(args[6], args[7]))\r\n session.case.file_U.set_field('boundaryField',\r\n {'inlet': {'type': 'fixedValue',\r\n 'value': 'uniform ({} {} 0)'.format(args[6], args[7])},\r\n 'outlet': {'type': 'inletOutlet','inletValue':'uniform ({} {} 0)'.format(args[6], args[7]),\r\n 'value':'uniform ({} {} 0)'.format(args[6], args[7])},\r\n 'intrados': {'type': 'noSlip'},\r\n 'extrados': {'type': 'noSlip'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n session.case.file_p.set_field('internalField', 'uniform {}'.format(args[10]))\r\n session.case.file_p.set_field('boundaryField',\r\n {'inlet': {'type': 'fixedValue', 'value': 'uniform {}'.format(args[10])},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'zeroGradient'},\r\n 'extrados': {'type': 'zeroGradient'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n session.case.file_T.set_field('internalField', 'uniform {}'.format(args[11]))\r\n session.case.file_T.set_field('boundaryField',\r\n {'inlet': {'type': 'fixedValue','value':'uniform {}'.format(args[11])},\r\n 'outlet': {'type': 'inletOutlet','inletValue':'uniform {}'.format(args[11]),'value':'uniform {}'.format(args[11])},\r\n 'intrados': {'type': 'zeroGradient'},\r\n 'extrados': {'type': 'zeroGradient'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n \"\"\"\r\n \"\"\"\r\n session.case.file_U.set_field('internalField', 'uniform ({} {} 0)'.format(args[7], args[6]))\r\n session.case.file_U.set_field('boundaryField',\r\n {'inlet': {'type': 'fixedValue',\r\n 'value': 'uniform ({} {} 0)'.format(args[7], args[6])},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'noSlip'},\r\n 'extrados': {'type': 'noSlip'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n session.case.file_p.set_field('internalField', 'uniform {}'.format(args[10]))\r\n session.case.file_p.set_field('boundaryField',\r\n {'inlet': {'type': 'fixedValue', 'value': 'uniform {}'.format(args[10])},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'zeroGradient'},\r\n 'extrados': {'type': 'zeroGradient'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n session.case.file_T.set_field('internalField', 'uniform {}'.format(args[11]))\r\n session.case.file_T.set_field('boundaryField',\r\n {'inlet': {'type': 'zeroGradient'},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'zeroGradient'},\r\n 'extrados': {'type': 'zeroGradient'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n \"\"\"\r\n \"\"\"\r\n session.case.file_U.set_field('internalField', 'uniform ({} {} 0)'.format(args[5], args[6]))\r\n session.case.file_U.set_field('boundaryField',\r\n {'inlet': {'type': 'fixedValue', 'value': 'uniform ({} {} 0)'.format(args[5], args[6])},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'noSlip'},\r\n 'extrados': {'type': 'noSlip'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n session.case.file_p.set_field('boundaryField', {'inlet': {'type': 'fixedValue','value':'uniform {}'.format(args[9])},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'zeroGradient'},\r\n 'extrados': {'type': 'zeroGradient'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n session.case.file_p.set_field('internalField', 'uniform {}'.format(args[9]))\r\n\r\n session.case.file_T.set_field('boundaryField', {'inlet': {'type': 'fixedValue', 'value': 'uniform {}'.format(args[10])},\r\n 'outlet': {'type': 'zeroGradient'},\r\n 'intrados': {'type': 'zeroGradient'},\r\n 'extrados': {'type': 'zeroGradient'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n session.case.file_T.set_field('internalField','uniform 300')\r\n\r\n session.case.file_nut.set_field('boundaryField', {'inlet':{'type':'calculated', 'value':'uniform 0'},\r\n 'outlet':{'type':'calculated', 'value':'uniform 0'},\r\n 'intrados': {'type': 'nutkWallFunction', 'Cmu':'0.09', 'kappa':'0.41', 'E':'9.8', 'value':'uniform 0'},\r\n 'extrados': {'type': 'nutkWallFunction', 'Cmu':'0.09', 'kappa':'0.41', 'E':'9.8', 'value':'uniform 0'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n session.case.file_k.set_field('internalField', 'uniform 1')\r\n session.case.file_k.set_field('boundaryField', {\r\n 'inlet': {'type': 'turbulentIntensityKineticEnergyInlet', 'intensity': '0.05', 'value': 'uniform 1'},\r\n 'outlet': {'type': 'inletOutlet', 'inletValue': 'uniform 1', 'value': 'uniform 1'},\r\n 'intrados': {'type': 'kqRWallFunction','value':'uniform 1'},\r\n 'extrados': {'type': 'kqRWallFunction','value':'uniform 1'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n\r\n session.case.file_epsilon.set_field('boundaryField', {'inlet': {'type': 'turbulentMixingLengthDissipationRateInlet', 'mixingLength': '0.005', 'value': 'uniform 200'},\r\n 'outlet': {'type': 'inletOutlet', 'inletValue': 'uniform 200', 'value': 'uniform 200'},\r\n 'intrados': {'type': 'epsilonWallFunction', 'Cmu':'0.09', 'kappa':'0.41', 'E':'9.8', 'value':'uniform 200'},\r\n 'extrados': {'type': 'epsilonWallFunction', 'Cmu':'0.09', 'kappa':'0.41', 'E':'9.8', 'value':'uniform 200'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n session.case.file_epsilon.set_field('internalField', 'uniform 200')\r\n\r\n session.case.file_alphat.set_field('boundaryField', {'inlet':{'type':'calculated', 'value':'uniform 0'},\r\n 'outlet':{'type':'calculated', 'value':'uniform 0'},\r\n 'intrados': {'type': 'compressible::alphatWallFunction', 'Prt':'0.85', 'value':'uniform 0'},\r\n 'extrados': {'type': 'compressible::alphatWallFunction', 'Prt':'0.85', 'value':'uniform 0'},\r\n 'top_down': {'type': 'empty'},\r\n 'cyclic_in_1': {'type': 'cyclic'},\r\n 'cyclic_in_2': {'type': 'cyclic'},\r\n 'cyclic_out_1': {'type': 'cyclic'},\r\n 'cyclic_out_2': {'type': 'cyclic'}})\r\n \"\"\"\r\n session.case.file_controlDict.set_field('endTime', '10000')\r\n session.case.file_controlDict.set_field('startFrom', 'latestTime')\r\n session.case.file_controlDict.set_field('functions', {\"#includeFunc\":\"MachNo\"})\r\n session.case.file_turbulenceProperties.set_field('simulationType', 'laminar')\r\n session.case.interacting(100)\r\n sim = session.case.simulation(\"open40\") # Build files\r\n sim.limit_write = 50\r\n sim.block_mesh(string=write_block_mesh(args[1], args[2], args[3], args[4], args[5], session.mesh))\r\n sim.check_mesh()\r\n\r\n result_dict={\"T\": 0, \"p\":0, \"Theta\":0, \"z\":0, \"profile\":args[14]}\r\n\r\n def _function(container, args):\r\n current_time = container['current_time']\r\n if float(current_time)>=0.000015:\r\n print('Parsing results')\r\n sim.foamToVTK()\r\n results = sim.get_last_results('outlet')\r\n result_U = results.GetCellData('U')\r\n result_p = results.GetCellData('p')\r\n result_T = results.GetCellData('T')\r\n theta = 0.0\r\n z = 0.0\r\n p=0.0\r\n t=0.0\r\n U_length = len(result_U)\r\n p_length = len(result_p)\r\n t_length = len(result_T)\r\n for i,j,k in zip(result_p, result_T, result_U):\r\n p+= float(i[0])/p_length\r\n t+= float(j[0])/t_length\r\n theta += float(k[1])/U_length\r\n z += float(k[0])/U_length\r\n\r\n args[\"T\"] = t\r\n args[\"p\"] = p\r\n args[\"Theta\"] = theta\r\n args[\"z\"] = z\r\n return True\r\n return False\r\n \r\n #sim.run(_function, result_dict)\r\n #result_dict = {'T': 195.38959999999997, 'z': 429.3120571428572, 'p': 74001.90285714286, 'Theta': -207.19442857142855, 'profile': 0}\r\n\r\n print('Sending results')\r\n if args[0].split('#')[0]=='R':\r\n result_dict['Theta']= -result_dict['Theta']\r\n\r\n session.socket_design.send({'new_data':result_dict})",
"def __init__(__self__, *,\n chain_of_custody_sas_key: str,\n contact_details: 'outputs.ContactDetailsResponse',\n copy_log_details: Sequence[Any],\n copy_progress: Sequence['outputs.DataBoxDiskCopyProgressResponse'],\n delivery_package: 'outputs.PackageShippingDetailsResponse',\n destination_account_details: Sequence[Any],\n disks_and_size_details: Mapping[str, int],\n error_details: Sequence['outputs.JobErrorDetailsResponse'],\n job_details_type: str,\n job_stages: Sequence['outputs.JobStagesResponse'],\n return_package: 'outputs.PackageShippingDetailsResponse',\n reverse_shipment_label_sas_key: str,\n shipping_address: 'outputs.ShippingAddressResponse',\n expected_data_size_in_tera_bytes: Optional[int] = None,\n passkey: Optional[str] = None,\n preferences: Optional['outputs.PreferencesResponse'] = None,\n preferred_disks: Optional[Mapping[str, int]] = None):\n pulumi.set(__self__, \"chain_of_custody_sas_key\", chain_of_custody_sas_key)\n pulumi.set(__self__, \"contact_details\", contact_details)\n pulumi.set(__self__, \"copy_log_details\", copy_log_details)\n pulumi.set(__self__, \"copy_progress\", copy_progress)\n pulumi.set(__self__, \"delivery_package\", delivery_package)\n pulumi.set(__self__, \"destination_account_details\", destination_account_details)\n pulumi.set(__self__, \"disks_and_size_details\", disks_and_size_details)\n pulumi.set(__self__, \"error_details\", error_details)\n pulumi.set(__self__, \"job_details_type\", 'DataBoxDisk')\n pulumi.set(__self__, \"job_stages\", job_stages)\n pulumi.set(__self__, \"return_package\", return_package)\n pulumi.set(__self__, \"reverse_shipment_label_sas_key\", reverse_shipment_label_sas_key)\n pulumi.set(__self__, \"shipping_address\", shipping_address)\n if expected_data_size_in_tera_bytes is not None:\n pulumi.set(__self__, \"expected_data_size_in_tera_bytes\", expected_data_size_in_tera_bytes)\n if passkey is not None:\n pulumi.set(__self__, \"passkey\", passkey)\n if preferences is not None:\n pulumi.set(__self__, \"preferences\", preferences)\n if preferred_disks is not None:\n pulumi.set(__self__, \"preferred_disks\", preferred_disks)",
"def main(args):\n\n\t##############################################################################\n\t######## Pass user command line arguments to setup.py which will #############\n\t############# initialise some parameters for the analysis ###################\n\t##############################################################################\n\tinit_ = setup.initialise_user_input(args)\n\n\t##############################################################################\n\t######## Define system_ which is the object, of class nanoCISC, ##############\n\t######## which contains all relevant information about your nanoparticle ####\n\t##############################################################################\n\tsystem_ = nano_cisc.nanoCISC(init_.nano_particle, init_.anchors, init_.beta, init_.calcrange, \n init_.curves, init_.targetinc, init_.density) \n\t# initialise system_ as nanoCISC class here ^^^\n\n\t# If density is being calculated, define grid from grid class\n\tif args['density']:\n\t\tgrid=grids.grid(system_)\n\n\n\t##############################################################################\n\t################ Process trajectory, frame by frame ##########################\n\t##############################################################################\n\n\tfor ts in init_.u.trajectory: # loop through trajectory frames here \n\t\tprint \"Processing snapshot %d \" % (ts.frame)\n\n\t\t# Array for calculating intrinsic density is initialised to {0}\n\t\tintrinsic_count=np.zeros( ( np.ceil( 3 * system_.calculation_range).astype(np.int) ,len(system_.density) ), dtype = np.float32) \n\n\t\t# Array that stores the instantaneous volume of each spatial interval is initialised to {0}\n\t\tvolume_at_dist=np.zeros( ( np.ceil( 3 * system_.calculation_range).astype(np.int) ,len(system_.density) ), dtype = np.float32) \n\n\t\t# Centre of mass position is updated\n\t\tsystem_.update_com()\n\n\t\t# Vectors describing the anchor points are updated \n\t\tsystem_.update_anchors() \n\n\t\t# Nanoparticle depth values are updated\n\t\tsystem_.update_surface() \t\n\n\t\tif args['XYZsurface']:\n\t\t\tsystem_.write_surface(init_.f_visualise_surface) # write micelle surface to xyz file\n \n \t\tif args['density']: \n \t\t\tgrid.update_volume_estimate(volume_at_dist, system_) # volume estimate is updated for snapshot\n\t\t\tsystem_.calculate_density(intrinsic_count, volume_at_dist) # calculate density here\n\n\t\tsystem_.frames_processed += 1\n\n\t##################################\n\t##### Print results to files #####\n\t##################################\n\tif args['density']:\n\t\tsystem_.print_intrinsic_density(init_.f_intrinsic_density_out)\n\t\tsystem_.print_radial_density()\n\n\n\tprint \"Program finished successfully!!!\\n\"",
"def get_sasa(topology, trajectory, dssp_loc=master_dssp_location,skip=None):\n\n\tdssp_loc = dssp_loc\n\tDSSP={'A':{}}\n\tuniverse = MDAnalysis.Universe(topology, trajectory)\n\n\t#set the chain name here. this will only work for MDAnalysis 0.16\n\tchain_name=universe.add_Segment(segid='A')\n\tuniverse.residues[...].segments=chain_name\n\n\tprotein=universe.select_atoms(\"protein\")\n\tdiff_res=[]\n\t#this attempt to identify chain breaks will only work if the resids\n\t#... in the chains are not numbered consecutively\n\tfor i in range(len(protein.resnums)):\n\t\tif protein.resnums[i]-protein.resnums[i-1]<0 and i!=0:\n\t\t\tdiff_res.append(i)\n\tif len(diff_res)>=1:\n\t\tchain_sep=diff_res.pop(0)\n\t\tchain_end=len(protein.resnums)\n\t\tbchain=protein[chain_sep:chain_end]\n\t\tbchain.set_segids('B')\n\t\tDSSP['B']={}\n\n\tfor ts in universe.trajectory:\n\t\tif skip:\n\t\t\tuniverse.trajectory.skip=skip\n\t\tsys.stdout.flush()\n\t\tsys.stdout.write('\\rsasa [step {0}] '.format(\n\t\t\tuniverse.trajectory.frame))\n\t\twriter=MDAnalysis.Writer(\"tmp.pdb\")\n\t\twriter.write(protein)\n\t\twriter.close()\n\t\tparser=bp.PDBParser()\n\t\tstructure=parser.get_structure('tmp','tmp.pdb')\n\t\tdssp=bp.DSSP(structure[0],'tmp.pdb',dssp_loc)\n\t\tfor key in dssp.keys():\n\t\t\tif 0:\n\t\t\t\tresobj=dssp[key][0]\n\t\t\t\tresname=dssp[key][0].resname\n\t\t\t\tresidx=resobj.id[1]\n\t\t\t\tchain=key[0]\n\t\t\t\tsecondary_structure=resobj.xtra['SS_DSSP']\n\t\t\t\trel_sasa=resobj.xtra['EXP_DSSP_RASA']\n\t\t\t\tabs_sasa=resobj.xtra['EXP_DSSP_ASA']\n\t\t\t\tphi=resobj.xtra['PHI_DSSP']\n\t\t\t\tpsi=resobj.xtra['PSI_DSSP']\n\t\t\tresobj=dssp[key]\n\t\t\tresname=residue_codes_reverse[resobj[1]]\n\t\t\tresidx=key[1][1]\n\t\t\tchain=key[0]\n\t\t\tsecondary_structure=resobj[2]\n\t\t\trel_sasa=resobj[3]\n\t\t\tabs_sasa=resobj[3]*dssp.residue_max_acc[resname]\n\t\t\tphi=resobj[4]\n\t\t\tpsi=resobj[5]\n\t\t\tif residx in DSSP[chain] and DSSP[chain][residx]['resname']==resname:\n\t\t\t\tDSSP[chain][residx]['dssp'].append(secondary_structure)\n\t\t\t\tDSSP[chain][residx]['rel_sasa'].append(rel_sasa)\n\t\t\t\tDSSP[chain][residx]['abs_sasa'].append(abs_sasa)\n\t\t\t\tDSSP[chain][residx]['phi'].append(phi)\n\t\t\t\tDSSP[chain][residx]['psi'].append(psi)\n\t\t\t\tDSSP[chain][residx]['time'].append(ts.time)\n\t\t\telse:\n\t\t\t\tDSSP[chain][residx]={'dssp':[secondary_structure],'phi':[phi],'time':[ts.time],\n\t\t\t\t\t\t\t\t\t 'psi':[psi],'rel_sasa':[rel_sasa],'chain':chain,\n\t\t\t\t\t\t\t\t\t 'abs_sasa':[abs_sasa],'resname':resname}\n\treturn DSSP",
"def ex1_create(alpha,beta,pace,delta):\n\t\n\tfilename = seed+\"/ex_sim_a\"+str(alpha)+\"_p\"+str(pace)+\"_d\"+str(delta)+\".tmp\"\n\t\n\t# generate the K random walks\n\tfor _ in range(K):\n\t\tavancement(_,K)\n\t\tcall(filename,alpha,beta,'all',pace,delta)",
"def runStudy(catName,energyStr,truePdfName,dataFileNames,sigMasses):\n\n dataTree = root.TChain()\n for i in dataFileNames:\n dataTree.Add(i+\"/outtree\"+catName)\n dataTree.SetCacheSize(10000000);\n dataTree.AddBranchToCache(\"*\");\n\n truePdfFunc = None\n if truePdfName == \"Bernstein\" or truePdfName == \"Chebychev\" or truePdfName == \"Polynomial\" or truePdfName == \"SumExp\" or truePdfName == \"SumPow\" or truePdfName == \"Laurent\" or truePdfName == \"ExpTimesBernstein\" or truePdfName == \"ExpTimesChebychev\" or truePdfName == \"ExpTimesPolynomial\":\n truePdfFunc = getattr(fitOrderChooser,\"makePDFBak\"+truePdfName)\n else:\n truePdfFunc = getattr(makeCards,\"makePDFBak\"+truePdfName)\n\n dimuonMass = root.RooRealVar(\"dimuonMass\",\"m [GeV/c^{2}]\",110.,160.)\n dimuonMass.setBins(50)\n dimuonMass.setRange(\"exprange\",120,160)\n dimuonMass.setRange(\"whole\",110,160)\n dimuonMass.setRange(\"low\",110,120) # Silly ranges for old fit functionality\n dimuonMass.setRange(\"high\",130,160)\n dimuonMass.setRange(\"signal\",120,130)\n dimuonMass.setRange(\"signalfit\",110,140)\n dimuonMass.setRange(\"annaRegion\",123.5,127.5)\n dimuonMassArgSet = root.RooArgSet(dimuonMass)\n wTrue = root.RooWorkspace(\"wTrue\")\n wTrueImport = getattr(wTrue,\"import\")\n\n canvas = root.TCanvas(\"canvas\"+catName+energyStr+truePdfName)\n tlatex = root.TLatex()\n tlatex.SetNDC()\n tlatex.SetTextFont(root.gStyle.GetLabelFont())\n tlatex.SetTextSize(0.04)\n\n # Hack to Make makePDFBakOld work\n minMassZ = 88.\n maxMassZ = 94.\n dimuonMassZ = root.RooRealVar(\"dimuonMass\",\"dimuonMass\",minMassZ,maxMassZ)\n\n ### Load data\n \n realData = root.RooDataSet(\"realData\"+catName+energyStr,\n \"realData\"+catName+energyStr,\n dataTree,root.RooArgSet(dimuonMass)\n )\n realDataHist = realData.binnedClone(\"realDataHist\"+catName+energyStr)\n nData = realData.sumEntries()\n realDataZ = root.RooDataSet(\"realDataZ\"+catName+energyStr,\n \"realDataZ\"+catName+energyStr,\n dataTree,root.RooArgSet(dimuonMassZ)\n )\n\n ### Make Bak Pdfs\n\n trashParamList, trashBakNormTup, trashDebug, trueOrder = truePdfFunc(truePdfName+catName+energyStr,realData,dimuonMass,110,160,wTrueImport,dimuonMassZ,realDataZ)\n truePdf = wTrue.pdf(\"bak\")\n truePdf.SetName(truePdfName)\n truePdf.SetTitle(\"True PDF \")\n\n nDataVar = root.RooFit.RooConst(nData)\n nBakVar = root.RooRealVar(\"nBak\",\"N_{B}\",nData/2.,nData*2)\n truePdfE = root.RooExtendPdf(truePdfName+\"E\",\"True PDF Extended\",truePdf,nBakVar)\n\n # Make sure Voigt params are set to True vals and constant\n if truePdfName == \"Old\":\n for xTrue in rooArgSet2List(truePdf.getParameters(realData)):\n if not (\"voit\" in xTrue.GetName()):\n continue\n for xToy in rooArgSet2List(trueToyPdf.getParameters(realData)):\n trueMatch = re.match(r\".*(_voit.*)\",xTrue.GetName()) \n toyMatch = re.match(r\".*(_voit.*)\",xToy.GetName()) \n assert(trueMatch)\n if not toyMatch:\n continue\n trueBaseName = trueMatch.group(1)\n toyBaseName = toyMatch.group(1)\n if not ( trueBaseName == toyBaseName ):\n continue\n xToy.setVal(xTrue.getVal())\n xTrue.setConstant(True)\n xToy.setConstant(True)\n\n ### Now load Signal PDFs\n nSigVarBounds = nData/2.\n nSigVar = root.RooRealVar(\"nSig\",\"N_{S}\",-nSigVarBounds,nSigVarBounds)\n sigPdfs = []\n sigPdfEs = []\n wSigs = []\n for hmass in sigMasses:\n wSig = root.RooWorkspace(\"signal\"+catName+energyStr+str(hmass))\n makeCards.makePDFSigNew(catName+energyStr,\"sig_ggH\",dimuonMass,float(hmass),\n getattr(wSig,\"import\")\n )\n sigPdf = wSig.pdf(\"ggH\")\n sigPdf.SetName(\"sigPDF_\"+str(hmass)+\"_\"+catName+energyStr)\n sigPdfs.append(sigPdf)\n wSigs.append(wSig)\n sigPdfE = root.RooExtendPdf(sigPdf.GetName()+\"E\",sigPdf.GetTitle()+\" Extended\",sigPdf,nSigVar)\n sigPdfEs.append(sigPdfE)\n\n ## Load the 1*SM N signal events\n nSigSMs = []\n for hmass in sigMasses:\n nSigSMs.append(getSMSigCounts(catName,hmass,energy=energyStr))\n\n result = {}\n\n ### Do S+B Fits\n for hmass,sigPdf,sigPdfE,nSigSM in zip(sigMasses,sigPdfs,sigPdfEs,nSigSMs):\n truePdfPlusSigPdf = root.RooAddPdf(\"truePdfPlusSigPdf\"+catName+energyStr,\"\",root.RooArgList(truePdfE,sigPdfE))\n fr = truePdfPlusSigPdf.fitTo(realData,\n PRINTLEVEL,\n root.RooFit.Save(1)\n )\n #frPars = fr.floatParsFinal()\n #for i in range(frPars.getSize()):\n # frPars[i].Print()\n #nSigVar.Print()\n\n result[hmass] = nSigVar.getError()\n #result[hmass] = nSigVar.getError()/nSigSM\n\n # Debug plot for fit to data\n frame = dimuonMass.frame()\n chi2RealDataVar = truePdfPlusSigPdf.createChi2(realDataHist)\n ndfRealData = dimuonMass.getBins() - 1 # b/c roofit normalizes\n ndfRealData -= rooPdfNFreeParams(truePdfPlusSigPdf,realDataHist)\n realData.plotOn(frame)\n errVisArg = root.RooFit.VisualizeError(fr,1,True)\n errFillArg = root.RooFit.FillStyle(3001)\n truePdfPlusSigPdf.plotOn(frame,root.RooFit.Range('low,signal,high'),root.RooFit.NormRange('low,signal,high'),errVisArg,errFillArg,root.RooFit.FillColor(root.kGreen-7))\n truePdfPlusSigPdf.plotOn(frame,root.RooFit.Range('low,signal,high'),root.RooFit.NormRange('low,signal,high'),root.RooFit.Components(truePdf.GetName()),root.RooFit.LineStyle(2),root.RooFit.LineColor(root.kRed+1))\n truePdfPlusSigPdf.plotOn(frame,root.RooFit.Range('low,signal,high'),root.RooFit.NormRange('low,signal,high'))\n #truePdfPlusSigPdf.plotOn(frame,root.RooFit.Range('low,signal,high'),root.RooFit.NormRange('low,signal,high'),root.RooFit.Components(sigPdf.GetName()),root.RooFit.LineColor(root.kRed+1))\n \n frame.Draw()\n frame.SetTitle(\"\")\n frame.GetYaxis().SetTitle(\"Events / 1 GeV/c^{2}\")\n tlatex.SetTextAlign(12)\n tlatex.DrawLatex(gStyle.GetPadLeftMargin(),0.96,\"CMS Internal\")\n tlatex.DrawLatex(0.02+gStyle.GetPadLeftMargin(),0.85,\"Ref PDF: \"+truePdfName)\n tlatex.SetTextAlign(32)\n tlatex.DrawLatex(0.99-gStyle.GetPadRightMargin(),0.96,catName+\" \"+energyStr)\n tlatex.DrawLatex(0.97-gStyle.GetPadRightMargin(),0.85,\"Ref. S+B Fit to Real Data\")\n tlatex.DrawLatex(0.97-gStyle.GetPadRightMargin(),0.80,\"Ref. GOF: {0:.2f}\".format(scipy.stats.chi2.sf(chi2RealDataVar.getVal(),ndfRealData)))\n tlatex.DrawLatex(0.97-gStyle.GetPadRightMargin(),0.75,\"Ref. #chi^{{2}}/NDF: {0:.2f}\".format(chi2RealDataVar.getVal()/ndfRealData))\n canvas.SaveAs(\"output/debug_oneSig_RealData_\"+truePdfName+\"_\"+catName+\"_\"+energyStr+\"_\"+str(hmass)+\".png\")\n\n return result",
"def simulation_stage(self, iteration, instance):\n\t\tk = Kernel(name=\"misc.mkfile\")\n\t\tk.arguments = [\"--size=1000\", \"--filename=simulation-{0}-{1}.dat\".format(iteration, instance)]\n\t\treturn k",
"def fusion_api_edit_sas_logical_jbods(self, body, uri, api=None, headers=None):\n return self.sas_logical_jbods.put(body=body, uri=uri, api=api, headers=headers)",
"def createMatlabScript(dHeader):\n\th = parse(dHeader['headerFile'])\n\tL = ['SampleRate','CutoffFreq','Gain','DataQualityMeasure','SensorID','TimeZero','ISSConfiguration']\n\tfor i in L:\n\t\tdHeader[i] = str(h.documentElement.getElementsByTagName(i)[0].childNodes[0].nodeValue)\n\tdHeader['DataType'] = str(h.documentElement.nodeName)\n\tdHeader['BiasCoeff'] = getSubfield(h,'BiasCoeff',['x','y','z'])\n\tdHeader['ScaleFactor'] = getSubfield(h,'ScaleFactor',['x','y','z'])\n\tLcoord = ['x','y','z','r','p','w','name','time','comment']\n\tdHeader['SensorCoordinateSystem'] = getSubfield(h,'SensorCoordinateSystem',Lcoord)\n\tdHeader['DataCoordinateSystem'] = getSubfield(h,'DataCoordinateSystem',Lcoord)\n\tdHeader['GData'] = getSubfield(h,'GData',['format','file'])\n\tdHeader['year'],dHeader['month'] = getYearMonthPath(dHeader['headerFile'])\n\tif not dHeader['Nfft'] and not dHeader['No']:\n\t\tdHeader['Nfft'],dHeader['No'] = getNfftNo(float(dHeader['SampleRate']))\n\telif dHeader['Nfft'] and not dHeader['No']:\n\t\tdHeader['Nfft'] = int(dHeader['Nfft'])\n\t\tdHeader['No'] = 0\n\telif dHeader['Nfft'] and dHeader['No']:\n\t\tdHeader['Nfft'] = int(dHeader['Nfft'])\n\t\tdHeader['No'] = int(dHeader['No'])\n\telse:\n\t raise '\\nweird case with No defined, but not Nfft\\n'\n\n # Create path/name for info m-file\n\tresPath = getoutput('echo ${RESULTSPATH}')\t\t\t\t\n if dHeader.has_key('actualStart'):\n actualStart = float(dHeader['actualStart'])\n else:\n actualStart = stringTimeToUnix(dHeader['dateStart'])\n st = split(unixTimeToString(actualStart),'_')[:3]\n resPath += '/year' + st[0] + '/month' + st[1] + '/day' + st[2] + '/' + dHeader['sensor'] + '/padspec/' + dHeader['tag']\n strFs = dHeader['SampleRate'].replace('.','p')\n\tabbr = dHeader['abbr']\n\tpm = dHeader['pm']\n\tsuff = getAxisSuffix(dHeader)\n\tinfoFilename = '%s/m%s%s%s%s%sinfo.m' % (resPath,dHeader['SensorID'], abbr, suff, dHeader['tag'], strFs)\n\n if not os.path.isdir(resPath):\n cmd = 'mkdir -p ' + resPath\n res = getoutput(cmd) # os.mkdir(resPath) would not work because [is minus p supported?]\n if os.path.isfile(infoFilename):\n print 'not overwriting info script: %s' % infoFilename\n return\n if ( (len(infoFilename)-len(resPath))>33 ):\n print 'MATLAB may not read long script name %s (must be 32 chars or less)?' % infoFilename\n else:\n print 'writing info script: %s ...' % infoFilename,\n \n\toutfile = open(infoFilename,'w')\n\tprint >> outfile, 'fs=%.3f;' % float(dHeader['SampleRate'])\n\tprint >> outfile, 'fc=%.3f;' % float(dHeader['CutoffFreq'])\n\tprint >> outfile, 'Nfft=%d;' % dHeader['Nfft']\n\tprint >> outfile, 'No=%d;' % dHeader['No']\n\tprint >> outfile, 'df=fs/Nfft;'\n\tprint >> outfile, 'dT=(Nfft-No)/fs;'\n\tprint >> outfile, 'f=0:fs/Nfft:fs/2;'\n\tprint >> outfile, \"sHeader.DataType='%s';\" % dHeader['DataType']\n\tprint >> outfile, \"sHeader.SensorID='%s';\" % dHeader['SensorID']\n\tprint >> outfile, \"sHeader.Gain='%.1f';\" % float(dHeader['Gain'])\n\tprint >> outfile, \"sHeader.SampleRate=%.3f;\" % float(dHeader['SampleRate'])\n\tprint >> outfile, \"sHeader.CutoffFreq=%.3f;\" % float(dHeader['CutoffFreq'])\n\tprint >> outfile, \"sHeader.GDataFormat='%s';\" % dHeader['GData']['format']\n\tprint >> outfile, \"sHeader.BiasCoeffX='%.2f';\" % float(dHeader['BiasCoeff']['x'])\n\tprint >> outfile, \"sHeader.BiasCoeffY='%.2f';\" % float(dHeader['BiasCoeff']['y'])\n\tprint >> outfile, \"sHeader.BiasCoeffZ='%.2f';\" % float(dHeader['BiasCoeff']['z'])\n\tprint >> outfile, \"sHeader.SensorCoordinateSystemName='%s';\" % dHeader['SensorCoordinateSystem']['name']\n\tprint >> outfile, \"sHeader.SensorCoordinateSystemRPY=[%.1f %.1f %.1f];\" % (float(dHeader['SensorCoordinateSystem']['r']),\n\t\t\t\t\t\t\t\t\t\t float(dHeader['SensorCoordinateSystem']['p']),\n\t\t\t\t\t\t\t\t\t\t float(dHeader['SensorCoordinateSystem']['w']) )\n\tprint >> outfile, \"sHeader.SensorCoordinateSystemXYZ=[%.1f %.1f %.1f];\" % (float(dHeader['SensorCoordinateSystem']['x']),\n\t\t\t\t\t\t\t\t\t\t float(dHeader['SensorCoordinateSystem']['y']),\n\t\t\t\t\t\t\t\t\t\t float(dHeader['SensorCoordinateSystem']['z']) )\n\tprint >> outfile, \"sHeader.SensorCoordinateSystemComment='%s';\" % dHeader['SensorCoordinateSystem']['comment']\n\tprint >> outfile, \"sHeader.SensorCoordinateSystemTime='%s';\" % dHeader['SensorCoordinateSystem']['time']\n\tprint >> outfile, \"sHeader.DataCoordinateSystemName='%s';\" % dHeader['DataCoordinateSystem']['name']\n\tprint >> outfile, \"sHeader.DataCoordinateSystemRPY=[%.1f %.1f %.1f];\" % (float(dHeader['DataCoordinateSystem']['r']),\n\t\t\t\t\t\t\t\t\t\t float(dHeader['DataCoordinateSystem']['p']),\n\t\t\t\t\t\t\t\t\t\t float(dHeader['DataCoordinateSystem']['w']) )\n\tprint >> outfile, \"sHeader.DataCoordinateSystemXYZ=[%.1f %.1f %.1f];\" % (float(dHeader['DataCoordinateSystem']['x']),\n\t\t\t\t\t\t\t\t\t\t float(dHeader['DataCoordinateSystem']['y']),\n\t\t\t\t\t\t\t\t\t\t float(dHeader['DataCoordinateSystem']['z']) )\n\tprint >> outfile, \"sHeader.DataCoordinateSystemComment='%s';\" % dHeader['DataCoordinateSystem']['comment']\n\tprint >> outfile, \"sHeader.DataCoordinateSystemTime='%s';\" % dHeader['DataCoordinateSystem']['time']\n\tprint >> outfile, \"sHeader.DataQualityMeasure='%s';\" % dHeader['DataQualityMeasure']\n\tprint >> outfile, \"sHeader.ISSConfiguration='%s';\" % dHeader['ISSConfiguration']\n\tprint >> outfile, \"sHeader.ScaleFactorX='%.2f';\" % float(dHeader['ScaleFactor']['x'])\n\tprint >> outfile, \"sHeader.ScaleFactorY='%.2f';\" % float(dHeader['ScaleFactor']['y'])\n\tprint >> outfile, \"sHeader.ScaleFactorZ='%.2f';\" % float(dHeader['ScaleFactor']['z'])\n\tprint >> outfile, \"sHeader.sdnDataStart=717673.5;\"\n\tprint >> outfile, \"sHeader.sdnDataStart=888-1606;\"\n\tprint >> outfile, \"sHeader.GUnits='g';\"\n\tprint >> outfile, \"sHeader.TUnits='seconds';\"\n\tprint >> outfile, \"sOutput.Type='imagefilebat';\"\n\tprint >> outfile, \"sOutput.ResultsPath='/tmp/dummy/';\"\n\tprint >> outfile, \"sPlot.WhichAx='%s';\" % dHeader['whichAx']\n\tprint >> outfile, \"sPlot.TUnits='hours';\"\n\tprint >> outfile, \"sPlot.TSpan=8;\"\n\tprint >> outfile, \"sPlot.TempRes=df;\"\n\tprint >> outfile, \"sPlot.FreqRes=dT;\"\n\tprint >> outfile, \"sPlot.FLim=[0 fc];\"\n\tprint >> outfile, \"sPlot.CLimMode='minmax';\"\n\tprint >> outfile, \"sPlot.CLim=[-12 -6];\"\n\tprint >> outfile, \"sPlot.Colormap='pimsmap';\"\n\tprint >> outfile, \"sPlot.Window='hanning';\"\n\tprint >> outfile, \"sPlot.TLimMode='auto';\"\n\tprint >> outfile, \"sPlot.TTickLabelMode='dateaxis';\"\n\tprint >> outfile, \"sPlot.TTickForm='hh:mm';\"\n\tprint >> outfile, \"sPlot.OverlapLim=[1 50];\"\n\tprint >> outfile, \"sPlot.AxWidth=NaN;\"\n\tprint >> outfile, \"sPlot.AxHeight=NaN;\"\n\tprint >> outfile, \"sPlot.Nfft=Nfft;\"\n\tprint >> outfile, \"sPlot.No=No;\"\n\tprint >> outfile, \"sPlot.P=0;\"\n\tprint >> outfile, \"sPlot.TimeSlices=floor((3600*fs*sPlot.TSpan-No)/(Nfft-No));\"\n\tprint >> outfile, \"sPlot.FrequencyBins=floor(fc/df);\"\n\tprint >> outfile, \"sSearch.PathQualifiers.strTimeFormat='dd-mmm-yyyy,HH:MM:SS.SSS';\"\n\tprint >> outfile, \"sSearch.PathQualifiers.strTimeBase='GMT';\"\n\tprint >> outfile, \"sSearch.HeaderQualifiers='dummy';\"\n\tprint >> outfile, \"sSearch.ModeDur='dummy';\"\n\ts = split(dHeader['DataType'],'_')\n\tprint >> outfile, \"sText.casUL{1}=['%s, %s at ' sHeader.SensorCoordinateSystemComment ':' sprintf('[%%g %%g %%g]',sHeader.SensorCoordinateSystemXYZ)];\" % (s[0], dHeader['SensorID'])\n\tprint >> outfile, \"sText.casUL{2}='%.2f sa/sec (%.2f Hz)';\" % ( float(dHeader['SampleRate']),\n\t\t\t\t\t\t\t\t\tfloat(dHeader['CutoffFreq']) )\n\tprint >> outfile, \"sText.casUL{3}=['\\Deltaf' sprintf(' = %.3f Hz, Nfft = %d',df,Nfft)];\"\n\tprint >> outfile, \"sText.casUL{4}=sprintf('Temp. Res. = %.3f sec, No = %d',dT,No);\"\n\tprint >> outfile, \"sText.strXType='Time';\"\n\tprint >> outfile, \"sText.casYStub={'\\Sigma'};\"\n\tprint >> outfile, \"sText.strXUnits='hours';\"\n\tprint >> outfile, \"sText.strComment='%s, %s';\" % (s[0], dHeader['SensorID'])\n\tprint >> outfile, \"sText.casUR{1}=sHeader.ISSConfiguration;\"\n\tprint >> outfile, \"sText.casUR{2}='sum';\"\n\tprint >> outfile, \"sText.casUR{3}=sPlot.Window;\"\n\tprint >> outfile, \"sText.casUR{4}=sprintf('%.2f hours',sPlot.TSpan);\"\n\tprint >> outfile, \"sText.strTitle='Start GMT 01-Month-0000, 000/00:00:00.000';\"\n\tprint >> outfile, \"sText.strVersion=' ';\"\n\t#print >> outfile, \"%%save('%s/%s','f','sHeader','sOutput','sPlot','sSearch','sText')\" % (pth, infoFile)\n\toutfile.close()\n print 'done'",
"def create_isosj_instance( self, isoform_id ):\n hash_pos = { 'chrom': self.snv_chrom, 'pos_oi': self.snv_start } #used to find the closest position for an isoform\n bool_simulant_sj = False\n group_sj = 0 #this means splicing events will NOT be grouped into 5' or 3' competitive splicing\n iso_sj = IsoformSJ( isoform_id, [], -10, hash_pos, bool_simulant_sj, group_sj )\n \n return iso_sj",
"def create(*args):",
"def create_dataset(subs_list, indexing=True):\n\n S = None\n print(f'\\nProcess - {current_process().name} has {len(subs_list)} files to work on.\\n')\n\n try:\n start = time()\n repo = (Subject(sub) for sub in subs_list)\n for sub in repo:\n S = sub\n for i in range(3):\n filePath = Path(f'{new_sensor_paths[i]}/{sub.subject_id[:-4]}.csv')\n if not os.path.exists(filePath):\n # Most expensive line of code in the module (Takes hours)\n col_names, df, _, _, _ = feature_extractor(sub, sensors[i].lower(), output_type='df')\n df.to_csv(filePath, sep=\"\\t\", index=indexing)\n print(f\"File generated - '{sub.subject_id[:-4]}.csv' by process : {current_process().name}\")\n else:\n print(f'File \"{sub.subject_id[:-4]}.csv\" already exists!')\n\n print(f'\\nTime taken by - {current_process().name} : {time() - start:.2f} secs')\n except Exception as e:\n print(f\"Exception occurred in {current_process().name}\\n\")\n print(f'While working on this portion of the subs_list:\\n'\n f'{subs_list}')\n print(f'Error occurred in FILE # {S.subject_id}\\n')\n raise e",
"def build_indices(genome_fasta, genome_gtf, rRNA_fasta, transcriptome_fasta):\n \n if not os.path.exists(\"data/indices\"):\n os.mkdir(\"data/indices\")\n\n \n # 1. Bowtie index\n print(\"Building Bowtie index\")\n if not os.path.exists(BOWTIE_DIR):\n os.mkdir(BOWTIE_DIR)\n cmd_bowtie = 'bowtie-build' + ' ' + genome_fasta + ' ' + BOWTIE_DIR+'/yeast'\n output = subprocess.run(cmd_bowtie, shell=True)\n\n cmd_rRNA = 'bowtie-build' + ' ' + rRNA_fasta + ' ' + BOWTIE_DIR+'/rRNA'\n output = subprocess.run(cmd_rRNA, shell=True)\n \n # 2. STAR index\n print(\"Building STAR index\")\n if not os.path.exists(STAR_DIR):\n os.mkdir(STAR_DIR)\n cmd_STAR = 'STAR' + ' ' + '--runThreadN' + ' ' + '4' + ' ' + '--runMode' + ' ' + 'genomeGenerate' + ' ' + '--genomeDir' + ' ' + STAR_DIR + ' ' + '--genomeFastaFiles' + ' ' + genome_fasta + ' ' + '--sjdbGTFfile' + ' ' + genome_gtf #+ ' ' + '--sjdbOverhang' + ' ' + 'max(ReadLength)-1'\n output = subprocess.run(cmd_STAR, shell=True)\n\n\n# run build transcriptome fasta. \n if not os.path.exists(STAR_TRANSCRIPTOME_DIR):\n os.mkdir(STAR_TRANSCRIPTOME_DIR)\n cmd_STAR = 'STAR' + ' ' + '--runThreadN' + ' ' + '4' + ' ' + '--runMode' + ' ' + 'genomeGenerate' + ' ' + '--genomeDir' + ' ' + STAR_TRANSCRIPTOME_DIR + ' ' + '--genomeFastaFiles' + ' ' + transcriptome_fasta # + ' ' + '--sjdbGTFfile' + ' ' + genome_gtf #+ ' ' + '--sjdbOverhang' + ' ' + 'max(ReadLength)-1'\n output = subprocess.run(cmd_STAR, shell=True)",
"def __init__(self, workdir, encut, struct_path, name=\"relax_bwmn\"): \n potcar_path = \"../pseudos/BWO_Mn_POTCAR\" \n kgrid = [2, 2, 2] \n input_param = DefaultOptimizationParameters(encut) \n relax_calc = SCFCalculation(workdir, pseudo_par=None, kgrid=kgrid, name=\"BWO_Mn_relax\", encut=encut, input_parameters=input_param) \n relax_calc.make_calculation(struct_path, potcar_path=potcar_path)"
] | [
"0.54142535",
"0.5274596",
"0.50874454",
"0.5075501",
"0.4990759",
"0.49766293",
"0.49568054",
"0.4930684",
"0.4930611",
"0.49111018",
"0.48671573",
"0.48432696",
"0.48261032",
"0.47949788",
"0.47848707",
"0.47655132",
"0.4760693",
"0.475955",
"0.47541136",
"0.47470814",
"0.47451714",
"0.47385567",
"0.47301677",
"0.47025356",
"0.47021163",
"0.46927518",
"0.46922705",
"0.4687969",
"0.4670927",
"0.46528518"
] | 0.55910516 | 0 |
Deletes independent sas logical jbods from the appliance based on uri [Arguments] | def fusion_api_delete_sas_logical_jbods(self, uri, api=None, headers=None):
return self.sas_logical_jbods.delete(uri=uri, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_delete_sas_li(self, name=None, uri=None, api=None, headers=None):\n return self.sasli.delete(name=name, uri=uri, api=api, headers=headers)",
"def fusion_api_delete_sas_interconnect(self, name=None, uri=None, api=None, headers=None):\n return self.sasics.delete(name=name, uri=uri, api=api, headers=headers)",
"def fusion_api_delete_sas_lig(self, name=None, uri=None, api=None, headers=None):\n return self.saslig.delete(name=name, uri=uri, api=api, headers=headers)",
"def fusion_api_delete_os_deploymentserver(self, name=None, uri=None, param='', api=None, headers=None):\n return self.osds.delete(name=name, uri=uri, param=param, api=api, headers=headers)",
"def fusion_api_delete_rack_manager(self, uri, name=None, param='', api=None, headers=None):\n return self.rackmanager.delete(uri=uri, name=name, param=param, api=api, headers=headers)",
"def fusion_api_delete_lsg(self, name=None, uri=None, api=None, headers=None):\n return self.lsg.delete(name=name, uri=uri, api=api, headers=headers)",
"def delete(constraint,check=True):\n output = db.query(['jobid','fwid','storage_directory'],constraint,order='jobid')\n for jid,fwid,path in output: \n lpad.archive_wf(fwid) # archive firework\n db.updateDB('deleted','jobid',jid,1,tableName='completed') # note deletion in deleted column\n if not check or ask('Do you want to delete %s?'%path): # delete storage directory \n if 'scratch' in path: shutil.rmtree(path)\n elif 'nfs' in path: \n d = subprocess.Popen(['ssh','[email protected]', 'rm -r %s'%path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n dout, err = d.communicate()\n else: raise NotImplementedError\n print 'deleted!'",
"def test_destroy_nas_share_by_nas(self):\n pass",
"def delete():",
"def test_004_delete(self):\n ret = svcmgr.main(argv=[\"delete\", \"-s\", SVCNAME, \"--local\"])\n assert ret == 0",
"def fusion_api_delete_logical_enclosure(self, name=None, uri=None, param='', api=None, headers=None):\n return self.logical_enclosure.delete(name=name, uri=uri, param=param, api=api, headers=headers)",
"def test_destroy_nas_share(self):\n pass",
"def fusion_api_delete_ls(self, name=None, uri=None, api=None, headers=None):\n return self.ls.delete(name=name, uri=uri, api=api, headers=headers)",
"def fusion_api_remove_rack(self, name=None, uri=None, api=None, headers=None):\n return self.rack.delete(name, uri, api, headers)",
"def delete(isamAppliance, name, check_mode=False, force=False):\n ret_obj = search(isamAppliance, name, check_mode=check_mode, force=force)\n chain_id = ret_obj['data']\n\n if chain_id == {}:\n logger.info(\"STS Chain {0} not found, skipping delete.\".format(name))\n else:\n if check_mode is True:\n return isamAppliance.create_return_object(changed=True)\n else:\n return isamAppliance.invoke_delete(\n \"Delete a specific STS chain\",\n \"{0}/{1}\".format(uri, chain_id),\n requires_modules=requires_modules,\n requires_version=requires_version)\n\n return isamAppliance.create_return_object()",
"def fusion_api_delete_storage_system(self, uri=None, api=None, headers=None):\n return self.system.delete(uri=uri, api=api, headers=headers)",
"def fusion_api_delete_server_hardware(self, name=None, uri=None, api=None, headers=None):\n return self.sh.delete(name, uri, api, headers)",
"def delete(ribo, name, force):\n\n delete_rnaseq_wrapper(ribo_file = ribo, \n name = name,\n force = force)",
"def bdev_daos_delete(client, name):\n params = {'name': name}\n return client.call('bdev_daos_delete', params)",
"def delete(self, _uri):\n print(\"Deleting '%s'\"%(_uri))\n response = self.__httpsRequest('DELETE', _uri, '')",
"def delete(self, host, file):",
"def fusion_api_delete_repository(self, uri, api=None, headers=None):\n return self.repository.delete(uri=uri, api=api, headers=headers)",
"def test_remove_share(self):\n self.app.delete(url=\"/config/shares?share=80&destination=gsiftp://nowhere&vo=dteam\", status=400)\n self.app.delete(url=\"/config/shares?share=80&destination=gsiftp://nowhere&vo=dteam&source=gsiftp://source\", status=204)",
"def deleteShards():\n os.popen('rm *_shard')",
"def fusion_api_remove_datacenter(self, name=None, uri=None, api=None, headers=None):\n return self.dc.delete(name, uri, api, headers)",
"def fusion_api_delete_storage_pool(self, uri=None, api=None, headers=None):\n return self.pool.delete(uri=uri, api=api, headers=headers)",
"def deleteStudy(self, study_id, full_delete):\n con = self.getMetadataDatabaseConnection()\n con.cursor().callproc('qiime_assets.study_delete', [study_id, full_delete])",
"def fusion_api_remove_san_manager(self, name=None, uri=None, api=None, headers=None):\n return self.dm.delete(name, uri, api, headers)",
"def test_upload_area_cleanup(self):\n vis2_uvid='urn:mrn:stm:service:instance:furuno:vis2'\n p = Path('import')\n files = list(p.glob('**/urn:mrn:s124:*'))\n for item in files:\n print(item)\n os.remove(str(item))\n pass",
"def svn_fs_delete_fs(*args):\r\n return _fs.svn_fs_delete_fs(*args)"
] | [
"0.6884551",
"0.6660939",
"0.66185933",
"0.6468747",
"0.6230566",
"0.6153764",
"0.6016363",
"0.5933864",
"0.5921936",
"0.58827883",
"0.5882349",
"0.5878072",
"0.5875323",
"0.58577526",
"0.58542216",
"0.5836818",
"0.58298755",
"0.58044356",
"0.5769623",
"0.57540727",
"0.57200617",
"0.5692369",
"0.5668738",
"0.56607795",
"0.56044596",
"0.55892473",
"0.55728364",
"0.5562596",
"0.5544453",
"0.5519452"
] | 0.7457779 | 0 |
Updates independent sas logical jbods [Arguments] | def fusion_api_edit_sas_logical_jbods(self, body, uri, api=None, headers=None):
return self.sas_logical_jbods.put(body=body, uri=uri, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_patch_sas_logical_jbods(self, body, uri, api=None, headers=None):\n return self.sas_logical_jbods.patch(body=body, uri=uri, api=api, headers=headers)",
"def update(all=True, QDomni=False, omni=False, omni2=False, leapsecs=False, PSDdata=False):\n from spacepy.datamodel import SpaceData, dmarray, fromCDF, toHDF5\n from spacepy import DOT_FLN, config\n\n if sys.version_info[0]<3:\n import urllib as u\n else:\n import urllib.request as u\n\n if 'user_agent' in config and config['user_agent']:\n class AppURLopener(u.FancyURLopener):\n version = config['user_agent']\n u._urlopener = AppURLopener()\n\n datadir = os.path.join(DOT_FLN, 'data')\n if not os.path.exists(datadir):\n os.mkdir(datadir)\n os.chmod(datadir, 0o777)\n\n #leapsec_url ='ftp://maia.usno.navy.mil/ser7/tai-utc.dat'\n leapsec_fname = os.path.join(datadir, 'tai-utc.dat')\n\n # define location for getting omni\n #omni_url = 'ftp://virbo.org/QinDenton/hour/merged/latest/WGhour-latest.d.zip'\n omni_fname_zip = os.path.join(datadir, 'WGhour-latest.d.zip')\n omni2_fname_zip = os.path.join(datadir, 'omni2-latest.cdf.zip')\n omni_fname_pkl = os.path.join(datadir, 'omnidata.pkl')\n omni_fname_json = os.path.join(datadir, 'omnidata.txt')\n omni_fname_h5 = os.path.join(datadir, 'omnidata.h5')\n omni2_fname_h5 = os.path.join(datadir, 'omni2data.h5')\n\n PSDdata_fname = os.path.join('psd_dat.sqlite')\n\n if (omni or omni2 or QDomni or leapsecs or PSDdata):\n all = False #if an option is explicitly selected, turn 'all' off\n\n if all == True:\n omni = True\n omni2 = True\n leapsecs = True\n\n if QDomni == True:\n omni = True\n omni2 = True\n\n if omni == True:\n # retrieve omni, unzip and save as table\n print(\"Retrieving Qin_Denton file ...\")\n u.urlretrieve(config['qindenton_url'], omni_fname_zip, reporthook=progressbar)\n fh_zip = zipfile.ZipFile(omni_fname_zip)\n data = fh_zip.read(fh_zip.namelist()[0])\n fh_zip.close()\n if not str is bytes:\n data = data.decode('ascii')\n A = np.array(data.split('\\n'))\n print(\"Now processing (this may take a minute) ...\")\n\n # create a keylist\n keys = A[0].split()\n keys.remove('8')\n keys.remove('6')\n keys[keys.index('status')] = '8_status'\n keys[keys.index('stat')] = '6_status'\n keys[keys.index('dst')] = 'Dst'\n keys[keys.index('kp')] = 'Kp'\n #keys[keys.index('Hr')] = 'Hr'\n keys[keys.index('V_SW')] = 'velo'\n keys[keys.index('Den_P')] = 'dens'\n keys[keys.index('Day')] = 'DOY'\n keys[keys.index('Year')] = 'Year'\n\n # remove keyword lines and empty lines as well\n idx = np.where(A != '')[0]\n # put it into a 2D table\n tab = [val.split() for val in A[idx[1:]]]\n stat8 = [val[11] for val in tab]\n stat6 = [val[27] for val in tab]\n\n tab = np.array(tab, dtype='float32')\n # take out where Dst not available ( = 99999) or year == 0\n idx = np.where((tab[:,12] !=99.0) & (tab[:,0] != 0))[0]\n tab = tab[idx,:]\n stat8 = np.array(stat8)[idx]\n stat6 = np.array(stat6)[idx]\n\n omnidata = SpaceData()\n # sort through and make an omni dictionary\n # extract keys from line above\n for ikey, i in zip(keys,range(len(keys))):\n if ikey in ('Year', 'DOY', 'Hr', 'Dst'):\n omnidata[ikey] = dmarray(tab[:, i], dtype='int16')\n else:\n omnidata[ikey] = dmarray(tab[:,i])\n\n # add TAI to omnidata\n nTAI = len(omnidata['DOY'])\n\n # add interpolation quality flags\n omnidata['Qbits'] = SpaceData()\n arr = dmarray(stat8.view(stat8.dtype.kind + '1'),\n dtype=np.byte).reshape((8, nTAI))\n for ik, key in enumerate(['ByIMF', 'BzIMF', 'velo', 'dens', 'Pdyn', 'G1', 'G2', 'G3']):\n omnidata['Qbits'][key] = arr[ik,:]\n if stat6.dtype.str[1:] == 'U6':\n stat6 = np.require(stat6, dtype='|S6')\n arr = dmarray(stat6.view(stat6.dtype.kind + '1'),\n dtype=np.byte).reshape((6, nTAI))\n for ik, key in enumerate(['W1', 'W2', 'W3', 'W4', 'W5', 'W6']):\n omnidata['Qbits'][key] = arr[ik,:]\n\n #remove string status keys\n foo = omnidata.pop('6_status')\n foo = omnidata.pop('8_status')\n\n # add time information to omni pickle (long loop)\n omnidata['UTC'] = dmarray([datetime.datetime(int(omnidata['Year'][i]), 1, 1) +\n datetime.timedelta(days=int(omnidata['DOY'][i]) - 1,\n hours=int(omnidata['Hr'][i]))\n for i in range(nTAI)])\n\n omnidata['ticks'] = spt.Ticktock(omnidata['UTC'], 'UTC')\n omnidata['RDT'] = omnidata['ticks'].RDT\n del omnidata['ticks'] #Can be quickly regenerated on import\n del omnidata['Year']\n del omnidata['Hr']\n\n print(\"Now saving... \")\n ##for now, make one file -- think about whether monthly/annual files makes sense\n toHDF5(omni_fname_h5, omnidata)\n\n # delete left-overs\n os.remove(omni_fname_zip)\n\n\n if omni2 == True:\n # adding missing values from original omni2\n print(\"Retrieving OMNI2 file ...\")\n u.urlretrieve(config['omni2_url'], omni2_fname_zip, reporthook=progressbar)\n fh_zip = zipfile.ZipFile(omni2_fname_zip)\n fh_zip.extractall();\n fh_zip.close()\n omnicdf = fromCDF(fh_zip.namelist()[0])\n #add RDT\n omnicdf['RDT'] = spt.Ticktock(omnicdf['Epoch'],'UTC').RDT\n #remove keys that get in the way\n del omnicdf['Hour']\n del omnicdf['Year']\n del omnicdf['Decimal_Day']\n\n # save as HDF5\n toHDF5(omni2_fname_h5, omnicdf)\n\n # delete left-overs\n os.remove(omni2_fname_zip)\n\n if leapsecs == True:\n print(\"Retrieving leapseconds file ... \")\n u.urlretrieve(config['leapsec_url'], leapsec_fname)\n\n if PSDdata == True:\n print(\"Retrieving PSD sql database\")\n u.urlretrieve(config['psddata_url'], PSDdata_fname, reporthook=progressbar)\n return datadir",
"def fusion_api_post_sas_logical_jbods(self, body, api=None, headers=None):\n return self.sas_logical_jbods.post(body=body, api=api, headers=headers)",
"def update_iemaccess(obs):\n icursor = IEM.cursor()\n for sid in obs:\n ob = obs[sid]\n iemob = Observation(sid, \"IA_RWIS\", ob['valid'])\n for varname in ['tmpf', 'dwpf', 'drct', 'sknt', 'gust', 'vsby',\n 'pday', 'tsf0', 'tsf1', 'tsf2', 'tsf3', 'scond0',\n 'scond1', 'scond2', 'scond3', 'relh']:\n # Don't insert NaN values into iemaccess\n thisval = ob.get(varname)\n if thisval is None:\n continue\n # strings fail the isnan check\n if isinstance(thisval, str):\n iemob.data[varname] = ob.get(varname)\n elif not np.isnan(thisval):\n iemob.data[varname] = ob.get(varname)\n for varname in ['tsub0', 'tsub1', 'tsub2', 'tsub3']:\n if ob.get(varname) is not None:\n iemob.data['rwis_subf'] = ob.get(varname)\n break\n iemob.save(icursor)\n icursor.close()\n IEM.commit()",
"def UpdateS1SVs(s, Difference, WorkingSet):",
"def update(self, ds, **kwargs):\n ds.set_status(self._db, self._es, self._queue, DatasetStatus.INDEXING)\n\n self._es.delete_ds(ds.id)\n for mol_db_dict in ds.config['databases']:\n mol_db = MolecularDB(name=mol_db_dict['name'],\n version=mol_db_dict.get('version', None),\n iso_gen_config=ds.config['isotope_generation'])\n self._es.index_ds(ds.id, mol_db)\n\n ds.set_status(self._db, self._es, self._queue, DatasetStatus.FINISHED)",
"def test_openmdao_good_1(self):\n updates = [\n #['MAT1', 3, 10.0], # 3 is E -> set to 10.0\n #['MAT1', 4, 10.0], # 3 is G -> set to 10.0\n ['GRID', 1, 3, 10.0], # 3 is x1 -> set to 10.0\n ['GRID', 1, 4, 20.0], # 4 is x2 -> set to 20.0\n ['CPENTA', 9, 2, 10], # 2 is property_id -> set to 10\n ['CPENTA', 9, 3, 20], # 3 is node1 -> set to 20\n ['PSOLID', 4, 1, 2], # 1 is material_id\n ['PARAM', 'WTMASS', 1, 'WTMASs'], # key\n ['PARAM', 'WTMASS', 2, 0.0025], # value1\n ['PCOMP', 1, 2, 1.],\n ['PCOMP', 1, 3, 2.],\n ['CTETRA', 8, 3, 1], # nid[0]\n ['CTETRA', 8, 4, 2], # nid[1]\n ['CTETRA', 8, 5, 3], # nid[2]\n ['CTETRA', 8, 6, 4], # nid[3]\n ]\n #GRID 1 0 0. 0. 0. 0\n #GRID 2 0 1. 0. 0. 0\n #GRID 3 0 1. 1. 0. 0\n #GRID 4 0 0. 1. 0. 0\n #CPENTA 9 4 21 22 23 24 25 26\n #PSOLID 4 1 0\n #CTETRA 8 4 11 12 13 15\n\n bdf_filename = os.path.join(mesh_utils_path, 'test_mass.dat')\n\n model = BDF(debug=False)\n model.read_bdf(bdf_filename)\n pcomp_updates = [\n ['PCOMP', 1, 15, 'YES_A', 'souts_0'],\n ['PCOMP', 1, 19, 'YES_B', 'souts_1'],\n\n ['PCOMP', 1, 25, 'YES_C', 'souts_2'],\n #['PCOMP', 1, 29, 'YES_D', 'souts_3'],\n ]\n for iupdate in updates:\n card_type, itype, ifield, value = iupdate\n card = model.update_card(card_type, itype, ifield, value)\n\n for iupdate in pcomp_updates:\n card_type, itype, ifield, value, field_name = iupdate\n card = model.update_card(card_type, itype, ifield, value)\n if '_' in field_name:\n field_name2, index = field_name.split('_')\n index = int(index)\n actual = getattr(card, field_name2)[index]\n assert actual == value, 'field_name=%r ifield=%s value=%s actual=%s\\n%s' % (\n field_name, ifield, value, actual, card.print_raw_card())\n #if card_type == 'PCOMP':\n #print(card)",
"def update_soa(record):\n if record and record.domain and record.domain.soa:\n record.domain.soa.serial += 1\n record.domain.soa.dirty = True\n record.domain.soa.save()",
"def update(*args):",
"def joindna(*dnas, topology=\"linear\", compatibility=None, homology_length=None, unique=True, supfeature=False, product=None, process_name=None, process_description=None, pn=None, pd=None, quinable=True, **kwargs):\n kwargs.setdefault(\"_sourcefile\", None) \n kwargs.setdefault(\"process_id\", None)\n kwargs.setdefault(\"original_ids\", []) \n _sourcefile = kwargs[\"_sourcefile\"] \n process_id = kwargs[\"process_id\"] \n original_ids = kwargs[\"original_ids\"]\n\n project = None\n project = project if product is None else product\n process_name = pn if process_name is None else process_name\n process_description = pd if process_description is None else process_description\n \n if compatibility is None:\n fcompatibility = None\n compatibility = \"partial\"\n else:\n fcompatibility = compatibility\n\n if homology_length is None:\n fhomology_length = None\n if compatibility == \"complete\":\n homology_length = 0 \n else:\n homology_length = 2\n else:\n fhomology_length = homology_length\n\n new_dnas = [] \n for i, dna in enumerate(dnas):\n if dna.topology == \"circular\":\n if i == 0:\n order = \"first\"\n elif i == 1:\n order = \"second\"\n elif i == 2:\n order = \"third\"\n else:\n order = str(i) + \"th\" \n raise ValueError(\"The {} QUEEN object topology is 'circular.' Circular QUEEN objects cannot be connected with others.\".format(order)) \n new_dnas.append(dna) \n \n dnas = new_dnas\n \n #Extract history information\n history_features = [] \n for dna in dnas:\n history_features.append(dna._history_feature)\n \n construct = copy.deepcopy(dnas[0])\n positions_list = [construct._positions] \n if len(dnas) > 1:\n for dna in dnas[1:]:\n annealing = False\n feats = dna.dnafeatures\n if dna._ssdna == False and construct._ssdna == False:\n if (dna._left_end_top * construct._right_end_bottom == 1 or dna._left_end_bottom * construct._right_end_top == 1) and ((dna._left_end_top == -1 or dna._left_end_bottom == -1) or (construct._right_end_top == -1 or construct._right_end_bottom == -1)):\n if dna._left_end_top == 1:\n sticky_end = dna._left_end \n else:\n sticky_end = construct._right_end\n \n if compatibility == \"partial\":\n if len(construct._right_end) < len(dna._left_end):\n ovresult = _detect_overlap(construct._right_end, dna._left_end, allow_outies=False)\n else:\n ovresult = _detect_overlap(dna._left_end[::-1], construct._right_end[::-1], allow_outies=False) \n \n if ovresult == False:\n raise ValueError(\"The QUEEN_objects cannot be joined due to the end structure incompatibility.\")\n return False\n else:\n pass \n ovhg_length = ovresult[1][0] \n \n else:\n if construct._right_end == dna._left_end:\n ovhg_length = len(construct._right_end)\n pass \n else:\n raise ValueError(\"The QUEEN_objects cannot be joined due to the end structure incompatibility.\")\n return False\n\n new_dna = cropdna(dna, ovhg_length, len(dna.seq), quinable=0) \n else:\n if (construct._right_end == \"\" and ((dna._left_end == \"\") or (dna._left_end == dna.seq))) or (construct._right_end_top >= 0 and construct._right_end_bottom >= 0 and dna._left_end_top >= 0 and dna._left_end_bottom >= 0):\n new_dna = dna\n ovhg_length = 0 \n ovhg = \"\"\n else:\n raise ValueError(\"The QUEEN_objects cannot be joined due to the end structure incompatibility.\")\n return False\n \n elif dna._ssdna == True and construct._ssdna == True:\n annealing = True\n if len(construct._right_end) < len(dna._left_end):\n ovresult = _detect_overlap(construct._right_end, dna._left_end.translate(str.maketrans(\"ATGC\",\"TACG\"))[::-1])[1]\n new_q = ovresult[1] \n ovhg = ovresult[2]\n else:\n ovresult = _detect_overlap(dna._left_end, construct._right_end.translate(str.maketrans(\"ATGC\",\"TACG\"))[::-1])[1] \n new_q = ovresult[1] \n ovhg = ovresult[2] \n new_q = new_q[::-1]\n \n if compatibility == \"complete\":\n if len(new_q) == len(ovhg):\n pass \n else:\n raise ValueError(\"The QUEEN_objects cannot be joined due to the end structure incompatibility.\")\n return False\n new_q = construct.__class__(seq=new_q, quinable=0) \n ovhg_length = len(ovhg)\n \n else:\n raise ValueError(\"ssDNA cannot be joined with dsDNA\") \n \n if ovhg_length < homology_length and ovhg_length > 0:\n raise ValueError(\"Compatible stickey end legnth should be larger than or equal to {} bp\".format(homology_length)) \n\n feats = _slide(feats, len(construct.seq) - ovhg_length)\n feats1 = [feat for feat in construct.dnafeatures if \"broken_feature\" in feat.qualifiers]\n feats2 = [feat for feat in feats if \"broken_feature\" in feat.qualifiers]\n feats2_seqs = set([str(feat._original) for feat in feats2]) \n feats1 = [feat for feat in feats1 if str(feat._original) in feats2_seqs]\n\n if annealing == True:\n construct._seq = new_q._seq\n construct._right_end = new_q._right_end\n construct._right_end_top = new_q._right_end_top\n construct._right_end_bottom = new_q._right_end_bottom\n construct._left_end = new_q._left_end\n construct._left_end_top = new_q._left_end_top\n construct._left_end_bottom = new_q._left_end_bottom\n construct._topology = \"linear\"\n construct._positions = new_q._positions \n construct._ssdna = False\n positions_list.append(construct._positions)\n else:\n construct._seq = construct.seq + new_dna.seq \n construct._right_end = dna._right_end\n construct._right_end_top = dna._right_end_top\n construct._right_end_bottom = dna._right_end_bottom\n construct._topology = \"linear\"\n positions_list.append(new_dna._positions) \n const_features = copy.copy(construct.dnafeatures) \n \n #Restore a original feature from fragmented features\n if len(feats1) > 0 and len(feats2) > 0:\n for feat1 in feats1:\n if feat1.location.strand == -1:\n s1, e1 = feat1.location.parts[-1].start.position, feat1.location.parts[0].end.position\n else:\n s1, e1 = feat1.location.parts[0].start.position, feat1.location.parts[-1].end.position\n\n for feat2 in feats2:\n if feat2.location.strand == -1:\n s2, e2 = feat2.location.parts[-1].start.position - (len(construct.seq) - ovhg_length), feat2.location.parts[0].end.position - (len(construct.seq) - ovhg_length)\n else:\n s2, e2 = feat2.location.parts[0].start.position - (len(construct.seq) - ovhg_length), feat2.location.parts[-1].end.position - (len(construct.seq) - ovhg_length)\n \n if feat1.type == feat2.type and feat1.original == feat2.original: \n flag = 0\n for key in feat1.qualifiers:\n if key == \"broken_feature\":\n pass \n elif key in feat2.qualifiers and feat1.qualifiers[key] == feat2.qualifiers[key]:\n flag = 1\n else:\n #flag = 0\n break \n \n if flag == 1:\n note1 = feat1.qualifiers[\"broken_feature\"][0]\n label1 = \":\".join(note1.split(\":\")[:-1])\n length1 = int(note1.split(\":\")[-4]) \n pos_s1 = int(note1.split(\":\")[-1].split(\"..\")[0].replace(\" \",\"\"))\n pos_e1 = int(note1.split(\":\")[-1].split(\"..\")[1].replace(\" \",\"\"))\n\n note2 = feat2.qualifiers[\"broken_feature\"][0]\n label2 = \":\".join(note2.split(\":\")[:-1])\n length2 = int(note2.split(\":\")[-4]) \n pos_s2 = int(note2.split(\":\")[-1].split(\"..\")[0].replace(\" \",\"\"))\n pos_e2 = int(note2.split(\":\")[-1].split(\"..\")[1].replace(\" \",\"\"))\n \n #Join fragmented features\n if length1 == length2 and \"_original\" in feat1.__dict__ and \"_original\" in feat2.__dict__ and feat1.location.strand == feat2.location.strand:\n note = \"{}:{}..{}\".format(label1, pos_s1, pos_e2)\n new_seq = construct.seq[s1:e1] + dna.seq[s2:e2]\n feat1_index = const_features.index(feat1)\n new_feat = copy.deepcopy(const_features[feat1_index]) \n strand = new_feat.location.strand\n if len(feat1.location.parts) == 1 and len(feat2.location.parts) == 1:\n new_feat.location = FeatureLocation(feat1.location.parts[0].start.position, feat2.location.parts[-1].end.position, feat1.strand)\n new_feat.location.strand = strand\n else:\n locations = feat1.location.parts[0:-1] + [FeatureLocation(feat1.location.parts[-1].start.position, feat2.location.parts[0].end.position, feat1.strand)] + feat2.location.parts[0:-1]\n if strand == -1:\n locations.reverse() \n new_feat.location = CompoundLocation(locations) \n new_feat.location.strand = strand \n \n new_feat = feat1.__class__(feature=new_feat, subject=construct)\n new_feat1 = feat1.__class__(feature=feat1, subject=construct)\n new_feat2 = feat1.__class__(feature=feat2, subject=construct) \n s = new_feat.start \n e = new_feat.end if new_feat.end <= len(construct.seq) else new_feat.end - len(construct.seq) \n \n if construct.printsequence(s, e, new_feat.location.strand if new_feat.location.strand !=0 else 1) in new_feat.original:\n new_feat._id = label1.split(\":\")[1]\n construct._dnafeatures[feat1_index] = feat1.__class__(feature=new_feat)\n construct._dnafeatures[feat1_index].qualifiers[\"broken_feature\"] = [note]\n if feat2 in feats:\n del feats[feats.index(feat2)] \n \n construct._dnafeatures = construct.dnafeatures + feats\n \n construct._dnafeatures.sort(key=lambda x:x.location.parts[0].start.position)\n for feat in construct.dnafeatures:\n if \"broken_feature\" in feat.qualifiers:\n note = feat.qualifiers[\"broken_feature\"][0]\n label = \":\".join(note.split(\":\")[:-1])\n length = int(note.split(\":\")[-4]) \n pos_s = int(note.split(\":\")[-1].split(\"..\")[0].replace(\" \",\"\"))\n pos_e = int(note.split(\":\")[-1].split(\"..\")[1].replace(\" \",\"\"))\n if (pos_s == 1 and pos_e == length) or (pos_s == length and pos_e == 1):\n del feat.qualifiers[\"broken_feature\"]\n if Alphabet:\n new_record = SeqRecord(Seq(str(construct.seq), Alphabet.DNAAlphabet()))\n else:\n new_record = SeqRecord(Seq(str(construct.seq)))\n\n new_record.features = construct.dnafeatures\n new_record.annotations[\"topology\"] = topology\n construct.record = new_record \n \n if topology == \"circular\":\n construct = _circularizedna(construct)\n \n if quinable == True: \n zero_positions = [] \n for d, positions in enumerate(positions_list):\n if 0 in positions:\n zero_positions.append((len(positions),d,positions.index(0)))\n if len(zero_positions) > 0:\n zero_positions.sort() \n zero_positions.reverse() \n zero_position = 0\n for dna in dnas[0:zero_positions[0][1]]:\n zero_position += len(dna.seq)\n zero_position += zero_positions[0][2]\n construct = cutdna(construct, zero_position, quinable=0)[0]\n construct = _circularizedna(construct) \n construct._positions = tuple(range(len(construct.seq))) \n else:\n construct._positions = tuple(range(len(construct.seq))) \n\n else:\n zero_positions = [] \n for d, positions in enumerate(positions_list):\n if 0 in positions:\n zero_positions.append((len(positions),d,positions.index(0)))\n \n if len(zero_positions) > 0:\n zero_positions.sort() \n zero_positions.reverse() \n zero_origin = zero_positions[0][1]\n new_positions = [] \n for d, positions in enumerate(positions_list): \n if d == zero_origin:\n new_positions.extend(positions)\n else:\n new_positions.extend([-1] * len(positions))\n construct._positions = tuple(new_positions) \n else:\n construct._positions = tuple(range(len(construct.seq)))\n \n construct._supfeatureids() #Update feature ID\n else:\n topology = \"circular\"\n construct = _circularizedna(dnas[0])\n construct._positions = construct._positions[0:len(construct.seq)]\n \n if project is None:\n construct._unique_id = dnas[0]._unique_id\n else:\n construct._unique_id = project\n\n new_features = [] \n remove_features = [] \n for feat in construct.dnafeatures:\n if \"broken_feature\" in feat.qualifiers:\n note = feat.qualifiers[\"broken_feature\"][0]\n label = \":\".join(note.split(\":\")[:-1])\n poss, pose = list(map(int,note.split(\":\")[-1].split(\"..\")))\n length = int(note.split(\":\")[-4]) \n if feat.location.strand != -1:\n sfeat = feat.start-(poss-1) \n sfeat = sfeat if sfeat >= 0 else len(construct.seq) + sfeat\n efeat = feat.end+(length-pose)\n else:\n sfeat = feat.start-(length-pose) \n sfeat = sfeat if sfeat >= 0 else len(construct.seq) + sfeat\n efeat = feat.end+(poss-1) \n \n if feat.subject is None:\n feat.subject = construct\n \n if note.split(\":\")[-3] == construct.printsequence(sfeat, efeat, strand=feat.location.strand):\n if sfeat < efeat:\n location = FeatureLocation(sfeat, efeat, feat.location.strand) \n else:\n location = CompoundLocation([FeatureLocation(sfeat, len(construct.seq)), FeatureLocation(0, efeat, feat.location.strand)]) \n newfeat = feat.__class__(location=location, subject=construct)\n newfeat.type = feat.type\n newfeat.qualifiers = feat.qualifiers\n del newfeat.qualifiers[\"broken_feature\"]\n newfeat._id = label.split(\":\")[1]\n new_features.append(newfeat)\n remove_features.append(feat)\n\n for feat in remove_features:\n del construct._dnafeatures[construct.dnafeatures.index(feat)]\n \n for feat in new_features:\n construct._dnafeatures.append(feat) \n \n if type(supfeature) in (tuple, list) and type(supfeature[0]) == dict:\n for feature_dict in supfeature: \n construct.setfeature(feature_dict) \n elif type(supfeature) == dict:\n construct.setfeature(supfeature)\n \n if unique == True:\n new_features = [] \n for feat in construct.dnafeatures:\n if feat in new_features:\n pass \n else:\n new_features.append(feat) \n construct._dnafeatures = new_features\n\n construct.record.feartures = construct.dnafeatures\n if quinable == True:\n fproject = \"\" \n fcompatibility = \"\" if fcompatibility is None else \", compatibility='{}'\".format(str(compatibility))\n fhomology_length = \"\" if fhomology_length is None else \", homology_length={}\".format(homology_length)\n funique = \"\" if unique == True else \", unique={}\".format(unique) \n fsupfeature = \"\" if supfeature == False else \", supfeature={}\".format(str(supfeature))\n fproduct = \"\" if product is None else \", product='\" + product + \"'\"\n process_name = \"\" if process_name is None else \", process_name='\" + process_name + \"'\"\n process_description = \"\" if process_description is None else \", process_description='\" + process_description + \"'\" \n \n construct._product_id = construct._unique_id if product is None else product \n construct.record.id = construct.project\n dna_elements = \"[\" + \", \".join([\"QUEEN.dna_dict['{}']\".format(dna._product_id) for dna in dnas]) + \"]\"\n building_history = \"QUEEN.dna_dict['{}'] = joindna(*{}, topology='{}'{}{}{}{}{}{})\".format(construct._product_id, dna_elements, topology, fcompatibility, fhomology_length, fproject, fproduct, process_name, process_description) \n history_feature = _combine_history(construct, history_features) \n construct._history_feature = history_feature \n process_id, original_ids = make_processid(construct, building_history, process_id, original_ids)\n add_history(construct, [building_history, \"topology: {}\".format(topology), \",\".join([process_id] + original_ids)], _sourcefile) \n construct._check_uniqueness()\n else:\n construct.__dict__[\"_product_id\"] = dnas[0]._product_id if \"_product_id\" in dnas[0].__dict__ else dnas[0]._unique_id\n\n for dnafeature in construct.dnafeatures:\n dnafeature.subject = construct\n \n if product is None:\n pass \n else:\n product = product.replace(\" \",\"\") \n match = re.fullmatch(\"(.+)\\[(.+)\\]\", product) \n if match:\n if match.group(2).isdecimal() == True:\n construct.__class__._namespace[match.group(1)][int(match.group(2))] = construct\n else:\n construct.__class__._namespace[match.group(1)][match.group(2)] = construct\n else: \n construct.__class__._namespace[product] = construct\n return construct",
"def update(self, bsd):\n raise NotImplementedError()",
"def updateAnnoByMetadata(syn, synId, metaDf, refCol, cols2Add,fileExts):\n \n if type(synId) is list:\n print \"Input is a list of Synapse IDs \\n\"\n for synID in synId:\n print \"Getting File %s ...\" % synID\n temp = syn.get(synID,downloadFile = False)\n _helperUpdateAnnoByMetadata(syn,temp,metaDf,refCol,cols2Add,fileExts)\n else:\n print \"Input is a Synpase ID \\n\"\n starting = syn.get(synId,downloadFile = False)\n if not is_container(starting):\n print \"%s is a File \\n\" % synId\n _helperUpdateAnnoByMetadata(syn,starting,metaDf,refCol,cols2Add,fileExts)\n else:\n directory = synu.walk(syn,synId)\n for dirpath,dirname,filename in directory:\n for i in filename:\n temp = syn.get(i[1],downloadFile = False)\n print \"Getting File %s ...\" % i[1]\n _helperUpdateAnnoByMetadata(syn,temp,metaDf,refCol,cols2Add,fileExts)",
"def update_which_sde_data(\n current_sde_df,\n latest_esi_df,\n index_key\n):\n pass",
"def fusion_api_delete_sas_logical_jbods(self, uri, api=None, headers=None):\n return self.sas_logical_jbods.delete(uri=uri, api=api, headers=headers)",
"def main():\n\n # Script arguments... \n \"\"\" If running as standalone, hardcode theWorkspace and inFile \"\"\"\n theWorkspace = arcpy.GetParameterAsText(0)\n if not theWorkspace:\n theWorkspace = r\"d:\\_dataTest\"\n arcpy.env.workspace = theWorkspace\n arcpy.env.overwriteOutput = True\t\n\n inFile = arcpy.GetParameterAsText(1)\n if not inFile:\n inFile = \"updateMultipleSourcePaths.csv\"\n inFile = r\"\\\\dfg.alaska.local\\gis\\Anchorage\\GISStaff\\___gisStaffConnections\\RepairBrokenSrcAug242015.csv\"\n\n outWorkspace = arcpy.GetParameterAsText(2)\n if not outWorkspace:\n outWorkspace = os.path.join(theWorkspace, \"_repaired\")\n '''if not os.path.isdir(outWorkspace): \n os.makedirs(outWorkspace)\n myMsgs(\"created new directory {0} \\n\".format(outWorkspace))'''\n\n # Create .txt Report of what it thinks was fixed, tagged with YYYYMMDD_HHMM\n outFile = \"FixedReport\"\n fileDateTime = curFileDateTime()\n currentDate = curDate()\n outfileTXT = os.path.join(theWorkspace, outFile) + fileDateTime + \".txt\" \n myMsgs (outFile)\n reportFile = open(outfileTXT, 'w')\n myMsgs( \"File {0} is open? {1}\".format(outfileTXT, str(not reportFile.closed)))\n outText = \"Report for what it THINKS it repaired in {0}, on {1} \\n \".format(theWorkspace, currentDate)\n outText += \" Includes coverages (pts, poly, arc, anno), shapes, and FGDB data.\" + '\\n'\n outText += \"-----------------------------------------------------\" + '\\n' \n reportFile.write(outText)\t\n\n mxd = None\n outMXDName = \"none\"\n updatePath = []\n cvrList = [r\"\\arc\", r\"\\polygon\", r\"\\region\", r\"\\point\", r\"\\tic\" ]\n lstExtDatatype = [[\".shp\", \"SHAPEFILE_WORKSPACE\" ], [\".sde\",\"SDE_WORKSPACE\"], \n [\".mdb\", \"ACCESS_WORKSPACE\" ], [\".gdb\", \"FILEGDB_WORKSPACE\"], \n [\"cover\", \"ARCINFO_WORKSPACE\"]]\t\n cntMXD = 0\n cntFixed = 0\n cntTotalFixed = 0\n\n # makes sure the .csv file exists\n if arcpy.Exists(inFile):\n myMsgs (\"->Using {0} to repair paths.\\n==============================\".format(inFile))\n # walks thru the workspace to create list of files \n for root, dirs, files in os.walk(theWorkspace): \t\t\n for fileName in files:\n if root == outWorkspace: # don't process mxd's in the target directory\n pass\n else:\n fullPath = os.path.join(root, fileName)\n basename, extension = os.path.splitext(fileName)\n # Only process .mxd files\n if extension == \".mxd\":\n myMsgs(\"\\nReviewing MXD: {0}\".format(fullPath))\n reportFile.write(\"\\nReviewing MXD: {0}\".format(fullPath))\n mxd = arcpy.mapping.MapDocument(fullPath)\n dfs = arcpy.mapping.ListDataFrames(mxd)\n cntMXD += 1\n cntFixed = 0\n basename, extension = os.path.splitext(fileName)\n # New output mxd name....\n outMXDName = os.path.join(outWorkspace, (str(basename) + \".mxd\")) #\"_fix.mxd\"))\n # create list of the tables since they are handle differently\n theTables = arcpy.mapping.ListTableViews(mxd)\n # Loops thru dataframes so adding and deleting Services will work.\n for df in dfs:\n # Loops thru layers, checks for broken links and tries to repair\n lyrList = arcpy.mapping.ListLayers(mxd, \"\", df)\n for lyr in lyrList:\n if lyr.isBroken:\n if not lyr.supports(\"DATASOURCE\") and not lyr.isServiceLayer:\n myMsgs(\" ->Skipping {0} not a Service layer, and does not support DATASOURCE\".format(lyr.name))\n pass #continue\n elif not lyr.supports(\"DATASOURCE\") and lyr.isServiceLayer:\n myMsgs(\" -Broken Service: {0}\".format(lyr.name))\n else:\n myMsgs(\" -Broken: {0}\".format(lyr.dataSource))\n #myMsgs(\"layer is Group {0} or ServiceLayer {1}\".format(lyr.isGroupLayer, lyr.isServiceLayer))\n if (lyr.isGroupLayer or (\"Events\" in lyr.name)) and (not lyr.isServiceLayer): # Groups and Event FC skipped\n myMsgs(\" ...skipping group or event: {0}\".format(lyr.name))\n reportFile.write(\"\\n *skipping group or event: {0} \\n\".format(lyr.name))\n pass #break\n elif lyr.isServiceLayer: # services might have to be handle differently\n if lyr.supports(\"SERVICEPROPERTIES\"):\n for spType, spName in lyr.serviceProperties.iteritems():\n myMsgs(\" Service Properties: {0}: {1}\".format(spType, spName ))\n if spType == \"URL\": \n dataSource = str(spName)\n lyrType = (\"service_{}\".format(lyr.name))\n break\n myMsgs(\" ->this ia a service....using add and remove layer\")\n updatePath = findUpdatePath(inFile, dataSource, lyrType.strip().lower())\n newDSPath, newDSName = os.path.split(updatePath[0])\n if (\"service\" in updatePath[3]) and (\"service\" in updatePath[1]):\n insertLayer = arcpy.mapping.Layer(updatePath[0])\n print(\"dataframe: {0}\".format(df))\n arcpy.mapping.InsertLayer(df, lyr, insertLayer, \"AFTER\")\n arcpy.mapping.RemoveLayer(df, lyr)\n reportFile.write(\"\\n ->sees this as service....{0} \\n\".format(dataSource))\n # will still look at deleted version after insert, not the new version..\n # isBroken will give false info even if fixed, so \n # don't use myMsgs(\"Still broken? {0}\".format(lyr.isBroken)) \n else:\n myMsgs(\" --> a service layer but no SERVICE PROPERTIES\")\n elif lyr.supports(\"DATASOURCE\") and lyr.supports(\"DATASETNAME\"): \n # not a group, event or what it thinks is a service\n updatePath = findUpdatePath(inFile, lyr.dataSource, \"\")\n newDSPath, newDSName = os.path.split(updatePath[0])\n sameType = updatePath[2] \n for cvr in cvrList: #checks to see if the source layer is a coverage...must handle different\n if cvr in lyr.dataSource:\n sourceIsCoverage = True\n break\n else:\n sourceIsCoverage = False\n # updatePath[1] is False if there wasn't a match\n # so \"not update[1]\" means no match was found, and moves to next layer\t\t\t\t\t\t\t\t\n if not updatePath[1]: # if no match was found\n myMsgs(\" !! no match to: {0} \".format(lyr.dataSource))\n updateStatus = \"no match, not changed\" # used for message only\n pass\n elif updatePath[1].strip().lower() == \"drive\":\n myMsgs(\" skipping drive-letter matches for now: {0}\".format(lyr.dataSource))\n updateStatus = \"can only find drive match...look into it)\"\n pass\n elif updatePath[1].strip().lower() == \"_review\":\n myMsgs(\" no new source assigned yet for: {0}\".format(lyr.dataSource))\n updateStatus = (\"review and update {0}\".format(inFile))\n pass\n else: #if lyr.supports(\"DATASOURCE\") and lyr.supports(\"DATASETNAME\"):\n updateStatus = str(updatePath[0]) # used for message only\n if lyr in theTables:\n #myMsgs(\" thinks its a table....using findAndReplsWorkspacePath\")\n myMsgs(\" *Moving {0}: {1} to new: {2}\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n reportFile.write(\"\\n Moving {0}: {1} to new: {2} \\n\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n lyr.findAndReplaceWorkspacePath(lyr.dataSource, updatePath, False) \n elif lyr.isRasterLayer:\n #myMsgs(\" thinks its a raster....using findAndReplsWorkspacePath\")\n myMsgs(\" *Moving {0}: {1} to new: {2}\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n reportFile.write(\"\\n Moving {0}: {1} to new: {2} \\n\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n newType = \"RASTER_WORKSPACE\"\n for extType in lstExtDatatype:\n if extType[0] in updatePath[0]:\n newType = extType[1] \n if extType[0] == '.gdb':\n newDSPath = newDSPath.split('.gdb', 1)[0] + '.gdb'\n #newType = extType[1]\n elif extType[0] == '.sde':\n newDSPath = newDSPath.split('.sde', 1)[0] + '.sde'\n break \n lyr.replaceDataSource(newDSPath, newType, newDSName, False)\n if not sameType:\n testOldTOC = updatePath[4].strip('\\\\')\n if lyr.name == testOldTOC:\n lyr.name = lyr.datasetName\n else:\n newType = updatePath[1] \n if sourceIsCoverage and sameType:\n newDSPath = os.path.split(newDSPath)[0]\n newType = \"ARCINFO_WORKSPACE\"\n for extType in lstExtDatatype:\n if extType[0] in updatePath[0]:\n newType = extType[1]\n if extType[0] == '.gdb':\n newDSPath = newDSPath.split('.gdb', 1)[0] + '.gdb'\n #newType = extType[1]\n elif extType[0] == '.sde':\n newDSPath = newDSPath.split('.sde', 1)[0] + '.sde'\n\n break\n print(\"line ~281 newType is: {0}\".format(newType))\n myMsgs(\" *Moving {0}: {1} to new: {2}\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n reportFile.write(\"\\n Moving {0}: {1} to new: {2}\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n lyr.replaceDataSource(newDSPath, newType, newDSName, False)\n #myMsgs(\" new datasource: {0}\".format(lyr.dataSource))\n myMsgs(\" **the new data source: {0}\".format(updateStatus))\n cntFixed += 1\n myMsgs(\" Still broken? {0}\".format(lyr.isBroken))\n else:\n myMsgs(\"not sure what it is, but can't process {0}\".format(lyr.name))\n \n else:\n myMsgs(\" -Not Broken: {0}\".format(str(lyr)))\n\n myMsgs(\" Number of links fixed processed: {0}\".format(cntFixed))\n myMsgs(\" -{0} Review complete.\".format(fullPath))\n reportFile.write(\" -Number of links fixed processed: {0} \\n\".format(cntFixed))\t\t\t\t\t\t\n reportFile.write(\" -{0} Review complete. \\n\\n\".format(fullPath))\n\n if cntFixed > 0:\n mxd.save()\n myMsgs(\"saved to {0}\".format(fullPath))\n reportFile.write(\"saved to {0}\".format(fullPath))\n cntTotalFixed += cntFixed\n cntFixed = 0\n \"\"\"if cntFixed > 0:\n\t\t\t\t\t\t\tmxd.saveACopy(outMXDName, '10.1')\n\t\t\t\t\t\t\tmyMsgs(\"saved to {0}\".format(outMXDName))\n\t\t\t\t\t\t\tcntFixed = 0\"\"\"\n '''if arcpy.Exists(outMXDName):\n outMXDName.()\n myMsgs(\"saved 1\")\n else:\n mxd.saveACopy(outMXDName, '10.1')\n myMsgs(\"saved 2\")'''\n del mxd\n cntFixed = 0\n else:\n myMsgs (\"ERROR: Required repair source list: [0] does not exit. \\n\".format(inFile))\n outText = (\"\\n\\n ==========================================\")\n outText += (\"\\n Number of MXD's processed: {0} \\n\".format(cntMXD))\n outText += (\" Total Number of links it fixed, all mxds: {0} \\n\".format(cntTotalFixed) )\n\n myMsgs(\" {0}\".format(outText))\n\n reportFile.write(outText)\n # close the .txt file, \n reportFile.close()\n myMsgs( \"File {0} is closed? {1}\".format(outfileTXT, str(reportFile.closed)))\t\n\n myMsgs('!!! Success !!! ')",
"def write_mesh_java(self):\n fout = open(self.javaBatch1File+\".java\",\"w\")\n fout.write(\"\"\"\\\n// STAR-CCM+ macro\npackage macro;\n\nimport java.util.*;\n\nimport star.common.*;\nimport star.base.neo.*;\nimport star.resurfacer.*;\nimport star.trimmer.*;\nimport star.prismmesher.*;\nimport star.meshing.*;\n\npublic class %s extends StarMacro {\n\n public void execute() {\n execute0();\n }\n \"\"\" % (self.javaBatch1File))\n\n fout.write(\"\"\"\\\n\n private void execute0() {\n\n // Directory for output files and final sim file (if saved)\n String myPath = \"$PWD\";\n\n String myInputSTLFilename = \"%s\"; // contains aircraft geometry\n\n String myOutputMeshFilename = \"%s\"; // output sim name with volume mesh\n\n double mySphereRadius_ft = %f; // radius of freestream outer boundary in feet\n double mySphereX_ft = %f; // Center of freestream sphere in feet\n double mySphereY_ft = %f;\n double mySphereZ_ft = %f;\n double mySphereTriangles_ft = %f; // size of sphere outer boundary facets\n\n double myPrismFieldRatio = %f; // thickness ratio of near field to outermost prism\n\n int myBLcells = %d; // number of cells in boundary layer normal direction\n double myBLthickness_in = %f; // thickness of boundary layer in inches\n\n double myBaseSize_ft = %f; // mesh base size in feet\n int myCurvature = %d; // number of points to divide a circle\n double mySurfaceGrowthRate = %f; // growth rate (max size ratio) of surface triangles\n double myFeatureAngle_deg = %f; // maximum angle for defining sharp edges on ATR model\n\n double myMinMesh_pct = %f; // smallest cell size in percent\n double myEdgeTarget_pct = %f; // target size for feature curve edges in percent\n\n boolean bln_makeSurfaceMesh = %s; // use true to make surface mesh, false to skip\n boolean bln_makeVolumeMesh = %s; // use true to make volume mesh, false to skip\n boolean bln_saveMeshFile = %s; // use true to save final mesh file, false to skip\n \"\"\" % (self.STLFile,self.simMeshFile,self.mySphereRadius,self.mySphereX,self.mySphereY,\n self.mySphereZ,self.mySphereTriangles,self.myPrismFieldRatio,self.myBLcells,\n self.myBLthickness,self.myBaseSize,self.myCurvature,self.mySurfaceGrowthRate,\n self.myFeatureAngle,self.myMinMesh,self.myEdgeTarget,\n str(self.makeSurfaceMesh).lower(),str(self.makeVolumeMesh).lower(),str(self.saveMeshFile).lower()))\n\n fout.write(\"\"\"\\\n\n if (!bln_makeSurfaceMesh) bln_makeVolumeMesh = false;\n\n // Start of STAR macro\n Simulation simulation_0 = getActiveSimulation();\n\n Units units_0 = simulation_0.getUnitsManager().getPreferredUnits(new IntVector(new int[] {0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}));\n Units units_1 = ((Units) simulation_0.getUnitsManager().getObject(\"ft\"));\n units_1.setPreferred(true);\n\n PartImportManager partImportManager_0 = simulation_0.get(PartImportManager.class);\n\n // Read concatenated STL parts\n //partImportManager_0.importStlPart(resolvePath(myPath+myInputSTLFilename), \"OneSurfacePerPatch\", units_1, true, 1.0E-5);\n partImportManager_0.importStlPart(resolvePath(myInputSTLFilename), \"OneSurfacePerPatch\", units_1, true, 1.0E-5);\n\n MeshPartFactory meshPartFactory_0 = simulation_0.get(MeshPartFactory.class);\n\n SimpleSpherePart simpleSpherePart_0 = meshPartFactory_0.createNewSpherePart(simulation_0.get(SimulationPartManager.class));\n\n simpleSpherePart_0.setDoNotRetessellate(true);\n\n LabCoordinateSystem labCoordinateSystem_0 = simulation_0.getCoordinateSystemManager().getLabCoordinateSystem();\n\n simpleSpherePart_0.setCoordinateSystem(labCoordinateSystem_0);\n\n Coordinate coordinate_0 = simpleSpherePart_0.getOrigin();\n\n coordinate_0.setCoordinateSystem(labCoordinateSystem_0);\n coordinate_0.setCoordinate(units_1, units_1, units_1, new DoubleVector(new double[] {0.0, 0.0, 0.0}));\n\n // Set location of freestream sphere center (x, y, z) in feet\n coordinate_0.setValue(new DoubleVector(new double[] {mySphereX_ft, mySphereY_ft, mySphereZ_ft}));\n\n simpleSpherePart_0.getRadius().setUnits(units_1);\n\n // Set freestream sphere radius in feet\n simpleSpherePart_0.getRadius().setValue(mySphereRadius_ft);\n simpleSpherePart_0.getTessellationDensityOption().setSelected(TessellationDensityOption.MEDIUM);\n simpleSpherePart_0.rebuildSimpleShapePart();\n simpleSpherePart_0.setDoNotRetessellate(false);\n\n Region region_0 = simulation_0.getRegionManager().createEmptyRegion();\n\n region_0.setPresentationName(\"Region\");\n Boundary boundary_0 = region_0.getBoundaryManager().getBoundary(\"Default\");\n\n region_0.getBoundaryManager().removeBoundaries(new NeoObjectVector(new Object[] {boundary_0}));\n FeatureCurve featureCurve_0 = ((FeatureCurve) region_0.getFeatureCurveManager().getObject(\"Default\"));\n\n region_0.getFeatureCurveManager().removeObjects(featureCurve_0);\n FeatureCurve featureCurve_1 = region_0.getFeatureCurveManager().createEmptyFeatureCurveWithName(\"Feature Curve\");\n\n MeshPart meshPart_0 = ((MeshPart) simulation_0.get(SimulationPartManager.class).getPart(\"combined\"));\n\n simulation_0.getRegionManager().newRegionsFromParts(new NeoObjectVector(new Object[] {meshPart_0, simpleSpherePart_0}), \"OneRegion\", region_0, \"OneBoundaryPerPartSurface\", null, \"OneFeatureCurve\", featureCurve_1, false);\n\n MeshContinuum meshContinuum_0 = simulation_0.getContinuumManager().createContinuum(MeshContinuum.class);\n\n PhysicsContinuum physicsContinuum_0 = simulation_0.getContinuumManager().createContinuum(PhysicsContinuum.class);\n\n meshContinuum_0.enable(ResurfacerMeshingModel.class);\n\n // Use trimmer (Cartesian hex) mesh\n meshContinuum_0.enable(TrimmerMeshingModel.class);\n\n meshContinuum_0.enable(PrismMesherModel.class);\n\n // Base size in feet - larger values makes coarser grids, smaller values makes finer grids\n meshContinuum_0.getReferenceValues().get(BaseSize.class).setValue(myBaseSize_ft);\n\n ResurfacerMeshingModel resurfacerMeshingModel_0 = meshContinuum_0.getModelManager().getModel(ResurfacerMeshingModel.class);\n resurfacerMeshingModel_0.setDoCompatibilityRefinement(true);\n resurfacerMeshingModel_0.setDoAutomaticSurfaceRepair(false);\n\n MaxTrimmerSizeToPrismThicknessRatio maxTrimmerSizeToPrismThicknessRatio_0 = meshContinuum_0.getReferenceValues().get(MaxTrimmerSizeToPrismThicknessRatio.class);\n maxTrimmerSizeToPrismThicknessRatio_0.setLimitCellSizeByPrismThickness(true);\n SizeThicknessRatio sizeThicknessRatio_0 = maxTrimmerSizeToPrismThicknessRatio_0.getSizeThicknessRatio();\n\n // Prism to field thickness ratio\n sizeThicknessRatio_0.setNeighboringThicknessMultiplier(myPrismFieldRatio);\n\n NumPrismLayers numPrismLayers_0 = meshContinuum_0.getReferenceValues().get(NumPrismLayers.class);\n\n // Number of boundary layer cells\n numPrismLayers_0.setNumLayers(myBLcells);\n\n PrismThickness prismThickness_0 = meshContinuum_0.getReferenceValues().get(PrismThickness.class);\n\n prismThickness_0.getRelativeOrAbsoluteOption().setSelected(RelativeOrAbsoluteOption.ABSOLUTE);\n\n GenericAbsoluteSize genericAbsoluteSize_0 = ((GenericAbsoluteSize) prismThickness_0.getAbsoluteSize());\n\n Units units_2 = ((Units) simulation_0.getUnitsManager().getObject(\"in\"));\n\n genericAbsoluteSize_0.getValue().setUnits(units_2);\n\n // Boundary layer thickness in inches\n genericAbsoluteSize_0.getValue().setValue(myBLthickness_in);\n\n SurfaceCurvature surfaceCurvature_0 = meshContinuum_0.getReferenceValues().get(SurfaceCurvature.class);\n\n SurfaceCurvatureNumPts surfaceCurvatureNumPts_0 = surfaceCurvature_0.getSurfaceCurvatureNumPts();\n\n // Curvature refinement specified as number of points around a circle\n surfaceCurvatureNumPts_0.setNumPointsAroundCircle(myCurvature);\n\n SurfaceGrowthRate surfaceGrowthRate_0 = meshContinuum_0.getReferenceValues().get(SurfaceGrowthRate.class);\n\n // Surface growth rate (ratio of triangle sizes)\n surfaceGrowthRate_0.setGrowthRate(mySurfaceGrowthRate);\n\n SurfaceSize surfaceSize_0 = meshContinuum_0.getReferenceValues().get(SurfaceSize.class);\n\n RelativeMinimumSize relativeMinimumSize_0 = surfaceSize_0.getRelativeMinimumSize();\n\n // Set triangle minimum size percentage\n relativeMinimumSize_0.setPercentage(myMinMesh_pct);\n\n SimpleTemplateGrowthRate simpleTemplateGrowthRate_0 = meshContinuum_0.getReferenceValues().get(SimpleTemplateGrowthRate.class);\n\n // Set volume mesh growth rate for field (FAST, MEDIUM, SLOW, VERYSLOW)\n simpleTemplateGrowthRate_0.getGrowthRateOption().setSelected(GrowthRateOption.SLOW);\n\n // Set nearfield mesh growth rate for field (FAST, MEDIUM, SLOW, VERYSLOW)\n simpleTemplateGrowthRate_0.getSurfaceGrowthRateOption().setSelected(SurfaceGrowthRateOption.VERYSLOW);\n\n // Remove existing feature curves (will remark feature curves below)\n region_0.getFeatureCurveManager().removeObjects(featureCurve_1);\n\n MeshPipelineController meshPipelineController_0 = simulation_0.get(MeshPipelineController.class);\n\n meshPipelineController_0.initializeMeshPipeline();\n\n SurfaceRep surfaceRep_0 = ((SurfaceRep) simulation_0.getRepresentationManager().getObject(\"Initial Surface\"));\n\n Boundary boundary_1 = region_0.getBoundaryManager().getBoundary(\"combined.fuselage\");\n Boundary boundary_2 = region_0.getBoundaryManager().getBoundary(\"combined.tail\");\n Boundary boundary_3 = region_0.getBoundaryManager().getBoundary(\"combined.wing\");\n Boundary boundary_4 = region_0.getBoundaryManager().getBoundary(\"Sphere.Sphere Surface\");\n boundary_4.setBoundaryType(FreeStreamBoundary.class);\n\n // Identify feature curves using angle criteria (currently set at 17 degrees for the ATR model)\n FeatureCurve featureCurve_2 = surfaceRep_0.createFeatureEdgesOnBoundaries(new NeoObjectVector(new Object[] {boundary_1, boundary_2, boundary_3, boundary_4}), true, true, true, true, true, true, myFeatureAngle_deg, false);\n\n SurfaceSizeOption surfaceSizeOption_0 = featureCurve_2.get(MeshConditionManager.class).get(SurfaceSizeOption.class);\n\n surfaceSizeOption_0.setSurfaceSizeOption(true);\n\n SurfaceSize surfaceSize_1 = featureCurve_2.get(MeshValueManager.class).get(SurfaceSize.class);\n\n RelativeMinimumSize relativeMinimumSize_1 = surfaceSize_1.getRelativeMinimumSize();\n\n // Set feature curve minimum size (usually the same as surface triangle minimum size)\n relativeMinimumSize_1.setPercentage(myMinMesh_pct);\n\n RelativeTargetSize relativeTargetSize_0 = surfaceSize_1.getRelativeTargetSize();\n\n // Set feature curve target size as a percentage\n relativeTargetSize_0.setPercentage(myEdgeTarget_pct);\n\n SurfaceSizeOption surfaceSizeOption_1 = boundary_4.get(MeshConditionManager.class).get(SurfaceSizeOption.class);\n\n surfaceSizeOption_1.setSurfaceSizeOption(true);\n\n SurfaceSize surfaceSize_2 = boundary_4.get(MeshValueManager.class).get(SurfaceSize.class);\n\n surfaceSize_2.getRelativeOrAbsoluteOption().setSelected(RelativeOrAbsoluteOption.ABSOLUTE);\n\n AbsoluteMinimumSize absoluteMinimumSize_0 = surfaceSize_2.getAbsoluteMinimumSize();\n\n // Set minimum triangle size for freestream boundary (in feet)\n absoluteMinimumSize_0.getValue().setValue(mySphereTriangles_ft);\n\n AbsoluteTargetSize absoluteTargetSize_0 = surfaceSize_2.getAbsoluteTargetSize();\n\n // Set target triangle size for freestream boundary in feet\n absoluteTargetSize_0.getValue().setValue(mySphereTriangles_ft);\n\n // Make surface mesh\n if ( bln_makeSurfaceMesh ) meshPipelineController_0.generateSurfaceMesh();\n\n // Make volume mesh\n if ( bln_makeVolumeMesh ) meshPipelineController_0.generateVolumeMesh();\n\n // Save .sim file\n if ( bln_saveMeshFile ) simulation_0.saveState(resolvePath(myOutputMeshFilename));\n\n\n }\n}\n\"\"\")\n fout.close()",
"def updateEMPStudy(self, study_id, study_name, investigation_type, miens_compliant, submit_to_insdc, \n portal_type, study_title, study_alias, pmid, study_abstract, study_description,\n number_samples_collected, number_samples_promised , lab_person,\n lab_person_contact, emp_person, first_contact, most_recent_contact, sample_type, \n has_physical_specimen, has_extracted_data, timeseries, spatial_series,\n principal_investigator, principal_investigator_contact, default_emp_status, funding,\n includes_timeseries):\n con = self.getMetadataDatabaseConnection()\n results = con.cursor().callproc('qiime_assets.emp_study_update', \n [study_id, study_name, investigation_type, miens_compliant, submit_to_insdc, portal_type, \n study_title, study_alias, pmid, study_abstract, study_description,\n number_samples_collected, number_samples_promised , lab_person,\n lab_person_contact, emp_person, first_contact, most_recent_contact, sample_type, \n has_physical_specimen, has_extracted_data, timeseries, spatial_series,\n principal_investigator, principal_investigator_contact, default_emp_status, funding,\n includes_timeseries])",
"def fusion_api_get_sas_logical_jbods(self, uri=None, param='', api=None, headers=None):\n return self.sas_logical_jbods.get(uri=uri, api=api, headers=headers, param=param)",
"def main():\n\n\t# Script arguments... \n\t\"\"\" If running as standalone, hardcode theWorkspace and inFile \"\"\"\n\ttheWorkspace = arcpy.GetParameterAsText(0)\n\tif not theWorkspace:\n\t\ttheWorkspace = r\"d:\\_dataTest\"\n\ttheWorkspace = r\"d:\\_dataTest\"\n\tarcpy.env.workspace = theWorkspace\n\tarcpy.env.overwriteOutput = True\n\toutWorkspace = os.path.join(theWorkspace, \"_repair\")\n\n\tinFile = arcpy.GetParameterAsText(1)\n\tif not inFile:\n\t\tinFile = \"updateMultipleSourcePaths.csv\"\n\t#inFile = \"FixSource4.csv\"\n\t#inFile = os.path.join(theWorkspace, inFile) + \".csv\"\n\t# opens the infile.csv, read only; then creates tuple of inFile\n\t#f = open(inFile, \"r\") \n\t#update_list = [tuple(line.strip().split(\",\") for line in f)]\n\n\n\tmxd = None\n\toutMXDName = \"none\"\n\tnewPath = []\n\t# makes sure the .csv file exists\n\tif arcpy.Exists(inFile):\n\t\tmyMsgs (\"Repair source list: \" + inFile)\n\t\t# walks thru the workspace to create list of files \n\t\tfor root, dirs, files in os.walk(theWorkspace): \n\t\t\tif root == outWorkspace:\n\t\t\t\tprint(\"heh now\")\n\t\t\t\tpass\n\t\t\t# creates list of .mxd's and works thru them\n\t\t\tmxdList = arcpy.ListFiles(\"*.mxd\")\n\t\t\tfor fileName in mxdList:\n\t\t\t\tfullPath = os.path.join(root, fileName) \n\t\t\t\tmxd = arcpy.mapping.MapDocument(fullPath)\n\t\t\t\tmyMsgs (\"*** Processing mxd: \" + fullPath)\n\t\t\t\t#mxd.findAndReplaceWorkspacePaths(\"v:\\\\\", \"\\\\\\\\dfg.alaska.local\\\\gis\\\\Anchorage\\\\gisshare\\\\\", validate=False)\n\t\t\t\t#mxd.findAndReplaceWorkspacePaths(\"t:\\\\\", \"\\\\\\\\dfg.alaska.local\\\\gis\\\\Anchorage\\\\GISStaff\\\\\", validate=False)\n\t\t\t\t#mxd.findAndReplaceWorkspacePaths(\"u:\\\\\", \"\\\\\\\\dfg.alaska.local\\\\gis\\\\Anchorage\\\\GISStaff\\\\\", validate=False)\n\t\t\t\t# New output mxd....\n\t\t\t\tbasename, extension = os.path.splitext(fileName)\n\t\t\t\toutMXDName = os.path.join(outWorkspace, (str(basename) + \"_fix.mxd\"))\n\t\t\t\t# create list of the tables since they are handle differently\n\t\t\t\ttheTables = arcpy.mapping.ListTableViews(mxd)\n\t\t\t\t# Loops thru layers, checks for broken links and tries to repai\n\t\t\t\tlyrList = arcpy.mapping.ListLayers(mxd)\n\t\t\t\tfor lyr in lyrList:\n\t\t\t\t\tif lyr.isBroken:\n\t\t\t\t\t\tif lyr.isGroupLayer or (\"Events\" in lyr.name):\n\t\t\t\t\t\t\tprint(\"...skipping group or event\")\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t#print(lyr.isServiceLayer)\n\t\t\t\t\t\tif lyr.isServiceLayer:\n\t\t\t\t\t\t\tif lyr.supports(\"SERVICEPROPERTIES\"):\n\t\t\t\t\t\t\t\tcnt = 0\n\t\t\t\t\t\t\t\tfor i, j in lyr.serviceProperties.iteritems():\n\t\t\t\t\t\t\t\t\tif cnt == 2:\n\t\t\t\t\t\t\t\t\t\tdataSource = str(j)\n\t\t\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\t\tcnt += 1 \n\t\t\t\t\t\t\t\tprint(\"sees this as service....using findAndReplsWorkspacePath\")\n\t\t\t\t\t\t\t\tnewPath = findUpdatePath(inFile, dataSource)\n\t\t\t\t\t\t\t\tlyr.findAndReplaceWorkspacePath(lyr.dataSource, newPath, False)\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tprint(\"--> a service layer but no SERVICE PROPOERTIES\")\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tprint(lyr.dataSource)\n\t\t\t\t\t\t\tnewPath = findUpdatePath(inFile, lyr.dataSource)\n\t\t\t\t\t\t\tnewDSPath, newDSName = os.path.split(newPath[0])\n\t\t\t\t\t\t\tprint(\"..newDSPAth \" + newDSPath)\n\t\t\t\t\t\t\tprint(\"..newDSName \" + newDSName)\n\t\t\t\t\t\t\tsameType = newPath[1]\n\t\t\t\t\t\t\tprint(\" same type? \" + str(sameType))\n\t\t\t\t\t\t\tcvrList = [r\"\\arc\", r\"\\polygon\", r\"\\region\", r\"\\point\", r\"\\tic\" ]\n\t\t\t\t\t\t\t#print newDSPath\n\t\t\t\t\t\t\tif newPath == \"no match\":\n\t\t\t\t\t\t\t\tprint(\"...no match to: \" + lyr.dataSource)\n\t\t\t\t\t\t\t\tnewPath[0] = \"not found\"\n\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\telif lyr.supports(\"dataSource\") and lyr.supports(\"datasetName\"):\n\t\t\t\t\t\t\t\tif lyr in theTables:\n\t\t\t\t\t\t\t\t\tprint(\"thinks its a table....using findAndReplsWorkspacePath\")\n\t\t\t\t\t\t\t\t\tlyr.findAndReplaceWorkspacePath(lyr.dataSource, newPath, False) \n\t\t\t\t\t\t\t\telif lyr.isRasterLayer:\n\t\t\t\t\t\t\t\t\tprint(\"thinks its a raster....using findAndReplsWorkspacePath\")\n\t\t\t\t\t\t\t\t\t#lyr.replaceDataSource(newPath, \"RASTER_WORKSPACE\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\tlyr.findAndReplaceWorkspacePath(lyr.dataSource, newPath, False)\n\t\t\t\t\t\t\t\telif lyr.supports(\"dataSource\") and lyr.supports(\"datasetName\"):\n\t\t\t\t\t\t\t\t\tif not sameType and newPath[1] == \"gdb\":\n\t\t\t\t\t\t\t\t\t\tprint(\"..................moving to fgdb\")\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"FILEGDB_WORKSPACE\", newDSName, False) \n\t\t\t\t\t\t\t\t\telif r\".shp\" in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\tprint(\"thinks its a shape\")\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"SHAPEFILE_WORKSPACE\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\telif r\".sde\" in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\tprint(\"thinks its a sde\")\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"SDE_Workspace\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\telif r\".mdb\" in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\tprint(\"thinks its a pgdb\")\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"ACCESS_WORKSPACE\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\telif r\".gdb\" in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\tprint(\"thinks its a fgdb\")\n\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"FILEGDB_WORKSPACE\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\telif sameType:\n\t\t\t\t\t\t\t\t\t\tfor cvr in cvrList:\n\t\t\t\t\t\t\t\t\t\t\tif cvr in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\t\t\tprint(\"to WS sametype is True\")\n\t\t\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"ARCINFO_WORKSPACE\", newDSName, False)\n\t\t\t\t\t\t\t\t\telif not sameType:\n\t\t\t\t\t\t\t\t\t\tfor cvr in cvrList:\n\n\t\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"FILEGDB_WORKSPACE\", newDSName, False)\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\"\"\"else:\n newPath[0] = \"not found\" \"\"\"\n\t\t\t\t\t\t\tprint(\" **** the new data source: \" + newPath[0])\n\t\t\t\t\t\t\tprint(\"\")\n\n\t\t\t\tprint(outMXDName)\n\t\t\t\t#mxd.saveACopy(outMXDName, '10.1')\n\t\t\tif arcpy.Exists(outMXDName):\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\toutMXDName.save()\n\t\t\t\telse:\n mxd.saveACopy(outMXDName, '10.1')\n\t\t\t\tdel mxd\n\telse:\n\t\tmyMsgs (\"Repair source list: \" + inFile + \" does not exit.\")\n\n\tmyMsgs('!!! Success !!! ')",
"def run(file_name,verbose=False,clean_up=True):\n # Get file and file name\n file_name,pdb_id,file_to_clean = process_file(file_name)\n nbo_out = NBO_results()\n nbo_out.pdb_id = pdb_id\n #\n if os.path.isfile(file_name):\n # get pdb date and experiment_type\n pdb_inp = iotbx.pdb.input(file_name=file_name)\n nbo_out.year = pdb_inp.extract_header_year()\n nbo_out.experiment_type = pdb_inp.get_experiment_type()\n # get pdb overlap count and clashscore\n option = 'substitute_non_crystallographic_unit_cell_if_necessary=false'\n args = [file_name,'verbose={}'.format(verbose),option]\n try:\n r = nonbonded_overlaps.run(args,out=null_out())\n e = None\n except Sorry as e:\n r = None\n except: # catch *all* exceptions\n e = sys.exc_info()[0]\n r = None\n if e:\n # some error\n if hasattr(e,'message') and (e.message):\n unknown_type_pairs = 'unknown type pairs' in e.message\n multiple_models = 'provide only a single model' in e.message\n bad_cryst1 = 'None valid CRSYT1 records' in e.message\n if unknown_type_pairs:\n macro_molecule = sym = all = -2\n elif multiple_models:\n macro_molecule = sym = all = -3\n elif bad_cryst1:\n macro_molecule = sym = all = -4\n else:\n macro_molecule = sym = all = -1\n else:\n macro_molecule = sym = all = -1\n else:\n # All is good\n macro_molecule = r.result.nb_overlaps_macro_molecule\n sym = r.result.nb_overlaps_due_to_sym_op\n all = r.result.nb_overlaps_all\n #\n nbo_out.macro_molecule_clashscore = r.result.cctbx_clashscore_macro_molecule\n nbo_out.symmetry_clashscore = r.result.cctbx_clashscore_due_to_sym_op\n nbo_out.all_clashscore = r.result.cctbx_clashscore_all\n else:\n # file_name is not a valid pdb file\n macro_molecule = sym = all = -5\n #\n nbo_out.macro_molecule_overlaps = macro_molecule\n nbo_out.symmetry_overlaps = sym\n nbo_out.all_overlaps = all\n\n if verbose:\n outstr = nbo_out.__str__()\n '''\n -1: Other processing issue\n -2: model contains unknown_type_pairs\n -3: multiple models in pdb files\n -4: Bad CRYST1 records, bad crystal symmetry\n -5: File could not be fetched\n '''\n error_dict = {\n -1: 'Other processing issue',\n -2: 'Model contains unknown_type_pairs',\n -3: 'multiple models in pdb files',\n -4: 'Bad CRYST1 records, bad crystal symmetry',\n -5: 'File could not be fetched'}\n if macro_molecule < 0:\n # if we have an error, add the error message\n outstr = error_dict[macro_molecule] + '\\n' + outstr\n else:\n outstr = nbo_out.__repr__()\n\n if clean_up:\n # Cleanup files if they where not already in local folder\n if file_to_clean:\n for fn in file_to_clean:\n if os.path.isfile(fn): os.remove(fn)\n return outstr",
"def setSorConstant(*argv):",
"def main(server=None, input=None):\n if not input:\n input = {}\n\n try:\n # CUSTOM_SCRIPT00034\n #print \"TOGGLE CLOSED INPUT = %s\" % input\n sobject = input.get('sobject')\n update_data = input.get('update_data')\n #print \"CLOSED UPDATE_DATA = %s\" % update_data\n if 'closed' in update_data.keys():\n #print \"CLOSED IN IF\"\n new_val = update_data.get('closed')\n #print \"CLOSED NEW_VAL = %s\" % new_val\n stes = ['twog/order','twog/title','twog/proj','twog/work_order']\n fk = ['order_code','title_code','proj_code','work_order_code']\n st = input.get('search_type').split('?')[0]\n #print \"CLOSED ST = %s\" % st\n idx = int(stes.index(st))\n nidx = idx + 1\n #print \"CLOSED NIDX =%s\" % nidx\n if st != 'twog/work_order':\n my_kids_expr = \"@SOBJECT(%s['%s','%s'])\" % (stes[nidx], fk[idx], sobject.get('code'))\n #print \"CLOSED MY KIDS EXPR =%s\" % my_kids_expr\n my_kids = server.eval(my_kids_expr)\n #print \"CLOSED MY_KIDS LEN = %s\" % len(my_kids)\n for kid in my_kids:\n server.update(kid.get('__search_key__'), {'closed': new_val})\n \n if st in ['twog/proj','twog/work_order']:\n task_code = sobject.get('task_code')\n if task_code not in [None,'']:\n task = server.eval(\"@SOBJECT(sthpw/task['code','%s'])\" % task_code)\n if task:\n task = task[0]\n server.update(task.get('__search_key__'), {'closed': new_val})\n #print \"LEAVING TOGGLE CLOSED\"\n except AttributeError as e:\n traceback.print_exc()\n print str(e) + '\\nMost likely the server object does not exist.'\n raise e\n except KeyError as e:\n traceback.print_exc()\n print str(e) + '\\nMost likely the input dictionary does not exist.'\n raise e\n except Exception as e:\n traceback.print_exc()\n print str(e)\n raise e",
"def run(config, tim=None):\n import dr_lib\n import DST\n \n if tim is not None:\n tim.getTime(False)\n old_time = tim.getOldTime()\n\n if config.data is None:\n raise RuntimeError(\"Need to pass a data filename to the driver \"\\\n +\"script.\")\n\n # Read in geometry if one is provided\n if config.inst_geom is not None:\n if config.verbose:\n print \"Reading in instrument geometry file\"\n \n inst_geom_dst = DST.getInstance(\"application/x-NxsGeom\",\n config.inst_geom)\n else:\n inst_geom_dst = None\n\n # Perform Steps 1-11 on sample data\n d_som1 = dr_lib.process_sas_data(config.data, config, timer=tim,\n inst_geom_dst=inst_geom_dst,\n bkg_subtract=config.bkg_coeff,\n acc_down_time=config.data_acc_down_time.toValErrTuple(),\n bkg_scale=config.bkg_scale,\n trans_data=config.data_trans)\n\n # Perform Steps 1-11 on buffer/solvent only data\n if config.solv is not None:\n s_som1 = dr_lib.process_sas_data(config.solv, config, timer=tim,\n inst_geom_dst=inst_geom_dst,\n dataset_type=\"solvent\",\n bkg_subtract=config.bkg_coeff,\n acc_down_time=config.solv_acc_down_time.toValErrTuple(),\n bkg_scale=config.bkg_scale,\n trans_data=config.solv_trans)\n else:\n s_som1 = None\n\n # Step 12: Subtract buffer/solvent only spectrum from sample spectrum\n d_som2 = dr_lib.subtract_bkg_from_data(d_som1, s_som1,\n verbose=config.verbose,\n timer=tim,\n dataset1=\"data\",\n dataset2=\"solvent\")\n \n del s_som1, d_som1\n\n # Perform Steps 1-11 on empty-can data\n if config.ecan is not None:\n e_som1 = dr_lib.process_sas_data(config.ecan, config, timer=tim,\n inst_geom_dst=inst_geom_dst,\n dataset_type=\"empty_can\",\n bkg_subtract=config.bkg_coeff,\n acc_down_time=config.ecan_acc_down_time.toValErrTuple(),\n bkg_scale=config.bkg_scale,\n trans_data=config.ecan_trans)\n else:\n e_som1 = None\n\n # Step 13: Subtract empty-can spectrum from sample spectrum\n d_som3 = dr_lib.subtract_bkg_from_data(d_som2, e_som1,\n verbose=config.verbose,\n timer=tim,\n dataset1=\"data\",\n dataset2=\"empty_can\")\n \n del e_som1, d_som2\n\n # Perform Steps 1-11 on open beam data\n if config.open is not None:\n o_som1 = dr_lib.process_sas_data(config.open, config, timer=tim,\n inst_geom_dst=inst_geom_dst,\n dataset_type=\"open_beam\",\n bkg_subtract=config.bkg_coeff,\n acc_down_time=config.open_acc_down_time.toValErrTuple(),\n bkg_scale=config.bkg_scale)\n else:\n o_som1 = None\n \n # Step 14: Subtract open beam spectrum from sample spectrum\n d_som4 = dr_lib.subtract_bkg_from_data(d_som3, o_som1,\n verbose=config.verbose,\n timer=tim,\n dataset1=\"data\",\n dataset2=\"open_beam\")\n \n del o_som1, d_som3\n\n # Perform Steps 1-11 on dark current data\n if config.dkcur is not None:\n dc_som1 = dr_lib.process_sas_data(config.open, config, timer=tim,\n inst_geom_dst=inst_geom_dst,\n dataset_type=\"dark_current\",\n bkg_subtract=config.bkg_coeff)\n else:\n dc_som1 = None\n \n # Step 15: Subtract dark current spectrum from sample spectrum\n d_som5 = dr_lib.subtract_bkg_from_data(d_som4, dc_som1,\n verbose=config.verbose,\n timer=tim,\n dataset1=\"data\",\n dataset2=\"dark_current\")\n \n del dc_som1, d_som4 \n\n # Create 2D distributions is necessary\n if config.dump_Q_r:\n d_som5_1 = dr_lib.create_param_vs_Y(d_som5, \"radius\", \"param_array\",\n config.r_bins.toNessiList(),\n rebin_axis=config.Q_bins.toNessiList(),\n binnorm=True,\n y_label=\"S\",\n y_units=\"Counts / A^-1 m\",\n x_labels=[\"Radius\", \"Q\"],\n x_units=[\"m\", \"1/Angstroms\"])\n\n hlr_utils.write_file(config.output, \"text/Dave2d\", d_som5_1,\n output_ext=\"qvr\", verbose=config.verbose,\n data_ext=config.ext_replacement,\n path_replacement=config.path_replacement,\n message=\"S(r, Q) information\")\n\n del d_som5_1\n \n if config.dump_Q_theta:\n d_som5_1 = dr_lib.create_param_vs_Y(d_som5, \"polar\", \"param_array\",\n config.theta_bins.toNessiList(),\n rebin_axis=config.Q_bins.toNessiList(),\n binnorm=True,\n y_label=\"S\",\n y_units=\"Counts / A^-1 rads\",\n x_labels=[\"Polar Angle\", \"Q\"],\n x_units=[\"rads\", \"1/Angstroms\"])\n\n hlr_utils.write_file(config.output, \"text/Dave2d\", d_som5_1,\n output_ext=\"qvt\", verbose=config.verbose,\n data_ext=config.ext_replacement,\n path_replacement=config.path_replacement,\n message=\"S(theta, Q) information\")\n\n del d_som5_1\n \n # Steps 16 and 17: Rebin and sum all spectra\n if config.verbose:\n print \"Rebinning and summing for final spectrum\"\n \n if tim is not None:\n tim.getTime(False)\n\n if config.dump_frac_rebin:\n set_conf = config\n else:\n set_conf = None\n\n d_som6 = dr_lib.sum_by_rebin_frac(d_som5, config.Q_bins.toNessiList(),\n configure=set_conf)\n\n if tim is not None:\n tim.getTime(msg=\"After rebinning and summing for spectrum\") \n\n del d_som5\n\n if config.facility == \"LENS\":\n # Step 18: Scale final spectrum by Q bin centers\n if config.verbose:\n print \"Scaling final spectrum by Q centers\"\n \n if tim is not None:\n tim.getTime(False)\n\n d_som7 = dr_lib.fix_bin_contents(d_som6, scale=True, width=True,\n units=\"1/Angstroms\")\n\n if tim is not None:\n tim.getTime(msg=\"After scaling final spectrum\") \n else:\n d_som7 = d_som6\n\n del d_som6\n\n # If rescaling factor present, rescale the data\n if config.rescale_final is not None:\n import common_lib\n d_som8 = common_lib.mult_ncerr(d_som7, (config.rescale_final, 0.0))\n else:\n d_som8 = d_som7\n\n del d_som7\n \n hlr_utils.write_file(config.output, \"text/Spec\", d_som8,\n verbose=config.verbose,\n replace_path=False,\n replace_ext=False,\n message=\"combined S(Q) information\")\n\n # Create 1D canSAS file\n hlr_utils.write_file(config.output, \"text/canSAS\", d_som8,\n verbose=config.verbose,\n output_ext=\"xml\",\n data_ext=config.ext_replacement, \n path_replacement=config.path_replacement,\n message=\"combined S(Q) information\")\n \n d_som8.attr_list[\"config\"] = config\n\n hlr_utils.write_file(config.output, \"text/rmd\", d_som8,\n output_ext=\"rmd\",\n data_ext=config.ext_replacement, \n path_replacement=config.path_replacement,\n verbose=config.verbose,\n message=\"metadata\")\n\n if tim is not None:\n tim.setOldTime(old_time)\n tim.getTime(msg=\"Total Running Time\")",
"def cli(\n codes,\n # vasp_code,\n # phonopy_code,\n # thirdorder_sow_code,\n # thirdorder_reap_code,\n # shengbte_code,\n structure,\n protocol,\n parameters,\n supercell_matrix,\n neighbor,\n pseudo_family,\n max_num_machines,\n num_mpiprocs_per_machine,\n with_mpi,\n max_wallclock_seconds,\n daemon,\n):\n # set up calculation\n\n if not structure:\n structure = helpers.get_test_structure()\n\n (\n vasp_code,\n phonopy_code,\n thirdorder_reap_code,\n thirdorder_sow_code,\n shengbte_code,\n ) = codes\n protocol = helpers.get_protocol_parameters(name=protocol, type=parameters)\n potential_mapping = {}\n for kind in structure.get_kind_names():\n potential_mapping[kind] = kind\n\n base_incar_dict = protocol[\"incar\"]\n\n base_config = {\n \"code_string\": get_code_string(vasp_code),\n \"kpoints_density\": protocol[\"kpoints_density\"], # k-point density,\n \"potential_family\": pseudo_family,\n \"potential_mapping\": potential_mapping,\n \"options\": {\n \"resources\": {\n \"num_machines\": max_num_machines,\n \"num_mpiprocs_per_machine\": num_mpiprocs_per_machine,\n },\n \"max_wallclock_seconds\": max_wallclock_seconds,\n },\n }\n base_parser_settings = protocol[\"parser_settings\"]\n forces_config = base_config.copy()\n forces_config.update(\n {\n \"parser_settings\": base_parser_settings,\n \"parameters\": {\"incar\": base_incar_dict},\n }\n )\n nac_config = base_config.copy()\n nac_parser_settings = protocol[\"nac_parser_settings\"]\n nac_parser_settings.update(base_parser_settings)\n nac_incar_dict = protocol[\"nac_incar\"]\n nac_incar_dict.update(base_incar_dict)\n nac_config.update(\n {\n \"parser_settings\": nac_parser_settings,\n \"parameters\": {\"incar\": nac_incar_dict},\n }\n )\n\n inputs = {\n \"structure\": structure,\n \"phonopy\": {\n \"run_phonopy\": Bool(True),\n \"remote_phonopy\": Bool(True),\n \"code_string\": Str(get_code_string(phonopy_code)),\n \"phonon_settings\": Dict(\n dict={\n \"mesh\": protocol[\"mesh\"],\n \"supercell_matrix\": supercell_matrix,\n \"distance\": protocol[\"kpoints_distance\"],\n \"is_nac\": True,\n }\n ),\n \"symmetry_tolerance\": Float(protocol[\"symmetry_tolerance\"]),\n \"options\": Dict(dict=base_config[\"options\"]),\n \"metadata\": {\n \"label\": \"example\",\n \"description\": \"Test job submission with the phonopy\",\n },\n },\n \"thirdorder\": {\n \"thirdorder_sow\": {\n \"code\": thirdorder_sow_code,\n \"parameters\": Dict(\n dict={\n \"supercell_matrix\": supercell_matrix,\n \"option\": neighbor,\n }\n ),\n },\n \"thirdorder_reap\": {\n \"code\": thirdorder_reap_code,\n \"parameters\": Dict(\n dict={\n \"supercell_matrix\": supercell_matrix,\n \"option\": neighbor,\n }\n ),\n },\n },\n \"shengbte\": {\n \"control\": Dict(\n dict={\n \"allocations\": {\"ngrid\": [3, 3, 3], \"norientations\": 3},\n \"crystal\": {\n \"orientations\": [[1, 0, 0], [1, 1, 0], [1, 1, 1]]\n # 'masses': [],\n # 'gfactors': []\n },\n \"parameters\": {\n \"T\": 300,\n # 'T_min': 0,\n # 'T_max': 0,\n # 'T_step': 0,\n # 'omega_max': 0,\n # 'scalebroad': 0,\n # 'rmin': 0,\n # 'rmax': 0,\n # 'dr': 0,\n # 'maxiter': 0,\n # 'nticks': 0,\n # 'eps': 0\n },\n \"flags\": {\n # \"espresso\": False,\n \"nonanalytic\": True,\n \"nanowires\": True,\n },\n }\n ),\n \"calculation\": {\n \"code\": shengbte_code,\n \"metadata\": {\n \"options\": {\n \"resources\": {\n \"num_machines\": int(max_num_machines),\n \"num_mpiprocs_per_machine\": int(\n num_mpiprocs_per_machine\n ),\n },\n \"withmpi\": with_mpi,\n },\n },\n },\n },\n \"vasp_settings\": Dict(\n dict={\"forces\": forces_config, \"nac\": nac_config}\n ),\n # 'clean_workdir': Bool(True),\n # 'dry_run': Bool(True),\n \"metadata\": {\n \"description\": \"Test job submission with the aiida_shengbte thirdorder plugin\",\n },\n }\n from aiida_shengbte.cli.utils import launch\n\n launch.launch_process(WorkflowFactory(\"shengbte.vasp\"), daemon, **inputs)",
"def IRIS_update(input, address):\n \n t_update_1 = datetime.now()\n \n client_iris = Client_iris()\n \n events, address_events = quake_info(address, 'info')\n len_events = len(events)\n \n Stas_iris = []\n \n for i in range(0, len_events):\n \n target_path = address_events\n Sta_iris = IRIS_available(input, events[i], target_path[i], event_number = i)\n Stas_iris.append(Sta_iris)\n \n if input['iris_bulk'] != 'Y':\n print 'IRIS-Availability for event: ' + str(i+1) + str('/') + \\\n str(len_events) + ' ---> ' + 'DONE'\n else:\n print 'IRIS-bulkfile for event : ' + str(i+1) + str('/') + \\\n str(len_events) + ' ---> ' + 'DONE'\n \n if input['get_continuous'] == 'Y':\n for j in range(1, len_events):\n Stas_iris.append(Sta_iris)\n print 'IRIS-Availability for event: ' + str(j+1) + str('/') + \\\n str(len_events) + ' --->' + 'DONE'\n break\n \n \n Stas_req = []\n \n for k in range(0, len_events):\n Sta_all = Stas_iris[k]\n Stas_req.append(rm_duplicate(Sta_all, \\\n address = os.path.join(address_events[k])))\n \n return Stas_req",
"def mark_obsolete_in_dataset( dataset_name, engine, table ):\n s = table.select( table.c.dataset_name==dataset_name ) \n result = conn.execute(s) # all rows of replica.files with the specified dataset_name\n\n sr = []\n srf = {}\n for row in result:\n # Note that you can loop through result this way only once.\n sr.append(row)\n fn = filename(row)\n if fn in srf:\n srf[fn].append(row)\n else:\n srf[fn] = [row]\n\n #sr.sort( key=filename )\n\n for fn,rows in srf.items():\n if len(rows)<=1: continue\n rows.sort( key=rowversion )\n print \"jfp will keep abs_path=\",rows[-1]['abs_path'],\"status=\",rows[-1]['status'],\\\n \"dataset_name=\",rows[-1]['dataset_name']\n for row in rows[0:-1]:\n abs_path = row['abs_path']\n dataset_name = \"old_\"+row['dataset_name']\n print \"jfp will do update for abs_path=\",abs_path,\"status from\",row['status'],\"to 50\"\n s = table.update().where( table.c.abs_path==abs_path ).\\\n values( status=50 )\n #if dataset_name.find('old_old_')!=0:\n # s = table.update().where( table.c.abs_path==abs_path ).\\\n # values( dataset_name=dataset_name )\n # ... doesn't work, you first have to create a row in replica.datasets with this name.\n result = conn.execute(s)",
"def set_j(cmd, limb, joints, index, delta):\n joint = joints[index]\n cmd[joint] = delta + limb.joint_angle(joint)",
"def update_cds(self, line, cds):\n args = self.extract_cds_args(line)\n cds.add_indices(args['indices'])\n cds.add_phase(args['phase'])\n cds.add_identifier(args['identifier'])\n if 'score' in args:\n cds.add_score(args['score'])",
"def update(self, params):",
"def import_and_save(ADCthres=0, s=False):\n df = import_data(ADCthres, s)\n bus_vec = np.array(range(0,3))\n for bus in bus_vec:\n df_clu = cluster_data(df, bus) \n save_clusters(df_clu, bus)"
] | [
"0.59633976",
"0.55347496",
"0.5399242",
"0.538885",
"0.5283675",
"0.5071874",
"0.5064917",
"0.5011597",
"0.4963516",
"0.48926964",
"0.48888305",
"0.48228374",
"0.4788545",
"0.4787641",
"0.47791782",
"0.4741496",
"0.47407743",
"0.46761432",
"0.46562803",
"0.46489465",
"0.46433657",
"0.46386325",
"0.46300337",
"0.46247742",
"0.46191666",
"0.4618314",
"0.4609664",
"0.46010557",
"0.4582618",
"0.4578484"
] | 0.610272 | 0 |
Issues a Patch request for independent sas logical jbods. [Arguments] | def fusion_api_patch_sas_logical_jbods(self, body, uri, api=None, headers=None):
return self.sas_logical_jbods.patch(body=body, uri=uri, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_edit_sas_logical_jbods(self, body, uri, api=None, headers=None):\n return self.sas_logical_jbods.put(body=body, uri=uri, api=api, headers=headers)",
"def fusion_api_post_sas_logical_jbods(self, body, api=None, headers=None):\n return self.sas_logical_jbods.post(body=body, api=api, headers=headers)",
"def fusion_api_patch_sas_interconnect(self, body=None, uri=None, api=None, headers=None):\n return self.sasics.patch(body=body, uri=uri, api=api, headers=headers)",
"def fusion_api_patch_sas_li(self, body=None, uri=None, api=None, headers=None):\n return self.sasli.patch(body, uri, api, headers)",
"def PatchConcepts(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def handle_patch(self, api, command):\n return self._make_request_from_command('PATCH', command)",
"def patch(self , request , pk = None ):\r\n return Response({'method':'patch'})",
"def test_patch_creation(self):\n host = synthetic_host(\"myserver\")\n self.create_simple_filesystem(host)\n\n spare_volume_1 = synthetic_volume_full(host)\n spare_volume_2 = synthetic_volume_full(host)\n\n response = self.api_client.patch(\n \"/api/target/\",\n data={\n \"objects\": [\n {\"kind\": \"OST\", \"filesystem_id\": self.fs.id, \"volume_id\": spare_volume_1.id},\n {\"kind\": \"MDT\", \"filesystem_id\": self.fs.id, \"volume_id\": spare_volume_2.id},\n ],\n \"deletions\": [],\n },\n )\n self.assertHttpAccepted(response)",
"def _patch(self, path=None, version=None, params=None,\n data=None, json=None, header=None):\n return self.client.patch(module='mam', path=path, version=version,\n params=params, data=data,\n json=json, header=header)",
"def patch(self, request , pk=None):\n return Response({'message':'PATCH'})",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)",
"def Patch(self, request, global_params=None):\n config = self.GetMethodConfig('Patch')\n return self._RunMethod(\n config, request, global_params=global_params)"
] | [
"0.5980176",
"0.55514956",
"0.5444114",
"0.5345474",
"0.5314434",
"0.5190509",
"0.5173068",
"0.51346314",
"0.5098097",
"0.50588393",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951",
"0.5053951"
] | 0.7037723 | 0 |
patch a scope [Arguments] | def fusion_api_patch_scope(self, uri, body=None, api=None, headers=None, etag=None):
return self.scope.patch(uri=uri, body=body, api=api, headers=headers, etag=etag) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def override(self,scope):",
"def scope_reset(client, args):\n result = client.get_scope()\n if result.is_custom:\n print(\"Proxy is using a custom function to check scope. Cannot set context to scope.\")\n return\n client.context.set_query(result.filter)",
"def updateScopes(markup):\n markupNew = markup.copy()\n if markupNew.getVerbose():\n print u\"updating scopes\"\n modifiers = markupNew.getConTextModeNodes(\"modifier\")\n for modifier in modifiers:\n if(markupNew.getVerbose()):\n print(u\"old scope for %s is %s\"%(modifier.__str__(),modifier.getScope()))\n modifier.setScope()\n if(markupNew.getVerbose()):\n print(u\"new scope for %s is %s\"%(modifier.__str__(),modifier.getScope()))\n\n\n # Now limit scope based on the domains of the spans of the other\n # modifier\n for i in range(len(modifiers)-1):\n modifier = modifiers[i]\n for j in range(i+1,len(modifiers)):\n modifier2 = modifiers[j]\n if( TO.limitScope(modifier,modifier2) and\n modifier2.ti.citem.getRule().lower() == 'terminate'):\n markupNew.add_edge(modifier2,modifier)\n if( TO.limitScope(modifier2,modifier) and\n modifier.ti.citem.getRule().lower() == 'terminate'):\n markupNew.add_edge(modifier,modifier2)\n markupNew.graph[\"__SCOPEUPDATED\"] = True\n return markupNew",
"def patch(*args, **kwargs):\n return update(*args, patch=True, **kwargs)",
"def enterScope(self, name):",
"def _replace(self, scope:MjoScope=..., type:MjoType=..., dimension:MjoDimension=..., modifier:MjoModifier=..., invert:MjoInvert=...) -> 'MjoFlags':\n if scope is Ellipsis: scope = self.scope\n if type is Ellipsis: type = self.type\n if dimension is Ellipsis: dimension = self.dimension\n if modifier is Ellipsis: modifier = self.modifier\n if invert is Ellipsis: invert = self.invert\n return self.fromflags(scope=scope, type=type, dimension=dimension, modifier=modifier, invert=invert)",
"def scope(self, name):\r\n raise NotImplementedError",
"def monkey_patch_global(ctx, param, value):\n # del ctx, param\n if value:\n # from golem.core.variables import P2P_PROTOCOL_ID, TASK_PROTOCOL_ID\n print(\"patching \\n\")\n global P2P_PROTOCOL_ID, TASK_PROTOCOL_ID\n P2P_PROTOCOL_ID = value\n TASK_PROTOCOL_ID = value\n mylist.add(value)\n x = value\n CLASS_PROTOCOL_ID.P2P_ID = value\n CLASS_PROTOCOL_ID.TASK_ID = value",
"def patch_hook():\n\n patch_apply(\"package_to_be_patched.foobar.target_function2\", patch_function)",
"def updateScopes(self):\n if self.getVerbose():\n print(\"updating scopes\")\n self.__SCOPEUPDATED = True\n # make sure each tag has its own self-limited scope\n modifiers = self.getConTextModeNodes(\"modifier\")\n for modifier in modifiers:\n if self.getVerbose():\n print(\"old scope for {0} is {1}\".format(modifier.__str__(), modifier.getScope()))\n modifier.setScope()\n if self.getVerbose():\n print(\"new scope for {0} is {1}\".format(modifier.__str__(), modifier.getScope()))\n\n\n # Now limit scope based on the domains of the spans of the other\n # modifier\n for i in range(len(modifiers)-1):\n modifier = modifiers[i]\n for j in range(i+1, len(modifiers)):\n modifier2 = modifiers[j]\n if modifier.limitScope(modifier2) and \\\n modifier2.getRule().lower() == 'terminate':\n self.add_edge(modifier2, modifier)\n if modifier2.limitScope(modifier) and \\\n modifier.getRule().lower() == 'terminate':\n self.add_edge(modifier, modifier2)",
"def fusion_api_edit_scope(self, uri, body=None, api=None, headers=None, eTag=None):\n\n return self.scope.put(uri=uri, body=body, api=api, headers=headers, eTag=eTag)",
"def monkeymodule():\n from _pytest.monkeypatch import MonkeyPatch\n\n mpatch = MonkeyPatch()\n yield mpatch\n mpatch.undo()",
"def scope(self): # noqa: ANN201",
"def patches(*args):\n with cros_build_lib.ContextManagerStack() as stack:\n for arg in args:\n stack.Add(lambda ret=arg: ret)\n yield",
"def update(self, env):\n del env\n return",
"def patch(self, *args, **kwargs):\n self.request(\"patch\", *args, **kwargs)",
"def patch_repos(self):",
"def _patch_implementation(self, original, *args, **kwargs):\n pass",
"def stub_out(self, old, new):\n self.useFixture(fixtures.MonkeyPatch(old, new))",
"def stub_out(self, old, new):\n self.useFixture(fixtures.MonkeyPatch(old, new))",
"def scope(self, scope):\n self._scope = scope",
"def SetDefaultScopeIfEmpty(unused_ref, args, request):\n request.scope = GetDefaultScopeIfEmpty(args)\n return request",
"def cli(ctx, dry_run, verbose):\n ctx.obj = copy(ctx.params)",
"def update(self, system, environment_input):\n pass",
"def patch_sdk():",
"def patch_sdk():",
"def patch_sdk():",
"def patch(self, method, uri, query_param, request_param, headers, **kwargs):\n raise NotImplementedError",
"def apply_telescope_mods(self):\n for key, val in self.telescope_mods.items():\n self.cn.telescope.__dict__[key] = val\n return",
"def __call__(self, env, target, source, *args, **kw):\n return ReplacingCaller._call(self, env, target, source, *args, **kw)"
] | [
"0.6150769",
"0.5922721",
"0.5863875",
"0.5782693",
"0.5780058",
"0.57643557",
"0.5706636",
"0.56096774",
"0.55088556",
"0.5479934",
"0.5457914",
"0.5456176",
"0.5436474",
"0.5418265",
"0.5416216",
"0.5395018",
"0.5330931",
"0.5314338",
"0.52900195",
"0.52900195",
"0.52524596",
"0.52501625",
"0.524463",
"0.5231057",
"0.5191906",
"0.5191906",
"0.5191906",
"0.51767075",
"0.51584125",
"0.51422095"
] | 0.65478545 | 0 |
Deletes a Scope from the appliance based on name OR uri [Arguments] | def fusion_api_delete_scope(self, uri=None, api=None, headers=None):
return self.scope.delete(uri, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def scope_delete(client, args):\n client.set_scope([])",
"def delete_scope_cli(args: Namespace):\n\n # Get the base profile\n profile, base_config = extract_profile(args)\n\n # Get the workspace groups\n groups = get_groups(profile)\n\n # Get the existing scopes\n scopes = extract_scopes(profile)\n\n # Check scope name\n scope_name = args.scope_name\n scope_exists = scope_name in scopes\n\n # Construct the access groups\n accesses = ['read', 'write', 'manage']\n access_groups = {\n f'scope-{scope_name}-{access}': access.upper()\n for access in accesses\n }\n\n # Filter the existing groups\n existing_groups = [group for group in access_groups if group in groups]\n\n # Get the acls if the scope exists\n if scope_exists:\n # Get the acls for the scope\n acls = get_acls(scope_name, profile)\n else:\n acls = {}\n\n # Set deletions\n to_delete = {\n 'scope': scope_name,\n 'groups': existing_groups,\n 'acls': acls\n }\n\n # Verify deletion parameters\n if (not args.a and not args.s) or not scope_exists:\n to_delete.pop('scope')\n if (not args.a and not args.g) or not existing_groups:\n to_delete.pop('groups')\n if (not args.a and not args.c) or not acls:\n to_delete.pop('acls')\n\n # Set the deletion warning\n deletion_warning = ''\n if 'scope' in to_delete:\n deletion_warning += '\\nScope:'\n deletion_warning += f'\\n\\t{to_delete[\"scope\"]}'\n if 'groups' in to_delete:\n deletion_warning += '\\nGroups:'\n for group in to_delete['groups']:\n deletion_warning += f'\\n\\t{group}'\n if 'acls' in to_delete:\n deletion_warning += '\\nAcls:'\n for acl, permission in to_delete['acls'].items():\n deletion_warning += f'\\n\\t{(permission+\":\").ljust(8)}{acl}'\n\n deletion_warning = 'The following resources will be deleted:' + deletion_warning\n\n # Provide the debug output\n if args.d:\n print(deletion_warning)\n\n # Check for confirmation\n elif to_delete and (args.q or input(deletion_warning + '\\n(Y/N):').upper() == 'Y'):\n for principal in to_delete.get('acls', []):\n # Remove the existing acl\n delete_acl(principal, scope_name, profile)\n for group in to_delete.get('groups', []):\n # Remove the existing group\n delete_group(group, profile)\n if 'scope' in to_delete:\n # Delete the scope\n delete_scope(scope_name, profile)",
"def fusion_api_delete_logical_enclosure(self, name=None, uri=None, param='', api=None, headers=None):\n return self.logical_enclosure.delete(name=name, uri=uri, param=param, api=api, headers=headers)",
"def fusion_api_remove_enclosure(self, name=None, uri=None, param='', api=None, headers=None):\n return self.enclosure.delete(name=name, uri=uri, param=param, api=api, headers=headers)",
"def fusion_api_delete_vwwn_range(self, name=None, uri=None, api=None, headers=None):\n return self.vwwnrange.delete(name, uri, api, headers)",
"def fusion_api_delete_lig(self, name=None, uri=None, api=None, headers=None, etag=None):\n return self.lig.delete(name=name, uri=uri, api=api, headers=headers, etag=etag)",
"def delete(self, layer='', name='', uid='', params={}):\n return self.__common_client._post_with_layer('delete-access-rule', layer, name, uid, params)",
"def fusion_api_delete_vsn_range(self, name=None, uri=None, api=None, headers=None):\n return self.vsnrange.delete(name, uri, api, headers)",
"def leaveScope(self, name):",
"def fusion_api_delete_vmac_range(self, name=None, uri=None, api=None, headers=None):\n return self.vmacrange.delete(name, uri, api, headers)",
"def remove_scope(self, ):\n if self.AttributeNames.SCOPE in self.attrs:\n del self.attrs[self.AttributeNames.SCOPE]\n return self",
"def delete(self, args):\n try:\n db = get_db('intents')\n intents = db.delete_intent(args['intent'])\n resp = jsonify(intents=intents)\n resp.status_code = 200\n return resp\n except DatabaseError as error:\n resp = jsonify(error=error)\n resp.status_code = 500\n return resp\n except DatabaseInputError as error:\n resp = jsonify(error=error)\n resp.status_code = 400\n return resp",
"def delete(isamAppliance, name, check_mode=False, force=False):\n ret_obj = search(isamAppliance, name, check_mode=check_mode, force=force)\n chain_id = ret_obj['data']\n\n if chain_id == {}:\n logger.info(\"STS Chain {0} not found, skipping delete.\".format(name))\n else:\n if check_mode is True:\n return isamAppliance.create_return_object(changed=True)\n else:\n return isamAppliance.invoke_delete(\n \"Delete a specific STS chain\",\n \"{0}/{1}\".format(uri, chain_id),\n requires_modules=requires_modules,\n requires_version=requires_version)\n\n return isamAppliance.create_return_object()",
"def delete_overrides(self, app, name, namespace):\n return self._delete(self._path(app) +\n '?name=' + name +\n '&namespace=' + namespace)",
"def fusion_api_delete_lsg(self, name=None, uri=None, api=None, headers=None):\n return self.lsg.delete(name=name, uri=uri, api=api, headers=headers)",
"def deleteScope():\n global currScope\n scopeStack.pop()\n currScope = scopeStack[-1]",
"def delete(**args):\n\tglobal _objstore\n\t_objstore = _objstore or ObjStore()\n\n\t_objstore.delete(args['type'], args['name'])\n\treturn {'message':'ok'}",
"def delete(self, uri, where, selectionArgs):\n pass",
"def fusion_api_delete_rack_manager(self, uri, name=None, param='', api=None, headers=None):\n return self.rackmanager.delete(uri=uri, name=name, param=param, api=api, headers=headers)",
"def fusion_api_delete_sas_lig(self, name=None, uri=None, api=None, headers=None):\n return self.saslig.delete(name=name, uri=uri, api=api, headers=headers)",
"def delete(self, args, intent):\n if 'all' in args.keys() and args['all'] == True:\n try:\n db = get_db('expressions')\n db_results = db.delete_all_intent_expressions(intent)\n expressions = [x[1] for x in db_results]\n resp = jsonify(intent=intent, expressions=expressions)\n return resp\n except DatabaseError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 500\n return resp\n except DatabaseInputError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 400\n return resp \n elif args['expressions']:\n try:\n db = get_db('expressions')\n db_results = db.delete_expressions_from_intent(intent, args['expressions'])\n expressions = [x[1] for x in db_results]\n resp = jsonify(intent=intent, expressions=expressions, deleted_expressions=args['expressions'])\n return resp\n except DatabaseError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 500\n return resp\n except DatabaseInputError as error:\n resp = jsonify(error=error.value)\n resp.status_code = 400\n return resp",
"def delete(self, name):\n\n pass",
"def fusion_api_delete_enclosure_group(self, name=None, uri=None, api=None, headers=None):\n return self.enclosure_group.delete(name, uri, api, headers)",
"def DELETE(self, uri):\n def body(conn, cur):\n self.enforce_right('owner', uri)\n if web.ctx.ermrest_history_snaptime is not None:\n raise exception.Forbidden('deletion of catalog at previous revision')\n if web.ctx.ermrest_history_snaprange is not None:\n # should not be possible bug check anyway...\n raise NotImplementedError('deletion of catalog with snapshot range')\n self.set_http_etag( web.ctx.ermrest_catalog_model.etag() )\n self.http_check_preconditions(method='DELETE')\n self.emit_headers()\n return True\n\n def post_commit(destroy):\n web.ctx.ermrest_registry.unregister(self.catalog_id)\n web.ctx.status = '204 No Content'\n return ''\n\n return self.perform(body, post_commit)",
"def app_delete(self, name):\n self.core.api.os.shell.cmd('{0} delete app /app.name:\"{1}\"'.format(self.APP_CMD, name))",
"def fusion_api_delete_fabric(self, name=None, uri=None, api=None, headers=None):\n return self.fabric.delete(name, uri, api, headers)",
"def del_amenity(a_id):\n the_amenity = storage.get(Amenity, a_id)\n if the_amenity is not None:\n storage.delete(the_amenity)\n storage.save()\n return jsonify({}), 200\n abort(404)",
"def fusion_api_delete_sas_li(self, name=None, uri=None, api=None, headers=None):\n return self.sasli.delete(name=name, uri=uri, api=api, headers=headers)",
"def delete_specific_amenity(amenity_id):\n amenity = storage.get('Amenity', amenity_id)\n if not amenity:\n abort(404)\n storage.delete(amenity)\n storage.save()\n return make_response(jsonify({}), 200)",
"def fusion_api_delete_repository(self, uri, api=None, headers=None):\n return self.repository.delete(uri=uri, api=api, headers=headers)"
] | [
"0.73025864",
"0.7263487",
"0.603533",
"0.59936666",
"0.5951854",
"0.58858526",
"0.5821072",
"0.5802672",
"0.57769704",
"0.56434864",
"0.56293416",
"0.5628568",
"0.55978626",
"0.5558368",
"0.5539387",
"0.55280393",
"0.5519028",
"0.5513393",
"0.55118746",
"0.5482532",
"0.5471457",
"0.5457231",
"0.54454136",
"0.5423409",
"0.541954",
"0.54176193",
"0.5403835",
"0.5376062",
"0.5372282",
"0.5356016"
] | 0.7602069 | 0 |
Fusion API Get Appliance Certificate [Example] ${resp} = Fusion Api Get Appliance Certificate | | | def fusion_api_get_appliance_certificate(self, api=None, headers=None):
return self.appliance_certificate.get(api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_get_appliance_certificate(self, api=None, headers=None):\n return self.wsc.get(api=api, headers=headers)",
"def get_ssl_certificate() :",
"def certificate_auth():\r\n url = 'https://www.12306.cn'\r\n response = requests.get(url, verify=False)\r\n print(response.status_code)\r\n print(response.text)",
"def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)",
"def get_certificate(self, url):\n bearer = 'Authorization: Bearer '+str(self.exchanged_token).split('\\n', 1)[0]\n data = json.dumps({\"service_id\": \"x509\"})\n\n headers = StringIO()\n buffers = StringIO()\n\n c = pycurl.Curl()\n c.setopt(pycurl.URL, url)\n c.setopt(pycurl.HTTPHEADER, [bearer, 'Content-Type: application/json'])\n c.setopt(pycurl.POST, 1)\n c.setopt(pycurl.POSTFIELDS, data)\n c.setopt(c.WRITEFUNCTION, buffers.write)\n c.setopt(c.HEADERFUNCTION, headers.write)\n c.setopt(c.VERBOSE, True)\n\n try:\n c.perform()\n status = c.getinfo(c.RESPONSE_CODE)\n c.close()\n body = buffers.getvalue()\n\n if str(status) != \"303\" :\n self.log.error(\"On \\\"get redirect curl\\\": %s , http error: %s \" % (body, str(status)))\n return False \n except pycurl.error, error:\n errno, errstr = error\n self.log.info('An error occurred: %s' % errstr)\n return False\n \n redirect = self.tts\n for item in headers.getvalue().split(\"\\n\"):\n if \"location\" in item:\n redirect = redirect + item.strip().replace(\"location: \", \"\")\n\n headers = {'Authorization': 'Bearer ' + self.exchanged_token.strip()}\n response = requests.get(redirect, headers=headers)\n\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as e:\n # Whoops it wasn't a 200\n self.log.error(\"get_certificate() Error: %s \" %str(e))\n return False\n\n with open('/tmp/output.json', 'w') as outf:\n outf.write(response.content)\n else:\n self.log.error(\"No location in redirect response\")\n\n return True",
"def get_ssl_certificate():",
"def fusion_api_get_ca_certificate(self, uri=None, api=None, headers=None, param=''):\n return self.ca.get(uri=uri, api=api, headers=headers, param=param)",
"def fusion_api_get_client_certificate(self, ip, api=None, headers=None):\n return self.client_certificate.get(ip, api, headers)",
"def Certificate(self) -> _n_8_t_0:",
"def Certificate(self) -> _n_8_t_0:",
"def _parse_certificate(cls, response):\n links = _parse_header_links(response)\n try:\n cert_chain_uri = links[u'up'][u'url']\n except KeyError:\n cert_chain_uri = None\n return (\n response.content()\n .addCallback(\n lambda body: messages.CertificateResource(\n uri=cls._maybe_location(response),\n cert_chain_uri=cert_chain_uri,\n body=body))\n )",
"def fusion_api_get_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.get(aliasname, api, headers)",
"def fusion_api_get_certificate_status(self, api=None, headers=None):\n return self.certificate_status.get(api, headers)",
"def fusion_api_get_remote_certificate(self, ip, api=None, headers=None):\n return self.remote_certificate.get(ip, api, headers)",
"def credential_get(uniqueID: str):\n\n cert = safeisland.certificate(uniqueID)\n return {\"payload\": cert}",
"def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def request_cert(session, domain_name, validation_domain):\n if session is None:\n return None\n\n client = session.client('acm')\n validation_options = [\n {\n 'DomainName': domain_name,\n 'ValidationDomain': validation_domain\n },\n ]\n response = client.request_certificate(DomainName=domain_name,\n DomainValidationOptions=validation_options)\n return response",
"def certificate(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"certificate\")",
"def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()",
"def request(domain):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n try:\n client().certificates.request_acme_certificate(domain)\n except Exception as e:\n raise CLIException(str(e))",
"def cert_challenge_http(self) -> 'outputs.CertHttpChallengeResponse':\n return pulumi.get(self, \"cert_challenge_http\")",
"def get_certificate(self, path: Union[bytes, str]) -> str:\n path = _to_bytes_or_null(path)\n certificate = ffi.new(\"char **\")\n ret = lib.Fapi_GetCertificate(self._ctx, path, certificate)\n _chkrc(ret)\n # certificate is guaranteed to be a null-terminated string\n return ffi.string(_get_dptr(certificate, lib.Fapi_Free)).decode()",
"def find_certificate(p): # find_certificate(props, /)\n\n for page in acm.get_paginator('list_certificates').paginate():\n for certificate in page['CertificateSummaryList']:\n log_info(certificate)\n\n if p['DomainName'].lower() == certificate['DomainName']:\n tags = {tag['Key']: tag['Value'] for tag in\n acm.list_tags_for_certificate(**{'CertificateArn': certificate['CertificateArn']})['Tags']}\n\n if (tags.get('cloudformation:' + 'logical-id') == e['LogicalResourceId'] and\n tags.get('cloudformation:' + 'stack-id') == e['StackId'] and\n tags.get('cloudformation:' + 'properties') == hash_func(p)\n ):\n return certificate['CertificateArn']",
"def get_certificate_request(self, vault_name: str,\n certificate_name: str,\n certificate_version: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}'\n if certificate_version:\n url = url + f'/{certificate_version}'\n response = self.http_request(\n 'GET', full_url=url,\n resource=self.get_vault_resource())\n\n return response",
"def credential_list():\n rows = safeisland.list_certificates()\n certs = []\n for row in rows:\n# certs.append(row[\"cert\"])\n certs.append({\"uuid\": row[\"uuid\"], \"cert\": row[\"cert\"]})\n\n return {\"payload\": certs}",
"def certificate_body(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate_body\")",
"def get_certinfo(doc):\n\n #set a two second default timeout to recieve a cert\n socket.setdefaulttimeout(2)\n doc['ssl'] = {} \n\n try:\n cert = ssl.get_server_certificate((doc['hostname'], 443))\n #sometimes certs come back as unicode so cast to str() aka ascii\n cert = M2Crypto.X509.load_cert_string(str(cert))\n\n except:\n syslog.syslog('[*] Failed to get ssl certificate from %s' % doc['hostname'])\n print('[*] Failed to get ssl certificate from %s' % doc['hostname'])\n #lets remove the ssl key and return the doc untouched\n doc.pop('ssl')\n return doc\n\n\n #get creation date\n doc['ssl']['created'] = cert.get_not_before().get_datetime().isoformat()\n #get not valid after, aka expiration data\n doc['ssl']['expire'] = cert.get_not_after().get_datetime().isoformat()\n #get issuer information\n doc['ssl']['issuer'] = cert.get_issuer().as_text()\n #get subject information\n doc['ssl']['subject'] = cert.get_subject().as_text()\n #get keysize, size() returns in bytes, so we multiply * 8 to get the number of bits\n doc['ssl']['keysize'] = cert.get_pubkey().size() * 8\n #get cert fingerprint for comparison\n doc['ssl']['fingerprint'] = cert.get_fingerprint()\n\n return doc"
] | [
"0.6914495",
"0.64815027",
"0.64781034",
"0.64724874",
"0.644901",
"0.63908297",
"0.6243455",
"0.6158818",
"0.6155777",
"0.6155777",
"0.6079546",
"0.5998563",
"0.5967901",
"0.58697426",
"0.5802458",
"0.58006537",
"0.5792219",
"0.5792219",
"0.5792219",
"0.5684703",
"0.5648874",
"0.56340945",
"0.5596035",
"0.55212057",
"0.54479694",
"0.5424187",
"0.5418135",
"0.53756124",
"0.53711873",
"0.536387"
] | 0.70567644 | 0 |
Fusion API Create Appliance Selfsigned Certificate [Arguments] body [Example] ${resp} = Fusion Api Create Appliance Selfsigned Certificate | | | | def fusion_api_create_appliance_selfsigned_certificate(self, body, api=None, headers=None):
return self.appliance_certificate.put(body, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fusion_api_create_certificate_request(self, body, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/certificaterequest'\n return self.ic.post(uri=uri, body=body, api=api, headers=headers, param=param)",
"def fusion_api_generate_certificate_signing_request(self, body, api=None, headers=None):\n return self.wsc.post(body, api=api, headers=headers)",
"def create_selfsigned_certificates(name):\n pass",
"def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }",
"def req_handler(args):\n key = _get_key(args)\n subject = get_subject_arguments()\n req = create_certificate_request(key, subject=subject, file_name=args.req_out)\n if not args.req_out:\n print(print_certificate_request(req))\n return req",
"def Certificate(self) -> _n_8_t_0:",
"def Certificate(self) -> _n_8_t_0:",
"def generate(name, domain, country, state, locale, email,\n keytype, keylength):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n if not country:\n logger.info(\n \"ctl:cert:generate\",\n \"Two-character country code (ex.: 'US' or 'CA')\"\n )\n country = click.prompt(\"Country code\")\n if not state:\n state = click.prompt(\"State/Province\")\n if not locale:\n locale = click.prompt(\"City/Town/Locale\")\n if not email:\n email = click.prompt(\"Contact email [optional]\")\n try:\n cmd = client().certificates.generate\n job, data = cmd(\n name, domain, country, state, locale, email, keytype, keylength)\n handle_job(job)\n except Exception as e:\n raise CLIException(str(e))",
"def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()",
"def __init__(__self__,\n resource_name: str,\n args: CertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def get_ssl_certificate() :",
"def create_self_signed_cert():\n\n # create a key pair\n k = crypto.PKey()\n k.generate_key(crypto.TYPE_RSA, 1024)\n\n # create a self-signed cert\n cert = crypto.X509()\n cert.get_subject().C = \"GP\"\n cert.get_subject().ST = \"GRAD_PROJECT\"\n cert.get_subject().L = \"GRAD_PROJECT\"\n cert.get_subject().OU = \"GRAD_PROJECT\"\n cert.get_subject().CN = gethostname()\n cert.set_serial_number(1000)\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(10*365*24*60*60)\n cert.set_issuer(cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(k, 'sha1')\n\n cert_file = open(CERT_FILE, \"wb\")\n cert_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n cert_file.close()\n\n key_file = open(KEY_FILE, \"wb\")\n key_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))\n key_file.close()",
"def test_add_certificate(self):\n response = self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate added successfully')\n assert response.status_code == 201",
"def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def __init__(__self__,\n resource_name: str,\n args: ServerCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def fusion_api_upload_certificate_info(self, body, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.put(body=body, uri=uri, api=api, headers=headers, param=param)",
"def CreateCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"CreateCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.CreateCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def __init__(__self__,\n resource_name: str,\n args: SSLCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def selfsign_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n subject_csr = create_x509_req(key, subject_info)\n\n # sign created request\n cert = do_sign(subject_csr, subject_csr, key, args.days, args.path_length, '<selfsign>')\n do_output(cert_to_pem(cert), args, 'x509')",
"def get_ssl_certificate():",
"def fusion_api_create_rabbitmq_client_certificate(self, body, uri=None, api=None, headers=None, param=''):\n return self.rabmq.post(body, uri=uri, api=api, headers=headers, param=param)",
"def sign_handler(args):\n if not args.issuer_key and not args.issuer_cert:\n key = _get_key(args)\n subject = get_subject_arguments()\n\n cert = selfsigned_certificate_for_key(\n key,\n subject=subject,\n serial_number=int(args.serial_number),\n length=args.duration,\n file_name=args.cert_out\n )\n\n else:\n req = _get_request(args)\n issuer_cert = load_certificate(args.issuer_cert)\n issuer_key = load_key(args.issuer_key)\n cert = sign_request(\n req,\n issuer_cert=issuer_cert,\n issuer_key=issuer_key,\n length=args.duration,\n file_name=args.cert_out\n )\n\n if not args.cert_out:\n print(print_certificate(cert))",
"def create_certificate(self, subscription_id, management_host, hackathon_name):\n\n # make sure certificate dir exists\n if not os.path.isdir(self.CERT_BASE):\n self.log.debug('certificate dir not exists')\n os.mkdir(self.CERT_BASE)\n\n base_url = '%s/%s' % (self.CERT_BASE, subscription_id)\n\n pem_url = base_url + '.pem'\n # avoid duplicate pem generation\n if not os.path.isfile(pem_url):\n pem_command = 'openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout %s -out %s -batch' % \\\n (pem_url, pem_url)\n commands.getstatusoutput(pem_command)\n else:\n self.log.debug('%s exists' % pem_url)\n\n cert_url = base_url + '.cer'\n # avoid duplicate cert generation\n if not os.path.isfile(cert_url):\n cert_command = 'openssl x509 -inform pem -in %s -outform der -out %s' % (pem_url, cert_url)\n commands.getstatusoutput(cert_command)\n else:\n self.log.debug('%s exists' % cert_url)\n\n azure_key = self.db.find_first_object_by(AzureKey,\n cert_url=cert_url,\n pem_url=pem_url,\n subscription_id=subscription_id,\n management_host=management_host)\n # avoid duplicate azure key\n if azure_key is None:\n azure_key = self.db.add_object_kwargs(AzureKey,\n cert_url=cert_url,\n pem_url=pem_url,\n subscription_id=subscription_id,\n management_host=management_host)\n self.db.commit()\n else:\n self.log.debug('azure key exists')\n\n hackathon_id = self.db.find_first_object_by(Hackathon, name=hackathon_name).id\n hackathon_azure_key = self.db.find_first_object_by(HackathonAzureKey,\n hackathon_id=hackathon_id,\n azure_key_id=azure_key.id)\n # avoid duplicate hackathon azure key\n if hackathon_azure_key is None:\n self.db.add_object_kwargs(HackathonAzureKey,\n hackathon_id=hackathon_id,\n azure_key_id=azure_key.id)\n self.db.commit()\n else:\n self.log.debug('hackathon azure key exists')\n\n azure_cert_url = self.file_service.upload_file_to_azure_from_path(cert_url, self.CONTAINER_NAME,\n subscription_id + '.cer')\n azure_key.cert_url = azure_cert_url\n self.db.commit()\n return azure_cert_url",
"def generate_selfsigned_ca(clustername):\n\n from datetime import datetime, timedelta\n import ipaddress\n\n from cryptography import x509\n from cryptography.x509.oid import NameOID\n from cryptography.hazmat.primitives import hashes\n from cryptography.hazmat.backends import default_backend\n from cryptography.hazmat.primitives import serialization\n from cryptography.hazmat.primitives.asymmetric import rsa\n \n # Generate key\n key = rsa.generate_private_key(\n public_exponent=65537,\n key_size=2048,\n backend=default_backend(),\n )\n \n name = x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, unicode(clustername))\n ])\n \n # path_len=1 means that this certificate can sign one level of sub-certs\n basic_contraints = x509.BasicConstraints(ca=True, path_length=1)\n now = datetime.utcnow()\n cert = (\n x509.CertificateBuilder()\n .subject_name(name)\n .issuer_name(name)\n .public_key(key.public_key())\n .serial_number(1)\n .not_valid_before(now)\n .not_valid_after(now + timedelta(days=10*365))\n .add_extension(basic_contraints, False)\n .sign(key, hashes.SHA256(), default_backend())\n )\n\n cert_pem = cert.public_bytes(encoding=serialization.Encoding.PEM)\n\n key_pem = key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n )\n\n return cert_pem, key_pem",
"def fusion_api_validator_certificate(self, body, api=None, headers=None):\n return self.client_certificate.post_validator(body, api, headers)",
"def create_cert(commonname, ca_dir):\n sca = SimpleCA(ca_dir)\n sca.new_cert(commonname)",
"def sign_certificate(csr):\n unique_filename = str(uuid.uuid4().hex)\n\n file = open(\"./csr_req/%s.csr\" % unique_filename, \"w\")\n file.write(csr.decode(\"utf-8\"))\n file.close()\n\n subprocess.run([\"../ca/scripts/sign.sh\", unique_filename], check=False)\n\n file = open(\"./csr_req/%s.p7b\" % unique_filename, \"r\")\n cert = file.read()\n\n os.remove(\"./csr_req/%s.csr\" % unique_filename)\n os.remove(\"./csr_req/%s.p7b\" % unique_filename)\n\n return cert",
"def opensslCmsCertCreate( ownerCertFile ):\n opensslCmdArgs = [ \"openssl\", \"crl2pkcs7\", \"-certfile\", ownerCertFile,\n \"-nocrl\", \"-outform\", \"der\" ]\n ownerCertCmsDerBase64 = runOpensslCmd( opensslCmdArgs, [ \"base64\" ] )\n return ownerCertCmsDerBase64",
"def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ..."
] | [
"0.6971027",
"0.69361377",
"0.68268514",
"0.63815004",
"0.62957174",
"0.6160668",
"0.6160668",
"0.6114752",
"0.6106183",
"0.60284036",
"0.6020332",
"0.59913826",
"0.5973368",
"0.5967798",
"0.5933598",
"0.59006125",
"0.5893792",
"0.58749163",
"0.5872739",
"0.5861752",
"0.58191895",
"0.58045995",
"0.58014166",
"0.5752848",
"0.57494247",
"0.5676803",
"0.5675528",
"0.56618756",
"0.56432956",
"0.5618998"
] | 0.7743001 | 0 |
Get Remote Certificate [Arguments] | def fusion_api_get_remote_certificate(self, ip, api=None, headers=None):
return self.remote_certificate.get(ip, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_ssl_certificate():",
"def get_ssl_certificate() :",
"def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)",
"def fusion_api_get_client_certificate(self, ip, api=None, headers=None):\n return self.client_certificate.get(ip, api, headers)",
"def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def fusion_api_get_rabbitmq_client_certificate(self, param='', api=None, headers=None):\n return self.rabmq.get(param=param, api=api, headers=headers)",
"def get_server_certificate(latest=None,name=None,name_prefix=None,path_prefix=None,opts=None):\n __args__ = dict()\n\n __args__['latest'] = latest\n __args__['name'] = name\n __args__['namePrefix'] = name_prefix\n __args__['pathPrefix'] = path_prefix\n if opts is None:\n opts = pulumi.InvokeOptions()\n if opts.version is None:\n opts.version = utilities.get_version()\n __ret__ = pulumi.runtime.invoke('aws:iam/getServerCertificate:getServerCertificate', __args__, opts=opts).value\n\n return AwaitableGetServerCertificateResult(\n arn=__ret__.get('arn'),\n certificate_body=__ret__.get('certificateBody'),\n certificate_chain=__ret__.get('certificateChain'),\n expiration_date=__ret__.get('expirationDate'),\n latest=__ret__.get('latest'),\n name=__ret__.get('name'),\n name_prefix=__ret__.get('namePrefix'),\n path=__ret__.get('path'),\n path_prefix=__ret__.get('pathPrefix'),\n upload_date=__ret__.get('uploadDate'),\n id=__ret__.get('id'))",
"def fusion_api_get_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.get(aliasname, api, headers)",
"def credential_get(uniqueID: str):\n\n cert = safeisland.certificate(uniqueID)\n return {\"payload\": cert}",
"def fusion_api_get_ca_certificate(self, uri=None, api=None, headers=None, param=''):\n return self.ca.get(uri=uri, api=api, headers=headers, param=param)",
"def get_certificate_request(self, vault_name: str,\n certificate_name: str,\n certificate_version: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}'\n if certificate_version:\n url = url + f'/{certificate_version}'\n response = self.http_request(\n 'GET', full_url=url,\n resource=self.get_vault_resource())\n\n return response",
"def get_authentication_certificate(hostname:str) -> str:\r\n host = hostname.split(\":\")[0]\r\n port = int(hostname.split(\":\")[1] or 443)\r\n conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\r\n context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)\r\n sock = context.wrap_socket(conn, server_hostname=host)\r\n sock.connect((host, port))\r\n cert = ssl.DER_cert_to_PEM_cert(sock.getpeercert(True))\r\n return str.encode(cert)",
"def test_get_certificate(self):\n chain = _create_certificate_chain()\n [(cakey, cacert), (ikey, icert), (skey, scert)] = chain\n\n context = Context(SSLv23_METHOD)\n context.use_certificate(scert)\n client = Connection(context, None)\n cert = client.get_certificate()\n assert cert is not None\n assert \"Server Certificate\" == cert.get_subject().CN",
"def get(resource, **kwargs):\n\t#print(_endpoint(resource, 'GET'))\n\tresp = requests.get(\n\t\t_endpoint(resource, 'GET'),\n\t\tparams=_jsonify_dict_values(kwargs),\n\t\tverify=SERVER_CERT\n\t)\n\tresp.raise_for_status()\n\treturn resp.json()",
"def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n return _Run(args, holder, ssl_certificate_ref)",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")",
"def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n return _Run(args, holder, ssl_certificate_ref)",
"def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")",
"def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")",
"def get_certificate(self, cert_id):\r\n return self.ssl.getObject(id=cert_id)",
"def certificate(self) -> str:\n return pulumi.get(self, \"certificate\")",
"def fusion_api_get_appliance_certificate(self, api=None, headers=None):\n return self.appliance_certificate.get(api, headers)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n certificate_body: Optional[pulumi.Input[str]] = None,\n certificate_chain: Optional[pulumi.Input[str]] = None,\n expiration: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n path: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n upload_date: Optional[pulumi.Input[str]] = None) -> 'ServerCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ServerCertificateState.__new__(_ServerCertificateState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"certificate_body\"] = certificate_body\n __props__.__dict__[\"certificate_chain\"] = certificate_chain\n __props__.__dict__[\"expiration\"] = expiration\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"path\"] = path\n __props__.__dict__[\"private_key\"] = private_key\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"upload_date\"] = upload_date\n return ServerCertificate(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[int]] = None,\n creation_timestamp: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n expire_time: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n self_link: Optional[pulumi.Input[str]] = None) -> 'SSLCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SSLCertificateState.__new__(_SSLCertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"creation_timestamp\"] = creation_timestamp\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"expire_time\"] = expire_time\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"private_key\"] = private_key\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"self_link\"] = self_link\n return SSLCertificate(resource_name, opts=opts, __props__=__props__)",
"def get_certificate(self, url):\n bearer = 'Authorization: Bearer '+str(self.exchanged_token).split('\\n', 1)[0]\n data = json.dumps({\"service_id\": \"x509\"})\n\n headers = StringIO()\n buffers = StringIO()\n\n c = pycurl.Curl()\n c.setopt(pycurl.URL, url)\n c.setopt(pycurl.HTTPHEADER, [bearer, 'Content-Type: application/json'])\n c.setopt(pycurl.POST, 1)\n c.setopt(pycurl.POSTFIELDS, data)\n c.setopt(c.WRITEFUNCTION, buffers.write)\n c.setopt(c.HEADERFUNCTION, headers.write)\n c.setopt(c.VERBOSE, True)\n\n try:\n c.perform()\n status = c.getinfo(c.RESPONSE_CODE)\n c.close()\n body = buffers.getvalue()\n\n if str(status) != \"303\" :\n self.log.error(\"On \\\"get redirect curl\\\": %s , http error: %s \" % (body, str(status)))\n return False \n except pycurl.error, error:\n errno, errstr = error\n self.log.info('An error occurred: %s' % errstr)\n return False\n \n redirect = self.tts\n for item in headers.getvalue().split(\"\\n\"):\n if \"location\" in item:\n redirect = redirect + item.strip().replace(\"location: \", \"\")\n\n headers = {'Authorization': 'Bearer ' + self.exchanged_token.strip()}\n response = requests.get(redirect, headers=headers)\n\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as e:\n # Whoops it wasn't a 200\n self.log.error(\"get_certificate() Error: %s \" %str(e))\n return False\n\n with open('/tmp/output.json', 'w') as outf:\n outf.write(response.content)\n else:\n self.log.error(\"No location in redirect response\")\n\n return True",
"def certificate(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"certificate\")",
"def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")",
"def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")"
] | [
"0.7108284",
"0.69801885",
"0.6434288",
"0.6255552",
"0.62373865",
"0.61934626",
"0.6150075",
"0.6104605",
"0.60285634",
"0.5978037",
"0.58507806",
"0.57769924",
"0.57670623",
"0.57441795",
"0.572235",
"0.57032377",
"0.57032377",
"0.57032377",
"0.56818783",
"0.56648785",
"0.56648785",
"0.5614528",
"0.5602363",
"0.5576014",
"0.55691874",
"0.5553844",
"0.554588",
"0.5531697",
"0.55162585",
"0.55162585"
] | 0.71719384 | 0 |
Import Client Certificate to Oneview [Arguments] | def fusion_api_import_client_certificate(self, body, api=None, headers=None):
return self.client_certificate.post(body, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def AddClientCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server. Database Migration Service\n encrypts the value when storing it.\n \"\"\"\n parser.add_argument('--client-certificate', help=help_text, required=required)",
"def test_load_client_ca(self, context, ca_file):\n context.load_client_ca(ca_file)",
"def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def Certificate(self) -> _n_8_t_0:",
"def Certificate(self) -> _n_8_t_0:",
"def _ParseCertificateArguments(client, args):\n self_managed = None\n managed = None\n certificate_type = None\n if args.certificate:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.SELF_MANAGED\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n self_managed = client.messages.SslCertificateSelfManagedSslCertificate(\n certificate=certificate, privateKey=private_key)\n if args.domains:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.MANAGED\n managed = client.messages.SslCertificateManagedSslCertificate(\n domains=args.domains)\n return certificate_type, self_managed, managed",
"def AddCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server.\n \"\"\"\n parser.add_argument('--certificate', help=help_text, required=required)",
"def test_load_client_ca_unicode(self, context, ca_file):\n pytest.deprecated_call(context.load_client_ca, ca_file.decode(\"ascii\"))",
"def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def fusion_api_import_appliance_certificate(self, body, api=None, headers=None, param=''):\n return self.wsc.put(body, api=api, headers=headers, param=param)",
"def cmd(\n ctx,\n url,\n prompt,\n include_paths,\n include_urls,\n include_only_ca,\n export_file,\n update_env,\n):\n client = ctx.obj.create_client(url=url, key=None, secret=None)\n export_file = pathify_export_file(client=client, export_file=export_file)\n\n with ctx.obj.exc_wrap(wraperror=ctx.obj.wraperror):\n includes = []\n\n for url in include_urls:\n includes += from_url(url=url, split=False, ca_only=include_only_ca)\n\n for path in include_paths:\n includes += from_path(path=path, split=False, ca_only=include_only_ca)\n\n chain = client.HTTP.get_cert_chain()\n leaf_cert, intm_certs = split_leaf(chain=chain)\n prompt = confirm_cert(prompt=prompt, cert=leaf_cert)\n handle_export(\n data=chain + includes,\n export_file=export_file,\n export_backup=True,\n export_format=\"pem\",\n )\n handle_update_env(update_env=update_env, export_file=export_file)\n\n ctx.exit(0)",
"def req_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n req = create_x509_req(key, subject_info)\n do_output(req_to_pem(req), args, 'req')",
"def fusion_api_import_server_certificate(self, body, api=None, headers=None):\n return self.server_certificate.post(body, api, headers)",
"def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]",
"def customization_data(client=None):\n\n yield ImportDefinition(u\"\"\"\neyJhY3Rpb25fb3JkZXIiOiBbXSwgImFjdGlvbnMiOiBbeyJhdXRvbWF0aW9ucyI6IFtdLCAiY29u\nZGl0aW9ucyI6IFt7ImV2YWx1YXRpb25faWQiOiBudWxsLCAiZmllbGRfbmFtZSI6ICJhcnRpZmFj\ndC50eXBlIiwgIm1ldGhvZCI6ICJpbiIsICJ0eXBlIjogbnVsbCwgInZhbHVlIjogWyJJUCBBZGRy\nZXNzIiwgIkROUyBOYW1lIiwgIlVSTCJdfV0sICJlbmFibGVkIjogdHJ1ZSwgImV4cG9ydF9rZXki\nOiAiUnVuIHJkYXAgcXVlcnkgYWdhaW5zdCBBcnRpZmFjdCIsICJpZCI6IDE2MiwgImxvZ2ljX3R5\ncGUiOiAiYWxsIiwgIm1lc3NhZ2VfZGVzdGluYXRpb25zIjogW10sICJuYW1lIjogIlJ1biByZGFw\nIHF1ZXJ5IGFnYWluc3QgQXJ0aWZhY3QiLCAib2JqZWN0X3R5cGUiOiAiYXJ0aWZhY3QiLCAidGFn\ncyI6IFtdLCAidGltZW91dF9zZWNvbmRzIjogODY0MDAsICJ0eXBlIjogMSwgInV1aWQiOiAiNTNj\nNjlmZGQtYTU3NS00YmE3LTg2ZjEtNmJkMjk5NzBlYjA4IiwgInZpZXdfaXRlbXMiOiBbXSwgIndv\ncmtmbG93cyI6IFsiZXhhbXBsZV9yZGFwX3F1ZXJ5Il19LCB7ImF1dG9tYXRpb25zIjogW10sICJj\nb25kaXRpb25zIjogW3siZXZhbHVhdGlvbl9pZCI6IG51bGwsICJmaWVsZF9uYW1lIjogImFydGlm\nYWN0LnR5cGUiLCAibWV0aG9kIjogImluIiwgInR5cGUiOiBudWxsLCAidmFsdWUiOiBbIklQIEFk\nZHJlc3MiLCAiRE5TIE5hbWUiLCAiVVJMIl19XSwgImVuYWJsZWQiOiB0cnVlLCAiZXhwb3J0X2tl\neSI6ICJSdW4gd2hvaXMgcXVlcnkgYWdhaW5zdCBBcnRpZmFjdCAoUkRBUCkiLCAiaWQiOiAxNjMs\nICJsb2dpY190eXBlIjogImFsbCIsICJtZXNzYWdlX2Rlc3RpbmF0aW9ucyI6IFtdLCAibmFtZSI6\nICJSdW4gd2hvaXMgcXVlcnkgYWdhaW5zdCBBcnRpZmFjdCAoUkRBUCkiLCAib2JqZWN0X3R5cGUi\nOiAiYXJ0aWZhY3QiLCAidGFncyI6IFtdLCAidGltZW91dF9zZWNvbmRzIjogODY0MDAsICJ0eXBl\nIjogMSwgInV1aWQiOiAiNThjYmM5OGMtNjZhYi00N2RjLTk1YTktNzRkZDlkZjM0MjdjIiwgInZp\nZXdfaXRlbXMiOiBbXSwgIndvcmtmbG93cyI6IFsiZXhhbXBsZV93aG9pc19xdWVyeSJdfV0sICJh\ndXRvbWF0aWNfdGFza3MiOiBbXSwgImV4cG9ydF9kYXRlIjogMTU5NzkzMjAyMDkzMSwgImV4cG9y\ndF9mb3JtYXRfdmVyc2lvbiI6IDIsICJmaWVsZHMiOiBbeyJhbGxvd19kZWZhdWx0X3ZhbHVlIjog\nZmFsc2UsICJibGFua19vcHRpb24iOiBmYWxzZSwgImNhbGN1bGF0ZWQiOiBmYWxzZSwgImNoYW5n\nZWFibGUiOiB0cnVlLCAiY2hvc2VuIjogZmFsc2UsICJkZWZhdWx0X2Nob3Nlbl9ieV9zZXJ2ZXIi\nOiBmYWxzZSwgImRlcHJlY2F0ZWQiOiBmYWxzZSwgImV4cG9ydF9rZXkiOiAiX19mdW5jdGlvbi9y\nZGFwX2RlcHRoIiwgImhpZGVfbm90aWZpY2F0aW9uIjogZmFsc2UsICJpZCI6IDEwNTAsICJpbnB1\ndF90eXBlIjogIm51bWJlciIsICJpbnRlcm5hbCI6IGZhbHNlLCAiaXNfdHJhY2tlZCI6IGZhbHNl\nLCAibmFtZSI6ICJyZGFwX2RlcHRoIiwgIm9wZXJhdGlvbl9wZXJtcyI6IHt9LCAib3BlcmF0aW9u\ncyI6IFtdLCAicGxhY2Vob2xkZXIiOiAiMCIsICJwcmVmaXgiOiBudWxsLCAicmVhZF9vbmx5Ijog\nZmFsc2UsICJyZXF1aXJlZCI6ICJhbHdheXMiLCAicmljaF90ZXh0IjogZmFsc2UsICJ0YWdzIjog\nW10sICJ0ZW1wbGF0ZXMiOiBbXSwgInRleHQiOiAicmRhcF9kZXB0aCIsICJ0b29sdGlwIjogIkNh\nbiBiZSAwLCAxIG9yIDIiLCAidHlwZV9pZCI6IDExLCAidXVpZCI6ICI4NWIyYjJiOC1hNjhiLTQz\nODMtYTAwNC1jYTA2MGZkN2M2YTYiLCAidmFsdWVzIjogW119LCB7ImFsbG93X2RlZmF1bHRfdmFs\ndWUiOiBmYWxzZSwgImJsYW5rX29wdGlvbiI6IGZhbHNlLCAiY2FsY3VsYXRlZCI6IGZhbHNlLCAi\nY2hhbmdlYWJsZSI6IHRydWUsICJjaG9zZW4iOiBmYWxzZSwgImRlZmF1bHRfY2hvc2VuX2J5X3Nl\ncnZlciI6IGZhbHNlLCAiZGVwcmVjYXRlZCI6IGZhbHNlLCAiZXhwb3J0X2tleSI6ICJfX2Z1bmN0\naW9uL3dob2lzX3F1ZXJ5IiwgImhpZGVfbm90aWZpY2F0aW9uIjogZmFsc2UsICJpZCI6IDEwNDks\nICJpbnB1dF90eXBlIjogInRleHQiLCAiaW50ZXJuYWwiOiBmYWxzZSwgImlzX3RyYWNrZWQiOiBm\nYWxzZSwgIm5hbWUiOiAid2hvaXNfcXVlcnkiLCAib3BlcmF0aW9uX3Blcm1zIjoge30sICJvcGVy\nYXRpb25zIjogW10sICJwbGFjZWhvbGRlciI6ICJpYm0uY29tIiwgInByZWZpeCI6IG51bGwsICJy\nZWFkX29ubHkiOiBmYWxzZSwgInJlcXVpcmVkIjogImFsd2F5cyIsICJyaWNoX3RleHQiOiBmYWxz\nZSwgInRhZ3MiOiBbXSwgInRlbXBsYXRlcyI6IFtdLCAidGV4dCI6ICJ3aG9pc19xdWVyeSIsICJ0\nb29sdGlwIjogIklQLCBVUkwgb3IgRE5TIEFydGlmYWN0IiwgInR5cGVfaWQiOiAxMSwgInV1aWQi\nOiAiOTRhYmJkZGItNmU0NC00MDQzLWFkZGYtYjY4MDU3NjM0OGYxIiwgInZhbHVlcyI6IFtdfSwg\neyJhbGxvd19kZWZhdWx0X3ZhbHVlIjogZmFsc2UsICJibGFua19vcHRpb24iOiBmYWxzZSwgImNh\nbGN1bGF0ZWQiOiBmYWxzZSwgImNoYW5nZWFibGUiOiB0cnVlLCAiY2hvc2VuIjogZmFsc2UsICJk\nZWZhdWx0X2Nob3Nlbl9ieV9zZXJ2ZXIiOiBmYWxzZSwgImRlcHJlY2F0ZWQiOiBmYWxzZSwgImV4\ncG9ydF9rZXkiOiAiX19mdW5jdGlvbi9yZGFwX3F1ZXJ5IiwgImhpZGVfbm90aWZpY2F0aW9uIjog\nZmFsc2UsICJpZCI6IDEwNTEsICJpbnB1dF90eXBlIjogInRleHQiLCAiaW50ZXJuYWwiOiBmYWxz\nZSwgImlzX3RyYWNrZWQiOiBmYWxzZSwgIm5hbWUiOiAicmRhcF9xdWVyeSIsICJvcGVyYXRpb25f\ncGVybXMiOiB7fSwgIm9wZXJhdGlvbnMiOiBbXSwgInBsYWNlaG9sZGVyIjogImlibS5jb20iLCAi\ncHJlZml4IjogbnVsbCwgInJlYWRfb25seSI6IGZhbHNlLCAicmVxdWlyZWQiOiAiYWx3YXlzIiwg\nInJpY2hfdGV4dCI6IGZhbHNlLCAidGFncyI6IFtdLCAidGVtcGxhdGVzIjogW10sICJ0ZXh0Ijog\nInJkYXBfcXVlcnkiLCAidG9vbHRpcCI6ICJJUCwgVVJMIG9yIEROUyBBcnRpZmFjdCIsICJ0eXBl\nX2lkIjogMTEsICJ1dWlkIjogIjA0ZWRkMTlkLWQwMDgtNGFiYy1hMTliLTczMWE5OTk3MzIwZCIs\nICJ2YWx1ZXMiOiBbXX0sIHsiZXhwb3J0X2tleSI6ICJpbmNpZGVudC9pbnRlcm5hbF9jdXN0b21p\nemF0aW9uc19maWVsZCIsICJpZCI6IDAsICJpbnB1dF90eXBlIjogInRleHQiLCAiaW50ZXJuYWwi\nOiB0cnVlLCAibmFtZSI6ICJpbnRlcm5hbF9jdXN0b21pemF0aW9uc19maWVsZCIsICJyZWFkX29u\nbHkiOiB0cnVlLCAidGV4dCI6ICJDdXN0b21pemF0aW9ucyBGaWVsZCAoaW50ZXJuYWwpIiwgInR5\ncGVfaWQiOiAwLCAidXVpZCI6ICJiZmVlYzJkNC0zNzcwLTExZTgtYWQzOS00YTAwMDQwNDRhYTEi\nfV0sICJmdW5jdGlvbnMiOiBbeyJjcmVhdG9yIjogeyJkaXNwbGF5X25hbWUiOiAiUmVzaWxpZW50\nIFN5c2FkbWluIiwgImlkIjogMywgIm5hbWUiOiAiYUBleGFtcGxlLmNvbSIsICJ0eXBlIjogInVz\nZXIifSwgImRlc2NyaXB0aW9uIjogeyJmb3JtYXQiOiAidGV4dCIsICJjb250ZW50IjogIlVzaW5n\nIGlwd2hvaXMgbGlicmFyeSB0byBtYWtlIGdlbmVyYWwgcXVlcmllcyBpbiBSREFQIGZvcm1hdCJ9\nLCAiZGVzdGluYXRpb25faGFuZGxlIjogImZuX3dob2lzX3JkYXAiLCAiZGlzcGxheV9uYW1lIjog\nIlJEQVA6IFF1ZXJ5IiwgImV4cG9ydF9rZXkiOiAicmRhcF9xdWVyeSIsICJpZCI6IDY3LCAibGFz\ndF9tb2RpZmllZF9ieSI6IHsiZGlzcGxheV9uYW1lIjogIlJlc2lsaWVudCBTeXNhZG1pbiIsICJp\nZCI6IDMsICJuYW1lIjogImFAZXhhbXBsZS5jb20iLCAidHlwZSI6ICJ1c2VyIn0sICJsYXN0X21v\nZGlmaWVkX3RpbWUiOiAxNTk3OTI5NzQ2OTMxLCAibmFtZSI6ICJyZGFwX3F1ZXJ5IiwgInRhZ3Mi\nOiBbXSwgInV1aWQiOiAiMTliNWJiMzctOWE0ZS00OWVjLTgwMmEtZmJjMzAwNmRlMTE3IiwgInZl\ncnNpb24iOiAyLCAidmlld19pdGVtcyI6IFt7ImNvbnRlbnQiOiAiODViMmIyYjgtYTY4Yi00Mzgz\nLWEwMDQtY2EwNjBmZDdjNmE2IiwgImVsZW1lbnQiOiAiZmllbGRfdXVpZCIsICJmaWVsZF90eXBl\nIjogIl9fZnVuY3Rpb24iLCAic2hvd19pZiI6IG51bGwsICJzaG93X2xpbmtfaGVhZGVyIjogZmFs\nc2UsICJzdGVwX2xhYmVsIjogbnVsbH0sIHsiY29udGVudCI6ICIwNGVkZDE5ZC1kMDA4LTRhYmMt\nYTE5Yi03MzFhOTk5NzMyMGQiLCAiZWxlbWVudCI6ICJmaWVsZF91dWlkIiwgImZpZWxkX3R5cGUi\nOiAiX19mdW5jdGlvbiIsICJzaG93X2lmIjogbnVsbCwgInNob3dfbGlua19oZWFkZXIiOiBmYWxz\nZSwgInN0ZXBfbGFiZWwiOiBudWxsfV0sICJ3b3JrZmxvd3MiOiBbeyJhY3Rpb25zIjogW10sICJk\nZXNjcmlwdGlvbiI6IG51bGwsICJuYW1lIjogIkV4YW1wbGU6IFJEQVAgcXVlcnkiLCAib2JqZWN0\nX3R5cGUiOiAiYXJ0aWZhY3QiLCAicHJvZ3JhbW1hdGljX25hbWUiOiAiZXhhbXBsZV9yZGFwX3F1\nZXJ5IiwgInRhZ3MiOiBbXSwgInV1aWQiOiBudWxsLCAid29ya2Zsb3dfaWQiOiA4N31dfSwgeyJj\ncmVhdG9yIjogeyJkaXNwbGF5X25hbWUiOiAiUmVzaWxpZW50IFN5c2FkbWluIiwgImlkIjogMywg\nIm5hbWUiOiAiYUBleGFtcGxlLmNvbSIsICJ0eXBlIjogInVzZXIifSwgImRlc2NyaXB0aW9uIjog\neyJmb3JtYXQiOiAidGV4dCIsICJjb250ZW50IjogIlVzaW5nIGlwd2hvaXMgbGlicmFyeSB0byBt\nYWtlIGdlbmVyYWwgcXVlcmllcyBpbiB3aG9pcyBmb3JtYXQifSwgImRlc3RpbmF0aW9uX2hhbmRs\nZSI6ICJmbl93aG9pc19yZGFwIiwgImRpc3BsYXlfbmFtZSI6ICJXSE9JUzogcXVlcnkiLCAiZXhw\nb3J0X2tleSI6ICJ3aG9pc19yZGFwX3F1ZXJ5IiwgImlkIjogNjgsICJsYXN0X21vZGlmaWVkX2J5\nIjogeyJkaXNwbGF5X25hbWUiOiAiUmVzaWxpZW50IFN5c2FkbWluIiwgImlkIjogMywgIm5hbWUi\nOiAiYUBleGFtcGxlLmNvbSIsICJ0eXBlIjogInVzZXIifSwgImxhc3RfbW9kaWZpZWRfdGltZSI6\nIDE1OTc5Mjk3ODY1MzMsICJuYW1lIjogIndob2lzX3JkYXBfcXVlcnkiLCAidGFncyI6IFtdLCAi\ndXVpZCI6ICJhMDQwZDNjZC1kMTUxLTRjMTQtYTYyMi0yNjI2MmFiNmE5YjkiLCAidmVyc2lvbiI6\nIDUsICJ2aWV3X2l0ZW1zIjogW3siY29udGVudCI6ICI5NGFiYmRkYi02ZTQ0LTQwNDMtYWRkZi1i\nNjgwNTc2MzQ4ZjEiLCAiZWxlbWVudCI6ICJmaWVsZF91dWlkIiwgImZpZWxkX3R5cGUiOiAiX19m\ndW5jdGlvbiIsICJzaG93X2lmIjogbnVsbCwgInNob3dfbGlua19oZWFkZXIiOiBmYWxzZSwgInN0\nZXBfbGFiZWwiOiBudWxsfV0sICJ3b3JrZmxvd3MiOiBbeyJhY3Rpb25zIjogW10sICJkZXNjcmlw\ndGlvbiI6IG51bGwsICJuYW1lIjogIkV4YW1wbGU6IFdob2lzIHF1ZXJ5IiwgIm9iamVjdF90eXBl\nIjogImFydGlmYWN0IiwgInByb2dyYW1tYXRpY19uYW1lIjogImV4YW1wbGVfd2hvaXNfcXVlcnki\nLCAidGFncyI6IFtdLCAidXVpZCI6IG51bGwsICJ3b3JrZmxvd19pZCI6IDg2fV19XSwgImdlb3Mi\nOiBudWxsLCAiZ3JvdXBzIjogbnVsbCwgImlkIjogMzcsICJpbmJvdW5kX21haWxib3hlcyI6IG51\nbGwsICJpbmNpZGVudF9hcnRpZmFjdF90eXBlcyI6IFtdLCAiaW5jaWRlbnRfdHlwZXMiOiBbeyJ1\ncGRhdGVfZGF0ZSI6IDE1OTc5MzIwMTkyMTQsICJjcmVhdGVfZGF0ZSI6IDE1OTc5MzIwMTkyMTQs\nICJ1dWlkIjogImJmZWVjMmQ0LTM3NzAtMTFlOC1hZDM5LTRhMDAwNDA0NGFhMCIsICJkZXNjcmlw\ndGlvbiI6ICJDdXN0b21pemF0aW9uIFBhY2thZ2VzIChpbnRlcm5hbCkiLCAiZXhwb3J0X2tleSI6\nICJDdXN0b21pemF0aW9uIFBhY2thZ2VzIChpbnRlcm5hbCkiLCAibmFtZSI6ICJDdXN0b21pemF0\naW9uIFBhY2thZ2VzIChpbnRlcm5hbCkiLCAiZW5hYmxlZCI6IGZhbHNlLCAic3lzdGVtIjogZmFs\nc2UsICJwYXJlbnRfaWQiOiBudWxsLCAiaGlkZGVuIjogZmFsc2UsICJpZCI6IDB9XSwgImluZHVz\ndHJpZXMiOiBudWxsLCAibGF5b3V0cyI6IFtdLCAibG9jYWxlIjogbnVsbCwgIm1lc3NhZ2VfZGVz\ndGluYXRpb25zIjogW3siYXBpX2tleXMiOiBbXSwgImRlc3RpbmF0aW9uX3R5cGUiOiAwLCAiZXhw\nZWN0X2FjayI6IHRydWUsICJleHBvcnRfa2V5IjogImZuX3dob2lzX3JkYXAiLCAibmFtZSI6ICJm\nbl93aG9pc19yZGFwIiwgInByb2dyYW1tYXRpY19uYW1lIjogImZuX3dob2lzX3JkYXAiLCAidGFn\ncyI6IFtdLCAidXNlcnMiOiBbImFAZXhhbXBsZS5jb20iXSwgInV1aWQiOiAiYTEzZmYyOGUtNWNk\nZS00YTUyLWEzNzItOGY5NDg0ODNkMzJlIn1dLCAibm90aWZpY2F0aW9ucyI6IG51bGwsICJvdmVy\ncmlkZXMiOiBbXSwgInBoYXNlcyI6IFtdLCAicmVndWxhdG9ycyI6IG51bGwsICJyb2xlcyI6IFtd\nLCAic2NyaXB0cyI6IFtdLCAic2VydmVyX3ZlcnNpb24iOiB7ImJ1aWxkX251bWJlciI6IDMyLCAi\nbWFqb3IiOiAzNSwgIm1pbm9yIjogMiwgInZlcnNpb24iOiAiMzUuMi4zMiJ9LCAidGFncyI6IFtd\nLCAidGFza19vcmRlciI6IFtdLCAidGltZWZyYW1lcyI6IG51bGwsICJ0eXBlcyI6IFtdLCAid29y\na2Zsb3dzIjogW3siYWN0aW9ucyI6IFtdLCAiY29udGVudCI6IHsidmVyc2lvbiI6IDIsICJ3b3Jr\nZmxvd19pZCI6ICJleGFtcGxlX3JkYXBfcXVlcnkiLCAieG1sIjogIjw/eG1sIHZlcnNpb249XCIx\nLjBcIiBlbmNvZGluZz1cIlVURi04XCI/PjxkZWZpbml0aW9ucyB4bWxucz1cImh0dHA6Ly93d3cu\nb21nLm9yZy9zcGVjL0JQTU4vMjAxMDA1MjQvTU9ERUxcIiB4bWxuczpicG1uZGk9XCJodHRwOi8v\nd3d3Lm9tZy5vcmcvc3BlYy9CUE1OLzIwMTAwNTI0L0RJXCIgeG1sbnM6b21nZGM9XCJodHRwOi8v\nd3d3Lm9tZy5vcmcvc3BlYy9ERC8yMDEwMDUyNC9EQ1wiIHhtbG5zOm9tZ2RpPVwiaHR0cDovL3d3\ndy5vbWcub3JnL3NwZWMvREQvMjAxMDA1MjQvRElcIiB4bWxuczpyZXNpbGllbnQ9XCJodHRwOi8v\ncmVzaWxpZW50LmlibS5jb20vYnBtblwiIHhtbG5zOnhzZD1cImh0dHA6Ly93d3cudzMub3JnLzIw\nMDEvWE1MU2NoZW1hXCIgeG1sbnM6eHNpPVwiaHR0cDovL3d3dy53My5vcmcvMjAwMS9YTUxTY2hl\nbWEtaW5zdGFuY2VcIiB0YXJnZXROYW1lc3BhY2U9XCJodHRwOi8vd3d3LmNhbXVuZGEub3JnL3Rl\nc3RcIj48cHJvY2VzcyBpZD1cImV4YW1wbGVfcmRhcF9xdWVyeVwiIGlzRXhlY3V0YWJsZT1cInRy\ndWVcIiBuYW1lPVwiRXhhbXBsZTogUkRBUCBxdWVyeVwiPjxkb2N1bWVudGF0aW9uPlRoaXMgd29y\na2Zsb3cgZ2VuZXJhdGVzIFJEQVAgZm9ybWF0dGVkIHJlc3VsdHMgZnJvbSBhbiBJUCwgVVJMIG9y\nIEROUyBBcnRpZmFjdDwvZG9jdW1lbnRhdGlvbj48c3RhcnRFdmVudCBpZD1cIlN0YXJ0RXZlbnRf\nMTU1YXN4bVwiPjxvdXRnb2luZz5TZXF1ZW5jZUZsb3dfMXB6Nzlnczwvb3V0Z29pbmc+PC9zdGFy\ndEV2ZW50PjxzZXJ2aWNlVGFzayBpZD1cIlNlcnZpY2VUYXNrXzA2Y2c0ZHRcIiBuYW1lPVwiUkRB\nUDogUXVlcnlcIiByZXNpbGllbnQ6dHlwZT1cImZ1bmN0aW9uXCI+PGV4dGVuc2lvbkVsZW1lbnRz\nPjxyZXNpbGllbnQ6ZnVuY3Rpb24gdXVpZD1cIjE5YjViYjM3LTlhNGUtNDllYy04MDJhLWZiYzMw\nMDZkZTExN1wiPntcImlucHV0c1wiOnt9LFwicG9zdF9wcm9jZXNzaW5nX3NjcmlwdFwiOlwiZGVm\nIGZvcm1hdF9saW5rKGl0ZW0pOlxcbiAgaWYgaXRlbSBhbmQgKFxcXCJodHRwczovL1xcXCIgaW4g\naXRlbSBvciBcXFwiaHR0cDovL1xcXCIgaW4gaXRlbSk6XFxuICAgIHJldHVybiBcXFwiJmx0O2Eg\ndGFyZ2V0PSdibGFuaycgaHJlZj0nezB9JyZndDt7MH0mbHQ7L2EmZ3Q7XFxcIi5mb3JtYXQoaXRl\nbSlcXG4gIGVsc2U6XFxuICAgIHJldHVybiBpdGVtXFxuXFxuZGVmIGV4cGFuZF9saXN0KGxpc3Rf\ndmFsdWUsIHNlcGFyYXRvcj1cXFwiJmx0O2JyJmd0O1xcXCIpOlxcbiAgaWYgbm90IGlzaW5zdGFu\nY2UobGlzdF92YWx1ZSwgbGlzdCk6XFxuICAgIHJldHVybiBmb3JtYXRfbGluayhsaXN0X3ZhbHVl\nKVxcbiAgZWxzZTpcXG4gICAgdHJ5OlxcbiAgICAgIGl0ZW1zID0gW11cXG4gICAgICBmb3IgaXRl\nbSBpbiBsaXN0X3ZhbHVlOlxcbiAgICAgICAgaWYgaXNpbnN0YW5jZShpdGVtLCBkaWN0KTpcXG4g\nICAgICAgICAgaXRlbXMuYXBwZW5kKFxcXCImbHQ7ZGl2IHN0eWxlPSdwYWRkaW5nOjEwcHgnJmd0\nO3t9Jmx0Oy9kaXYmZ3Q7XFxcIi5mb3JtYXQod2Fsa19kaWN0KGl0ZW0pKSlcXG4gICAgICAgIGVs\nc2U6XFxuICAgICAgICAgIGl0ZW1zLmFwcGVuZChmb3JtYXRfbGluayhpdGVtKSlcXG4gICAgICBy\nZXR1cm4gc2VwYXJhdG9yLmpvaW4oaXRlbXMpXFxuICAgIGV4Y2VwdDpcXG4gICAgICAgIHBhc3Nc\nXG4gICAgXFxuZGVmIHdhbGtfZGljdChzdWJfZGljdCk6XFxuICBub3RlcyA9IFtdXFxuICBmb3Ig\na2V5LCB2YWx1ZSBpbiBzdWJfZGljdC5pdGVtcygpOlxcbiAgICBpZiBrZXkgbm90IGluIFsnZGlz\ncGxheV9jb250ZW50J106XFxuICAgICAgaWYgaXNpbnN0YW5jZSh2YWx1ZSwgZGljdCk6XFxuICAg\nICAgICBub3Rlcy5hcHBlbmQodVxcXCImbHQ7YiZndDt7fSZsdDsvYiZndDs6ICZsdDtkaXYgc3R5\nbGU9J3BhZGRpbmc6MTBweCcmZ3Q7e30mbHQ7L2RpdiZndDtcXFwiLmZvcm1hdChrZXksIHdhbGtf\nZGljdCh2YWx1ZSkpKVxcbiAgICAgIGVsc2U6XFxuICAgICAgICBub3Rlcy5hcHBlbmQodVxcXCIm\nbHQ7YiZndDt7fSZsdDsvYiZndDs6IHt9XFxcIi5mb3JtYXQoa2V5LCBleHBhbmRfbGlzdCh2YWx1\nZSkpKVxcbiAgICAgIFxcbiAgcmV0dXJuIHVcXFwiJmx0O2JyJmd0O1xcXCIuam9pbihub3Rlcylc\nXG4gICAgXFxuXFxubm90ZSA9IHVcXFwiUkRBUCBXaG9pcyBmb3IgYXJ0aWZhY3Q6IHt9Jmx0O2Jy\nJmd0OyZsdDticiZndDtcXFwiLmZvcm1hdChhcnRpZmFjdC52YWx1ZSlcXG5pZiByZXN1bHRzW1xc\nXCJzdWNjZXNzXFxcIl06XFxuICBub3RlID0gbm90ZSArIHdhbGtfZGljdChyZXN1bHRzW1xcXCJj\nb250ZW50XFxcIl0pXFxuZWxzZTpcXG4gIG5vdGUgPSBub3RlICsgdVxcXCJUaGlzIEFydGlmYWN0\nIGhhcyBubyBhbnMgYWNjZXNzaWJsZSByZWdpc3RyeSBpbmZvcm1hdGlvblxcXCJcXG5cXG5pbmNp\nZGVudC5hZGROb3RlKGhlbHBlci5jcmVhdGVSaWNoVGV4dChub3RlKSlcXG5cXG5cXG4nJydcXG4g\nIGZvciBrZXkudmFsdWUgaW4gcmVzdWx0c1tcXFwiY29udGVudFxcXCJdLml0ZW1zKCk6XFxuICAg\nIGlmIGlzaW5zdGFuY2VcXG4gICAgaWYga2V5IG5vdCBpbiBbJ25ldHdvcmsnXTpcXG4gICAgICBp\ndGVtLmFwcGVuZChcXFwiJmx0O2ImZ3Q7e30mbHQ7L2JyJmd0Ozoge31cXFwiLmZvcm1hdChrZXks\nIGV4cGFuZF9saXN0KVxcbiAgaWYgZGVzIGlzIE5vbmU6XFxuICAgIG5vdGUgPSBub3RlICsgdVxc\nXCJcXFwiXFxcIiZsdDtkaXYmZ3Q7Jmx0O3AmZ3Q7Jmx0O2JyJmd0OyZsdDtiJmd0O1JEQVAgdGhy\nZWF0IGludGVsbGlnZW5jZSBhdCB7Mn06Jmx0Oy9iJmd0OyZsdDsvYnImZ3Q7XFxcXG5cXFxcblxc\nbiAgICAmbHQ7YnImZ3Q7Jmx0O2ImZ3Q7ezB9Jmx0Oy9iJmd0OyZsdDsvYnImZ3Q7Jmx0Oy9kaXYm\nZ3Q7Jmx0Oy9wJmd0O1xcXFxuXFxcXG5cXG4gICAgJmx0O2RpdiZndDsmbHQ7cCZndDsmbHQ7YnIm\nZ3Q7Jmx0O2ImZ3Q7IFBvc3NpYmxlIGFjY2Vzc2libGUga2V5czombHQ7L2ImZ3Q7Jmx0Oy9iciZn\ndDtcXFxcblxcXFxuXFxuICAgICZsdDticiZndDsmbHQ7YiZndDt7MX0mbHQ7L2ImZ3Q7Jmx0Oy9i\nciZndDtcXFxcblxcXFxuXFxcIlxcXCJcXFwiLmZvcm1hdChyZXN1bHRzW1xcXCJjb250ZW50XFxc\nIl1bXFxcImRpc3BsYXlfY29udGVudFxcXCJdLHJlc3VsdHNbXFxcImNvbnRlbnRcXFwiXS5rZXlz\nKCkscmVzdWx0c1tcXFwibWV0cmljc1xcXCJdW1xcXCJ0aW1lc3RhbXBcXFwiXSlcXG4nJydcIixc\nInByZV9wcm9jZXNzaW5nX3NjcmlwdFwiOlwiaW5wdXRzLnJkYXBfcXVlcnkgPSBhcnRpZmFjdC52\nYWx1ZVxcbmlucHV0cy5yZGFwX2RlcHRoID0gMFwiLFwicmVzdWx0X25hbWVcIjpcIlwifTwvcmVz\naWxpZW50OmZ1bmN0aW9uPjwvZXh0ZW5zaW9uRWxlbWVudHM+PGluY29taW5nPlNlcXVlbmNlRmxv\nd18xcHo3OWdzPC9pbmNvbWluZz48b3V0Z29pbmc+U2VxdWVuY2VGbG93XzBwa2s4amY8L291dGdv\naW5nPjwvc2VydmljZVRhc2s+PGVuZEV2ZW50IGlkPVwiRW5kRXZlbnRfMGtlM3VxaFwiPjxpbmNv\nbWluZz5TZXF1ZW5jZUZsb3dfMHBrazhqZjwvaW5jb21pbmc+PC9lbmRFdmVudD48c2VxdWVuY2VG\nbG93IGlkPVwiU2VxdWVuY2VGbG93XzFwejc5Z3NcIiBzb3VyY2VSZWY9XCJTdGFydEV2ZW50XzE1\nNWFzeG1cIiB0YXJnZXRSZWY9XCJTZXJ2aWNlVGFza18wNmNnNGR0XCIvPjxzZXF1ZW5jZUZsb3cg\naWQ9XCJTZXF1ZW5jZUZsb3dfMHBrazhqZlwiIHNvdXJjZVJlZj1cIlNlcnZpY2VUYXNrXzA2Y2c0\nZHRcIiB0YXJnZXRSZWY9XCJFbmRFdmVudF8wa2UzdXFoXCIvPjx0ZXh0QW5ub3RhdGlvbiBpZD1c\nIlRleHRBbm5vdGF0aW9uXzFreHhpeXRcIj48dGV4dD5TdGFydCB5b3VyIHdvcmtmbG93IGhlcmU8\nL3RleHQ+PC90ZXh0QW5ub3RhdGlvbj48YXNzb2NpYXRpb24gaWQ9XCJBc3NvY2lhdGlvbl8xc2V1\najQ4XCIgc291cmNlUmVmPVwiU3RhcnRFdmVudF8xNTVhc3htXCIgdGFyZ2V0UmVmPVwiVGV4dEFu\nbm90YXRpb25fMWt4eGl5dFwiLz48dGV4dEFubm90YXRpb24gaWQ9XCJUZXh0QW5ub3RhdGlvbl8w\ndWQ2Ym5yXCI+PHRleHQ+PCFbQ0RBVEFbUmVzdWx0cyByZXR1cm5lZCBpbiBhIE5vdGVcbl1dPjwv\ndGV4dD48L3RleHRBbm5vdGF0aW9uPjxhc3NvY2lhdGlvbiBpZD1cIkFzc29jaWF0aW9uXzExOTZ6\nZ2dcIiBzb3VyY2VSZWY9XCJTZXJ2aWNlVGFza18wNmNnNGR0XCIgdGFyZ2V0UmVmPVwiVGV4dEFu\nbm90YXRpb25fMHVkNmJuclwiLz48L3Byb2Nlc3M+PGJwbW5kaTpCUE1ORGlhZ3JhbSBpZD1cIkJQ\nTU5EaWFncmFtXzFcIj48YnBtbmRpOkJQTU5QbGFuZSBicG1uRWxlbWVudD1cInVuZGVmaW5lZFwi\nIGlkPVwiQlBNTlBsYW5lXzFcIj48YnBtbmRpOkJQTU5TaGFwZSBicG1uRWxlbWVudD1cIlN0YXJ0\nRXZlbnRfMTU1YXN4bVwiIGlkPVwiU3RhcnRFdmVudF8xNTVhc3htX2RpXCI+PG9tZ2RjOkJvdW5k\ncyBoZWlnaHQ9XCIzNlwiIHdpZHRoPVwiMzZcIiB4PVwiMjA1XCIgeT1cIjE5OVwiLz48YnBtbmRp\nOkJQTU5MYWJlbD48b21nZGM6Qm91bmRzIGhlaWdodD1cIjBcIiB3aWR0aD1cIjkwXCIgeD1cIjIw\nMFwiIHk9XCIyMzRcIi8+PC9icG1uZGk6QlBNTkxhYmVsPjwvYnBtbmRpOkJQTU5TaGFwZT48YnBt\nbmRpOkJQTU5TaGFwZSBicG1uRWxlbWVudD1cIlRleHRBbm5vdGF0aW9uXzFreHhpeXRcIiBpZD1c\nIlRleHRBbm5vdGF0aW9uXzFreHhpeXRfZGlcIj48b21nZGM6Qm91bmRzIGhlaWdodD1cIjMwXCIg\nd2lkdGg9XCIxMDBcIiB4PVwiOTlcIiB5PVwiMjU0XCIvPjwvYnBtbmRpOkJQTU5TaGFwZT48YnBt\nbmRpOkJQTU5FZGdlIGJwbW5FbGVtZW50PVwiQXNzb2NpYXRpb25fMXNldWo0OFwiIGlkPVwiQXNz\nb2NpYXRpb25fMXNldWo0OF9kaVwiPjxvbWdkaTp3YXlwb2ludCB4PVwiMjA3XCIgeHNpOnR5cGU9\nXCJvbWdkYzpQb2ludFwiIHk9XCIyMjVcIi8+PG9tZ2RpOndheXBvaW50IHg9XCIxNjdcIiB4c2k6\ndHlwZT1cIm9tZ2RjOlBvaW50XCIgeT1cIjI1NFwiLz48L2JwbW5kaTpCUE1ORWRnZT48YnBtbmRp\nOkJQTU5TaGFwZSBicG1uRWxlbWVudD1cIlNlcnZpY2VUYXNrXzA2Y2c0ZHRcIiBpZD1cIlNlcnZp\nY2VUYXNrXzA2Y2c0ZHRfZGlcIj48b21nZGM6Qm91bmRzIGhlaWdodD1cIjgwXCIgd2lkdGg9XCIx\nMDBcIiB4PVwiMzQ4XCIgeT1cIjE3N1wiLz48L2JwbW5kaTpCUE1OU2hhcGU+PGJwbW5kaTpCUE1O\nU2hhcGUgYnBtbkVsZW1lbnQ9XCJFbmRFdmVudF8wa2UzdXFoXCIgaWQ9XCJFbmRFdmVudF8wa2Uz\ndXFoX2RpXCI+PG9tZ2RjOkJvdW5kcyBoZWlnaHQ9XCIzNlwiIHdpZHRoPVwiMzZcIiB4PVwiNTU0\nXCIgeT1cIjE5OVwiLz48YnBtbmRpOkJQTU5MYWJlbD48b21nZGM6Qm91bmRzIGhlaWdodD1cIjEz\nXCIgd2lkdGg9XCI5MFwiIHg9XCI1MjdcIiB5PVwiMjM4XCIvPjwvYnBtbmRpOkJQTU5MYWJlbD48\nL2JwbW5kaTpCUE1OU2hhcGU+PGJwbW5kaTpCUE1ORWRnZSBicG1uRWxlbWVudD1cIlNlcXVlbmNl\nRmxvd18xcHo3OWdzXCIgaWQ9XCJTZXF1ZW5jZUZsb3dfMXB6Nzlnc19kaVwiPjxvbWdkaTp3YXlw\nb2ludCB4PVwiMjQxXCIgeHNpOnR5cGU9XCJvbWdkYzpQb2ludFwiIHk9XCIyMTdcIi8+PG9tZ2Rp\nOndheXBvaW50IHg9XCIzNDhcIiB4c2k6dHlwZT1cIm9tZ2RjOlBvaW50XCIgeT1cIjIxN1wiLz48\nYnBtbmRpOkJQTU5MYWJlbD48b21nZGM6Qm91bmRzIGhlaWdodD1cIjEzXCIgd2lkdGg9XCI5MFwi\nIHg9XCIyNDkuNVwiIHk9XCIxOTUuNVwiLz48L2JwbW5kaTpCUE1OTGFiZWw+PC9icG1uZGk6QlBN\nTkVkZ2U+PGJwbW5kaTpCUE1ORWRnZSBicG1uRWxlbWVudD1cIlNlcXVlbmNlRmxvd18wcGtrOGpm\nXCIgaWQ9XCJTZXF1ZW5jZUZsb3dfMHBrazhqZl9kaVwiPjxvbWdkaTp3YXlwb2ludCB4PVwiNDQ4\nXCIgeHNpOnR5cGU9XCJvbWdkYzpQb2ludFwiIHk9XCIyMTdcIi8+PG9tZ2RpOndheXBvaW50IHg9\nXCI1NTRcIiB4c2k6dHlwZT1cIm9tZ2RjOlBvaW50XCIgeT1cIjIxN1wiLz48YnBtbmRpOkJQTU5M\nYWJlbD48b21nZGM6Qm91bmRzIGhlaWdodD1cIjEzXCIgd2lkdGg9XCI5MFwiIHg9XCI0NTZcIiB5\nPVwiMTk1LjVcIi8+PC9icG1uZGk6QlBNTkxhYmVsPjwvYnBtbmRpOkJQTU5FZGdlPjxicG1uZGk6\nQlBNTlNoYXBlIGJwbW5FbGVtZW50PVwiVGV4dEFubm90YXRpb25fMHVkNmJuclwiIGlkPVwiVGV4\ndEFubm90YXRpb25fMHVkNmJucl9kaVwiPjxvbWdkYzpCb3VuZHMgaGVpZ2h0PVwiMzZcIiB3aWR0\naD1cIjEyMlwiIHg9XCI0NDhcIiB5PVwiODZcIi8+PC9icG1uZGk6QlBNTlNoYXBlPjxicG1uZGk6\nQlBNTkVkZ2UgYnBtbkVsZW1lbnQ9XCJBc3NvY2lhdGlvbl8xMTk2emdnXCIgaWQ9XCJBc3NvY2lh\ndGlvbl8xMTk2emdnX2RpXCI+PG9tZ2RpOndheXBvaW50IHg9XCI0MzdcIiB4c2k6dHlwZT1cIm9t\nZ2RjOlBvaW50XCIgeT1cIjE3N1wiLz48b21nZGk6d2F5cG9pbnQgeD1cIjQ5MVwiIHhzaTp0eXBl\nPVwib21nZGM6UG9pbnRcIiB5PVwiMTIyXCIvPjwvYnBtbmRpOkJQTU5FZGdlPjwvYnBtbmRpOkJQ\nTU5QbGFuZT48L2JwbW5kaTpCUE1ORGlhZ3JhbT48L2RlZmluaXRpb25zPiJ9LCAiY29udGVudF92\nZXJzaW9uIjogMiwgImNyZWF0b3JfaWQiOiAiYUBleGFtcGxlLmNvbSIsICJkZXNjcmlwdGlvbiI6\nICJUaGlzIHdvcmtmbG93IGdlbmVyYXRlcyBSREFQIGZvcm1hdHRlZCByZXN1bHRzIGZyb20gYW4g\nSVAsIFVSTCBvciBETlMgQXJ0aWZhY3QiLCAiZXhwb3J0X2tleSI6ICJleGFtcGxlX3JkYXBfcXVl\ncnkiLCAibGFzdF9tb2RpZmllZF9ieSI6ICJhQGV4YW1wbGUuY29tIiwgImxhc3RfbW9kaWZpZWRf\ndGltZSI6IDE1OTc5Mjk3NDcyNTEsICJuYW1lIjogIkV4YW1wbGU6IFJEQVAgcXVlcnkiLCAib2Jq\nZWN0X3R5cGUiOiAiYXJ0aWZhY3QiLCAicHJvZ3JhbW1hdGljX25hbWUiOiAiZXhhbXBsZV9yZGFw\nX3F1ZXJ5IiwgInRhZ3MiOiBbXSwgInV1aWQiOiAiY2MxOTkzZjYtMWNjNC00ODlhLWFhOWMtMzQy\nMWNlYTM0MmM2IiwgIndvcmtmbG93X2lkIjogODd9LCB7ImFjdGlvbnMiOiBbXSwgImNvbnRlbnQi\nOiB7InZlcnNpb24iOiAyLCAid29ya2Zsb3dfaWQiOiAiZXhhbXBsZV93aG9pc19xdWVyeSIsICJ4\nbWwiOiAiPD94bWwgdmVyc2lvbj1cIjEuMFwiIGVuY29kaW5nPVwiVVRGLThcIj8+PGRlZmluaXRp\nb25zIHhtbG5zPVwiaHR0cDovL3d3dy5vbWcub3JnL3NwZWMvQlBNTi8yMDEwMDUyNC9NT0RFTFwi\nIHhtbG5zOmJwbW5kaT1cImh0dHA6Ly93d3cub21nLm9yZy9zcGVjL0JQTU4vMjAxMDA1MjQvRElc\nIiB4bWxuczpvbWdkYz1cImh0dHA6Ly93d3cub21nLm9yZy9zcGVjL0RELzIwMTAwNTI0L0RDXCIg\neG1sbnM6b21nZGk9XCJodHRwOi8vd3d3Lm9tZy5vcmcvc3BlYy9ERC8yMDEwMDUyNC9ESVwiIHht\nbG5zOnJlc2lsaWVudD1cImh0dHA6Ly9yZXNpbGllbnQuaWJtLmNvbS9icG1uXCIgeG1sbnM6eHNk\nPVwiaHR0cDovL3d3dy53My5vcmcvMjAwMS9YTUxTY2hlbWFcIiB4bWxuczp4c2k9XCJodHRwOi8v\nd3d3LnczLm9yZy8yMDAxL1hNTFNjaGVtYS1pbnN0YW5jZVwiIHRhcmdldE5hbWVzcGFjZT1cImh0\ndHA6Ly93d3cuY2FtdW5kYS5vcmcvdGVzdFwiPjxwcm9jZXNzIGlkPVwiZXhhbXBsZV93aG9pc19x\ndWVyeVwiIGlzRXhlY3V0YWJsZT1cInRydWVcIiBuYW1lPVwiRXhhbXBsZTogV2hvaXMgcXVlcnlc\nIj48ZG9jdW1lbnRhdGlvbj5UaGlzIHdvcmtmbG93IGdlbmVyYXRlcyBSREFQIGZvcm1hdHRlZCBy\nZXN1bHRzIGZyb20gYW4gSVAsIFVSTCBvciBETlMgQXJ0aWZhY3Q8L2RvY3VtZW50YXRpb24+PHN0\nYXJ0RXZlbnQgaWQ9XCJTdGFydEV2ZW50XzE1NWFzeG1cIj48b3V0Z29pbmc+U2VxdWVuY2VGbG93\nXzF0dHkwdG48L291dGdvaW5nPjwvc3RhcnRFdmVudD48ZW5kRXZlbnQgaWQ9XCJFbmRFdmVudF8w\nZjZ1dWw0XCI+PGluY29taW5nPlNlcXVlbmNlRmxvd18xZXY2bThtPC9pbmNvbWluZz48L2VuZEV2\nZW50PjxzZXF1ZW5jZUZsb3cgaWQ9XCJTZXF1ZW5jZUZsb3dfMXR0eTB0blwiIHNvdXJjZVJlZj1c\nIlN0YXJ0RXZlbnRfMTU1YXN4bVwiIHRhcmdldFJlZj1cIlNlcnZpY2VUYXNrXzE3NWh0N3lcIi8+\nPHNlcnZpY2VUYXNrIGlkPVwiU2VydmljZVRhc2tfMTc1aHQ3eVwiIG5hbWU9XCJXSE9JUzogcXVl\ncnlcIiByZXNpbGllbnQ6dHlwZT1cImZ1bmN0aW9uXCI+PGV4dGVuc2lvbkVsZW1lbnRzPjxyZXNp\nbGllbnQ6ZnVuY3Rpb24gdXVpZD1cImEwNDBkM2NkLWQxNTEtNGMxNC1hNjIyLTI2MjYyYWI2YTli\nOVwiPntcImlucHV0c1wiOnt9LFwicG9zdF9wcm9jZXNzaW5nX3NjcmlwdFwiOlwiZGVmIGZvcm1h\ndF9saW5rKGl0ZW0pOlxcbiAgaWYgaXRlbSBhbmQgKFxcXCJodHRwczovL1xcXCIgaW4gaXRlbSBv\nciBcXFwiaHR0cDovL1xcXCIgaW4gaXRlbSk6XFxuICAgIHJldHVybiBcXFwiJmx0O2EgdGFyZ2V0\nPSdibGFuaycgaHJlZj0nezB9JyZndDt7MH0mbHQ7L2EmZ3Q7XFxcIi5mb3JtYXQoaXRlbSlcXG4g\nIGVsc2U6XFxuICAgIHJldHVybiBpdGVtXFxuXFxuZGVmIGV4cGFuZF9saXN0KGxpc3RfdmFsdWUs\nIHNlcGFyYXRvcj1cXFwiJmx0O2JyJmd0O1xcXCIpOlxcbiAgaWYgbm90IGlzaW5zdGFuY2UobGlz\ndF92YWx1ZSwgbGlzdCk6XFxuICAgIHJldHVybiBmb3JtYXRfbGluayhsaXN0X3ZhbHVlKVxcbiAg\nZWxzZTpcXG4gICAgdHJ5OlxcbiAgICAgIGl0ZW1zID0gW11cXG4gICAgICBmb3IgaXRlbSBpbiBs\naXN0X3ZhbHVlOlxcbiAgICAgICAgaWYgaXNpbnN0YW5jZShpdGVtLCBkaWN0KTpcXG4gICAgICAg\nICAgaXRlbXMuYXBwZW5kKFxcXCImbHQ7ZGl2IHN0eWxlPSdwYWRkaW5nOjEwcHgnJmd0O3t9Jmx0\nOy9kaXYmZ3Q7XFxcIi5mb3JtYXQod2Fsa19kaWN0KGl0ZW0pKSlcXG4gICAgICAgIGVsc2U6XFxu\nICAgICAgICAgIGl0ZW1zLmFwcGVuZChmb3JtYXRfbGluayhpdGVtKSlcXG4gICAgICByZXR1cm4g\nc2VwYXJhdG9yLmpvaW4oaXRlbXMpXFxuICAgIGV4Y2VwdDpcXG4gICAgICAgIHBhc3NcXG4gICAg\nXFxuZGVmIHdhbGtfZGljdChzdWJfZGljdCk6XFxuICBub3RlcyA9IFtdXFxuICBmb3Iga2V5LCB2\nYWx1ZSBpbiBzdWJfZGljdC5pdGVtcygpOlxcbiAgICBpZiBrZXkgbm90IGluIFsnZGlzcGxheV9j\nb250ZW50J106XFxuICAgICAgaWYgaXNpbnN0YW5jZSh2YWx1ZSwgZGljdCk6XFxuICAgICAgICBu\nb3Rlcy5hcHBlbmQodVxcXCImbHQ7YiZndDt7fSZsdDsvYiZndDs6ICZsdDtkaXYgc3R5bGU9J3Bh\nZGRpbmc6MTBweCcmZ3Q7e30mbHQ7L2RpdiZndDtcXFwiLmZvcm1hdChrZXksIHdhbGtfZGljdCh2\nYWx1ZSkpKVxcbiAgICAgIGVsc2U6XFxuICAgICAgICBub3Rlcy5hcHBlbmQodVxcXCImbHQ7YiZn\ndDt7fSZsdDsvYiZndDs6IHt9XFxcIi5mb3JtYXQoa2V5LCBleHBhbmRfbGlzdCh2YWx1ZSkpKVxc\nbiAgICAgIFxcbiAgcmV0dXJuIHVcXFwiJmx0O2JyJmd0O1xcXCIuam9pbihub3RlcylcXG4gICAg\nXFxuXFxubm90ZSA9IHVcXFwiV2hvaXMgZm9yIGFydGlmYWN0OiB7fSZsdDticiZndDsmbHQ7YnIm\nZ3Q7XFxcIi5mb3JtYXQoYXJ0aWZhY3QudmFsdWUpXFxuaWYgcmVzdWx0c1tcXFwic3VjY2Vzc1xc\nXCJdOlxcbiAgbm90ZSA9IG5vdGUgKyB3YWxrX2RpY3QocmVzdWx0c1tcXFwiY29udGVudFxcXCJd\nKVxcbmVsc2U6XFxuICBub3RlID0gbm90ZSArIHVcXFwiVGhpcyBBcnRpZmFjdCBoYXMgbm8gd2hv\naXMgaW5mb3JtYXRpb25cXFwiXFxuXFxuaW5jaWRlbnQuYWRkTm90ZShoZWxwZXIuY3JlYXRlUmlj\naFRleHQobm90ZSkpXFxuXCIsXCJwcmVfcHJvY2Vzc2luZ19zY3JpcHRcIjpcImlucHV0cy53aG9p\nc19xdWVyeSA9IGFydGlmYWN0LnZhbHVlXCIsXCJyZXN1bHRfbmFtZVwiOlwiXCJ9PC9yZXNpbGll\nbnQ6ZnVuY3Rpb24+PC9leHRlbnNpb25FbGVtZW50cz48aW5jb21pbmc+U2VxdWVuY2VGbG93XzF0\ndHkwdG48L2luY29taW5nPjxvdXRnb2luZz5TZXF1ZW5jZUZsb3dfMWV2Nm04bTwvb3V0Z29pbmc+\nPC9zZXJ2aWNlVGFzaz48c2VxdWVuY2VGbG93IGlkPVwiU2VxdWVuY2VGbG93XzFldjZtOG1cIiBz\nb3VyY2VSZWY9XCJTZXJ2aWNlVGFza18xNzVodDd5XCIgdGFyZ2V0UmVmPVwiRW5kRXZlbnRfMGY2\ndXVsNFwiLz48dGV4dEFubm90YXRpb24gaWQ9XCJUZXh0QW5ub3RhdGlvbl8xa3h4aXl0XCI+PHRl\neHQ+U3RhcnQgeW91ciB3b3JrZmxvdyBoZXJlPC90ZXh0PjwvdGV4dEFubm90YXRpb24+PGFzc29j\naWF0aW9uIGlkPVwiQXNzb2NpYXRpb25fMXNldWo0OFwiIHNvdXJjZVJlZj1cIlN0YXJ0RXZlbnRf\nMTU1YXN4bVwiIHRhcmdldFJlZj1cIlRleHRBbm5vdGF0aW9uXzFreHhpeXRcIi8+PHRleHRBbm5v\ndGF0aW9uIGlkPVwiVGV4dEFubm90YXRpb25fMW16aWZlaVwiPjx0ZXh0PjwhW0NEQVRBW1Jlc3Vs\ndHMgcmV0dXJuZWQgaW4gYSBub3RlXG5dXT48L3RleHQ+PC90ZXh0QW5ub3RhdGlvbj48YXNzb2Np\nYXRpb24gaWQ9XCJBc3NvY2lhdGlvbl8wNzZtdmdlXCIgc291cmNlUmVmPVwiU2VydmljZVRhc2tf\nMTc1aHQ3eVwiIHRhcmdldFJlZj1cIlRleHRBbm5vdGF0aW9uXzFtemlmZWlcIi8+PC9wcm9jZXNz\nPjxicG1uZGk6QlBNTkRpYWdyYW0gaWQ9XCJCUE1ORGlhZ3JhbV8xXCI+PGJwbW5kaTpCUE1OUGxh\nbmUgYnBtbkVsZW1lbnQ9XCJ1bmRlZmluZWRcIiBpZD1cIkJQTU5QbGFuZV8xXCI+PGJwbW5kaTpC\nUE1OU2hhcGUgYnBtbkVsZW1lbnQ9XCJTdGFydEV2ZW50XzE1NWFzeG1cIiBpZD1cIlN0YXJ0RXZl\nbnRfMTU1YXN4bV9kaVwiPjxvbWdkYzpCb3VuZHMgaGVpZ2h0PVwiMzZcIiB3aWR0aD1cIjM2XCIg\neD1cIjE2MlwiIHk9XCIxODhcIi8+PGJwbW5kaTpCUE1OTGFiZWw+PG9tZ2RjOkJvdW5kcyBoZWln\naHQ9XCIwXCIgd2lkdGg9XCI5MFwiIHg9XCIxNTdcIiB5PVwiMjIzXCIvPjwvYnBtbmRpOkJQTU5M\nYWJlbD48L2JwbW5kaTpCUE1OU2hhcGU+PGJwbW5kaTpCUE1OU2hhcGUgYnBtbkVsZW1lbnQ9XCJU\nZXh0QW5ub3RhdGlvbl8xa3h4aXl0XCIgaWQ9XCJUZXh0QW5ub3RhdGlvbl8xa3h4aXl0X2RpXCI+\nPG9tZ2RjOkJvdW5kcyBoZWlnaHQ9XCIzMFwiIHdpZHRoPVwiMTAwXCIgeD1cIjk5XCIgeT1cIjI1\nNFwiLz48L2JwbW5kaTpCUE1OU2hhcGU+PGJwbW5kaTpCUE1ORWRnZSBicG1uRWxlbWVudD1cIkFz\nc29jaWF0aW9uXzFzZXVqNDhcIiBpZD1cIkFzc29jaWF0aW9uXzFzZXVqNDhfZGlcIj48b21nZGk6\nd2F5cG9pbnQgeD1cIjE2OVwiIHhzaTp0eXBlPVwib21nZGM6UG9pbnRcIiB5PVwiMjIwXCIvPjxv\nbWdkaTp3YXlwb2ludCB4PVwiMTUzXCIgeHNpOnR5cGU9XCJvbWdkYzpQb2ludFwiIHk9XCIyNTRc\nIi8+PC9icG1uZGk6QlBNTkVkZ2U+PGJwbW5kaTpCUE1OU2hhcGUgYnBtbkVsZW1lbnQ9XCJFbmRF\ndmVudF8wZjZ1dWw0XCIgaWQ9XCJFbmRFdmVudF8wZjZ1dWw0X2RpXCI+PG9tZ2RjOkJvdW5kcyBo\nZWlnaHQ9XCIzNlwiIHdpZHRoPVwiMzZcIiB4PVwiNTIyXCIgeT1cIjE4OFwiLz48YnBtbmRpOkJQ\nTU5MYWJlbD48b21nZGM6Qm91bmRzIGhlaWdodD1cIjEzXCIgd2lkdGg9XCIwXCIgeD1cIjU0MFwi\nIHk9XCIyMjdcIi8+PC9icG1uZGk6QlBNTkxhYmVsPjwvYnBtbmRpOkJQTU5TaGFwZT48YnBtbmRp\nOkJQTU5FZGdlIGJwbW5FbGVtZW50PVwiU2VxdWVuY2VGbG93XzF0dHkwdG5cIiBpZD1cIlNlcXVl\nbmNlRmxvd18xdHR5MHRuX2RpXCI+PG9tZ2RpOndheXBvaW50IHg9XCIxOThcIiB4c2k6dHlwZT1c\nIm9tZ2RjOlBvaW50XCIgeT1cIjIwNlwiLz48b21nZGk6d2F5cG9pbnQgeD1cIjMwOFwiIHhzaTp0\neXBlPVwib21nZGM6UG9pbnRcIiB5PVwiMjA2XCIvPjxicG1uZGk6QlBNTkxhYmVsPjxvbWdkYzpC\nb3VuZHMgaGVpZ2h0PVwiMTNcIiB3aWR0aD1cIjBcIiB4PVwiMjUzXCIgeT1cIjE4NC41XCIvPjwv\nYnBtbmRpOkJQTU5MYWJlbD48L2JwbW5kaTpCUE1ORWRnZT48YnBtbmRpOkJQTU5TaGFwZSBicG1u\nRWxlbWVudD1cIlNlcnZpY2VUYXNrXzE3NWh0N3lcIiBpZD1cIlNlcnZpY2VUYXNrXzE3NWh0N3lf\nZGlcIj48b21nZGM6Qm91bmRzIGhlaWdodD1cIjgwXCIgd2lkdGg9XCIxMDBcIiB4PVwiMzA4XCIg\neT1cIjE2NlwiLz48L2JwbW5kaTpCUE1OU2hhcGU+PGJwbW5kaTpCUE1ORWRnZSBicG1uRWxlbWVu\ndD1cIlNlcXVlbmNlRmxvd18xZXY2bThtXCIgaWQ9XCJTZXF1ZW5jZUZsb3dfMWV2Nm04bV9kaVwi\nPjxvbWdkaTp3YXlwb2ludCB4PVwiNDA4XCIgeHNpOnR5cGU9XCJvbWdkYzpQb2ludFwiIHk9XCIy\nMDZcIi8+PG9tZ2RpOndheXBvaW50IHg9XCI1MjJcIiB4c2k6dHlwZT1cIm9tZ2RjOlBvaW50XCIg\neT1cIjIwNlwiLz48YnBtbmRpOkJQTU5MYWJlbD48b21nZGM6Qm91bmRzIGhlaWdodD1cIjEzXCIg\nd2lkdGg9XCIwXCIgeD1cIjQ2NVwiIHk9XCIxODRcIi8+PC9icG1uZGk6QlBNTkxhYmVsPjwvYnBt\nbmRpOkJQTU5FZGdlPjxicG1uZGk6QlBNTlNoYXBlIGJwbW5FbGVtZW50PVwiVGV4dEFubm90YXRp\nb25fMW16aWZlaVwiIGlkPVwiVGV4dEFubm90YXRpb25fMW16aWZlaV9kaVwiPjxvbWdkYzpCb3Vu\nZHMgaGVpZ2h0PVwiMzZcIiB3aWR0aD1cIjEyNlwiIHg9XCI0MDhcIiB5PVwiODZcIi8+PC9icG1u\nZGk6QlBNTlNoYXBlPjxicG1uZGk6QlBNTkVkZ2UgYnBtbkVsZW1lbnQ9XCJBc3NvY2lhdGlvbl8w\nNzZtdmdlXCIgaWQ9XCJBc3NvY2lhdGlvbl8wNzZtdmdlX2RpXCI+PG9tZ2RpOndheXBvaW50IHg9\nXCI0MDBcIiB4c2k6dHlwZT1cIm9tZ2RjOlBvaW50XCIgeT1cIjE2OFwiLz48b21nZGk6d2F5cG9p\nbnQgeD1cIjQ1MVwiIHhzaTp0eXBlPVwib21nZGM6UG9pbnRcIiB5PVwiMTIyXCIvPjwvYnBtbmRp\nOkJQTU5FZGdlPjwvYnBtbmRpOkJQTU5QbGFuZT48L2JwbW5kaTpCUE1ORGlhZ3JhbT48L2RlZmlu\naXRpb25zPiJ9LCAiY29udGVudF92ZXJzaW9uIjogMiwgImNyZWF0b3JfaWQiOiAiYUBleGFtcGxl\nLmNvbSIsICJkZXNjcmlwdGlvbiI6ICJUaGlzIHdvcmtmbG93IGdlbmVyYXRlcyBSREFQIGZvcm1h\ndHRlZCByZXN1bHRzIGZyb20gYW4gSVAsIFVSTCBvciBETlMgQXJ0aWZhY3QiLCAiZXhwb3J0X2tl\neSI6ICJleGFtcGxlX3dob2lzX3F1ZXJ5IiwgImxhc3RfbW9kaWZpZWRfYnkiOiAiYUBleGFtcGxl\nLmNvbSIsICJsYXN0X21vZGlmaWVkX3RpbWUiOiAxNTk3OTI5NzQ3MTYyLCAibmFtZSI6ICJFeGFt\ncGxlOiBXaG9pcyBxdWVyeSIsICJvYmplY3RfdHlwZSI6ICJhcnRpZmFjdCIsICJwcm9ncmFtbWF0\naWNfbmFtZSI6ICJleGFtcGxlX3dob2lzX3F1ZXJ5IiwgInRhZ3MiOiBbXSwgInV1aWQiOiAiZDY0\nZGYwYjMtMmEyOC00NTRhLTg3NTItYjFiMWE0YmRlNjJiIiwgIndvcmtmbG93X2lkIjogODZ9XSwg\nIndvcmtzcGFjZXMiOiBbXX0=\n\"\"\")",
"def install_esgf_client(certfile, keyfile):\n\n # Create HTTPSHandler \n ssl_context = M2Crypto.SSL.Context()\n ssl_context.load_cert_chain(certfile, keyfile)\n opener = M2Crypto.m2urllib2.build_opener(ssl_context, \n urllib2.HTTPCookieProcessor,\n urllib2.ProxyHandler)\n\n opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)]\n urllib2.install_opener(opener)\n\n def new_request(url):\n log.debug('Opening URL %s' % url)\n\n r = urllib2.urlopen(url.rstrip('?&'))\n\n resp = r.headers.dict\n resp['status'] = str(r.code)\n data = r.read()\n\n # When an error is returned, we parse the error message from the\n # server and return it in a ``ClientError`` exception.\n if resp.get(\"content-description\") == \"dods_error\":\n m = re.search('code = (?P<code>\\d+);\\s*message = \"(?P<msg>.*)\"',\n data, re.DOTALL | re.MULTILINE)\n msg = 'Server error %(code)s: \"%(msg)s\"' % m.groupdict()\n raise ClientError(msg)\n\n return resp, data\n\n from pydap.util import http\n http.request = new_request",
"def _store_certificate(fullchain, key, domain=None, tag_prefix=None,\n region_name=None, acm_client=None, dry_run=False):\n #pylint:disable=unused-argument\n result = _check_certificate(fullchain, key, domain=domain)\n if not domain:\n domain = result['ssl_certificate']['common_name']\n cert, chain = _split_fullchain(fullchain)\n if not acm_client:\n acm_client = boto3.client('acm', region_name=region_name)\n kwargs = {}\n resp = acm_client.list_certificates()\n for acm_cert in resp['CertificateSummaryList']:\n if acm_cert['DomainName'] == domain:\n LOGGER.info(\"A certificate for domain %s has already been\"\\\n \" imported as %s - replacing\",\n domain, acm_cert['CertificateArn'])\n kwargs['CertificateArn'] = acm_cert['CertificateArn']\n break\n if not dry_run:\n resp = acm_client.import_certificate(\n Certificate=cert.encode('ascii'),\n PrivateKey=key.encode('ascii'),\n CertificateChain=chain.encode('ascii'),\n **kwargs)\n LOGGER.info(\"%s (re-)imported TLS certificate %s as %s\",\n tag_prefix, result['ssl_certificate'], resp['CertificateArn'])\n result.update({'CertificateArn': resp['CertificateArn']})\n return result",
"def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def view_certificate(self, request, queryset):\n if len(queryset) > 1:\n self.message_user(\n request,\n 'You can only choose one certificate.',\n level=messages.ERROR)\n return None\n response = HttpResponse(content_type=\"text/plain\")\n cert = queryset.first()\n response.write(crypto.dump_certificate(\n crypto.FILETYPE_TEXT, cert.get_certificate()))\n return response",
"def main(cli_args):\n store_obj = cert_human_py3.CertChainStore.from_socket(\n host=cli_args.host, port=cli_args.port\n )\n\n print(store_obj.dump_json)",
"def client_x509_cert_url(self, client_x509_cert_url):\n\n self._client_x509_cert_url = client_x509_cert_url",
"def req_handler(args):\n key = _get_key(args)\n subject = get_subject_arguments()\n req = create_certificate_request(key, subject=subject, file_name=args.req_out)\n if not args.req_out:\n print(print_certificate_request(req))\n return req",
"def main() -> None:\n params = demisto.params()\n # if your Client class inherits from BaseClient, SSL verification is\n # handled out of the box by it, just pass ``verify_certificate`` to\n # the Client constructor\n verify_certificate = not params.get('insecure', False)\n\n # if your Client class inherits from BaseClient, system proxy is handled\n # out of the box by it, just pass ``proxy`` to the Client constructor\n proxy = params.get('proxy', False)\n app_id = params.get('creds_client_id', {}).get('password', '') or params.get('app_id') or params.get('_app_id')\n base_url = params.get('base_url')\n\n tenant_id = params.get('creds_tenant_id', {}).get('password', '') or params.get('tenant_id') or params.get('_tenant_id')\n client_credentials = params.get('client_credentials', False)\n enc_key = params.get('enc_key') or (params.get('credentials') or {}).get('password')\n certificate_thumbprint = params.get('creds_certificate', {}).get('identifier', '') or \\\n params.get('certificate_thumbprint', '')\n\n private_key = (replace_spaces_in_credential(params.get('creds_certificate', {}).get('password', ''))\n or params.get('private_key', ''))\n managed_identities_client_id = get_azure_managed_identities_client_id(params)\n\n first_fetch_time = params.get('first_fetch', '3 days').strip()\n fetch_limit = arg_to_number(params.get('max_fetch', 10))\n fetch_timeout = arg_to_number(params.get('fetch_timeout', TIMEOUT))\n demisto.debug(f'Command being called is {demisto.command()}')\n\n command = demisto.command()\n args = demisto.args()\n\n try:\n if not managed_identities_client_id and not app_id:\n raise Exception('Application ID must be provided.')\n\n client = Client(\n app_id=app_id,\n verify=verify_certificate,\n base_url=base_url,\n proxy=proxy,\n tenant_id=tenant_id,\n enc_key=enc_key,\n client_credentials=client_credentials,\n certificate_thumbprint=certificate_thumbprint,\n private_key=private_key,\n managed_identities_client_id=managed_identities_client_id\n )\n if demisto.command() == 'test-module':\n # This is the call made when pressing the integration Test button.\n return_results(test_module(client))\n\n elif command == 'microsoft-365-defender-auth-start':\n return_results(start_auth(client))\n\n elif command == 'microsoft-365-defender-auth-complete':\n return_results(complete_auth(client))\n\n elif command == 'microsoft-365-defender-auth-reset':\n return_results(reset_auth())\n\n elif command == 'microsoft-365-defender-auth-test':\n return_results(test_connection(client))\n\n elif command == 'microsoft-365-defender-incidents-list':\n test_context_for_token(client)\n return_results(microsoft_365_defender_incidents_list_command(client, args))\n\n elif command == 'microsoft-365-defender-incident-update':\n test_context_for_token(client)\n return_results(microsoft_365_defender_incident_update_command(client, args))\n\n elif command == 'microsoft-365-defender-advanced-hunting':\n test_context_for_token(client)\n return_results(microsoft_365_defender_advanced_hunting_command(client, args))\n\n elif command == 'microsoft-365-defender-incident-get':\n test_context_for_token(client)\n return_results(microsoft_365_defender_incident_get_command(client, args))\n\n elif command == 'fetch-incidents':\n fetch_limit = arg_to_number(fetch_limit)\n fetch_timeout = arg_to_number(fetch_timeout) if fetch_timeout else None\n incidents = fetch_incidents(client, first_fetch_time, fetch_limit, fetch_timeout)\n demisto.incidents(incidents)\n else:\n raise NotImplementedError\n # Log exceptions and return errors\n except Exception as e:\n return_error(f'Failed to execute {demisto.command()} command.\\nError:\\n{str(e)}')",
"def do_import(args):\n base64str = b''\n for infile_name in args.infile_names:\n if args.png:\n chunk = subprocess.check_output(['zbarimg', '--raw', infile_name])\n base64str += chunk\n elif args.base64:\n with open(infile_name, 'rb') as infile:\n chunk = infile.read()\n base64str += chunk\n\n raw = base64.b64decode(base64str)\n paperkey = subprocess.Popen(['paperkey', '--pubring', args.pubkey],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n (paperkey_stdout, _) = paperkey.communicate(raw)\n gpg = subprocess.Popen(['gpg', '--import'], stdin=subprocess.PIPE)\n gpg.communicate(paperkey_stdout)",
"def svn_client_import(svn_client_commit_info_t_commit_info_p, char_path, char_url, svn_boolean_t_nonrecursive, svn_client_ctx_t_ctx, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass",
"def load_cert_chain(self, certfile, keyfile: Optional[Any] = ...):\n ...",
"def add_certificate_arguments(parser):\n group = parser.add_argument_group(\"Certificate management\")\n group.add_argument(\n \"-sn\", \"--serial_number\",\n help=\"Serial number for the certificate\",\n type=int,\n default=1\n )\n group.add_argument(\n \"-d\", \"--duration\",\n help=\"Period of validity for certificate (seconds)\",\n type=int,\n default=60*60*24*(365*100+25)\n )",
"def worker(args):\n\n # Step 1. Create the NDSE view request object\n # Set the url where you want the recipient to go once they are done\n # with the NDSE. It is usually the case that the\n # user will never \"finish\" with the NDSE.\n # Assume that control will not be passed back to your app.\n view_request = ConsoleViewRequest(return_url=args[\"ds_return_url\"])\n if args[\"starting_view\"] == \"envelope\" and args[\"envelope_id\"]:\n view_request.envelope_id = args[\"envelope_id\"]\n\n # Step 2. Get the console view url\n # Exceptions will be caught by the calling function\n api_client = create_api_client(base_path=args[\"base_path\"], access_token=args[\"access_token\"])\n\n envelope_api = EnvelopesApi(api_client)\n results = envelope_api.create_console_view(account_id=args[\"account_id\"], console_view_request=view_request)\n url = results.url\n return {\"redirect_url\": url}",
"def __init__(__self__,\n resource_name: str,\n args: CertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def enroll_certificate(self, kwargs):\n return self.__query(\"certificateEnroll\", kwargs)"
] | [
"0.5857431",
"0.51808554",
"0.51713127",
"0.51159406",
"0.51159406",
"0.51053613",
"0.5080026",
"0.5057473",
"0.5015352",
"0.497699",
"0.4970104",
"0.49373612",
"0.49193794",
"0.4890044",
"0.48616487",
"0.48438507",
"0.48412257",
"0.4810169",
"0.4780042",
"0.47724333",
"0.47528744",
"0.47512835",
"0.47378665",
"0.4734239",
"0.47212297",
"0.47091407",
"0.46598485",
"0.46526676",
"0.46202815",
"0.46136165"
] | 0.59895074 | 0 |
Update Client Certificate in Oneview [Arguments] | def fusion_api_update_client_certificate(self, aliasname, body, api=None, headers=None):
return self.client_certificate.put(aliasname, body, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def edit_certificate(self, certificate):\r\n return self.ssl.editObject(certificate, id=certificate['id'])",
"def renew_certificate(self, kwargs):\n return self.__query(\"certificateRenew\", kwargs)",
"def AddClientCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server. Database Migration Service\n encrypts the value when storing it.\n \"\"\"\n parser.add_argument('--client-certificate', help=help_text, required=required)",
"def test_update_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1', data=json.dumps(update_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate updated successfully')\n assert response.status_code == 200",
"def client_cert(self, client_cert):\n\n self._client_cert = client_cert",
"def Certificate(self) -> _n_8_t_0:",
"def Certificate(self) -> _n_8_t_0:",
"def fusion_api_update_server_certificate(self, aliasname, body, api=None, headers=None):\n return self.server_certificate.put(aliasname, body, api, headers)",
"def update_certificate(request):\r\n\r\n status = CertificateStatuses\r\n if request.method == \"POST\":\r\n\r\n xqueue_body = json.loads(request.POST.get('xqueue_body'))\r\n xqueue_header = json.loads(request.POST.get('xqueue_header'))\r\n\r\n try:\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(xqueue_body['course_id'])\r\n\r\n cert = GeneratedCertificate.objects.get(\r\n user__username=xqueue_body['username'],\r\n course_id=course_key,\r\n key=xqueue_header['lms_key'])\r\n\r\n except GeneratedCertificate.DoesNotExist:\r\n logger.critical('Unable to lookup certificate\\n'\r\n 'xqueue_body: {0}\\n'\r\n 'xqueue_header: {1}'.format(\r\n xqueue_body, xqueue_header))\r\n\r\n return HttpResponse(json.dumps({\r\n 'return_code': 1,\r\n 'content': 'unable to lookup key'}),\r\n mimetype='application/json')\r\n\r\n if 'error' in xqueue_body:\r\n cert.status = status.error\r\n if 'error_reason' in xqueue_body:\r\n\r\n # Hopefully we will record a meaningful error\r\n # here if something bad happened during the\r\n # certificate generation process\r\n #\r\n # example:\r\n # (aamorm BerkeleyX/CS169.1x/2012_Fall)\r\n # <class 'simples3.bucket.S3Error'>:\r\n # HTTP error (reason=error(32, 'Broken pipe'), filename=None) :\r\n # certificate_agent.py:175\r\n\r\n\r\n cert.error_reason = xqueue_body['error_reason']\r\n else:\r\n if cert.status in [status.generating, status.regenerating]:\r\n cert.download_uuid = xqueue_body['download_uuid']\r\n cert.verify_uuid = xqueue_body['verify_uuid']\r\n cert.download_url = xqueue_body['url']\r\n cert.status = status.downloadable\r\n elif cert.status in [status.deleting]:\r\n cert.status = status.deleted\r\n else:\r\n logger.critical('Invalid state for cert update: {0}'.format(\r\n cert.status))\r\n return HttpResponse(json.dumps({\r\n 'return_code': 1,\r\n 'content': 'invalid cert status'}),\r\n mimetype='application/json')\r\n\r\n dog_stats_api.increment(XQUEUE_METRIC_NAME, tags=[\r\n u'action:update_certificate',\r\n u'course_id:{}'.format(cert.course_id)\r\n ])\r\n\r\n cert.save()\r\n return HttpResponse(json.dumps({'return_code': 0}),\r\n mimetype='application/json')",
"def test_update_certificate_keys(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1', data=json.dumps(update_certificate_keys),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Invalid certificate_name key')\n assert response.status_code == 400",
"def _update_certificate_context(context, course, course_overview, user_certificate, platform_name):\n # Populate dynamic output values using the course/certificate data loaded above\n certificate_type = context.get('certificate_type')\n\n # Override the defaults with any mode-specific static values\n context['certificate_id_number'] = user_certificate.verify_uuid\n context['certificate_verify_url'] = \"{prefix}{uuid}{suffix}\".format(\n prefix=context.get('certificate_verify_url_prefix'),\n uuid=user_certificate.verify_uuid,\n suffix=context.get('certificate_verify_url_suffix')\n )\n\n # We prefer a CourseOverview for this function because it validates and corrects certificate_available_date\n # and certificates_display_behavior values. However, not all certificates are guaranteed to have a CourseOverview\n # associated with them, so we fall back on the course in that case. This shouldn't cause a problem because courses\n # that are missing CourseOverviews are generally old courses, and thus their display values are no longer relevant\n if course_overview:\n date = display_date_for_certificate(course_overview, user_certificate)\n else:\n date = display_date_for_certificate(course, user_certificate)\n # Translators: The format of the date includes the full name of the month\n context['certificate_date_issued'] = strftime_localized(date, settings.CERTIFICATE_DATE_FORMAT)\n\n # Translators: This text represents the verification of the certificate\n context['document_meta_description'] = _('This is a valid {platform_name} certificate for {user_name}, '\n 'who participated in {partner_short_name} {course_number}').format(\n platform_name=platform_name,\n user_name=context['accomplishment_copy_name'],\n partner_short_name=context['organization_short_name'],\n course_number=context['course_number']\n )\n\n # Translators: This text is bound to the HTML 'title' element of the page and appears in the browser title bar\n context['document_title'] = _(\"{partner_short_name} {course_number} Certificate | {platform_name}\").format(\n partner_short_name=context['organization_short_name'],\n course_number=context['course_number'],\n platform_name=platform_name\n )\n\n # Translators: This text fragment appears after the student's name (displayed in a large font) on the certificate\n # screen. The text describes the accomplishment represented by the certificate information displayed to the user\n context['accomplishment_copy_description_full'] = _(\"successfully completed, received a passing grade, and was \"\n \"awarded this {platform_name} {certificate_type} \"\n \"Certificate of Completion in \").format(\n platform_name=platform_name,\n certificate_type=context.get(\"certificate_type\"))\n\n certificate_type_description = get_certificate_description(user_certificate.mode, certificate_type, platform_name)\n if certificate_type_description:\n context['certificate_type_description'] = certificate_type_description\n\n # Translators: This text describes the purpose (and therefore, value) of a course certificate\n context['certificate_info_description'] = _(\"{platform_name} acknowledges achievements through \"\n \"certificates, which are awarded for course activities \"\n \"that {platform_name} students complete.\").format(\n platform_name=platform_name,\n )",
"def org_apache_felix_https_clientcertificate(self, org_apache_felix_https_clientcertificate: ConfigNodePropertyDropDown):\n\n self._org_apache_felix_https_clientcertificate = org_apache_felix_https_clientcertificate",
"def update_signing_cert(self, cert_id, status, user_name=None):\r\n params = {'CertificateId' : cert_id,\r\n 'Status' : status}\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('UpdateSigningCertificate', params)",
"def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def client_certificate_id(self, client_certificate_id):\n\n self._client_certificate_id = client_certificate_id",
"def ModifyCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ModifyCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.ModifyCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def client_x509_cert_url(self, client_x509_cert_url):\n\n self._client_x509_cert_url = client_x509_cert_url",
"def UpdateOIDCClient(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def put_certificate(self, target, who, args, _files, _user_path):\n name = self.arg_get(args, 'name', str)\n if not commonl.verify_str_safe(name, do_raise = False):\n raise ValueError(\n f\"{name}: invalid certificate name, only [-_a-zA-Z0-9] allowed\")\n\n with target.target_owned_and_locked(who):\n target.timestamp()\n\n cert_path = os.path.join(target.state_dir, \"certificates\")\n cert_client_path = os.path.join(target.state_dir, \"certificates_client\")\n self._setup_maybe(target, cert_path, cert_client_path)\n\n client_key_path = os.path.join(cert_client_path, name + \".key\")\n client_req_path = os.path.join(cert_client_path, name + \".req\")\n client_cert_path = os.path.join(cert_client_path, name + \".cert\")\n\n if os.path.isfile(client_key_path) \\\n and os.path.isfile(client_cert_path):\t# already made?\n with open(client_key_path) as keyf, \\\n open(client_cert_path) as certf:\n return dict({\n \"name\": name,\n \"created\": False,\n \"key\": keyf.read(),\n \"cert\": certf.read(),\n })\n\n try:\n subprocess.run(\n f\"openssl genrsa -out {client_key_path} {self.key_size}\".split(),\n stdin = None, timeout = 5,\n capture_output = True, cwd = cert_path, check = True)\n allocid = target.fsdb.get(\"_alloc.id\", \"UNKNOWN\")\n subprocess.run(\n f\"openssl req -new -key {client_key_path} -out {client_req_path}\"\n f\" -subj /C=LC/ST=Local/L=Local/O=TCF-Signing-Authority-{target.id}-{allocid}/CN=TCF-{name}\".split(),\n check = True, cwd = cert_path,\n stdout = subprocess.PIPE, stderr = subprocess.STDOUT)\n target.log.debug(f\"{name}: created client's certificate\")\n\n # Issue the client certificate using the cert request and the CA cert/key.\n # note we run in the cert_path directory, so the ca.*\n # files are there\n subprocess.run(\n f\"openssl x509 -req -in {client_req_path} -CA ca.cert\"\n \" -CAkey ca.key -set_serial 101 -extensions client\"\n f\" -days 365 -outform PEM -out {client_cert_path}\".split(),\n stdin = None, timeout = 5,\n capture_output = True, cwd = cert_path, check = True)\n except subprocess.CalledProcessError as e:\n target.log.error(f\"command {' '.join(e.cmd)} failed: {e.output}\")\n self._client_wipe(name, cert_client_path)\t# don't leave things half there\n raise\n\n with open(client_key_path) as keyf, \\\n open(client_cert_path) as certf:\n return dict({\n \"name\": name,\n \"created\": True,\n \"key\": keyf.read(),\n \"cert\": certf.read(),\n })",
"def replace_certificate(self):\n return self.__query(\"certificateReplace\", data)",
"def fusion_api_import_client_certificate(self, body, api=None, headers=None):\n return self.client_certificate.post(body, api, headers)",
"def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def update_servicech(self, conf, phone_num, body):\n\t\tpass",
"def enroll_certificate(self, kwargs):\n return self.__query(\"certificateEnroll\", kwargs)",
"def edit_cert(cert_id):\n\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n \n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n cert = Certs.query.get_or_404(cert_id)\n form = Cert_Form(obj=cert)\n\n if form.validate_on_submit():\n cert.cert_name = form.cert_name.data\n cert.hours = form.hours.data\n cert.is_required = form.is_required.data\n cert.expire = form.expire.data\n cert.good_for_time = form.good_for_time.data\n cert.good_for_unit = form.good_for_unit.data\n \n\n db.session.commit()\n flash(f\"{cert.cert_name} has been updated\")\n return redirect(\"/administrator\")\n\n return render_template(\"/admin/edit_cert.html\", form=form, cert = cert)",
"def change_client_info(request: Request) -> Dict:\n ser = ChangeClientInfoSerializer(data=request.data)\n if ser.is_valid():\n if ser.validated_data.get('email') and request.user.client.email != ser.validated_data['email']:\n request.user.client.activated = False\n new_email = UserEmail(template_id=1, user=request.user)\n new_email.generate_code()\n is_send = send_email_to_user(1, [request.user.client.email], f'https://royal-lion.bet/activate/{new_email.code}')\n if is_send:\n new_email.save()\n request.user.client.save()\n ser.update(request.user.client, validated_data=ser.validated_data)\n return {'data': 'ok', 'success': True}\n else:\n return {'errors': ser.errors, 'success': False}",
"def fusion_api_upload_certificate_info(self, body, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.put(body=body, uri=uri, api=api, headers=headers, param=param)",
"def update_client(c, stack_name, subdomain, profile, cert_arn=None, create=False):\n action = 'create' if create else 'update'\n\n with chdir(WORKING_DIR):\n aws('cloudformation', f'{action}-stack',\n '--stack-name', f'{stack_name}-client',\n '--template-body', f'file://static-site.yaml',\n '--parameters',\n f'ParameterKey=Subdomain,ParameterValue={subdomain}',\n f'ParameterKey=CertificateArn,ParameterValue={cert_arn if cert_arn else \"\"}',\n f'--profile', f'{profile}')",
"def AddCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server.\n \"\"\"\n parser.add_argument('--certificate', help=help_text, required=required)",
"def view_certificate(self, request, queryset):\n if len(queryset) > 1:\n self.message_user(\n request,\n 'You can only choose one certificate.',\n level=messages.ERROR)\n return None\n response = HttpResponse(content_type=\"text/plain\")\n cert = queryset.first()\n response.write(crypto.dump_certificate(\n crypto.FILETYPE_TEXT, cert.get_certificate()))\n return response"
] | [
"0.58557314",
"0.56805027",
"0.566867",
"0.56002045",
"0.54227984",
"0.54112077",
"0.54112077",
"0.5403928",
"0.5353365",
"0.535133",
"0.52481216",
"0.5242505",
"0.5226197",
"0.5186496",
"0.5183812",
"0.5158893",
"0.51565576",
"0.5068806",
"0.5057645",
"0.50548595",
"0.5052115",
"0.504665",
"0.50401276",
"0.50213206",
"0.5018523",
"0.49734804",
"0.4951004",
"0.49461538",
"0.49224705",
"0.4909649"
] | 0.6258567 | 0 |
Delete Client Certificate in Oneview [Arguments] | def fusion_api_delete_client_certificate(self, aliasname, api=None, headers=None):
return self.client_certificate.delete(aliasname, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_key_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n key_name = args['key_name']\n response = client.delete_key_request(vault_name, key_name)\n\n outputs = copy.deepcopy(response)\n outputs['deletedDate'] = convert_timestamp_to_readable_date(\n outputs['deletedDate'])\n outputs['scheduledPurgeDate'] = convert_timestamp_to_readable_date(\n outputs['scheduledPurgeDate'])\n\n readable_response = copy.deepcopy(outputs)\n readable_response['keyId'] = readable_response['key']['kid']\n\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'Delete {key_name}',\n readable_response,\n ['keyId', 'recoveryId', 'deletedDate',\n 'scheduledPurgeDate'],\n removeNull=True,\n headerTransform=pascalToSpace)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Key',\n outputs_key_field='recoveryId',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def delete_client():\n preserve_cache = request.args.get('preserve_cache', False)\n delete_client(g.client_id, preserve_cache)\n return jsonify({'Success': True})",
"def delete(self, **kwargs):\n self.dbdel('client', kwargs)",
"def DeleteCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DeleteCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.DeleteCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def test_delete_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/1', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate deleted successfully')\n assert response.status_code == 200",
"def run(self):\n certificate = self.admin_barbican.create_certificate()\n self.admin_barbican.orders_delete(certificate.order_ref)",
"def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)",
"def fusion_api_delete_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.delete(aliasname, api, headers)",
"def DeleteOIDCClient(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def delete(device):\n delete_subject(device)\n return redirect_back('index')",
"def delete_key_command():\n incident = demisto.args().get('id', get_investigation_id())\n key = demisto.args().get('key')\n # Search Collection for incident_id and key\n search = incident + '.key'\n cursor = COLLECTION.find_one({search: key})\n if cursor is not None:\n object_id = cursor.get('_id')\n COLLECTION.delete_one({'_id': object_id})\n return f'Incident \"{incident}\" - key/value collection - 1 document deleted', {}, {}\n return f'Key \"{key}\" for incident_id \"{incident}\" does not exist', {}, {}",
"def delete_oneoff(name, client_id, date, numeric_id):\n date = datetime.strptime(date, \"%d-%b-%y\")\n numeric_id = int(numeric_id)\n OneOff.delete(name, client_id, date, numeric_id)\n\n return redirect(url_for('all_jobs_for_client', ClientID=client_id))",
"def delete_key_vault_command(client: KeyVaultClient, args: dict[str, Any], params: dict[str, Any]) -> CommandResults:\n\n vault_name = args['vault_name']\n # subscription_id and resource_group_name arguments can be passed as command arguments or as configuration parameters,\n # if both are passed as arguments, the command arguments will be used.\n subscription_id = get_from_args_or_params(params=params, args=args, key='subscription_id')\n resource_group_name = get_from_args_or_params(params=params,\n args=args, key='resource_group_name')\n\n response = client.delete_key_vault_request(subscription_id=subscription_id,\n resource_group_name=resource_group_name,\n vault_name=vault_name)\n message = \"\"\n if response.get('status_code') == 200:\n message = f'Deleted Key Vault {vault_name} successfully.'\n elif response.get('status_code') == 204:\n message = f'Key Vault {vault_name} does not exists.'\n\n return CommandResults(\n readable_output=message\n )",
"def delete(self, *args, **kwargs):\n self.request(\"delete\", *args, **kwargs)",
"def svn_client_delete(svn_client_commit_info_t_commit_info_p, apr_array_header_t_paths, svn_boolean_t_force, svn_client_ctx_t_ctx, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass",
"def delete(self, client):\n log(\"Deleting %s\" % self, self.opt)\n client.delete(self.path)",
"def delete_client(\n client_id: str, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs\n):\n request = DeleteClient.create(\n client_id=client_id,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def delete_client_permission(\n action: int,\n client_id: str,\n resource: str,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n request = DeleteClientPermission.create(\n action=action,\n client_id=client_id,\n resource=resource,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def cmd_conversation_delete(client, args):\n delete_conversation = client.delete_conversation(args.conversation_id)\n generate_output({'delete_conversation': delete_conversation})",
"def test_delete_client(self):\n pass",
"def test_client_verification_document_delete(self):\n pass",
"def delete():",
"def test_delete_o_auth_client_authorization(self):\n pass",
"def test_004_delete(self):\n ret = svcmgr.main(argv=[\"delete\", \"-s\", SVCNAME, \"--local\"])\n assert ret == 0",
"def DeleteKey(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def do_charge_purchase_delete(cs, args):\n cs.charge_purchases.delete(args.charge_purchase_id)",
"def delete(self, urns, client_cert, credentials, best_effort): ### FIX the response\n result = []\n slice_urn = urns[0]\n # try:\n for urn in urns:\n if self._verify_users:\n logger.debug(\"delete: authenticate the user for %s\" % (urn))\n client_urn, client_uuid, client_email =\\\n self.auth(client_cert, credentials, urn, (\"deletesliver\",))\n logger.info(\"Client urn=%s, uuid=%s, email=%s\" % (\n client_urn, client_uuid, client_email,))\n\n try:\n links_db, nodes, links = self.SESlices.get_link_db(urn)\n except Exception as e:\n raise geni_ex.GENIv3GeneralError(\"Slice does not exist.\")\n\n reservation_ports = self.SESlices._allocate_ports_in_slice(nodes)[\"ports\"]\n\n portsVlansPairs = getPortsVlansPairs(links_db)\n\n try:\n for portVlanItem in portsVlansPairs:\n (in_port, out_port, in_vlan, out_vlan) = portVlanItem\n se_provision.deleteSwitchingRule(in_port, out_port, in_vlan, out_vlan)\n logger.debug(\"unprovision SE-Slice-Urn=%s, in_port=%s , out_port=%s, in_vlan=%s, out_port=%s\" % (urn,in_port, out_port, in_vlan, out_vlan))\n except:\n logger.warning(\"Problem in communication with SE\")\n\n # expires_date = datetime.strptime(links_db['geni_expires'], RFC3339_FORMAT_STRING)\n expires_date = links_db['geni_expires']\n\n\n for sliver in links_db[\"geni_sliver_urn\"]:\n result.append( \n { \n \"geni_sliver_urn\": sliver,\n \"geni_expires\": expires_date,\n \"geni_allocation_status\": \"geni_unallocated\",\n \"geni_operational_status\" : \"geni_notready\"\n }\n )\n\n # Mark resources as free\n self.SEResources.free_resource_reservation(reservation_ports)\n\n # Remove reservation\n self.SESlices.remove_link_db(urn)\n \n logger.info(\"delete successfully completed: %s\", slice_urn)\n \n return result\n\n # except:\n\n # raise geni_ex.GENIv3GeneralError(\"Delete Failed. Requested resources are not available.\")",
"async def delete_client_permission_async(\n action: int,\n client_id: str,\n resource: str,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n request = DeleteClientPermission.create(\n action=action,\n client_id=client_id,\n resource=resource,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def run(self):\n certificate = self.admin_barbican.create_asymmetric()\n self.admin_barbican.orders_delete(certificate.order_ref)",
"def delete(self, key):\n parser = reqparse.RequestParser()\n parser.add_argument('Content-Type', type=str, location='headers',\n choices='application/json', required=True)\n parser.add_argument('X-HP3PAR-WSAPI-SessionKey', type=str,\n location='headers', required=True)\n arg = parser.parse_args()\n\n if key != arg['X-HP3PAR-WSAPI-SessionKey']:\n return response(403)\n\n # Check session key\n if self.check_seskey(key):\n self.sessions.pop(key)\n return response(200)\n\n return response(403)"
] | [
"0.625013",
"0.6244312",
"0.62095284",
"0.6144197",
"0.60847956",
"0.6029214",
"0.598694",
"0.5954224",
"0.5932226",
"0.5910795",
"0.5881732",
"0.58764726",
"0.58708096",
"0.5808192",
"0.58025086",
"0.57821757",
"0.5762548",
"0.5756545",
"0.5754441",
"0.5750061",
"0.57493013",
"0.57189393",
"0.57143116",
"0.570898",
"0.56882036",
"0.567375",
"0.5673312",
"0.56591064",
"0.56468225",
"0.563049"
] | 0.685999 | 0 |
Import Server Certificate to Oneview [Arguments] | def fusion_api_import_server_certificate(self, body, api=None, headers=None):
return self.server_certificate.post(body, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def AddClientCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server. Database Migration Service\n encrypts the value when storing it.\n \"\"\"\n parser.add_argument('--client-certificate', help=help_text, required=required)",
"def fusion_api_import_client_certificate(self, body, api=None, headers=None):\n return self.client_certificate.post(body, api, headers)",
"def AddCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server.\n \"\"\"\n parser.add_argument('--certificate', help=help_text, required=required)",
"def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def req_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n req = create_x509_req(key, subject_info)\n do_output(req_to_pem(req), args, 'req')",
"def add_certificate_arguments(parser):\n group = parser.add_argument_group(\"Certificate management\")\n group.add_argument(\n \"-sn\", \"--serial_number\",\n help=\"Serial number for the certificate\",\n type=int,\n default=1\n )\n group.add_argument(\n \"-d\", \"--duration\",\n help=\"Period of validity for certificate (seconds)\",\n type=int,\n default=60*60*24*(365*100+25)\n )",
"def _ParseCertificateArguments(client, args):\n self_managed = None\n managed = None\n certificate_type = None\n if args.certificate:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.SELF_MANAGED\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n self_managed = client.messages.SslCertificateSelfManagedSslCertificate(\n certificate=certificate, privateKey=private_key)\n if args.domains:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.MANAGED\n managed = client.messages.SslCertificateManagedSslCertificate(\n domains=args.domains)\n return certificate_type, self_managed, managed",
"def Certificate(self) -> _n_8_t_0:",
"def Certificate(self) -> _n_8_t_0:",
"def main(cli_args):\n store_obj = cert_human_py3.CertChainStore.from_socket(\n host=cli_args.host, port=cli_args.port\n )\n\n print(store_obj.dump_json)",
"def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]",
"def import_natpat_cmd(args):\n\n if len(args) != 1:\n error(\"Usage: -import_natpat NOM_FICHIER.json\")\n\n elif args[0]:\n filename = args[0]\n\n with open(filename, \"r\", encoding=\"utf-8\") as file:\n content = json.loads(file.read())\n\n for rule in content[\"status\"].values():\n r = requete('Firewall:setPortForwarding',\n {\"id\": rule[\"Id\"],\n \"description\": rule[\"Description\"],\n \"persistent\": True,\n \"enable\": rule[\"Enable\"],\n \"protocol\": rule[\"Protocol\"],\n \"destinationIPAddress\": rule[\"DestinationIPAddress\"],\n \"internalPort\": rule[\"InternalPort\"],\n \"externalPort\": rule[\"ExternalPort\"],\n \"origin\": rule[\"Origin\"],\n \"sourceInterface\": rule[\"SourceInterface\"],\n \"sourcePrefix\": rule[\"SourcePrefix\"]})",
"def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def req_handler(args):\n key = _get_key(args)\n subject = get_subject_arguments()\n req = create_certificate_request(key, subject=subject, file_name=args.req_out)\n if not args.req_out:\n print(print_certificate_request(req))\n return req",
"def fusion_api_import_appliance_certificate(self, body, api=None, headers=None, param=''):\n return self.wsc.put(body, api=api, headers=headers, param=param)",
"def install_esgf_client(certfile, keyfile):\n\n # Create HTTPSHandler \n ssl_context = M2Crypto.SSL.Context()\n ssl_context.load_cert_chain(certfile, keyfile)\n opener = M2Crypto.m2urllib2.build_opener(ssl_context, \n urllib2.HTTPCookieProcessor,\n urllib2.ProxyHandler)\n\n opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)]\n urllib2.install_opener(opener)\n\n def new_request(url):\n log.debug('Opening URL %s' % url)\n\n r = urllib2.urlopen(url.rstrip('?&'))\n\n resp = r.headers.dict\n resp['status'] = str(r.code)\n data = r.read()\n\n # When an error is returned, we parse the error message from the\n # server and return it in a ``ClientError`` exception.\n if resp.get(\"content-description\") == \"dods_error\":\n m = re.search('code = (?P<code>\\d+);\\s*message = \"(?P<msg>.*)\"',\n data, re.DOTALL | re.MULTILINE)\n msg = 'Server error %(code)s: \"%(msg)s\"' % m.groupdict()\n raise ClientError(msg)\n\n return resp, data\n\n from pydap.util import http\n http.request = new_request",
"def __init__(__self__,\n resource_name: str,\n args: ServerCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def worker(args):\n\n # Step 1. Create the NDSE view request object\n # Set the url where you want the recipient to go once they are done\n # with the NDSE. It is usually the case that the\n # user will never \"finish\" with the NDSE.\n # Assume that control will not be passed back to your app.\n view_request = ConsoleViewRequest(return_url=args[\"ds_return_url\"])\n if args[\"starting_view\"] == \"envelope\" and args[\"envelope_id\"]:\n view_request.envelope_id = args[\"envelope_id\"]\n\n # Step 2. Get the console view url\n # Exceptions will be caught by the calling function\n api_client = create_api_client(base_path=args[\"base_path\"], access_token=args[\"access_token\"])\n\n envelope_api = EnvelopesApi(api_client)\n results = envelope_api.create_console_view(account_id=args[\"account_id\"], console_view_request=view_request)\n url = results.url\n return {\"redirect_url\": url}",
"def cmd(\n ctx,\n url,\n prompt,\n include_paths,\n include_urls,\n include_only_ca,\n export_file,\n update_env,\n):\n client = ctx.obj.create_client(url=url, key=None, secret=None)\n export_file = pathify_export_file(client=client, export_file=export_file)\n\n with ctx.obj.exc_wrap(wraperror=ctx.obj.wraperror):\n includes = []\n\n for url in include_urls:\n includes += from_url(url=url, split=False, ca_only=include_only_ca)\n\n for path in include_paths:\n includes += from_path(path=path, split=False, ca_only=include_only_ca)\n\n chain = client.HTTP.get_cert_chain()\n leaf_cert, intm_certs = split_leaf(chain=chain)\n prompt = confirm_cert(prompt=prompt, cert=leaf_cert)\n handle_export(\n data=chain + includes,\n export_file=export_file,\n export_backup=True,\n export_format=\"pem\",\n )\n handle_update_env(update_env=update_env, export_file=export_file)\n\n ctx.exit(0)",
"def info_from_args(args):\n return CertInfo(\n subject=parse_dn(args.subject),\n usage=parse_list(args.usage),\n alt_names=parse_list(args.san),\n ocsp_nocheck=args.ocsp_nocheck,\n ocsp_must_staple=args.ocsp_must_staple,\n ocsp_must_staple_v2=args.ocsp_must_staple_v2,\n ocsp_urls=parse_list(args.ocsp_urls),\n crl_urls=parse_list(args.crl_urls),\n issuer_urls=parse_list(args.issuer_urls),\n permit_subtrees=parse_list(args.permit_subtrees),\n exclude_subtrees=parse_list(args.exclude_subtrees),\n ca=args.CA,\n path_length=args.path_length)",
"def view_certificate(self, request, queryset):\n if len(queryset) > 1:\n self.message_user(\n request,\n 'You can only choose one certificate.',\n level=messages.ERROR)\n return None\n response = HttpResponse(content_type=\"text/plain\")\n cert = queryset.first()\n response.write(crypto.dump_certificate(\n crypto.FILETYPE_TEXT, cert.get_certificate()))\n return response",
"def show_command(args):\n for fn in args.files:\n ext = os.path.splitext(fn)[1].lower()\n if ext == '.csr':\n cmd = ['openssl', 'req', '-in', fn, '-text']\n elif ext == '.crt':\n cmd = ['openssl', 'x509', '-in', fn, '-text']\n else:\n die(\"Unsupported file: %s\", fn)\n subprocess.check_call(cmd)",
"def _store_certificate(fullchain, key, domain=None, tag_prefix=None,\n region_name=None, acm_client=None, dry_run=False):\n #pylint:disable=unused-argument\n result = _check_certificate(fullchain, key, domain=domain)\n if not domain:\n domain = result['ssl_certificate']['common_name']\n cert, chain = _split_fullchain(fullchain)\n if not acm_client:\n acm_client = boto3.client('acm', region_name=region_name)\n kwargs = {}\n resp = acm_client.list_certificates()\n for acm_cert in resp['CertificateSummaryList']:\n if acm_cert['DomainName'] == domain:\n LOGGER.info(\"A certificate for domain %s has already been\"\\\n \" imported as %s - replacing\",\n domain, acm_cert['CertificateArn'])\n kwargs['CertificateArn'] = acm_cert['CertificateArn']\n break\n if not dry_run:\n resp = acm_client.import_certificate(\n Certificate=cert.encode('ascii'),\n PrivateKey=key.encode('ascii'),\n CertificateChain=chain.encode('ascii'),\n **kwargs)\n LOGGER.info(\"%s (re-)imported TLS certificate %s as %s\",\n tag_prefix, result['ssl_certificate'], resp['CertificateArn'])\n result.update({'CertificateArn': resp['CertificateArn']})\n return result",
"def test_load_client_ca(self, context, ca_file):\n context.load_client_ca(ca_file)",
"def load_cert_chain(self, certfile, keyfile: Optional[Any] = ...):\n ...",
"def create_server_certs_enc():\n global server_keystore, config\n\n same_enc_sign_cert = config[\"config\"][\"same_enc_sign_cert\"]\n if same_enc_sign_cert:\n dn = \"/CN=server certificate RSA\"\n else:\n dn = \"/CN=server certificate encryption RSA\"\n key_pair_rsa = create_csr(dn)\n server_keystore[\"key\"] = key_pair_rsa[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt\"] = sign_csr(key_pair_rsa[\"pub\"], dn, san)",
"def sign_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n # Load certificate request\n if not args.request:\n die(\"Need --request\")\n subject_csr = load_req(args.request)\n\n reset_info = None\n if args.reset:\n reset_info = info_from_args(args)\n\n # Load CA info\n if not args.ca_info:\n die(\"Need --ca-info\")\n if args.ca_info.endswith('.csr'):\n issuer_obj = load_req(args.ca_info)\n else:\n issuer_obj = load_cert(args.ca_info)\n\n # Load CA private key\n issuer_key = load_key(args.ca_key, load_password(args.password_file))\n if not same_pubkey(issuer_key, issuer_obj):\n die(\"--ca-private-key does not match --ca-info data\")\n\n # Certificate generation\n cert = do_sign(subject_csr, issuer_obj, issuer_key, args.days, args.path_length, args.request, reset_info=reset_info)\n\n # Write certificate\n do_output(cert_to_pem(cert), args, 'x509')",
"def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }",
"def cmd_cert_clone(hostname, port, keyfile, certfile, copy_extensions, expired, verbose):\n\n context = ssl.create_default_context()\n\n with socket.create_connection((hostname, port), timeout=3) as sock:\n with context.wrap_socket(sock, server_hostname=hostname) as ssock:\n original = ssock.getpeercert(binary_form=True)\n\n key, cert = certclone(original, copy_extensions=copy_extensions, expired=expired)\n\n keyfile.write(key)\n certfile.write(cert)"
] | [
"0.5506298",
"0.53837276",
"0.5316408",
"0.51833266",
"0.5115405",
"0.51070744",
"0.50870794",
"0.5043834",
"0.5043834",
"0.49950194",
"0.49598062",
"0.49210295",
"0.49089193",
"0.48697874",
"0.4866097",
"0.48488688",
"0.48398224",
"0.48291233",
"0.48273656",
"0.48074907",
"0.47773117",
"0.4765856",
"0.47259778",
"0.4720674",
"0.4720423",
"0.47145545",
"0.46895486",
"0.4688076",
"0.4618265",
"0.46159193"
] | 0.5582675 | 0 |
Update Server Certificate in Oneview [Arguments] | def fusion_api_update_server_certificate(self, aliasname, body, api=None, headers=None):
return self.server_certificate.put(aliasname, body, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def edit_certificate(self, certificate):\r\n return self.ssl.editObject(certificate, id=certificate['id'])",
"def renew_certificate(self, kwargs):\n return self.__query(\"certificateRenew\", kwargs)",
"def fusion_api_update_client_certificate(self, aliasname, body, api=None, headers=None):\n return self.client_certificate.put(aliasname, body, api, headers)",
"def test_update_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1', data=json.dumps(update_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate updated successfully')\n assert response.status_code == 200",
"def update_certificate(request):\r\n\r\n status = CertificateStatuses\r\n if request.method == \"POST\":\r\n\r\n xqueue_body = json.loads(request.POST.get('xqueue_body'))\r\n xqueue_header = json.loads(request.POST.get('xqueue_header'))\r\n\r\n try:\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(xqueue_body['course_id'])\r\n\r\n cert = GeneratedCertificate.objects.get(\r\n user__username=xqueue_body['username'],\r\n course_id=course_key,\r\n key=xqueue_header['lms_key'])\r\n\r\n except GeneratedCertificate.DoesNotExist:\r\n logger.critical('Unable to lookup certificate\\n'\r\n 'xqueue_body: {0}\\n'\r\n 'xqueue_header: {1}'.format(\r\n xqueue_body, xqueue_header))\r\n\r\n return HttpResponse(json.dumps({\r\n 'return_code': 1,\r\n 'content': 'unable to lookup key'}),\r\n mimetype='application/json')\r\n\r\n if 'error' in xqueue_body:\r\n cert.status = status.error\r\n if 'error_reason' in xqueue_body:\r\n\r\n # Hopefully we will record a meaningful error\r\n # here if something bad happened during the\r\n # certificate generation process\r\n #\r\n # example:\r\n # (aamorm BerkeleyX/CS169.1x/2012_Fall)\r\n # <class 'simples3.bucket.S3Error'>:\r\n # HTTP error (reason=error(32, 'Broken pipe'), filename=None) :\r\n # certificate_agent.py:175\r\n\r\n\r\n cert.error_reason = xqueue_body['error_reason']\r\n else:\r\n if cert.status in [status.generating, status.regenerating]:\r\n cert.download_uuid = xqueue_body['download_uuid']\r\n cert.verify_uuid = xqueue_body['verify_uuid']\r\n cert.download_url = xqueue_body['url']\r\n cert.status = status.downloadable\r\n elif cert.status in [status.deleting]:\r\n cert.status = status.deleted\r\n else:\r\n logger.critical('Invalid state for cert update: {0}'.format(\r\n cert.status))\r\n return HttpResponse(json.dumps({\r\n 'return_code': 1,\r\n 'content': 'invalid cert status'}),\r\n mimetype='application/json')\r\n\r\n dog_stats_api.increment(XQUEUE_METRIC_NAME, tags=[\r\n u'action:update_certificate',\r\n u'course_id:{}'.format(cert.course_id)\r\n ])\r\n\r\n cert.save()\r\n return HttpResponse(json.dumps({'return_code': 0}),\r\n mimetype='application/json')",
"def update_server_cert(self, cert_name, new_cert_name=None,\r\n new_path=None):\r\n params = {'ServerCertificateName' : cert_name}\r\n if new_cert_name:\r\n params['NewServerCertificateName'] = new_cert_name\r\n if new_path:\r\n params['NewPath'] = new_path\r\n return self.get_response('UpdateServerCertificate', params)",
"def Certificate(self) -> _n_8_t_0:",
"def Certificate(self) -> _n_8_t_0:",
"def test_update_certificate_keys(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1', data=json.dumps(update_certificate_keys),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Invalid certificate_name key')\n assert response.status_code == 400",
"def update_endpoint_in_sipserver(self, endpoint: str, password: str) -> None:",
"def update_signing_cert(self, cert_id, status, user_name=None):\r\n params = {'CertificateId' : cert_id,\r\n 'Status' : status}\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('UpdateSigningCertificate', params)",
"def AddClientCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server. Database Migration Service\n encrypts the value when storing it.\n \"\"\"\n parser.add_argument('--client-certificate', help=help_text, required=required)",
"def configureSSL(domainName,dry=False):\n \n #enable ssl mod\n execute(subprocess.call,[\"a2enmod\",\"ssl\"],dry=dry)\n restartApache(dry=dry)\n \n #create input string for openssl command\n inputStr='CA\\nNova Scotia\\nHalifax\\nCompute Canada\\nACENET\\n'+domainName+'\\[email protected]\\n'\n \n #create ssl cert\n #Note that dry is fixed to be False, creating the cert doesn't really cause a problem except \n #it might overwrite an existing cert, and if it isn't actually executed the following steps will not be able to execute\n p=execute(subprocess.Popen,[\"openssl\",\"req\",\"-x509\",\"-nodes\"\n ,\"-days\",\"3650\"\n ,\"-newkey\",\"rsa:2048\"\n ,\"-keyout\",\"/etc/ssl/private/server.key\"\n ,\"-out\",\"/etc/ssl/certs/server.crt\"]\n ,stdout=subprocess.PIPE,stdin=subprocess.PIPE,stderr=subprocess.STDOUT,dry=dry)\n \n #have to handle dry runs in a special way as this command (dry or not) \n #depends on p not being None\n if not dry:\n output=execute(p.communicate,input=inputStr.encode('utf-8'),dry=dry)[0]\n else:\n print(\"p.communicate(input=\"+inputStr+\")\")\n \n #Set correct ownership and permission of key\n execute(subprocess.call,[\"sudo\",\"chown\",\"root:ssl-cert\",\"/etc/ssl/private/server.key\"],dry=dry)\n execute(subprocess.call,[\"sudo\",\"chmod\",\"640\",\"/etc/ssl/private/server.key\"],dry=dry)\n \n #comment out any previous settings\n execute(commentOutLineMatching,\".*SSLCertificateFile.*\",\"/etc/apache2/sites-available/default-ssl.conf\",dry=dry)#not matching\n execute(commentOutLineMatching,\".*SSLCertificateKeyFile.*\",\"/etc/apache2/sites-available/default-ssl.conf\",dry=dry)#not matching\n execute(commentOutLineMatching,\".*SSLCertificateChainFile.*\",\"/etc/apache2/sites-available/default-ssl.conf\",dry=dry)#not matching\n \n #add settings before for improved security </VirtualHost>\n execute(replaceStrInFileRe,\"</VirtualHost>\"\n ,\"\\tSSLCertificateFile /etc/ssl/certs/server.crt\\n\"\n +\"\\t\\tSSLCertificateKeyFile /etc/ssl/private/server.key\\n\"\n +\"\\t\\tSSLCertificateChainFile /etc/ssl/certs/server.crt\\n\"\n +\"\\t\\tServerName \"+domainName+\"\\n\"\n +\"\\t\\tServerAlias www.\"+domainName+\"\\n\"\n +\"\\t\\tSSLProtocol all -SSLv2 -SSLv3\\n\"\n +\"\\t\\tSSLCipherSuite HIGH:MEDIUM:!aNULL:!MD5:!SEED:!IDEA:!RC4\\n\"\n +\"\\t\\tSSLHonorCipherOrder on\\n\"\n +\"\\t</VirtualHost>\",\"/etc/apache2/sites-available/default-ssl.conf\",dry=dry)\n \n #add redirect to https\n execute(replaceStrInFileRe,\"</VirtualHost>\"\n ,\"\\tRedirect permanent / https://\"+domainName+\"/\\n</VirtualHost>\\n\"\n ,\"/etc/apache2/sites-available/000-default.conf\",dry=dry)\n \n #enable ssl on our virtual host\n execute(subprocess.call,[\"a2ensite\",\"default-ssl.conf\"])\n execute(subprocess.call,[\"service\",\"apache2\",\"restart\"])",
"def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])",
"def AddCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server.\n \"\"\"\n parser.add_argument('--certificate', help=help_text, required=required)",
"def update_cert(c, stack_name, domain_name, profile, create=False):\n action = 'create' if create else 'update'\n\n with chdir(WORKING_DIR):\n aws('cloudformation', f'{action}-stack',\n '--stack-name', f'{stack_name}-cert',\n '--template-body', f'file://cert.yaml',\n '--parameters',\n f'ParameterKey=DomainName,ParameterValue={domain_name}',\n f'--profile', f'{profile}')\n # Cert also needs adding to us-east-1 to be used by CloudFront\n aws('cloudformation', f'{action}-stack',\n '--stack-name', f'{stack_name}-cert',\n '--template-body', f'file://cert.yaml',\n '--parameters',\n f'ParameterKey=DomainName,ParameterValue={domain_name}',\n f'--profile', f'{profile}',\n '--region', 'us-east-1')",
"def replace_certificate(self):\n return self.__query(\"certificateReplace\", data)",
"def _update_certificate_context(context, course, course_overview, user_certificate, platform_name):\n # Populate dynamic output values using the course/certificate data loaded above\n certificate_type = context.get('certificate_type')\n\n # Override the defaults with any mode-specific static values\n context['certificate_id_number'] = user_certificate.verify_uuid\n context['certificate_verify_url'] = \"{prefix}{uuid}{suffix}\".format(\n prefix=context.get('certificate_verify_url_prefix'),\n uuid=user_certificate.verify_uuid,\n suffix=context.get('certificate_verify_url_suffix')\n )\n\n # We prefer a CourseOverview for this function because it validates and corrects certificate_available_date\n # and certificates_display_behavior values. However, not all certificates are guaranteed to have a CourseOverview\n # associated with them, so we fall back on the course in that case. This shouldn't cause a problem because courses\n # that are missing CourseOverviews are generally old courses, and thus their display values are no longer relevant\n if course_overview:\n date = display_date_for_certificate(course_overview, user_certificate)\n else:\n date = display_date_for_certificate(course, user_certificate)\n # Translators: The format of the date includes the full name of the month\n context['certificate_date_issued'] = strftime_localized(date, settings.CERTIFICATE_DATE_FORMAT)\n\n # Translators: This text represents the verification of the certificate\n context['document_meta_description'] = _('This is a valid {platform_name} certificate for {user_name}, '\n 'who participated in {partner_short_name} {course_number}').format(\n platform_name=platform_name,\n user_name=context['accomplishment_copy_name'],\n partner_short_name=context['organization_short_name'],\n course_number=context['course_number']\n )\n\n # Translators: This text is bound to the HTML 'title' element of the page and appears in the browser title bar\n context['document_title'] = _(\"{partner_short_name} {course_number} Certificate | {platform_name}\").format(\n partner_short_name=context['organization_short_name'],\n course_number=context['course_number'],\n platform_name=platform_name\n )\n\n # Translators: This text fragment appears after the student's name (displayed in a large font) on the certificate\n # screen. The text describes the accomplishment represented by the certificate information displayed to the user\n context['accomplishment_copy_description_full'] = _(\"successfully completed, received a passing grade, and was \"\n \"awarded this {platform_name} {certificate_type} \"\n \"Certificate of Completion in \").format(\n platform_name=platform_name,\n certificate_type=context.get(\"certificate_type\"))\n\n certificate_type_description = get_certificate_description(user_certificate.mode, certificate_type, platform_name)\n if certificate_type_description:\n context['certificate_type_description'] = certificate_type_description\n\n # Translators: This text describes the purpose (and therefore, value) of a course certificate\n context['certificate_info_description'] = _(\"{platform_name} acknowledges achievements through \"\n \"certificates, which are awarded for course activities \"\n \"that {platform_name} students complete.\").format(\n platform_name=platform_name,\n )",
"def fusion_api_upload_certificate_info(self, body, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.put(body=body, uri=uri, api=api, headers=headers, param=param)",
"def ModifyCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ModifyCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.ModifyCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def fusion_api_import_server_certificate(self, body, api=None, headers=None):\n return self.server_certificate.post(body, api, headers)",
"def add_certificate_arguments(parser):\n group = parser.add_argument_group(\"Certificate management\")\n group.add_argument(\n \"-sn\", \"--serial_number\",\n help=\"Serial number for the certificate\",\n type=int,\n default=1\n )\n group.add_argument(\n \"-d\", \"--duration\",\n help=\"Period of validity for certificate (seconds)\",\n type=int,\n default=60*60*24*(365*100+25)\n )",
"def set_pinserver(self, urlA=None, urlB=None, pubkey=None, cert=None):\n params = {}\n if urlA is not None or urlB is not None:\n params['urlA'] = urlA\n params['urlB'] = urlB\n if pubkey is not None:\n params['pubkey'] = pubkey\n if cert is not None:\n params['certificate'] = cert\n return self._jadeRpc('update_pinserver', params)",
"def __init__(__self__,\n resource_name: str,\n args: ServerCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"async def update_certificate_properties(\n self, certificate_name: str, version: Optional[str] = None, **kwargs\n ) -> KeyVaultCertificate:\n\n enabled = kwargs.pop(\"enabled\", None)\n\n if enabled is not None:\n attributes = self._models.CertificateAttributes(enabled=enabled)\n else:\n attributes = None\n\n parameters = self._models.CertificateUpdateParameters(\n certificate_attributes=attributes, tags=kwargs.pop(\"tags\", None)\n )\n\n bundle = await self._client.update_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n certificate_version=version or \"\",\n parameters=parameters,\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)",
"def assign(id, type, appid, specialid):\n try:\n client().certificates.assign(id, type, appid, specialid)\n logger.info(\n 'ctl:cert:assign', 'Assigned {0} to {0}'.format(id, appid)\n )\n except Exception as e:\n raise CLIException(str(e))",
"def create_server_certs_sign():\n global server_keystore\n\n dn_sign = \"/CN=server certificate sign RSA-PSS\"\n key_pair_rsa_sign = create_csr_pss(dn_sign)\n server_keystore[\"key-sign\"] = key_pair_rsa_sign[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt-sign\"] = sign_csr(key_pair_rsa_sign[\"pub\"], dn_sign, san)",
"def org_apache_felix_https_clientcertificate(self, org_apache_felix_https_clientcertificate: ConfigNodePropertyDropDown):\n\n self._org_apache_felix_https_clientcertificate = org_apache_felix_https_clientcertificate",
"def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def create_server_certs_enc():\n global server_keystore, config\n\n same_enc_sign_cert = config[\"config\"][\"same_enc_sign_cert\"]\n if same_enc_sign_cert:\n dn = \"/CN=server certificate RSA\"\n else:\n dn = \"/CN=server certificate encryption RSA\"\n key_pair_rsa = create_csr(dn)\n server_keystore[\"key\"] = key_pair_rsa[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt\"] = sign_csr(key_pair_rsa[\"pub\"], dn, san)"
] | [
"0.5565858",
"0.55483586",
"0.5518446",
"0.5513929",
"0.54373205",
"0.542909",
"0.5301142",
"0.5301142",
"0.52330923",
"0.5218268",
"0.5203179",
"0.5186676",
"0.51695204",
"0.5164349",
"0.5158582",
"0.5125818",
"0.5072391",
"0.5009334",
"0.49991253",
"0.49973583",
"0.49760723",
"0.49425238",
"0.4925484",
"0.49130732",
"0.49036616",
"0.48863846",
"0.4870106",
"0.4868062",
"0.48677984",
"0.48523957"
] | 0.6071537 | 0 |
Delete Server Certificate in Oneview [Arguments] | def fusion_api_delete_server_certificate(self, aliasname, api=None, headers=None):
return self.server_certificate.delete(aliasname, api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_server_cert(self, cert_name):\r\n params = {'ServerCertificateName' : cert_name}\r\n return self.get_response('DeleteServerCertificate', params)",
"def delete_server(ServerName=None):\n pass",
"def fusion_api_delete_client_certificate(self, aliasname, api=None, headers=None):\n return self.client_certificate.delete(aliasname, api, headers)",
"def test_delete_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/1', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate deleted successfully')\n assert response.status_code == 200",
"def DeleteCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DeleteCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.DeleteCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))",
"def test_004_delete(self):\n ret = svcmgr.main(argv=[\"delete\", \"-s\", SVCNAME, \"--local\"])\n assert ret == 0",
"def delete(self, oid):\n path = '/servers/%s' % oid\n res = self.client.call(path, 'DELETE', data='', \n token=self.manager.identity.token)\n self.logger.debug('Delete openstack server: %s' % truncate(res))\n return res[0]",
"def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)",
"def run(self):\n certificate = self.admin_barbican.create_certificate()\n self.admin_barbican.orders_delete(certificate.order_ref)",
"def delete(device):\n delete_subject(device)\n return redirect_back('index')",
"def delete_key_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n key_name = args['key_name']\n response = client.delete_key_request(vault_name, key_name)\n\n outputs = copy.deepcopy(response)\n outputs['deletedDate'] = convert_timestamp_to_readable_date(\n outputs['deletedDate'])\n outputs['scheduledPurgeDate'] = convert_timestamp_to_readable_date(\n outputs['scheduledPurgeDate'])\n\n readable_response = copy.deepcopy(outputs)\n readable_response['keyId'] = readable_response['key']['kid']\n\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'Delete {key_name}',\n readable_response,\n ['keyId', 'recoveryId', 'deletedDate',\n 'scheduledPurgeDate'],\n removeNull=True,\n headerTransform=pascalToSpace)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Key',\n outputs_key_field='recoveryId',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def do_command(self, args):\n subjectops = dbops.TestSubjects()\n subjectops.delete(args)",
"def delete(self, key):\n parser = reqparse.RequestParser()\n parser.add_argument('Content-Type', type=str, location='headers',\n choices='application/json', required=True)\n parser.add_argument('X-HP3PAR-WSAPI-SessionKey', type=str,\n location='headers', required=True)\n arg = parser.parse_args()\n\n if key != arg['X-HP3PAR-WSAPI-SessionKey']:\n return response(403)\n\n # Check session key\n if self.check_seskey(key):\n self.sessions.pop(key)\n return response(200)\n\n return response(403)",
"def delete_key_command():\n incident = demisto.args().get('id', get_investigation_id())\n key = demisto.args().get('key')\n # Search Collection for incident_id and key\n search = incident + '.key'\n cursor = COLLECTION.find_one({search: key})\n if cursor is not None:\n object_id = cursor.get('_id')\n COLLECTION.delete_one({'_id': object_id})\n return f'Incident \"{incident}\" - key/value collection - 1 document deleted', {}, {}\n return f'Key \"{key}\" for incident_id \"{incident}\" does not exist', {}, {}",
"def delete_command(arguments: List[str]) -> None:\n if len(arguments) != 2:\n print('Required 1 argument for create command') # noqa: WPS421\n return\n token = token_load.load()\n logic.delete(token, gist_id=arguments[1])",
"def do_command(self, args):\n hostops = dbops.Hosts()\n hostops.delete(args)",
"def remove_tier1_ipsec_vpn(**kwargs):\n proxy = kwargs['proxy']\n session_token = kwargs['sessiontoken']\n display_name = kwargs['display_name']\n tier1_gateway = kwargs['tier1_gateway']\n vpn_service = kwargs['vpn_service']\n\n json_response_status_code = delete_tier1_ipsec_vpn_json(proxy, session_token, display_name, tier1_gateway, vpn_service)\n if json_response_status_code == 200:\n sys.exit(f\"Tier-1 IPSec VPN Session {display_name} was deleted successfully\")\n else:\n print(f\"There was an error deleting Tier1 IPSec VPN Session {display_name}\")\n sys.exit(1)",
"def catalog_delete(self, args):\n headers = DEFAULT_HEADERS.copy()\n headers.update(args.headers)\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete(args.path, headers)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e",
"def delete_client():\n preserve_cache = request.args.get('preserve_cache', False)\n delete_client(g.client_id, preserve_cache)\n return jsonify({'Success': True})",
"def delete_server(self, request, tenant_id, server_id):\n response_data = delete_server(server_id)\n request.setResponseCode(response_data[1])\n return json.dumps(response_data[0])",
"def delete_certificate(a): # delete_certificate(arn, /)\n\n while True:\n\n try:\n acm.delete_certificate(**{'CertificateArn': a})\n return\n except ClientError as exception:\n log_exception('')\n\n err_code = exception.response['Error']['Code']\n\n if err_code == 'ResourceInUseException':\n if get_remaining_time_in_millis() / 1000 < 30:\n raise\n\n sleep(5)\n continue\n\n if err_code in ['ResourceNotFoundException', 'ValidationException']:\n # If the arn is invalid, it didn't exist anyway.\n return\n\n raise\n\n except ParamValidationError:\n # invalid arn\n return",
"def DELETE(self, env, start_response):\n key_args = set(['cors','lifecycle','policy','tagging','website'])\n\n qs = env.get('QUERY_STRING', '')\n args = urlparse.parse_qs(qs, 1)\n\n if not key_args & set(args):\n # DELETE a Bucket\n version = args.get('versionId')\n if version:\n vid = version[0]\n if vid.lower() == 'lastest':\n pass\n else:\n env['PATH_INFO'] = '/v1/AUTH_%s/%s/%s' % (quote(self.account_name),\n quote(self.version_name(self.container_name)),\n vid)\n\n body_iter = self._app_call(env)\n status = self._get_status_int()\n\n if status != HTTP_NO_CONTENT:\n if status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n elif status == HTTP_NOT_FOUND:\n return self.get_err_response('NoSuchBucket')\n elif status == HTTP_CONFLICT:\n return self.get_err_response('BucketNotEmpty')\n else:\n return self.get_err_response('InvalidURI')\n\n resp = Response()\n resp.status = HTTP_NO_CONTENT\n return resp\n else:\n # DELETE specified data\n action = args.keys().pop()\n if action == 'cors':\n # delete cors\n env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_ALLOW_ORIGIN'] = ''\n env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_MAX_AGE'] = ''\n env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_EXPOSE_HEADERS'] = ''\n env['HTTP_X_CONTAINER_META_ACCESS_CONTROL_ALLOW_METHOD'] = ''\n env['QUERY_STRING'] = ''\n env['REQUEST_METHOD'] = 'POST'\n\n body_iter = self._app_call(env)\n status = self._get_status_int()\n\n if is_success(status):\n resp = Response()\n resp.status = HTTP_NO_CONTENT\n return resp\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'lifecycle':\n # delete lifecycle\n env['HTTP_X_CONTAINER_META_TRANS_AT'] = ''\n env['HTTP_X_CONTAINER_META_TRANS_AFTER'] = ''\n env['HTTP_X_CONTAINER_META_TRANS_CLASS'] = ''\n\n env['HTTP_X_CONTAINER_META_EXPIRATION_AT'] = ''\n env['HTTP_X_CONTAINER_META_EXPIRATION_AFTER'] = ''\n env['HTTP_X_CONTAINER_META_EXPIRATION_PREFIX'] = ''\n env['HTTP_X_CONTAINER_META_EXPIRATION_STATUS'] = ''\n env['REQUEST_METHOD'] = 'POST'\n env['QUERY_STRING'] = ''\n body_iter = self._app_call(env)\n status = self._get_status_int()\n if is_success(status):\n resp = Response()\n resp.status = HTTP_NO_CONTENT\n return resp\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'policy':\n # delete policy\n env['REQUEST_METHOD'] = 'POST'\n env['QUERY_STRING'] = ''\n env['HTTP_X_CONTAINER_META_POLICY'] = ''\n body_iter = self._app_call(env)\n status = self._get_status_int()\n if is_success(status):\n resp = Response()\n resp.status = HTTP_NO_CONTENT\n return resp\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'tagging':\n # delete tagging\n env2 = copy(env)\n container_info = get_container_info(env2, self.app)\n meta_keys = container_info['meta'].keys()\n for key in meta_keys:\n env['HTTP_X_CONTAINER_META_' + key.replace('-', '_').upper()] = ''\n env['QUERY_STRING'] = ''\n env['REQUEST_METHOD'] = 'POST'\n\n body_iter = self._app_call(env)\n status = self._get_status_int()\n\n if is_success(status):\n resp = Response()\n resp.status = HTTP_NO_CONTENT\n return resp\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n elif action == 'website':\n # delete website\n body = env['wsgi.input'].read()\n env['REQUEST_METHOD'] = 'POST'\n env['QUERY_STRING'] = ''\n env['HTTP_X_CONTAINER_META_WEBSITE'] = quote(body)\n\n body_iter = self._app_call(env)\n status = self._get_status_int()\n\n if is_success(status):\n resp = Response()\n resp.status = HTTP_OK\n return resp\n elif status in (HTTP_UNAUTHORIZED, HTTP_FORBIDDEN):\n return self.get_err_response('AccessDenied')\n else:\n return self.get_err_response('InvalidURI')\n else:\n return self.get_err_response('InvalidURI')",
"def run(self):\n certificate = self.admin_barbican.create_asymmetric()\n self.admin_barbican.orders_delete(certificate.order_ref)",
"def delete(self, **kwargs):\n self.dbdel('client', kwargs)",
"def DeleteServer(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def delete(ctx):\n click.echo('deleting')\n ctx.delete()\n click.echo('done')",
"def delete():",
"def DELETE(self, req):\r\n req.headers['X-Remove-Container-Meta-Access-Control-Allow-Origin'] = 'x'\r\n req.headers['X-Remove-Container-Meta-Access-Control-Allow-Methods'] = 'x'\r\n req.headers['X-Remove-Container-Meta-Access-Control-Allow-Headers'] = 'x'\r\n req.headers['X-Remove-Container-Meta-Access-Control-Expose-Headers'] = 'x'\r\n req.headers['X-Remove-Container-Meta-Access-Control-Max-Age'] = 'x'\r\n\r\n resp = req.get_response(self.app, method='POST', headers=req.headers)\r\n\r\n return resp",
"def catalog_alias_delete(self, args):\n try:\n alias = self.server.connect_ermrest_alias(args.id)\n alias.delete_ermrest_alias(really=True)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog alias not found', e)\n else:\n raise e",
"def test_delete_non_existing_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/10', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate not found')\n assert response.status_code == 404"
] | [
"0.6339691",
"0.6308228",
"0.6220074",
"0.59610146",
"0.59122694",
"0.5907533",
"0.58121276",
"0.58015686",
"0.5714405",
"0.57131284",
"0.56532806",
"0.5625814",
"0.559648",
"0.5585206",
"0.5582829",
"0.5567907",
"0.55342",
"0.5526059",
"0.5492054",
"0.54811555",
"0.54777545",
"0.54612213",
"0.5440174",
"0.54387414",
"0.5421512",
"0.5420187",
"0.54013515",
"0.5396123",
"0.53792477",
"0.5373616"
] | 0.6733296 | 0 |
Get Certificate status in Oneview [Example] ${resp} = Fusion Api Get Server Certificate | | | def fusion_api_get_certificate_status(self, api=None, headers=None):
return self.certificate_status.get(api, headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def certificate_auth():\r\n url = 'https://www.12306.cn'\r\n response = requests.get(url, verify=False)\r\n print(response.status_code)\r\n print(response.text)",
"def cert_status(self) -> str:\n return pulumi.get(self, \"cert_status\")",
"def get_certificate(self, url):\n bearer = 'Authorization: Bearer '+str(self.exchanged_token).split('\\n', 1)[0]\n data = json.dumps({\"service_id\": \"x509\"})\n\n headers = StringIO()\n buffers = StringIO()\n\n c = pycurl.Curl()\n c.setopt(pycurl.URL, url)\n c.setopt(pycurl.HTTPHEADER, [bearer, 'Content-Type: application/json'])\n c.setopt(pycurl.POST, 1)\n c.setopt(pycurl.POSTFIELDS, data)\n c.setopt(c.WRITEFUNCTION, buffers.write)\n c.setopt(c.HEADERFUNCTION, headers.write)\n c.setopt(c.VERBOSE, True)\n\n try:\n c.perform()\n status = c.getinfo(c.RESPONSE_CODE)\n c.close()\n body = buffers.getvalue()\n\n if str(status) != \"303\" :\n self.log.error(\"On \\\"get redirect curl\\\": %s , http error: %s \" % (body, str(status)))\n return False \n except pycurl.error, error:\n errno, errstr = error\n self.log.info('An error occurred: %s' % errstr)\n return False\n \n redirect = self.tts\n for item in headers.getvalue().split(\"\\n\"):\n if \"location\" in item:\n redirect = redirect + item.strip().replace(\"location: \", \"\")\n\n headers = {'Authorization': 'Bearer ' + self.exchanged_token.strip()}\n response = requests.get(redirect, headers=headers)\n\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as e:\n # Whoops it wasn't a 200\n self.log.error(\"get_certificate() Error: %s \" %str(e))\n return False\n\n with open('/tmp/output.json', 'w') as outf:\n outf.write(response.content)\n else:\n self.log.error(\"No location in redirect response\")\n\n return True",
"def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)",
"def getCertificateStatus(self, configFile, certSerial):\n cmd = '%s ca -status %s -config %s'%(self.getOpensslExecutable(), certSerial, configFile)\n exitcode, output, stderr = q.system.process.run(cmd, stopOnError=False)\n\n # For some reason, openssl commands return their interesting output via stderr.\n # In this case, we're interested in the second line (actual status), not the first line (config file used)\n return stderr.splitlines()[1]",
"def Certificate(self) -> _n_8_t_0:",
"def Certificate(self) -> _n_8_t_0:",
"def get_ssl_certificate() :",
"def certificate_status_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n acm = session.client(\"acm\")\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n acm_certs = list_certificates(cache, session)\n for carn in acm_certs:\n # Get ACM Cert Details\n cert = acm.describe_certificate(CertificateArn=carn)[\"Certificate\"]\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(cert,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n cDomainName = str(cert['DomainName'])\n cIssuer = str(cert['Issuer'])\n cSerial = str(cert['Serial'])\n cStatus = str(cert['Status'])\n cKeyAlgo = str(cert['KeyAlgorithm'])\n # this is a passing check\n if cStatus == 'ISSUED':\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-status-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.5] ACM Certificates should be correctly validated\",\n \"Description\": f\"ACM Certificate {carn} is successfully issued\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on certificate issuing, please refer to the Issuing Certificates section of the AWS Certificate Manager User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/acm/latest/userguide/gs.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding\n elif cStatus == 'EXPIRED' or \\\n cStatus == 'VALIDATION_TIMED_OUT' or \\\n cStatus == 'REVOKED' or \\\n cStatus == 'FAILED':\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-renewal-status-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"HIGH\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.5] ACM Certificates should be correctly validated\",\n \"Description\": f\"ACM Certificate {carn} has not been successfully issued. State: {cStatus}\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on certificate issuing, please refer to the Issuing Certificates section of the AWS Certificate Manager User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/acm/latest/userguide/gs.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding",
"def info(name):\n try:\n cert = client().certificates.get(name)\n if not cert:\n logger.info('ctl:cert:info', 'No certificates found')\n return\n click.echo(click.style(cert[\"id\"], fg=\"white\", bold=True))\n click.echo(\n click.style(\" * Domain: \", fg=\"yellow\") + cert[\"domain\"]\n )\n click.echo(\n click.style(\" * Type: \", fg=\"yellow\") +\n \"{0}-bit {1}\".format(cert[\"keylength\"], cert[\"keytype\"])\n )\n click.echo(\n click.style(\" * SHA1: \", fg=\"yellow\") + cert[\"sha1\"]\n )\n click.echo(\n click.style(\" * Expires: \", fg=\"yellow\") +\n cert[\"expiry\"].strftime(\"%c\")\n )\n if cert.assigns:\n imsg = \", \".join([y[\"name\"] for y in cert[\"assigns\"]])\n click.echo(click.style(\" * Assigned to: \", fg=\"yellow\") + imsg)\n except Exception as e:\n raise CLIException(str(e))",
"def _parse_certificate(cls, response):\n links = _parse_header_links(response)\n try:\n cert_chain_uri = links[u'up'][u'url']\n except KeyError:\n cert_chain_uri = None\n return (\n response.content()\n .addCallback(\n lambda body: messages.CertificateResource(\n uri=cls._maybe_location(response),\n cert_chain_uri=cert_chain_uri,\n body=body))\n )",
"def get_ssl_certificate():",
"def _cert_info(user, course, cert_status):\r\n # simplify the status for the template using this lookup table\r\n template_state = {\r\n CertificateStatuses.generating: 'generating',\r\n CertificateStatuses.regenerating: 'generating',\r\n CertificateStatuses.downloadable: 'ready',\r\n CertificateStatuses.notpassing: 'notpassing',\r\n CertificateStatuses.restricted: 'restricted',\r\n }\r\n\r\n default_status = 'processing'\r\n\r\n default_info = {'status': default_status,\r\n 'show_disabled_download_button': False,\r\n 'show_download_url': False,\r\n 'show_survey_button': False,\r\n }\r\n\r\n if cert_status is None:\r\n return default_info\r\n\r\n status = template_state.get(cert_status['status'], default_status)\r\n\r\n d = {'status': status,\r\n 'show_download_url': status == 'ready',\r\n 'show_disabled_download_button': status == 'generating',\r\n 'mode': cert_status.get('mode', None)}\r\n\r\n if (status in ('generating', 'ready', 'notpassing', 'restricted') and\r\n course.end_of_course_survey_url is not None):\r\n d.update({\r\n 'show_survey_button': True,\r\n 'survey_url': process_survey_link(course.end_of_course_survey_url, user)})\r\n else:\r\n d['show_survey_button'] = False\r\n\r\n if status == 'ready':\r\n if 'download_url' not in cert_status:\r\n log.warning(\"User %s has a downloadable cert for %s, but no download url\",\r\n user.username, course.id)\r\n return default_info\r\n else:\r\n d['download_url'] = cert_status['download_url']\r\n\r\n if status in ('generating', 'ready', 'notpassing', 'restricted'):\r\n if 'grade' not in cert_status:\r\n # Note: as of 11/20/2012, we know there are students in this state-- cs169.1x,\r\n # who need to be regraded (we weren't tracking 'notpassing' at first).\r\n # We can add a log.warning here once we think it shouldn't happen.\r\n return default_info\r\n else:\r\n d['grade'] = cert_status['grade']\r\n\r\n return d",
"def status_get():\n response = json_response.success()\n response.headers['Access-Control-Allow-Origin'] = '*'\n return response",
"def get_certinfo(doc):\n\n #set a two second default timeout to recieve a cert\n socket.setdefaulttimeout(2)\n doc['ssl'] = {} \n\n try:\n cert = ssl.get_server_certificate((doc['hostname'], 443))\n #sometimes certs come back as unicode so cast to str() aka ascii\n cert = M2Crypto.X509.load_cert_string(str(cert))\n\n except:\n syslog.syslog('[*] Failed to get ssl certificate from %s' % doc['hostname'])\n print('[*] Failed to get ssl certificate from %s' % doc['hostname'])\n #lets remove the ssl key and return the doc untouched\n doc.pop('ssl')\n return doc\n\n\n #get creation date\n doc['ssl']['created'] = cert.get_not_before().get_datetime().isoformat()\n #get not valid after, aka expiration data\n doc['ssl']['expire'] = cert.get_not_after().get_datetime().isoformat()\n #get issuer information\n doc['ssl']['issuer'] = cert.get_issuer().as_text()\n #get subject information\n doc['ssl']['subject'] = cert.get_subject().as_text()\n #get keysize, size() returns in bytes, so we multiply * 8 to get the number of bits\n doc['ssl']['keysize'] = cert.get_pubkey().size() * 8\n #get cert fingerprint for comparison\n doc['ssl']['fingerprint'] = cert.get_fingerprint()\n\n return doc",
"def request_certificate(request):\r\n if request.method == \"POST\":\r\n if request.user.is_authenticated():\r\n xqci = XQueueCertInterface()\r\n username = request.user.username\r\n student = User.objects.get(username=username)\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get('course_id'))\r\n course = modulestore().get_course(course_key, depth=2)\r\n\r\n status = certificate_status_for_student(student, course_key)['status']\r\n if status in [CertificateStatuses.unavailable, CertificateStatuses.notpassing, CertificateStatuses.error]:\r\n logger.info('Grading and certification requested for user {} in course {} via /request_certificate call'.format(username, course_key))\r\n status = xqci.add_cert(student, course_key, course=course)\r\n return HttpResponse(json.dumps({'add_status': status}), mimetype='application/json')\r\n return HttpResponse(json.dumps({'add_status': 'ERRORANONYMOUSUSER'}), mimetype='application/json')",
"def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results",
"def test_render_certificate(self):\n GeneratedCertificate.objects.create(\n user=self.student, course_id=self.course.id, status=u'downloadable')\n\n url = '{}?is_bigcourse=0'.format(reverse(\n 'completion_data_view', kwargs={\n 'course_id': self.course.id}))\n self.response = self.staff_client.get(url)\n data = json.loads(self.response.content.decode())\n self.assertEqual(data['data'],[[False]])\n\n self.response = self.staff_client.get(url)\n self.assertEqual(self.response.status_code, 200)\n data = json.loads(self.response.content.decode())\n self.assertEqual(len(data['data']), 12)\n self.assertEqual(\n data['data'][-1], ['[email protected]', 'student', '', '', '0/1', '0/1', 'Si'])",
"def extract_ocsp_result(ocsp_response):\n\n func_name: str = \"extract_ocsp_result\"\n\n try:\n ocsp_response = ocsp.load_der_ocsp_response(ocsp_response.content)\n # OCSP Response Status here:\n # https://cryptography.io/en/latest/_modules/cryptography/x509/ocsp/#OCSPResponseStatus\n # A status of 0 == OCSPResponseStatus.SUCCESSFUL\n if str(ocsp_response.response_status.value) != \"0\":\n # This will return one of five errors, which means connecting\n # to the OCSP Responder failed for one of the below reasons:\n # MALFORMED_REQUEST = 1\n # INTERNAL_ERROR = 2\n # TRY_LATER = 3\n # SIG_REQUIRED = 5\n # UNAUTHORIZED = 6\n ocsp_response = str(ocsp_response.response_status)\n ocsp_response = ocsp_response.split(\".\")\n raise Exception(f\"{func_name}: OCSP Request Error: {ocsp_response[1]}\")\n\n certificate_status = str(ocsp_response.certificate_status)\n certificate_status = certificate_status.split(\".\")\n return f\"OCSP Status: {certificate_status[1]}\"\n\n except ValueError as err:\n return f\"{func_name}: {str(err)}\"",
"def fusion_api_get_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.get(aliasname, api, headers)",
"def cert_challenge_http(self) -> 'outputs.CertHttpChallengeResponse':\n return pulumi.get(self, \"cert_challenge_http\")",
"def _display_cert_details(self):\n if self.check_valid_result_data(\"cert_details\", silent=True):\n display(self._last_result.cert_details)\n else:\n nb_markdown(f\"No TLS certificate found for {self.url}.\")",
"def get_status():\n data = {\n 'status': 'up',\n }\n jsn = json.dumps(data)\n\n resp = Response(jsn, status=200, mimetype='application/json')\n\n return resp",
"def get():\n\n l2ca_info = caps.l2ca_info()\n\n res = {\n 'cache_size': l2ca_info['cache_size'],\n 'cw_size': l2ca_info['cache_way_size'],\n 'cw_num': l2ca_info['cache_ways_num'],\n 'clos_num': l2ca_info['clos_num'],\n 'cdp_supported': l2ca_info['cdp_supported'],\n 'cdp_enabled': l2ca_info['cdp_enabled']\n }\n return res, 200",
"def status():\n return jsonify(service='scwr-api-requirements', status='ok')",
"def certificate_renewal_status_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n acm = session.client(\"acm\")\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n try: \n acm_certs = list_certificates(cache, session)\n for carn in acm_certs:\n # Get ACM Cert Details\n cert = acm.describe_certificate(CertificateArn=carn)[\"Certificate\"]\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(cert,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n cDomainName = str(cert['DomainName'])\n cIssuer = str(cert['Issuer'])\n cSerial = str(cert['Serial'])\n cStatus = str(cert['Status'])\n cKeyAlgo = str(cert['KeyAlgorithm'])\n \n #Will trigger key error if certificate type is not AMAZON_ISSUED\n renewal_status = cert['RenewalSummary'].get('RenewalStatus', '')\n if renewal_status == 'FAILED':\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-renewal-status-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"HIGH\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.4] ACM Certificates should be renewed successfully\",\n \"Description\": f\"ACM Certificate {carn} renewal has failed\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on certificate renewals, please refer to the Managed Renewal section of the AWS Certificate Manager User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/acm/latest/userguide/check-certificate-renewal-status.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n elif renewal_status == 'PENDING_VALIDATION':\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-renewal-status-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"LOW\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.4] ACM Certificates should be renewed successfully\",\n \"Description\": f\"ACM Certificate {carn} renewal is pending user validation\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on certificate renewals, please refer to the Managed Renewal section of the AWS Certificate Manager User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/acm/latest/userguide/check-certificate-renewal-status.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n elif renewal_status == 'PENDING_AUTO_RENEWAL' or renewal_status == 'SUCCESS':\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-renewal-status-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks/AWS Security Best Practices\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.4] ACM Certificates should be renewed successfully\",\n \"Description\": f\"ACM Certificate {carn} renewal is in a {str(cert['RenewalSummary']['RenewalStatus'])} state\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on certificate renewals, please refer to the Managed Renewal section of the AWS Certificate Manager User Guide.\",\n \"Url\": \"https://docs.aws.amazon.com/acm/latest/userguide/check-certificate-renewal-status.html\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding\n except KeyError as e:\n pass",
"def _service_status(res, ctx):\n\n if _has_error_code(res):\n return print_errors(res, ctx)\n\n template = '''\\\nname: {name}\nconfig-location: {config_location}\nlog-location: {log_location}\nscript-location: {script_location}\nrunning: {running}\nenabled: {enabled}\ncontainer-running: {container_running}\ndeployment: {deployment}\nconfig: {config}'''\n\n result = template.format(name=res['name'],\n config_location=res['config_location'],\n log_location=res['log_location'],\n script_location=res['script_location'],\n running=res['running'],\n enabled=res['enabled'],\n container_running=res['container_status'].get('running', False),\n deployment=res['deployment'],\n config=res['config'])\n\n if 'callback_uri' in res:\n result += \"\\ncallback-uri: {callback_uri}\".format(callback_uri=res['callback_uri'])\n\n return result",
"def test_get_certificate(self):\n chain = _create_certificate_chain()\n [(cakey, cacert), (ikey, icert), (skey, scert)] = chain\n\n context = Context(SSLv23_METHOD)\n context.use_certificate(scert)\n client = Connection(context, None)\n cert = client.get_certificate()\n assert cert is not None\n assert \"Server Certificate\" == cert.get_subject().CN",
"def ping_response():\n\n return Response(\"ok\", status=200)",
"def get_patient_status():\n r = requests.get(\"http://vcm-7474.vm.duke.edu:5000/api/heart_rate/3\")\n print(r.text)"
] | [
"0.6732982",
"0.636036",
"0.6296492",
"0.61848366",
"0.5782088",
"0.5774162",
"0.5774162",
"0.5769534",
"0.5746093",
"0.5686152",
"0.56757915",
"0.5651092",
"0.56043833",
"0.5604218",
"0.55934715",
"0.55675757",
"0.5563659",
"0.55380785",
"0.5512679",
"0.5492537",
"0.5462718",
"0.54537016",
"0.54419327",
"0.5428214",
"0.54139256",
"0.5402121",
"0.5390352",
"0.53897953",
"0.53646225",
"0.5363245"
] | 0.6891181 | 0 |
Delete a Repository from the appliance based on uri [Arguments] | def fusion_api_delete_repository(self, uri, api=None, headers=None):
return self.repository.delete(uri=uri, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def repository_delete(ctx: click.Context, repository_name):\n subcommand_repository.cmd_delete(ctx.obj, repository_name)",
"def delete(ctx: click.Context, repository_path):\n root_commands.cmd_delete(ctx.obj, repository_path)",
"def delete(repo):\n print('Repo: %s' % repo)\n print('Deleted')",
"def delete(connection, rid=None, repo=None):\n\n if repo is None:\n repo = Repository(connection, rid)\n\n return repo.delete()",
"def delete_code_repository(CodeRepositoryName=None):\n pass",
"def DELETE(self, uri):\n def body(conn, cur):\n self.enforce_right('owner', uri)\n if web.ctx.ermrest_history_snaptime is not None:\n raise exception.Forbidden('deletion of catalog at previous revision')\n if web.ctx.ermrest_history_snaprange is not None:\n # should not be possible bug check anyway...\n raise NotImplementedError('deletion of catalog with snapshot range')\n self.set_http_etag( web.ctx.ermrest_catalog_model.etag() )\n self.http_check_preconditions(method='DELETE')\n self.emit_headers()\n return True\n\n def post_commit(destroy):\n web.ctx.ermrest_registry.unregister(self.catalog_id)\n web.ctx.status = '204 No Content'\n return ''\n\n return self.perform(body, post_commit)",
"def delete_repository(repository_id):\n user = get_jwt_identity()\n repository = Repository.query.get_by_id(repository_id, user)\n if repository is None:\n raise ApiException(400, \"No module with this id was found.\")\n if str(repository.owner_id) != user['id']:\n raise ApiException(400, \"Not enough permissions for this action.\")\n repository.delete()\n app.db.session.commit()\n return jsonify()",
"def catalog_delete(self, args):\n headers = DEFAULT_HEADERS.copy()\n headers.update(args.headers)\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete(args.path, headers)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e",
"def svn_fs_delete(*args):\r\n return _fs.svn_fs_delete(*args)",
"def delete(self, uri, where, selectionArgs):\n pass",
"def delete(self, namespace_name, repository_name, teamname):\n try:\n model.delete_repo_permission_for_team(teamname, namespace_name, repository_name)\n except DeleteException as ex:\n raise request_error(exception=ex)\n\n log_action(\n \"delete_repo_permission\",\n namespace_name,\n {\"team\": teamname, \"repo\": repository_name},\n repo_name=repository_name,\n )\n\n return \"\", 204",
"def fusion_api_delete_os_deploymentserver(self, name=None, uri=None, param='', api=None, headers=None):\n return self.osds.delete(name=name, uri=uri, param=param, api=api, headers=headers)",
"def do_command(self, args):\n imageops = dbops.Images()\n imageops.delete(args)",
"def delete(self, namespace_name, repository_name, username):\n try:\n model.delete_repo_permission_for_user(username, namespace_name, repository_name)\n except DeleteException as ex:\n raise request_error(exception=ex)\n\n log_action(\n \"delete_repo_permission\",\n namespace_name,\n {\"username\": username, \"repo\": repository_name, \"namespace\": namespace_name},\n repo_name=repository_name,\n )\n\n return \"\", 204",
"def delete_command(arguments: List[str]) -> None:\n if len(arguments) != 2:\n print('Required 1 argument for create command') # noqa: WPS421\n return\n token = token_load.load()\n logic.delete(token, gist_id=arguments[1])",
"def delete(self, _uri):\n print(\"Deleting '%s'\"%(_uri))\n response = self.__httpsRequest('DELETE', _uri, '')",
"def delete(self, uri, **kwargs):\n return self.session.delete(uri, **kwargs)",
"def test_collection_delete(repository_collection, faker):\n x_name = faker.word()\n\n repository_collection.delete(x_name)\n\n repository_collection.client.scripts.create_if_missing.assert_called_once()\n repository_collection.client.scripts.run.assert_called_with(\n 'nexus3-cli-repository-delete', data=x_name)",
"def delete(self, git_repo_id: int):\n self.datastore.delete(document_id=git_repo_id)\n return None, 204",
"def fusion_api_delete_rack_manager(self, uri, name=None, param='', api=None, headers=None):\n return self.rackmanager.delete(uri=uri, name=name, param=param, api=api, headers=headers)",
"def delete(self, uri, body=None, headers=None, auth=False):\n return self.send_request('DELETE', uri, body, headers, auth)",
"def delete_from_backend(uri, **kwargs):\n\n parsed_uri = urlparse.urlparse(uri)\n scheme = parsed_uri.scheme\n\n backend_class = get_backend_class(scheme)\n\n if hasattr(backend_class, 'delete'):\n return backend_class.delete(parsed_uri, **kwargs)",
"def delete(cls, uri):\n return cls._perform_request(uri, 'DELETE')",
"def catalog_drop(self, args):\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete_ermrest_catalog(really=True)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e",
"def cmd_rm(self, args):\n log.info(\"removing '{}' from the repository...\".format(args.file))\n self.check_repo()\n # check if file is inside the repository and if original file is indeed a symlink\n filepath = os.path.realpath(args.file)\n if not filepath.startswith(self.files_path):\n log.error('not a repository file: {}'.format(args.file))\n orig_path = filepath.replace(self.files_path, self.homedir)\n if not os.path.islink(orig_path):\n log.error('original file path is not a symlink: {}'.format(orig_path))\n # move file to its original location\n log.debug('deleting symlink: {}'.format(orig_path))\n os.unlink(orig_path)\n log.debug('moving file to its original location')\n shutil.move(filepath, orig_path)\n # check for empty dirs to remove\n self.rm_empty_folders(os.path.split(filepath)[0])\n log.debug('removing file from Git')\n self.git_commit('remove {}'.format(args.file))\n log.info('done')",
"def fusion_api_delete_deployment_manager(self, name=None, uri=None, api=None, headers=None):\n return self.dep_mgr.delete(name=name, uri=uri, api=api, headers=headers)",
"def catalog_alias_delete(self, args):\n try:\n alias = self.server.connect_ermrest_alias(args.id)\n alias.delete_ermrest_alias(really=True)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog alias not found', e)\n else:\n raise e",
"def remove(self):\n print \"*** Removing repository '%s'\" % self.destination\n shutil.rmtree(self.destination)\n self.destination = None",
"def fusion_api_delete_directory(self, name=None, uri=None, api=None, headers=None):\n return self.logindomain.delete(name, uri, api, headers)",
"def svn_fs_delete_fs(*args):\r\n return _fs.svn_fs_delete_fs(*args)"
] | [
"0.7315825",
"0.71764034",
"0.7037648",
"0.69510937",
"0.6805352",
"0.6645103",
"0.64232045",
"0.637457",
"0.630849",
"0.63075745",
"0.6225126",
"0.62168294",
"0.6214386",
"0.6177097",
"0.6175985",
"0.61539304",
"0.6138644",
"0.61281085",
"0.60665095",
"0.6013469",
"0.5942364",
"0.5925907",
"0.5905183",
"0.58328897",
"0.58195895",
"0.58109665",
"0.58017623",
"0.5796526",
"0.57796484",
"0.57407004"
] | 0.78250545 | 0 |
Gets the ssh access from appliance [Arguments] | def fusion_api_get_ssh_access(self, uri=None, api=None, headers=None):
return self.sshaccess.get(uri=uri, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ssh_access(self) -> Optional[pulumi.Input[Union[str, 'AgentPoolSSHAccess']]]:\n return pulumi.get(self, \"ssh_access\")",
"def client_ssh_access(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"client_ssh_access\")",
"def client_ssh_access(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"client_ssh_access\")",
"def web_ssh_access(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"web_ssh_access\")",
"def ssh(args, config):\n print('{}'.format(ssh.__doc__))",
"def get_server_access(server_name):\n qibuild_cfg = qibuild.config.QiBuildConfig()\n qibuild_cfg.read(create_if_missing=True)\n access = qibuild_cfg.get_server_access(server_name)\n return access",
"def option():\n parser = argparse.ArgumentParser(description='ssh with screen')\n parser.add_argument('host', type=str, default=sys.stdin)\n\n return parser.parse_args()",
"def _get_ilo_access(remote_console):\n url = remote_console.get('remoteConsoleUrl')\n url_parse = parse.urlparse(url)\n host_ip = parse.parse_qs(url_parse.netloc).get('addr')[0]\n token = parse.parse_qs(url_parse.netloc).get('sessionkey')[0]\n return host_ip, token",
"def ssh(ssh, app, command):\n if ssh is None:\n ssh = _DEFAULT_SSH\n\n if app.find('#') == -1:\n # Instance is not specified, list matching and exit.\n raise click.BadParameter('Specify full instance name: xxx#nnn')\n\n app_discovery = discovery.Discovery(context.GLOBAL.zk.conn, app, 'ssh')\n app_discovery.sync()\n\n # Restore default signal mask disabled by python spawning new thread\n # for Zk connection.\n #\n # TODO: should this be done as part of zkutils.connect?\n for sig in range(1, signal.NSIG):\n try:\n signal.signal(sig, signal.SIG_DFL)\n except OSError:\n pass\n\n # TODO: not sure how to handle mutliple instances.\n for (app, hostport) in app_discovery.items():\n _LOGGER.info('%s :: %s', app, hostport)\n if hostport:\n host, port = hostport.split(b':')\n run_ssh(host, port, ssh, list(command))",
"def ssh(self) -> pulumi.Input['ContainerServiceSshConfigurationArgs']:\n return pulumi.get(self, \"ssh\")",
"def _getSshCmdAndSecrets(hostname, user, sshId, reuseCon):\n\n sshCmdSecrets = []\n\n if sshId:\n sshCmd = f'ssh -i {sshId}'\n\n elif user.password:\n sshCmd = 'sshpass -v -p :0: ssh'\n sshCmdSecrets += [user.password]\n\n else:\n sshCmd = 'ssh'\n\n sshCmd += ' -o StrictHostKeyChecking=no'\n\n if reuseCon:\n sshCmd += ' -o ControlMaster=auto'\n sshCmd += f' -o ControlPath={CmdSsh._getSocketPath()}'\n sshCmd += ' -o ControlPersist=600'\n\n # Need to separate login part for use with 'rsync -e'\n\n sshLogin = f'{user.name}@{hostname}'\n\n return sshCmd, sshLogin, sshCmdSecrets",
"def ssh(self) -> Optional[pulumi.Input['LinuxProfilePropertiesSshArgs']]:\n return pulumi.get(self, \"ssh\")",
"def ssh_cmd(ctx):\n pass",
"def ssh(host_=None):\n run_command_on_selected_server(open_shell, host_=host_)",
"def main():\r\n parser = argparse.ArgumentParser(description=\"\"\"Starts SSH session with one\r\n of ARC\\'s Raspberrypis.\"\"\")\r\n\r\n parser.add_argument('usr', help='Username for the remote device.')\r\n parser.add_argument('pwd', help='Password for [email protected].')\r\n\r\n args = parser.parse_args()\r\n\r\n address = get_IP(IP_list(args.pwd), args.usr)\r\n os.system(\"ssh \" + \"pi\" + \"@\" + address)",
"def get_remote_access_session(arn=None):\n pass",
"def get_ssh():\n\n ip = str(sc.sticky[\"SSH\"]['ip'])\n port = str(sc.sticky[\"SSH\"]['port'])\n user = str(sc.sticky[\"SSH\"]['user'])\n pw = str(sc.sticky[\"SSH\"]['password'])\n\n ssh_dict = {'ip': ip, 'port': port, 'user': user, 'password': pw}\n\n return ssh_dict",
"def ssh_config(self, arguments):\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n print(utils.config_ssh_string(self.config_ssh))",
"def svn_fs_get_access(*args):\r\n return _fs.svn_fs_get_access(*args)",
"def get_conn(args):\n\n # connect this thing\n from pyVmomi import vim\n from pyVim.connect import SmartConnect, Disconnect\n import atexit\n try:\n si = SmartConnect(host=args.host, port=args.port, user=args.user, pwd=args.password)\n except Exception as exc:\n if isinstance(exc, vim.fault.HostConnectFault) and '[SSL: CERTIFICATE_VERIFY_FAILED]' in exc.msg:\n try:\n import ssl\n default_context = ssl._create_default_https_context\n ssl._create_default_https_context = ssl._create_unverified_context\n si = SmartConnect(\n host=args.host,\n port=args.port,\n user=args.user,\n pwd=args.password,\n )\n ssl._create_default_https_context = default_context\n except Exception as exc1:\n raise Exception(exc1)\n else:\n import ssl\n context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)\n context.verify_mode = ssl.CERT_NONE\n si = SmartConnect(\n host=args.host,\n port=args.port,\n user=args.user,\n pwd=args.password,\n sslContext=context)\n atexit.register(Disconnect, si)\n return si",
"def remote_accesses(self):\n response = self._request(\"GET\", [ROUTE_REMOTE_ACCESSES])\n return CBWParser().parse_response(CBWRemoteAccess, response)",
"def SSH(*args, **kwargs):\n method = import_class(settings.ORCHESTRATION_SSH_METHOD_BACKEND)\n return method(*args, **kwargs)",
"def ssh():\n env['remote_port'] = env['port_map']['22']\n\n sys.stdout.write('Connecting to SSH session on remote port %(remote_port)s\\n' % env)\n\n run('chmod 600 %(pair_private_key)s' % env)\n\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.connect(\n hostname=env['relay_server'],\n port=int(env['remote_port']),\n username=env['pair_user'],\n key_filename=env['pair_private_key']\n )\n\n channel = client.invoke_shell()\n posix_shell(channel)",
"def getRemoteHost():",
"def open_ssh():\n print('Opening SSH...')",
"def executeOnAgent(self, cmd, ip):\n sshadd = \"ssh-add \" + self.config.get(\"SSH\", \"privatekey\")\n self.shell_execute(sshadd)\n \n sshAgentConnection = \"ssh -o StrictHostKeyChecking=no \" + self.config.get('ACS', 'username') + '@' + ip\n self.log.debug(\"SSH Connection to agent: \" + sshAgentConnection)\n \n self.log.debug(\"Command to run on agent: \" + cmd)\n \n sshCmd = sshAgentConnection + ' \\'' + cmd + '\\''\n self.shell_execute(\"exit\")\n result = self.executeOnMaster(sshCmd)\n\n return result",
"def editor_cloud9_ssh_command():\n docker_vars = _editor_cloud9_docker_vars()\n print \"ssh -p %s -i private/ssh/id_rsa_devbox root@%s\" % (docker_vars['public_ssh_port'], env.host)",
"def ssh_call ( server, identity, cmd ) :\n print \"Running SSH command on server \" + server + \": \" + cmd\n return subprocess.call( [ \"ssh\",\n ssh_opt,\n \"-tt\",\n \"-i\",\n identity,\n \"ec2-user@\" + server,\n cmd ] )",
"def test_get_host_access(self):\n pass",
"def lxc_give_access(client, ports, app):\n # send edge config to user\n response = pickle.dumps(ports)\n client.send(response)\n\t# send key to user \n f = open('/var/lib/lxc/%s/rootfs/key' % app,'rb')\n print 'Sending access key to user...'\n l = f.read(1024)\n while (l):\n client.send(l)\n l = f.read(1024)\n f.close()\n print \"Access given.\""
] | [
"0.65911853",
"0.65124744",
"0.65124744",
"0.6461358",
"0.5900902",
"0.57852006",
"0.56950563",
"0.567006",
"0.56571496",
"0.5605361",
"0.55856943",
"0.55844957",
"0.55641323",
"0.55065364",
"0.55023366",
"0.54846245",
"0.54835445",
"0.54749405",
"0.54409343",
"0.54106104",
"0.54070354",
"0.53986555",
"0.5362793",
"0.53619725",
"0.53459245",
"0.5331501",
"0.5319132",
"0.53109884",
"0.5308281",
"0.5299455"
] | 0.6911141 | 0 |
Edits the ssh access for appliance [Arguments] | def fusion_api_edit_ssh_access(self, body, uri=None, api=None, headers=None):
return self.sshaccess.put(body=body, uri=uri, api=api, headers=headers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def appsec_update(config, **kwargs):\n logger.info('Start Akamai CLI onboard')\n _, wrapper_object = init_config(config)\n util = utility.utility()\n click_args = kwargs\n\n onboard_object = onboard_appsec_update.onboard(click_args)\n\n # Validate setup and akamai cli and cli pipeline are installed\n csv = click_args['csv']\n\n # Validate akamai cli and cli pipeline are installed\n cli_installed = util.installedCommandCheck('akamai')\n pipeline_installed = util.executeCommand(['akamai', 'pipeline'])\n\n if not (pipeline_installed and (cli_installed or pipeline_installed)):\n sys.exit()\n\n # validate setup steps when csv input provided\n util.csv_validator_appsec(onboard_object, csv)\n util.csv_2_appsec_array(onboard_object)\n util.validateAppsecSteps(onboard_object, wrapper_object, cli_mode='appsec-update')\n\n if util.valid is True:\n utility_waf_object = utility_waf.wafFunctions()\n # First create new WAF configuration version\n logger.debug(f'Trying to create new version for WAF configuration: {onboard_object.waf_config_name}')\n create_waf_version = utility_waf_object.createWafVersion(wrapper_object, onboard_object, notes=onboard_object.version_notes)\n wrapper_object.update_waf_config_version_note(onboard_object, notes=onboard_object.version_notes)\n if create_waf_version is False:\n sys.exit()\n\n # Created WAF config version, now can add selected hosts to it\n logger.debug(f'Trying to add property public_hostnames as selected hosts to WAF configuration: {onboard_object.waf_config_name}')\n hostnames_to_add = list(filter(lambda x: x not in onboard_object.skip_selected_hosts, onboard_object.hostname_list))\n add_hostnames = utility_waf_object.addHostnames(wrapper_object,\n hostnames_to_add,\n onboard_object.config_id,\n onboard_object.onboard_waf_config_version)\n if add_hostnames is True:\n logger.info(f'Selected hosts: Successfully added {hostnames_to_add}')\n else:\n logger.error('Unable to add selected hosts to WAF Configuration')\n exit(-1)\n\n # Update WAF match target\n for policy in onboard_object.appsec_json:\n policy_hostnames_to_add = list(filter(lambda x: x not in onboard_object.skip_selected_hosts, onboard_object.appsec_json[policy]['hostnames']))\n modify_matchtarget = utility_waf_object.updateMatchTarget(wrapper_object,\n policy_hostnames_to_add,\n onboard_object.config_id,\n onboard_object.onboard_waf_config_version,\n policy)\n if modify_matchtarget:\n logger.info(f'WAF Configuration Match Target {policy}: Successfully added {policy_hostnames_to_add}')\n else:\n logger.error(f'Failed to add {policy_hostnames_to_add} to match target {policy}')\n\n # Activate WAF configuration to staging\n if click_args['activate']:\n for network in click_args['activate']:\n waf_activation_status = utility_waf_object.updateActivateAndPoll(wrapper_object, onboard_object, network=network.upper())\n if waf_activation_status is False:\n sys.exit(logger.error(f'Unable to activate WAF configuration to {network.upper()} network'))\n else:\n print()\n logger.warning('Activate WAF Configuration Production: SKIPPING')\n\n util.log_cli_timing()",
"def grant_access(username: str, ssh_key: bytes, ip_address: str, remote_username: str):\n\n create_ssh_key_file(username=username, ssh_key=ssh_key, ip_address=ip_address)\n update_ansible_host_file(username=username, ip_address=ip_address)\n update_ansible_vars(\n remote_username=remote_username, username=username, ip_address=ip_address\n )\n AccessControlModel().grant_access(username=username, ip_addresses=[ip_address])",
"def svn_fs_set_access(*args):\r\n return _fs.svn_fs_set_access(*args)",
"def grant(config, hostname, username):\n\n response = make_api_request('PUT', config, '/machines/' + hostname +\n '/users/' + username)\n print 'Permission granted successfully.'",
"def grant_ssh_access ( ec2_conn, tgt_grps, nat_grp ) :\n for grp in tgt_grps :\n grant_grp_access( ec2_conn, [ nat_grp ], grp, 22 )",
"def lxc_give_access(client, ports, app):\n # send edge config to user\n response = pickle.dumps(ports)\n client.send(response)\n\t# send key to user \n f = open('/var/lib/lxc/%s/rootfs/key' % app,'rb')\n print 'Sending access key to user...'\n l = f.read(1024)\n while (l):\n client.send(l)\n l = f.read(1024)\n f.close()\n print \"Access given.\"",
"def allow_me(ctx, hostname, ssh, https, port):\n\n ports = make_port_list(ssh, https, port)\n from opstools.aws import allow_me as this_allow_me\n this_allow_me.main(hostname, ports)",
"def setprivileged(miner: Miner, login, allowsetting):\n commands = get_changeconfigcommands(getminerfilename(miner), 'api-allow', allowsetting)\n sendcommands_and_restart(miner, login, commands)",
"def install_ssh(app):\n os.system('lxc-attach -n %s -- apk update' % app)\n os.system('lxc-attach -n %s -- apk add openssh' % app)\n # Config sshd\n config = '/var/lib/lxc/%s/rootfs/etc/ssh/sshd_config' % app\n with open(config, \"a\") as myfile:\n myfile.write(\"RSAAuthentication yes\\nPubkeyAuthentication yes\\nPermitRootLogin yes\\nPermitEmptyPasswords yes\")\n os.system('lxc-attach -n %s -- /etc/init.d/sshd start' % app)",
"def update_ssh_shortcut(output_keyfile, quickname=None):\n if quickname:\n with settings(warn_only=True):\n local(\"touch $HOME/.ssh/config\")\n local(r\"echo '' >> $HOME/.ssh/config\")\n local(r\"echo 'Host %s' >> $HOME/.ssh/config\" % quickname)\n local(r\"echo '' >> $HOME/.ssh/config\")\n local(r\"echo 'Hostname %s' >> $HOME/.ssh/config\" % host_name)\n local(r\"echo 'User %s' >> $HOME/.ssh/config\" % user)\n local(r\"echo 'IdentityFile ~/.ssh/%s' >> $HOME/.ssh/config\" % output_keyfile)\n local(r\"echo 'ServerAliveCountMax 3' >> $HOME/.ssh/config\")\n local(r\"echo 'ServerAliveInterval 10' >> $HOME/.ssh/config\")",
"def main():\r\n parser = argparse.ArgumentParser(description=\"\"\"Starts SSH session with one\r\n of ARC\\'s Raspberrypis.\"\"\")\r\n\r\n parser.add_argument('usr', help='Username for the remote device.')\r\n parser.add_argument('pwd', help='Password for [email protected].')\r\n\r\n args = parser.parse_args()\r\n\r\n address = get_IP(IP_list(args.pwd), args.usr)\r\n os.system(\"ssh \" + \"pi\" + \"@\" + address)",
"def __gitEditUserConfig(self):\n self.vcs.gitEditUserConfig()",
"def edit_in_vim(self):\n self.host.run(\"vim '%s'\" % esc1(self.host.expand_path(self.remote_path)), use_sudo=self.use_sudo)",
"def cli(env, identifier, label, note):\n\n mgr = SoftLayer.SshKeyManager(env.client)\n\n key_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'SshKey')\n\n if not mgr.edit_key(key_id, label=label, notes=note):\n raise exceptions.CLIAbort('Failed to edit SSH key')",
"def home_edituser():\n\tpass",
"def edit_deployment(request, deployment, **_kwargs):\n pass",
"def ssh(ssh, app, command):\n if ssh is None:\n ssh = _DEFAULT_SSH\n\n if app.find('#') == -1:\n # Instance is not specified, list matching and exit.\n raise click.BadParameter('Specify full instance name: xxx#nnn')\n\n app_discovery = discovery.Discovery(context.GLOBAL.zk.conn, app, 'ssh')\n app_discovery.sync()\n\n # Restore default signal mask disabled by python spawning new thread\n # for Zk connection.\n #\n # TODO: should this be done as part of zkutils.connect?\n for sig in range(1, signal.NSIG):\n try:\n signal.signal(sig, signal.SIG_DFL)\n except OSError:\n pass\n\n # TODO: not sure how to handle mutliple instances.\n for (app, hostport) in app_discovery.items():\n _LOGGER.info('%s :: %s', app, hostport)\n if hostport:\n host, port = hostport.split(b':')\n run_ssh(host, port, ssh, list(command))",
"def set_permission(StackId=None, IamUserArn=None, AllowSsh=None, AllowSudo=None, Level=None):\n pass",
"def ssh(args, config):\n print('{}'.format(ssh.__doc__))",
"def edit(argv):\n output = lib.output.CLIoutput(\"vadapter\")\n valid_list = ['assignment_type','component_mask' ,'vfabric_id', 'init_type', 'io_module_id', 'name',\n 'mac', 'promiscuous', 'protocol', 'silent_listener', 'vlan' ,'wwnn',\n 'wwpn','status']\n\n if (len(argv) < 2 ):\n output.completeOutputError(lib.errorhandler.InvalidArgumentCount(3, \"vadapter-name\", syntax=edit.__doc__,\n descape = \"Please specify the vadapter id\"))\n return output\n\n if ( argv[1] == '?' or argv[1] == 'help'):\n output.completeOutputError(lib.errorhandler.InvalidArgumentCount(syntax=edit.__doc__, descape = \"Help\"))\n return output\n\n if argv[2].lower() == 'online':\n if isEditName(argv[1]) == -1:\n print \"Error Not a valid Id\"\n return output \n else:\n dict = {}\n dict['id'] = int(argv[1])\n try:\n result = vfm.py_vfm_vadapter_online(dict)\n except StandardError, e:\n print \"Error!\" ,e\n return output \n else:\n print result\n return output\n\n _parse_edit_or_add_argv(output, argv, valid_list,syntax = edit.__doc__ , call_from = 'edit' ) \n\n return output",
"def ssh_cmd(ctx):\n pass",
"def test_update_virt_realm_remote_access_config(self):\n pass",
"def addAdmin(username, sshId, user, identity):\n if identity:\n env.key_filename = identity\n if user:\n env.user = user\n sudo('adduser --disabled-password --gecos \",,,\" %s' % username)\n sudo('usermod -p \"\" %s' % username)\n sudo('chage -d 0 %s' % username)\n sudo('gpasswd --add %s admin' % username)\n authorizeSshKey(username, sshId)",
"def alter(self,\r\n owner=None,\r\n version=None,\r\n description=None,\r\n permission=None):\r\n url = \"%s/alter\" % self._url\r\n params = {\r\n 'f' : 'json'\r\n }\r\n if owner or\\\r\n version or\\\r\n description or\\\r\n permission:\r\n if owner:\r\n params['ownerName'] = owner\r\n if version:\r\n params['versionName'] = version\r\n if description:\r\n params['description'] = description\r\n if permission:\r\n params['accessPermission'] = permission\r\n res = self._con.post(url, params)\r\n self._properties = None\r\n return res['success']\r\n return False",
"def sgup(sg=\"sg_external_ssh\"):\n ip = os.popen(\"/usr/bin/curl ifconfig.co 2>/dev/null\").readline().strip()\n print(\"My Public IP is : \"+ip)\n client = boto3.client(\"ec2\")\n ippermissions = client.describe_security_groups(GroupNames = [ sg ])[\"SecurityGroups\"][0][\"IpPermissions\"]\n print(\"Revoking old IP from group \"+sg)\n client.revoke_security_group_ingress(GroupName = sg, IpPermissions = ippermissions)\n printr(\"Adding new IP to group \"+sg)\n client.authorize_security_group_ingress(GroupName=sg, IpProtocol=\"-1\", FromPort=0, ToPort=0, CidrIp=ip+\"/32\")",
"def ssh(host_=None):\n run_command_on_selected_server(open_shell, host_=host_)",
"def edit_httpdConf():\n t2 = sp.Popen(\n [\n '/opt/OAM/oracle/product/11.1.1/as_1/webgate/ihs/tools/setup/InstallTools/EditHttpConf -f /opt/WebSphere/HTTPServer/conf/httpd.conf -w /opt/OAM/oracle/Middleware/Oracle_OAMWebGate1 -oh /opt/OAM/oracle/product/11.1.1/as_1/ -ws ihs'\n ],\n shell=True,\n stdout=sp.PIPE,\n stderr=sp.PIPE\n )\n stdout_value, stderr_value = t2.communicate()",
"def executeOnAgent(self, cmd, ip):\n sshadd = \"ssh-add \" + self.config.get(\"SSH\", \"privatekey\")\n self.shell_execute(sshadd)\n \n sshAgentConnection = \"ssh -o StrictHostKeyChecking=no \" + self.config.get('ACS', 'username') + '@' + ip\n self.log.debug(\"SSH Connection to agent: \" + sshAgentConnection)\n \n self.log.debug(\"Command to run on agent: \" + cmd)\n \n sshCmd = sshAgentConnection + ' \\'' + cmd + '\\''\n self.shell_execute(\"exit\")\n result = self.executeOnMaster(sshCmd)\n\n return result",
"def ssh_config(self, arguments):\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n print(utils.config_ssh_string(self.config_ssh))",
"def ssh():\n env['remote_port'] = env['port_map']['22']\n\n sys.stdout.write('Connecting to SSH session on remote port %(remote_port)s\\n' % env)\n\n run('chmod 600 %(pair_private_key)s' % env)\n\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.connect(\n hostname=env['relay_server'],\n port=int(env['remote_port']),\n username=env['pair_user'],\n key_filename=env['pair_private_key']\n )\n\n channel = client.invoke_shell()\n posix_shell(channel)"
] | [
"0.59500366",
"0.5788264",
"0.5785551",
"0.5748906",
"0.5721119",
"0.5685838",
"0.56322545",
"0.5620057",
"0.5607988",
"0.5520972",
"0.5506001",
"0.54976606",
"0.54757434",
"0.5405314",
"0.53769195",
"0.53689694",
"0.5329173",
"0.5327672",
"0.5293718",
"0.5277495",
"0.5255005",
"0.5228237",
"0.522739",
"0.519979",
"0.51972085",
"0.51893586",
"0.5165038",
"0.51588684",
"0.5148347",
"0.51451725"
] | 0.687422 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.