id
stringlengths
1
8
text
stringlengths
6
1.05M
dataset_id
stringclasses
1 value
/OpenAISurveyWrapper-0.0.6.tar.gz/OpenAISurveyWrapper-0.0.6/CONTRIBUTING.md
# How to contribute ## How to get started Before anything else, please install the git hooks that run automatic scripts during each commit and merge to strip the notebooks of superfluous metadata (and avoid merge conflicts). After cloning the repository, run the following command inside it: ``` nbdev_install_git_hooks ``` ## Did you find a bug? * Ensure the bug was not already reported by searching on GitHub under Issues. * If you're unable to find an open issue addressing the problem, open a new one. Be sure to include a title and clear description, as much relevant information as possible, and a code sample or an executable test case demonstrating the expected behavior that is not occurring. * Be sure to add the complete error messages. #### Did you write a patch that fixes a bug? * Open a new GitHub pull request with the patch. * Ensure that your PR includes a test that fails without your patch, and pass with it. * Ensure the PR description clearly describes the problem and solution. Include the relevant issue number if applicable. ## PR submission guidelines * Keep each PR focused. While it's more convenient, do not combine several unrelated fixes together. Create as many branches as needing to keep each PR focused. * Do not mix style changes/fixes with "functional" changes. It's very difficult to review such PRs and it most likely get rejected. * Do not add/remove vertical whitespace. Preserve the original style of the file you edit as much as you can. * Do not turn an already submitted PR into your development playground. If after you submitted PR, you discovered that more work is needed - close the PR, do the required work and then submit a new PR. Otherwise each of your commits requires attention from maintainers of the project. * If, however, you submitted a PR and received a request for changes, you should proceed with commits inside that PR, so that the maintainer can see the incremental fixes and won't need to review the whole PR again. In the exception case where you realize it'll take many many commits to complete the requests, then it's probably best to close the PR, do the work and then submit it again. Use common sense where you'd choose one way over another. ## Do you want to contribute to the documentation? * Docs are automatically created from the notebooks in the nbs folder.
PypiClean
/Notification/notification.py
from entegywrapper.schemas.profile import ProfileIdentifier def send_notification( self, title: str, message: str, *, profile_id: str | None = None, external_reference: str | None = None, internal_reference: str | None = None, badge_reference: str | None = None, target_page: dict[str, str | int] | None = None ) -> str: """ Sends a notification to the specified profile. Parameters ---------- `title` (`str`): the title of the notification `message` (`str`): the message of the notification `profile_id` (`str`): the profileId of the profile to send the notification to `external_reference` (`str`, optional): the externalReference of the profile to send the ; defaults to `None`notification to `internal_reference` (`str`, optional): the internalReference of the profile to send the ; defaults to `None`notification to `badge_reference` (`str`, optional): the badgeReference of the profile to send the notification to; defaults to `None` `target_page` (`dict[str, str | int]`, optional): the page to view when the notification is clicked; defaults to `None` The format of `target_page` is as follows: ```python { "templateType": "Exhibitors", "moduleId": 1 # could be externalReference instead } ``` Raises ------ `ValueError`: if no identifier is specified Returns ------- `str`: API response message """ data = { "title": title, "message": message, "alertMessage": "This is an alert message", } if profile_id is not None: data["profileReferences"] = {"profileId": profile_id} elif external_reference is not None: data["profileReferences"] = {"externalReference": external_reference} elif internal_reference is not None: data["profileReferences"] = {"internalReference": internal_reference} elif badge_reference is not None: data["profileReferences"] = {"badgeReference": badge_reference} else: raise ValueError("Please specify an identifier") if target_page is not None: data["viewTargetPage"] = target_page response = self.post(self.api_endpoint + "/v2/Notification/SendBulk", data=data) return response["message"] def send_bulk_notification( self, title: str, message: str, profile_references: list[dict[ProfileIdentifier, str]], *, target_page: dict[str, str | int] | None = None ) -> str: """ Sends a notification to the specified profiles. Parameters ---------- `title` (`str`): the title of the notification `message` (`str`): the message of the notification `profile_references` (`list[dict[str, str]]`): the profile references to send the notification to `target_page` (`dict[str, str | int]`, optional): the page to view when the notification is clicked; defaults to `None` The format of `profile_references` is as follows: ```python [ { "profileId": "1234567890" }, { "externalReference": "1234567890" }, { "badgeReference": "1234567890" }, { "internalReference": "1234567890" } ] ``` The format of `target_page` is as follows: ```python { "templateType": "Exhibitors", "moduleId": 1 # could be externalReference instead } ``` Returns ------- `str`: API response message """ data = { "profileReferences": profile_references, "title": title, "message": message, "alertMessage": "This is an alert message -- it is not shown anywhere" " or documented in the API docs, but it is required.", } if target_page is not None: data["viewTargetPage"] = target_page response = self.post(self.api_endpoint + "/v2/Notification/SendBulk", data=data) return response["message"]
PypiClean
/suds2-0.7.1.zip/suds2-0.7.1/suds/options.py
from suds.cache import Cache, NoCache from suds.properties import * from suds.store import DocumentStore, defaultDocumentStore from suds.transport import Transport from suds.wsse import Security from suds.xsd.doctor import Doctor class TpLinker(AutoLinker): """ Transport (auto) linker used to manage linkage between transport objects Properties and those Properties that contain them. """ def updated(self, properties, prev, next): if isinstance(prev, Transport): tp = Unskin(prev.options) properties.unlink(tp) if isinstance(next, Transport): tp = Unskin(next.options) properties.link(tp) class Options(Skin): """ Options: - B{cache} - The XML document cache. May be set to None for no caching. - type: L{Cache} - default: L{NoCache()} - B{documentStore} - The XML document store used to access locally stored documents without having to download them from an external location. May be set to None for no internal suds library document store. - type: L{DocumentStore} - default: L{defaultDocumentStore} - B{extraArgumentErrors} - Raise exceptions when extra arguments are detected when invoking a web service operation, compared to the operation's WSDL schema definition. - type: I{bool} - default: True - B{faults} - Raise faults raised by server, else return tuple from service method invocation as (httpcode, object). - type: I{bool} - default: True - B{service} - The default service name. - type: I{str} - default: None - B{port} - The default service port name, not tcp port. - type: I{str} - default: None - B{location} - This overrides the service port address I{URL} defined in the WSDL. - type: I{str} - default: None - B{transport} - The message transport. - type: L{Transport} - default: None - B{soapheaders} - The soap headers to be included in the soap message. - type: I{any} - default: None - B{wsse} - The web services I{security} provider object. - type: L{Security} - default: None - B{doctor} - A schema I{doctor} object. - type: L{Doctor} - default: None - B{xstq} - The B{x}ml B{s}chema B{t}ype B{q}ualified flag indicates that the I{xsi:type} attribute values should be qualified by namespace. - type: I{bool} - default: True - B{prefixes} - Elements of the soap message should be qualified (when needed) using XML prefixes as opposed to xmlns="" syntax. - type: I{bool} - default: True - B{retxml} - Flag that causes the I{raw} soap envelope to be returned instead of the python object graph. - type: I{bool} - default: False - B{prettyxml} - Flag that causes I{pretty} xml to be rendered when generating the outbound soap envelope. - type: I{bool} - default: False - B{autoblend} - Flag that ensures that the schema(s) defined within the WSDL import each other. - type: I{bool} - default: False - B{cachingpolicy} - The caching policy. - type: I{int} - 0 = Cache XML documents. - 1 = Cache WSDL (pickled) object. - default: 0 - B{plugins} - A plugin container. - type: I{list} - default: I{list()} - B{nosend} - Create the soap envelope but do not send. When specified, method invocation returns a I{RequestContext} instead of sending it. - type: I{bool} - default: False - B{unwrap} - Enable automatic parameter unwrapping when possible. Enabled by default. If disabled, no input or output parameters are ever automatically unwrapped. - type: I{bool} - default: True """ def __init__(self, **kwargs): domain = __name__ definitions = [ Definition('cache', Cache, NoCache()), Definition('documentStore', DocumentStore, defaultDocumentStore), Definition('extraArgumentErrors', bool, True), Definition('faults', bool, True), Definition('transport', Transport, None, TpLinker()), Definition('service', (int, basestring), None), Definition('port', (int, basestring), None), Definition('location', basestring, None), Definition('soapheaders', (), ()), Definition('wsse', Security, None), Definition('doctor', Doctor, None), Definition('xstq', bool, True), Definition('prefixes', bool, True), Definition('retxml', bool, False), Definition('prettyxml', bool, False), Definition('autoblend', bool, False), Definition('cachingpolicy', int, 0), Definition('plugins', (list, tuple), []), Definition('nosend', bool, False), Definition('unwrap', bool, True)] Skin.__init__(self, domain, definitions, kwargs)
PypiClean
/pythondata_misc_opentitan-0.0.post5512-py3-none-any.whl/pythondata_misc_opentitan/resources/sw/vendor/riscv_compliance/riscv-test-suite/README.md
# RISC-V Test Suites Tests are grouped into different functional test suites targeting the different subsets of the full RISC-V specifications. There will be ISA and privilege suites. For information on the test framework and other documentation on the compliance tests look at : [../doc/README.adoc](../doc/README.adoc) Currently there are five solid test suites checked into this repository along with a few deprecated/WIP tests. If you are looking to check compliance of RV32I in user mode then run the suites: RV32I, RV32ICSR and RV32IFENCEI To see the coverage of the suites see the riscv-test-suite coverage directory for the summary/detailed reports. These are generated by Imperas by using the github.com/google/riscv-dv UVM coverage testbench and the Mentor Questa SystemVerilog simulator. Test suites status: Pretty Solid: * RV32I (significant improvements (Nov2019) by Imperas) * 48 focused tests, using the correct style/macros, excellent coverage of most instructions * Coverage 97.23% * RV32IM (developed by Imperas) * 8 focused tests, using the correct style/macros, excellent coverage * Coverage 89.95% * RV32IMC (developed by Imperas) * 25 focused tests, using the correct style/macros * Coverage 59.68% * RV32ICSR * 6 focused tests * RV32IFENCEI * 1 test Work in progress (64-bit tests): * RV64I (developed by Imperas) * 8 focused tests, using the correct style/macros * RV64IM (developed by Imperas) * 3 focused tests, using the correct style/macros To be worked on: * RV64C * RV32A * RV64A * RV64F * RV64D * RV32E * RV32EC * RV32EA * RV32EF * RV32ED
PypiClean
/easylearn-0.1.20a0.tar.gz/easylearn-0.1.20a0/eslearn/machine_learning/classfication/pca_relieff_svc_cv.py
import numpy as np from sklearn import svm from sklearn.model_selection import KFold from sklearn import preprocessing import nibabel as nib import os from eslearn.utils.lc_evaluation_model_performances import eval_performance from eslearn.utils.lc_niiProcessor import NiiProcessor import eslearn.utils.el_preprocessing as elprep class PcaReliffSvcCV(): """ This class is used to execute pca-svc-based classification training and testing. NOTE: Input data must be in the .nii or similar format. TODO: Muticlass classification. Parameters ---------- path_patients : Path of the image files of patients path_HC : Path of the image files of HC path_mask : Path of the mask path_out : Path to save results data_preprocess_method: str How to preprocess features 'StandardScaler' OR 'MinMaxScaler'. data_preprocess_level: str Which level to preprocess features. 'group' or 'subject' is_dim_reduction : bool If perfrome dimension reduction. is_feature_selection : bool if perfrome feature selection. n_features_to_select: int number of features to select n_components: float from 0 to 1 If is_dim_reduction, then how many components to remain. num_of_kfold: int Number of the k in k-fold cross-validation is_showfig_finally: bool If show figure after all iteration finished. is_showfig_in_each_fold: bool If show figure in each fold. Returns ------- Save all classification results and figures to local disk. """ def __init__(self, path_patients=None, path_HC=None, path_mask=None, path_out=None, data_preprocess_method='MinMaxScaler', data_preprocess_level='subject', num_of_kfold=5, is_dim_reduction=1, components=0.95, is_feature_selection=False, n_features_to_select=None, is_showfig_finally=True, is_showfig_in_each_fold=False): self.path_patients = path_patients self.path_HC = path_HC self.path_mask = path_mask self.path_out = path_out self.data_preprocess_method = data_preprocess_method self.data_preprocess_level = data_preprocess_level self.num_of_kfold = num_of_kfold self.is_dim_reduction = is_dim_reduction self.components = components self.is_feature_selection = is_feature_selection self.n_features_to_select = n_features_to_select self.is_showfig_finally = is_showfig_finally self.is_showfig_in_each_fold = is_showfig_in_each_fold def main_function(self): """ """ print('Training model and testing...\n') # load data and mask data_all, label_all, self.orig_shape, self.mask_obj, self.mask_all = self._load_nii_and_gen_label() # KFold Cross Validation self.label_test_all = np.array([], dtype=np.int16) train_index = np.array([], dtype=np.int16) test_index = np.array([], dtype=np.int16) self.decision = np.array([], dtype=np.int16) self.prediction = np.array([], dtype=np.int16) self.accuracy = np.array([], dtype=np.float16) self.sensitivity = np.array([], dtype=np.float16) self.specificity = np.array([], dtype=np.float16) self.AUC = np.array([], dtype=np.float16) self.coef = [] kf = KFold(n_splits=self.num_of_kfold, shuffle=True, random_state=0) for i, (tr_ind, te_ind) in enumerate(kf.split(data_all)): print(f'------{i+1}/{self.num_of_kfold}...------\n') train_index = np.int16(np.append(train_index, tr_ind)) test_index = np.int16(np.append(test_index, te_ind)) feature_train = data_all[tr_ind, :] label_train = label_all[tr_ind] feature_test = data_all[te_ind, :] label_test = label_all[te_ind] self.label_test_all = np.int16(np.append(self.label_test_all, label_test)) # Resampling training data feature_train, label_train = self.re_sampling(feature_train, label_train) # data_preprocess feature_train, feature_test = elprep.Preprocessing().data_preprocess(feature_train, feature_test, self.data_preprocess_method, self.data_preprocess_level) # Dimension reduction using PCA if self.is_dim_reduction: feature_train, feature_test, model_dim_reduction = self.dimReduction_PCA( feature_train, feature_test, self.components) print(f'After dimension reduction, the feature number is {feature_train.shape[1]}') else: print('No dimension reduction perfromed\n') print(f'The feature number is {feature_train.shape[1]}') # Feature selection if self.is_feature_selection: feature_train, feature_test, mask, n_features_origin = self.feature_selection_relief(feature_train, label_train, feature_test, self.n_features_to_select) # Train and test print('training and testing...\n') model = self.training(feature_train, label_train) # Get weight if self.is_feature_selection: coef = np.zeros([n_features_origin,]) coef[mask] = model.coef_ else: coef = model.coef_ if self.is_dim_reduction: self.coef.append(model_dim_reduction.inverse_transform(coef)) else: self.coef.append(coef) pred, dec = self.testing(model, feature_test) self.prediction = np.append(self.prediction, np.array(pred)) self.decision = np.append(self.decision, np.array(dec)) # Evaluating classification performances acc, sens, spec, auc = eval_performance(label_test, pred, dec, accuracy_kfold=None, sensitivity_kfold=None, specificity_kfold=None, AUC_kfold=None, verbose=1, is_showfig=self.is_showfig_in_each_fold) self.accuracy = np.append(self.accuracy, acc) self.sensitivity = np.append(self.sensitivity, sens) self.specificity = np.append(self.specificity, spec) self.AUC = np.append(self.AUC, auc) # Save results and fig to local path self.save_results() self._weight2nii(dimension_nii_data=(61, 73, 61)) self.save_fig() print("--" * 10 + "Done!" + "--" * 10 ) return self def _load_nii_and_gen_label(self): """ Load nii and generate label """ data1, _ = NiiProcessor().main(self.path_patients) data1 = np.squeeze( np.array([np.array(data1).reshape(1, -1) for data1 in data1])) data2, _ = NiiProcessor().main(self.path_HC) data2 = np.squeeze( np.array([np.array(data2).reshape(1, -1) for data2 in data2])) data = np.vstack([data1, data2]) # data in mask mask, mask_obj = NiiProcessor().read_sigle_nii(self.path_mask) orig_shape = mask.shape mask = mask >= 0.2 mask = np.array(mask).reshape(-1,) data_in_mask = data[:, mask] # label label = np.hstack( [np.ones([len(data1), ]), np.ones([len(data2), ])-1]) return data_in_mask, label, orig_shape, mask_obj, mask def re_sampling(self, feature, label): """ Used to over-sampling unbalanced data """ from imblearn.over_sampling import RandomOverSampler ros = RandomOverSampler(random_state=0) feature_resampled, label_resampled = ros.fit_resample(feature, label) from collections import Counter print(f"After re-sampling, the sample size are: {sorted(Counter(label_resampled).items())}") return feature_resampled, label_resampled def data_preprocess(self, feature_train, feature_test, data_preprocess_method, data_preprocess_level): ''' This function is used to preprocess features Method 1: preprocess data in group level, one feature by one feature. Method 2: preprocess data in subject level. ''' # Method 1: Group level preprocessing. if data_preprocess_level == 'group': feature_train, model = elscaler.scaler(feature_train, data_preprocess_method) feature_test = model.transform(feature_test) elif data_preprocess_level == 'subject': # Method 2: Subject level preprocessing. scaler = preprocessing.StandardScaler().fit(feature_train.T) feature_train = scaler.transform(feature_train.T) .T scaler = preprocessing.StandardScaler().fit(feature_test.T) feature_test = scaler.transform(feature_test.T) .T else: print('Please provide which level to preprocess features\n') return return feature_train, feature_test def feature_selection_relief(self, feature_train, label_train, feature_test, n_features_to_select=None): """ This functio is used to select the features using relief-based feature selection algorithms """ from skrebate import ReliefF [n_sub, n_features] = np.shape(feature_train) if n_features_to_select is None: n_features_to_select = np.int(np.round(n_features / 10)) if isinstance(n_features_to_select, np.float): n_features_to_select = np.int(np.round(n_features * n_features_to_select)) fs = ReliefF(n_features_to_select=n_features_to_select, n_neighbors=100, discrete_threshold=10, verbose=True, n_jobs=-1) fs.fit(feature_train, label_train) feature_train = fs.transform(feature_train) feature_test = fs.transform(feature_test) mask = fs.top_features_[:n_features_to_select] return feature_train, feature_test, mask, n_features def dimReduction_PCA(self, train_X, test_X, pca_n_component): from eslearn.utils.lc_dimreduction import pca_apply x_train, x_test, trained_pca = pca_apply( train_X, test_X, pca_n_component) return x_train, x_test, trained_pca def training(self, train_X, train_y): # Classfier is SVC svc = svm.SVC(kernel='linear', C=1, class_weight='balanced', max_iter=5000, random_state=0) svc.fit(train_X, train_y) return svc def testing(self, model, test_X): predict = model.predict(test_X) decision = model.decision_function(test_X) return predict, decision def save_results(self): # Save performances and others import pandas as pd performances_to_save = np.concatenate([[self.accuracy], [self.sensitivity], [self.specificity], [self.AUC]], axis=0).T de_pred_label_to_save = np.concatenate([[self.decision], [self.prediction], [self.label_test_all]], axis=0).T performances_to_save = pd.DataFrame(performances_to_save, columns=[['Accuracy','Sensitivity', 'Specificity', 'AUC']]) de_pred_label_to_save = pd.DataFrame(de_pred_label_to_save, columns=[['Decision','Prediction', 'Sorted_Real_Label']]) performances_to_save.to_csv(os.path.join(self.path_out, 'Performances.txt'), index=False, header=True) de_pred_label_to_save.to_csv(os.path.join(self.path_out, 'Decision_prediction_label.txt'), index=False, header=True) def _weight2nii(self, dimension_nii_data=(61, 73, 61)): """ Transfer weight matrix to nii file I used the mask file as reference to generate the nii file """ weight = np.squeeze(self.coef) weight_mean = np.mean(weight, axis=0) # to orignal space weight_mean_orig = np.zeros(np.size(self.mask_all)) weight_mean_orig[self.mask_all] = weight_mean weight_mean_orig = np.reshape(weight_mean_orig, dimension_nii_data) # save to nii weight_nii = nib.Nifti1Image(weight_mean_orig, affine=self.mask_obj.affine) weight_nii.to_filename(os.path.join(self.path_out, 'weight.nii')) def save_fig(self): # Save ROC and Classification 2D figure acc, sens, spec, auc = eval_performance(self.label_test_all, self.prediction, self.decision, self.accuracy, self.sensitivity, self.specificity, self.AUC, verbose=0, is_showfig=self.is_showfig_finally, is_savefig=1, out_name=os.path.join(self.path_out, 'Classification_performances.pdf')) # if __name__ == '__main__': # ============================================================================= # All inputs path_patients = r'D:\WorkStation_2018\Workstation_Old\WorkStation_2018-05_MVPA_insomnia_FCS\Degree\degree_gray_matter\Zdegree\Z_degree_patient\Weighted' path_HC = r'D:\WorkStation_2018\Workstation_Old\WorkStation_2018-05_MVPA_insomnia_FCS\Degree\degree_gray_matter\Zdegree\Z_degree_control\Weighted' path_mask = r'G:\Softer_DataProcessing\spm12\spm12\tpm\Reslice3_TPM_greaterThan0.2.nii' path_out = r'D:\workstation_b\haoge\FC' # ============================================================================= clf = PcaReliffSvcCV(path_patients=path_patients, path_HC=path_HC, path_mask=path_mask, path_out=path_out, is_feature_selection=True, n_features_to_select=0.99, components=0.75) clf.main_function() print(f"mean accuracy = {np.mean(clf.accuracy)}") print(f"std of accuracy = {np.std(clf.accuracy)}") print(f"mean sensitivity = {np.mean(clf.sensitivity)}") print(f"std of sensitivity = {np.std(clf.sensitivity)}") print(f"mean specificity = {np.mean(clf.specificity)}") print(f"std of specificity = {np.std(clf.specificity)}") print(f"mean AUC = {np.mean(clf.AUC)}") print(f"std of AUC = {np.std(clf.AUC)}")
PypiClean
/teamrubber.pdberrorlog-1.1.tar.gz/teamrubber.pdberrorlog-1.1/README.txt
Introduction ============ Adds the "errorlog" command to pdb sessions. This is only useful in functional test cases in PloneTestCase.:: (Pdb) errorlog (Pdb) admin.open("http://nohost/plone/createObject") *** HTTPError: HTTP Error 500: Internal Server Error (Pdb) admin.open("http://nohost/plone/createObject") *** HTTPError: HTTP Error 500: Internal Server Error (Pdb) admin.open("http://nohost/plone/createObject") *** HTTPError: HTTP Error 500: Internal Server Error (Pdb) errorlog Error: 1218794437.680.454937341407 ( Type name not specified ) Error: 1218794437.10.341611383065 ( Type name not specified ) Error: 1218794436.540.661922508086 ( Type name not specified ) (Pdb) errorlog 1218794437.10.341611383065 Traceback (innermost last): Module ZPublisher.Publish, line 115, in publish Module ZPublisher.mapply, line 88, in mapply Module ZPublisher.Publish, line 41, in call_object Module Products.CMFFormController.FSControllerPythonScript, line 104, in __call__ Module Products.CMFFormController.Script, line 145, in __call__ Module Products.CMFCore.FSPythonScript, line 108, in __call__ Module Shared.DC.Scripts.Bindings, line 311, in __call__ Module Shared.DC.Scripts.Bindings, line 348, in _bindAndExec Module Products.CMFCore.FSPythonScript, line 164, in _exec Module None, line 10, in createObject - <FSControllerPythonScript at /plone/createObject> - Line 10 Exception: Type name not specified Yay? Pre-requisites ============== A Plone functional test case. This can work with zope in theory, but it's written with plone assumptions in mind.
PypiClean
/oda_api-1.1.41-py3-none-any.whl/oda_api/token.py
import json import base64 import logging import os from typing import Union, Tuple from os import environ, getcwd, path, remove, chmod from enum import Enum from types import FunctionType import time import traceback from jwt.exceptions import ExpiredSignatureError # type: ignore default_algorithm = 'HS256' token_email_options_numeric = ['tem', 'intsub'] token_email_options_flags = ['mssub', 'msdone', 'mstout', 'msfail'] logger = logging.getLogger("oda_api.token") class TokenLocation(Enum): ODA_ENV_VAR = "environment variable ODA_TOKEN" FILE_CUR_DIR = "file in current directory" FILE_HOME = "file in home" try: import jwt except ImportError: jwt = None # type: ignore logger.debug("no pyjwt installed: some token operations will not be available") def format_token(decoded_oda_token: dict): return json.dumps(decoded_oda_token, indent=4, sort_keys=True) ## decoding def decode_oda_token(token: str, secret_key=None, allow_invalid=False) -> dict: if token is None: raise RuntimeError('provided token is None') if jwt is None: logger.info("decoding token without jwt") return json.loads(base64.b64decode(token.split(".")[1]+"=").decode()) if secret_key is None: secret_key = "" allow_invalid = True decode_options = {} if allow_invalid: decode_options['verify_signature'] = False decode_options['verify_exp'] = False try: return jwt.decode(token, secret_key, algorithms=[default_algorithm], options=decode_options) except ExpiredSignatureError as e: logger.warning("problem decoding token: %s", repr(e)) if allow_invalid: raise RuntimeError("expired token despite no verification?") except Exception as e: traceback.format_exc() logger.error(f'unexplained exception in decode token: %s\n%s\n%s', token, repr(e), traceback.format_exc()) raise raise RuntimeError() def decode_oauth2_token(token: str): # usually comes in cookies['_oauth2_proxy'] return json.loads(base64.b64decode(token.split(".")[0]+"=").decode()) def get_token_roles(decoded_token): # extract role(s) roles = None if 'roles' in decoded_token: if isinstance(decoded_token['roles'], str): roles = decoded_token['roles'].split(',') elif isinstance(decoded_token['roles'], list): roles = decoded_token['roles'] roles = [r.strip() for r in roles] return roles # TODO expand this with unchecked fields def compare_token(decoded_token1, decoded_token2): """ performs a comparison of the payloads entries of token1, with token2 returns a dict, which also contains, for each entry, the result of the relative comparison: - missing_keys: list of token1 missing keys (keys from token2 not found within token1) - extra_keys: list of token1 extra keys (keys from token1 not found within token2) - exp (expiration_time): 1 if token1 expires later than token2, -1 if token1 expires earlier than token2, 0 if they have the same expiration time - roles: 1 if token1 contains at least all the roles of token2, 0 if both tokens have the same roles, -1 if token1 misses some roles contained within token2 - email-related options: contains one entry per email option, depending on the type of option (can be a numeric or a not numeric value), a different type of result will be assigned: - True/False: if the values are matching or not (e.g. mssub options) - 1/0/-1: if the the numeric value from token1 is higher/the same/lower respectively, than the corresponding one in token2 """ result = {'missing_keys': [], 'extra_keys': []} if decoded_token1.keys() != decoded_token2.keys(): result['missing_keys'] = list(set(decoded_token2.keys()) - set(decoded_token1.keys())) result['extra_keys'] = list(set(decoded_token1.keys()) - set(decoded_token2.keys())) if 'sub' in decoded_token1 and 'sub' in decoded_token2: if decoded_token1['sub'] == decoded_token2['sub']: result['sub'] = True else: result['sub'] = False if 'email' in decoded_token1 and 'email' in decoded_token2: if decoded_token1['email'] == decoded_token2['email']: result['email'] = True else: result['email'] = False if 'name' in decoded_token1 and 'name' in decoded_token2: if decoded_token1['name'] == decoded_token2['name']: result['name'] = True else: result['name'] = False # check email options for opt in token_email_options_numeric: if opt in decoded_token1 and opt in decoded_token2: if decoded_token1[opt] > decoded_token2[opt]: result[opt] = 1 elif decoded_token1[opt] < decoded_token2[opt]: result[opt] = -1 else: result[opt] = 0 for opt in token_email_options_flags: if opt in decoded_token1 and opt in decoded_token2: if decoded_token1[opt] == decoded_token2[opt]: result[opt] = True else: result[opt] = False time_result_code = None if 'exp' in decoded_token1 and 'exp' in decoded_token2: current_time = time.time() decoded_token1_expires_in_s = decoded_token1['exp'] - current_time decoded_token2_expires_in_s = decoded_token2['exp'] - current_time if decoded_token1_expires_in_s > decoded_token2_expires_in_s: time_result_code = 1 elif decoded_token1_expires_in_s < decoded_token2_expires_in_s: time_result_code = -1 else: time_result_code = 0 result['exp'] = time_result_code decoded_token1_roles = get_token_roles(decoded_token1) decoded_token2_roles = get_token_roles(decoded_token2) roles_result_code = None if decoded_token1_roles is not None and decoded_token2_roles is not None: token1_roles_difference = set(decoded_token1_roles) - set(decoded_token2_roles) token2_roles_difference = set(decoded_token2_roles) - set(decoded_token1_roles) if token1_roles_difference != set() and token2_roles_difference == set(): roles_result_code = 1 elif len(token1_roles_difference) < len(token2_roles_difference) or \ (len(token1_roles_difference) >= len(token2_roles_difference) and token2_roles_difference != set()): roles_result_code = -1 elif len(token1_roles_difference) == len(token2_roles_difference) and \ token1_roles_difference == set() and token2_roles_difference == set(): roles_result_code = 0 result['roles'] = roles_result_code return result def rewrite_token(new_token, old_token=None, token_write_methods: Union[Tuple[TokenLocation, ...], TokenLocation] = None, discard_discovered_token=False, force_rewrite=False ): discover_method = None if old_token is None: old_token, discover_method = discover_token_and_method(allow_invalid=True) if old_token is not None: current_decoded_token = decode_oda_token(old_token, allow_invalid=True) current_decoded_token_roles = get_token_roles(current_decoded_token) new_decoded_token = decode_oda_token(new_token, allow_invalid=True) new_decoded_token_roles = get_token_roles(new_decoded_token) comparison_result = compare_token(new_decoded_token, current_decoded_token) new_token_missing_keys = comparison_result.get('missing_keys') if len(new_token_missing_keys) > 0: missing_keys_warning_msg = f"The following keys are missing within the new token: {new_token_missing_keys}" if force_rewrite: missing_keys_warning_msg += ", but it will be written" logger.warning(missing_keys_warning_msg) else: logger.warning(missing_keys_warning_msg) raise RuntimeError("The new token is missing some of the keys present instead on the discovered token") new_token_extra_keys = comparison_result.get('extra_keys') if len(new_token_extra_keys) > 0: extra_keys_warning_msg = f"The following keys are not contained within the discovered token: {new_token_extra_keys}" if force_rewrite: extra_keys_warning_msg += ", but it will be written" logger.warning(extra_keys_warning_msg) else: logger.warning(extra_keys_warning_msg) raise RuntimeError("The new token is missing some of the keys present instead on the discovered token") if comparison_result['exp'] == -1: warning_msg = "The new token will expire before the current one" if force_rewrite: warning_msg += ", but it will be written" logger.warning(warning_msg) else: raise RuntimeError("Expiration time of the refreshed token is lower than " "the currently available one, please pass force=True to overwrite") if comparison_result['roles'] == 1: logger.warning("The new token has more roles than the current one:\n" f"roles current token: {current_decoded_token_roles}\n" f"roles new token: {new_decoded_token_roles}") elif comparison_result['roles'] == -1: warning_msg = "The new token has less roles than the current one:\n" \ f"roles current token: {current_decoded_token_roles}\n" \ f"roles new token: {new_decoded_token_roles}\n" if force_rewrite: warning_msg += ", but it will be written" logger.warning(warning_msg) else: logger.warning(warning_msg) raise RuntimeError("The roles of the new token are less than those of the current one," " please pass force=True to overwrite") # check email options email_options_warning_msg = "The new token has a different value for the following email option-related options:\n" unmatching_email_options = [] for opt in token_email_options_numeric: if opt in comparison_result and comparison_result[opt] != 0: unmatching_email_options.append(opt) for opt in token_email_options_flags: if opt in comparison_result and not comparison_result[opt]: unmatching_email_options.append(opt) if len(unmatching_email_options) > 0: email_options_warning_msg += f"{unmatching_email_options}\n" if force_rewrite: email_options_warning_msg += "but it will be used.\n" logger.warning(email_options_warning_msg) else: logger.warning(email_options_warning_msg) raise RuntimeError(email_options_warning_msg) if token_write_methods is not None: if old_token is not None: with open("old-oda-token_" + str(time.time()), 'w') as ft: ft.write(old_token) if isinstance(token_write_methods, TokenLocation): token_write_methods = token_write_methods, if discover_method is not None and discard_discovered_token: if discover_method == TokenLocation.ODA_ENV_VAR: environ.pop("ODA_TOKEN", None) elif discover_method == TokenLocation.FILE_CUR_DIR: if not path.exists(path.join(getcwd(), ".oda-token")): raise RuntimeError("oda-token file not found within the current directory after was discovered") remove(path.join(getcwd(), ".oda-token")) elif discover_method == TokenLocation.FILE_HOME: if not path.exists(path.join(environ["HOME"], ".oda-token")): raise RuntimeError("oda-token file not found within the HOME directory after was discovered") remove(path.join(environ["HOME"], ".oda-token")) logger.info(f"Discovered token has been discarded. It was stored with the method: {discover_method.value}") for token_write_method in token_write_methods: if token_write_method == TokenLocation.ODA_ENV_VAR: environ["ODA_TOKEN"] = new_token elif token_write_method == TokenLocation.FILE_CUR_DIR: if path.exists(path.join(getcwd(), ".oda-token")): chmod(path.join(getcwd(), ".oda-token"), 0o600) with open(path.join(getcwd(), ".oda-token"), 'w') as ft: ft.write(new_token) chmod(path.join(getcwd(), ".oda-token"), 0o400) elif token_write_method == TokenLocation.FILE_HOME: if path.exists(path.join(environ["HOME"], ".oda-token")): chmod(path.join(environ["HOME"], ".oda-token"), 0o600) with open(path.join(environ["HOME"], ".oda-token"), 'w') as ft: ft.write(new_token) chmod(path.join(environ["HOME"], ".oda-token"), 0o400) logger.info(f"Refreshed token has been re-written with the method: {token_write_method.value}") # sanity check on the newly written token newly_discovered_token = discover_token() if newly_discovered_token != new_token: logger.warning("The discovered token does not match with the newly generated token, " "this is an expected behavior. In case you wish to discard any old token, " "please repeat the same request setting the argument discard_discovered_token to True") def discover_token_and_method( allow_invalid=False, token_discovery_methods=None): failed_methods = [] token = None if token_discovery_methods is None: token_discovery_methods = *(n for n in TokenLocation), else: token_discovery_methods = token_discovery_methods, for n in TokenLocation: if n in token_discovery_methods: try: if n == TokenLocation.ODA_ENV_VAR: token = environ['ODA_TOKEN'].strip() elif n == TokenLocation.FILE_CUR_DIR: with open(path.join(getcwd(), ".oda-token")) as ft: token = ft.read().strip() elif n == TokenLocation.FILE_HOME: with open(path.join(environ["HOME"], ".oda-token")) as ft: token = ft.read().strip() logger.debug("searching for token in %s", n) decoded_token = decode_oda_token(token, allow_invalid=allow_invalid) expires_in_s = decoded_token['exp'] - time.time() if expires_in_s < 0: logger.debug("token expired %.1f h ago!", -expires_in_s / 3600) if allow_invalid: break else: token = None else: logger.info("found token in %s your token payload: %s", n, format_token(decoded_token)) logger.info("token expires in %.1f h", expires_in_s / 3600) break except Exception as e: failed_methods.append(f"{n}: {e}") logger.debug("failed to find token with current method: %s", failed_methods[-1]) token = None if token is None: logger.debug("failed to discover token with any known method") else: logger.debug("discovered token method %s", n) return token, n #TODO: move to dynaconf def discover_token( allow_invalid=False, token_discovery_methods=None): token, discovery_method = discover_token_and_method( allow_invalid=allow_invalid, token_discovery_methods=token_discovery_methods ) return token ## updating def update_token(token, secret_key, payload_mutation: FunctionType, allow_invalid=False): if secret_key is None: raise RuntimeError("unable to update token without valid secret key") try: token_payload = jwt.decode(token, secret_key, algorithms=[default_algorithm]) except ExpiredSignatureError as e: logger.warning("provided token is invalid: %s", e) if allow_invalid: token_payload = jwt.decode(token, secret_key, algorithms=[default_algorithm], options=dict(verify_signature=False, verify_exp=False)) logger.warning("invalid token payload will be used as requested") else: raise RuntimeError("refusing to update invalid token") mutated_token_payload = payload_mutation(token_payload) out_token = jwt.encode(mutated_token_payload, secret_key, algorithm=default_algorithm) return out_token
PypiClean
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/insights/getguest_diagnostics_setting.py
import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from .. import _utilities, _tables from . import outputs __all__ = [ 'GetguestDiagnosticsSettingResult', 'AwaitableGetguestDiagnosticsSettingResult', 'getguest_diagnostics_setting', ] @pulumi.output_type class GetguestDiagnosticsSettingResult: """ Virtual machine guest diagnostics settings resource. """ def __init__(__self__, data_sources=None, id=None, location=None, name=None, os_type=None, proxy_setting=None, tags=None, type=None): if data_sources and not isinstance(data_sources, list): raise TypeError("Expected argument 'data_sources' to be a list") pulumi.set(__self__, "data_sources", data_sources) if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if location and not isinstance(location, str): raise TypeError("Expected argument 'location' to be a str") pulumi.set(__self__, "location", location) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if os_type and not isinstance(os_type, str): raise TypeError("Expected argument 'os_type' to be a str") pulumi.set(__self__, "os_type", os_type) if proxy_setting and not isinstance(proxy_setting, str): raise TypeError("Expected argument 'proxy_setting' to be a str") pulumi.set(__self__, "proxy_setting", proxy_setting) if tags and not isinstance(tags, dict): raise TypeError("Expected argument 'tags' to be a dict") pulumi.set(__self__, "tags", tags) if type and not isinstance(type, str): raise TypeError("Expected argument 'type' to be a str") pulumi.set(__self__, "type", type) @property @pulumi.getter(name="dataSources") def data_sources(self) -> Optional[Sequence['outputs.DataSourceResponse']]: """ the array of data source object which are configured to collect and send data """ return pulumi.get(self, "data_sources") @property @pulumi.getter def id(self) -> str: """ Azure resource Id """ return pulumi.get(self, "id") @property @pulumi.getter def location(self) -> str: """ Resource location """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> str: """ Azure resource name """ return pulumi.get(self, "name") @property @pulumi.getter(name="osType") def os_type(self) -> Optional[str]: """ Operating system type for the configuration """ return pulumi.get(self, "os_type") @property @pulumi.getter(name="proxySetting") def proxy_setting(self) -> Optional[str]: return pulumi.get(self, "proxy_setting") @property @pulumi.getter def tags(self) -> Optional[Mapping[str, str]]: """ Resource tags """ return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> str: """ Azure resource type """ return pulumi.get(self, "type") class AwaitableGetguestDiagnosticsSettingResult(GetguestDiagnosticsSettingResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetguestDiagnosticsSettingResult( data_sources=self.data_sources, id=self.id, location=self.location, name=self.name, os_type=self.os_type, proxy_setting=self.proxy_setting, tags=self.tags, type=self.type) def getguest_diagnostics_setting(diagnostic_settings_name: Optional[str] = None, resource_group_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetguestDiagnosticsSettingResult: """ Use this data source to access information about an existing resource. :param str diagnostic_settings_name: The name of the diagnostic setting. :param str resource_group_name: The name of the resource group. """ __args__ = dict() __args__['diagnosticSettingsName'] = diagnostic_settings_name __args__['resourceGroupName'] = resource_group_name if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-nextgen:insights:getguestDiagnosticsSetting', __args__, opts=opts, typ=GetguestDiagnosticsSettingResult).value return AwaitableGetguestDiagnosticsSettingResult( data_sources=__ret__.data_sources, id=__ret__.id, location=__ret__.location, name=__ret__.name, os_type=__ret__.os_type, proxy_setting=__ret__.proxy_setting, tags=__ret__.tags, type=__ret__.type)
PypiClean
/resoto_plugin_slack-3.6.5-py3-none-any.whl/resoto_plugin_slack/__init__.py
import resotolib.logger import threading import time import ssl import slack_sdk from typing import List, Any from retrying import retry from resotolib.utils import utc_str from .resources import ( SlackRegion, SlackTeam, SlackUser, SlackUsergroup, SlackConversation, ) from resotolib.baseplugin import BasePlugin, BaseCollectorPlugin from resotolib.baseresources import BaseCloud, BaseAccount, BaseRegion, BaseResource from resotolib.config import Config from resotolib.event import ( Event, EventType, add_event_listener, remove_event_listener, ) from resotolib.graph import Graph from .config import SlackConfig log = resotolib.logger.getLogger("resoto." + __name__) def retry_on_request_limit_exceeded(e): if isinstance(e, slack_sdk.errors.SlackApiError): if not e.response.data.get("ok", False) and e.response.data.get("error") == "ratelimited": retry_after = int(e.response.headers.get("Retry-After", 20)) log.debug(f"Slack API request limit exceeded, retrying after {retry_after} seconds") time.sleep(retry_after) return True return False class SlackCollectorPlugin(BaseCollectorPlugin): cloud = "slack" def __init__(self, *args: Any, **kwargs: Any): super().__init__(*args, **kwargs) self.client = None def collect(self) -> None: if not Config.slack.bot_token: log.info("Slack Collector Plugin: plugin loaded but no bot token provided") return log.info("Slack Collector Plugin: collecting Slack resources") slack_client_args = { "token": Config.slack.bot_token, } if Config.slack.do_not_verify_ssl: ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE slack_client_args.update({"ssl": ssl_context}) self.client = slack_sdk.WebClient(**slack_client_args) response = self.client.team_info() if not response.data.get("ok", False): log.error("Failed to retrieve Slack Account information") return team = response.data.get("team", {}) team = SlackTeam.new(team) self.graph.add_resource(self.root, team) members = SlackRegion(id="members", tags={}) self.graph.add_resource(team, members) usergroups = SlackRegion(id="usergroups", tags={}) self.graph.add_resource(team, usergroups) conversations = SlackRegion(id="conversations", tags={}) self.graph.add_resource(team, conversations) for member in self.list_members(): u = SlackUser.new(member) log.debug(f"Found Slack User {u.name}: {u.real_name} ({u.email}) - {u.mtime}") self.graph.add_resource(members, u) for usergroup in self.list_usergroups(): ug = SlackUsergroup.new(usergroup) log.debug(f"Found Slack Usergroup {ug.name}") self.graph.add_resource(usergroups, ug) for user_id in ug._users: u = self.graph.search_first("id", user_id) if u: self.graph.add_edge(ug, u) for conversation in self.list_conversations(): c = SlackConversation.new(conversation) conversation_type = "Conversation " if c.is_channel: conversation_type = "Channel #" log.debug(f"Found Slack {conversation_type}{c.name}") self.graph.add_resource(conversations, c) members = self.list_conversation_members(c) for member_id in members: m = self.graph.search_first_all({"kind": "slack_user", "id": member_id}) self.graph.add_edge(c, m) @retry(stop_max_attempt_number=10, retry_on_exception=retry_on_request_limit_exceeded) def list_conversations(self) -> List: log.debug("Fetching list of Slack Conversations") channel_types = "public_channel,private_channel" channel_limit = 100 exclude_archived = not Config.slack.include_archived response = self.client.conversations_list( exclude_archived=exclude_archived, types=channel_types, limit=channel_limit ) conversations = response.data.get("channels", []) while response.data.get("response_metadata", {}).get("next_cursor", "") != "": response = self.client.conversations_list( cursor=response.data["response_metadata"]["next_cursor"], exclude_archived=exclude_archived, types=channel_types, limit=channel_limit, ) log.debug("Fetching more Slack conversations") conversations.extend(response.data.get("channels", [])) return conversations @retry(stop_max_attempt_number=10, retry_on_exception=retry_on_request_limit_exceeded) def list_conversation_members(self, conversation) -> List: log.debug(f"Fetching list of Slack Conversation members for {conversation.rtdname}") members = [] try: response = self.client.conversations_members(channel=conversation.id) members = response.data.get("members", []) while response.data.get("response_metadata", {}).get("next_cursor", "") != "": response = self.client.conversations_list( channel=conversation.id, cursor=response.data["response_metadata"]["next_cursor"], ) log.debug("Fetching more Slack conversation members") members.extend(response.data.get("members", [])) except slack_sdk.errors.SlackApiError as e: if not e.response.data.get("ok", False) and e.response.data.get("error") == "internal_error": log.error( ("Slack responded with an internal error - " f"skipping members list for {conversation.rtdname}") ) return [] else: raise return members @retry(stop_max_attempt_number=10, retry_on_exception=retry_on_request_limit_exceeded) def list_usergroups(self) -> List: log.debug("Fetching list of Slack Usergroups") response = self.client.usergroups_list(include_users="true", include_count="true", include_disabled="false") return response.data.get("usergroups", []) @retry(stop_max_attempt_number=10, retry_on_exception=retry_on_request_limit_exceeded) def list_members(self) -> List: log.debug("Fetching list of Slack Users") response = self.client.users_list() members = response.data.get("members", []) while response.data.get("response_metadata", {}).get("next_cursor", "") != "": response = self.client.users_list(cursor=response.data["response_metadata"]["next_cursor"]) log.debug("Fetching more Slack users") members.extend(response.data.get("members", [])) return members @staticmethod def add_config(config: Config) -> None: config.add_config(SlackConfig) class SlackBotPlugin(BasePlugin): def __init__(self): super().__init__() self.name = "slack_bot" if Config.slack.bot_token is None: return self.client = slack_sdk.WebClient(token=Config.slack.bot_token) self.exit = threading.Event() self.users2id = {} self.emails2id = {} self.usergroups2id = {} self.channels2id = {} add_event_listener(EventType.SHUTDOWN, self.shutdown) add_event_listener(EventType.CLEANUP_FINISH, self.process_resoto_events, blocking=False) def __del__(self): remove_event_listener(EventType.CLEANUP_FINISH, self.process_resoto_events) remove_event_listener(EventType.SHUTDOWN, self.shutdown) def go(self): if Config.slack.bot_token is None: return self.exit.wait() def process_resoto_events(self, event: Event): graph = event.data log.info("Checking for outstanding Slack notifications") self.update_users_groups_channels(graph) for node in graph.nodes: if isinstance(node, BaseResource) and len(node.event_log) > 0 and "resoto:owner" in node.tags: cloud = node.cloud(graph) account = node.account(graph) region = node.region(graph) owner_tag = str(node.tags["resoto:owner"]) if ( not isinstance(cloud, BaseCloud) or not isinstance(account, BaseAccount) or not isinstance(region, BaseRegion) ): continue destination = None if owner_tag.startswith("slack:"): owner = owner_tag[6:] destination = self.users2id.get(owner) elif owner_tag.startswith("email:"): owner = owner_tag[6:] destination = self.emails2id.get(owner) else: log.error( ( f"Unknown owner tag format {owner_tag} for node {node.dname} in cloud {cloud.name} " f"account {account.dname} region {region.name}" ) ) if not isinstance(destination, SlackUser): log.error(f"Unable to determine Slack destination based on resoto:owner tag value {owner_tag}") continue event_log_text = "" for event in node.event_log: event_log_text += f"{utc_str(event['timestamp'])} {event['msg']}" + "\n" slack_message = ( f"Hello {destination.first_name}, your cloud resource `{node.dname}` in " f"cloud `{cloud.name}` account `{account.dname}` region `{region.name}`" f" was modified during the current resoto run. Here is the " f"event log:\n```\n{event_log_text}```" ) self.send_slack_message(destination.id, slack_message) @retry(stop_max_attempt_number=10, retry_on_exception=retry_on_request_limit_exceeded) def send_slack_message(self, user_id, message): log.debug(f"Sending Slack message to ID {user_id}") response = self.client.conversations_open(users=[user_id]) if response.data.get("ok", False): channel = response.data.get("channel", {}).get("id") self.client.chat_postMessage(channel=channel, text=message) def update_users_groups_channels(self, graph: Graph): log.debug("Updating Users Groups and Channels") tmp_users = {} tmp_emails = {} tmp_usergroups = {} tmp_channels = {} for user in graph.search("kind", "slack_user"): tmp_users[user.name] = user if user.email: tmp_emails[user.email] = user for usergroup in graph.search("kind", "slack_usergroup"): if usergroup.is_usergroup: tmp_usergroups[usergroup.name] = usergroup for channel in graph.search("kind", "slack_conversation"): if channel.is_channel: tmp_channels[channel.name] = channel self.users2id = tmp_users self.emails2id = tmp_emails self.usergroups2id = tmp_usergroups self.channels2id = tmp_channels def shutdown(self, event: Event): log.debug(f"Received event {event.event_type} - shutting down Slack plugin") self.exit.set()
PypiClean
/ovos-tts-plugin-polly-0.0.1.tar.gz/ovos-tts-plugin-polly-0.0.1/ovos_tts_plugin_polly/__init__.py
import logging import boto3 from ovos_plugin_manager.templates.tts import TTS, TTSValidator logging.getLogger('botocore').setLevel(logging.CRITICAL) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('urllib3.util.retry').setLevel(logging.CRITICAL) class PollyTTS(TTS): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs, audio_ext="mp3", validator=PollyTTSValidator(self)) # Catch Chinese alt code if self.lang.lower() == "zh-zh": self.lang = "cmn-cn" self.ssml_tags = ["speak", "say-as", "voice", "prosody", "break", "emphasis", "sub", "lang", "phoneme", "w", "whisper", "amazon:auto-breaths", "p", "s", "amazon:effect", "mark"] self.voice = self.config.get("voice", "Matthew") self.key_id = self.config.get("key_id") or \ self.config.get("access_key_id") or "" self.key = self.config.get("secret_key") or \ self.config.get("secret_access_key") or "" self.region = self.config.get("region", 'us-east-1') self.polly = boto3.Session(aws_access_key_id=self.key_id, aws_secret_access_key=self.key, region_name=self.region).client('polly') def get_tts(self, sentence, wav_file): text_type = "text" if self.remove_ssml(sentence) != sentence: text_type = "ssml" sentence = sentence.replace("\whispered", "/amazon:effect") \ .replace("\\whispered", "/amazon:effect") \ .replace("whispered", "amazon:effect name=\"whispered\"") response = self.polly.synthesize_speech( OutputFormat=self.audio_ext, Text=sentence, TextType=text_type, VoiceId=self.voice.title()) with open(wav_file, 'wb') as f: f.write(response['AudioStream'].read()) return wav_file, None def describe_voices(self, language_code="en-US"): if language_code.islower(): a, b = language_code.split("-") b = b.upper() language_code = "-".join([a, b]) # example 'it-IT' useful to retrieve voices voices = self.polly.describe_voices(LanguageCode=language_code) return voices class PollyTTSValidator(TTSValidator): def __init__(self, tts): super(PollyTTSValidator, self).__init__(tts) def validate_lang(self): # TODO pass def validate_dependencies(self): try: from boto3 import Session except ImportError: raise Exception( 'PollyTTS dependencies not installed, please run pip install ' 'boto3 ') def validate_connection(self): try: if not self.tts.voice: raise Exception("Polly TTS Voice not configured") output = self.tts.describe_voices() except TypeError: raise Exception( 'PollyTTS server could not be verified. Please check your ' 'internet connection and credentials.') def get_tts_class(self): return PollyTTS if __name__ == "__main__": e = PollyTTS(config={"key_id": "", "secret_key": ""}) ssml = """<speak> This is my original voice, without any modifications. <amazon:effect vocal-tract-length="+15%"> Now, imagine that I am much bigger. </amazon:effect> <amazon:effect vocal-tract-length="-15%"> Or, perhaps you prefer my voice when I'm very small. </amazon:effect> You can also control the timbre of my voice by making minor adjustments. <amazon:effect vocal-tract-length="+10%"> For example, by making me sound just a little bigger. </amazon:effect><amazon:effect vocal-tract-length="-10%"> Or, making me sound only somewhat smaller. </amazon:effect> </speak>""" e.get_tts(ssml, "polly.mp3")
PypiClean
/Sider-0.3.1.tar.gz/Sider-0.3.1/sider/set.py
from __future__ import absolute_import import collections from .session import Session from .types import Bulk, String from .transaction import manipulative, query from . import utils class Set(collections.MutableSet): """The Python-side representaion of Redis set value. It behaves alike built-in Python :class:`set` object. More exactly, it implements :class:`collections.MutableSet` protocol. .. table:: Mappings of Redis commands--:class:`Set` methods ==================== ========================================== Redis commands :class:`Set` methods ==================== ========================================== :redis:`DEL` :meth:`Set.clear()` :redis:`SADD` :meth:`Set.add()`, :meth:`Set.update()` :redis:`SCARD` :func:`len()` (:meth:`Set.__len__()`) :redis:`SDIFF` :meth:`Set.difference()`, :token:`-` (:meth:`Set.__sub__()`) :redis:`SDIFFSTORE` :meth:`Set.difference_update()`, :token:`-=` (:meth:`Set.__isub__()`) :redis:`SINTER` :meth:`Set.intersection()`, :token:`&` (:meth:`Set.__and__()`) :redis:`SINTERSTORE` :meth:`Set.intersection_update()`, :token:`&=` (:meth:`Set.__iand__()`) :redis:`SISMEMBER` :keyword:`in` (:meth:`Set.__contains__()`) :redis:`SMEMBERS` :func:`iter()` (:meth:`Set.__iter__()`) :redis:`SMOVE` N/A :redis:`SPOP` :meth:`Set.pop()` :redis:`SRANDMEMBER` N/A :redis:`SREM` :meth:`Set.discard()`, :meth:`Set.remove()` :redis:`SUNION` :meth:`Set.union()`, :token:`|` (:meth:`Set.__or__()`) :redis:`SUNIONSTORE` :meth:`Set.update()`, :token:`|=` (:meth:`Set.__ior__()`) N/A :meth:`Set.symmetric_difference()`, :token:`^` (:meth:`Set.__xor__()`) N/A :meth:`Set.symmetric_difference_update()`, :token:`^=` (:meth:`Set.__ixor__()`) ==================== ========================================== .. todo:: There currently are too many duplications implementations of its methods. These should be done refactoring. """ def __init__(self, session, key, value_type=String): if not isinstance(session, Session): raise TypeError('session must be a sider.session.Session ' 'instance, not ' + repr(session)) self.session = session self.key = key self.value_type = Bulk.ensure_value_type(value_type, parameter='value_type') @query def __iter__(self): decode = self.value_type.decode for member in self.session.client.smembers(self.key): yield decode(member) @query def __len__(self): """Gets the cardinality of the set. Use this with the built-in :func:`len()` function. :returns: the cardinality of the set :rtype: :class:`numbers.Integral` .. note:: This method is directly mapped to :redis:`SCARD` command. """ return self.session.client.scard(self.key) @query def __contains__(self, member): """:keyword:`in` operator. Tests whether the set contains the given operand ``member``. :param member: the value to test :returns: ``True`` if the set contains the given operand ``member`` :rtype: :class:`bool` .. note:: This method is directly mapped to :redis:`SISMEMBER` command. """ try: data = self.value_type.encode(member) except TypeError: return False return bool(self.session.client.sismember(self.key, data)) def __eq__(self, operand): if isinstance(operand, (set, frozenset)): return frozenset(self) == operand elif isinstance(operand, Set) and self.session is operand.session: length = len(self) if length == 0: return len(operand) == 0 elif self.value_type == operand.value_type: self.session.mark_query([self.key]) for _ in self.session.client.sdiff(self.key, operand.key): return False for _ in self.session.client.sdiff(operand.key, self.key): return False return True else: return False elif isinstance(operand, collections.Set): return frozenset(self) == frozenset(operand) return False def __ne__(self, operand): return not (self == operand) def __lt__(self, operand): """Less-than (:token:`<`) operator. Tests whether the set is a *proper* (or *strict*) subset of the given ``operand`` or not. To eleborate, the key difference between this less-than (:token:`<`) operator and less-than or equal-to (:token:`<=`) operator, which is equivalent to :meth:`issubset()` method, is that it returns ``False`` even if two sets are exactly the same. Let this show a simple example: .. sourcecode:: pycon >>> assert isinstance(s, sider.set.Set) # doctest: +SKIP >>> set(s) # doctest: +SKIP set([1, 2, 3]) >>> s < set([1, 2]), s <= set([1, 2]) # doctest: +SKIP (False, False) >>> s < set([1, 2, 3]), s <= set([1, 2, 3]) # doctest: +SKIP (False, True) >>> s < set([1, 2, 3, 4]), s <= set([1, 2, 3, 4]) # doctest: +SKIP (True, True) :param operand: another set to test :type operand: :class:`collections.Set` :returns: ``True`` if the set is a proper subset of ``operand`` :rtype: :class:`bool` """ if not isinstance(operand, collections.Set): raise TypeError('operand for < must be an instance of ' 'collections.Set, not ' + repr(operand)) if (isinstance(operand, Set) and self.session is operand.session and self.value_type == operand.value_type): client = self.session.client self.session.mark_query([self.key]) for _ in client.sdiff(self.key, operand.key): return False card = len(self) if card != len(client.sinter(self.key, operand.key)): return False return card < client.scard(operand.key) return frozenset(self) < frozenset(operand) def __le__(self, operand): """Less-than or equal to (:token:`<=`) operator. Tests whether the set is a subset of the given ``operand``. It's the same operation to :meth:`issubset()` method except it can take a set-like operand only. On the other hand :meth:`issubset()` can take an any iterable operand as well. :param operand: another set to test :type operand: :class:`collections.Set` :returns: ``True`` if the ``operand`` set contains the set :rtype: :class:`bool` """ if not isinstance(operand, collections.Set): raise TypeError('operand for <= must be an instance of ' 'collections.Set, not ' + repr(operand)) return self.issubset(operand) def __gt__(self, operand): """Greater-than (:token:`>`) operator. Tests whether the set is a *proper* (or *strict*) superset of the given ``operand``. To eleborate, the key difference between this greater-than (:token:`>`) operator and greater-than or equal-to (:token:`>=`) operator, which is equivalent to :meth:`issuperset()` method, is that it returns ``False`` even if two sets are exactly the same. Let this show a simple example: .. sourcecode:: pycon >>> assert isinstance(s, sider.set.Set) # doctest: +SKIP >>> set(s) # doctest: +SKIP set([1, 2, 3]) >>> s > set([1, 2]), s >= set([1, 2]) # doctest: +SKIP (True, True) >>> s > set([1, 2, 3]), s >= set([1, 2, 3]) # doctest: +SKIP (False, True) >>> s > set([1, 2, 3, 4]), s >= set([1, 2, 3, 4]) # doctest: +SKIP (False, False) :param operand: another set to test :type operand: :class:`collections.Set` :returns: ``True`` if the set is a proper superset of ``operand`` :rtype: :class:`bool` """ if not isinstance(operand, collections.Set): raise TypeError('operand for > must be an instance of ' 'collections.Set, not ' + repr(operand)) if isinstance(operand, Set): return operand < self return frozenset(self) > frozenset(operand) def __ge__(self, operand): """Greater-than or equal to (:token:`>=`) operator. Tests whether the set is a superset of the given ``operand``. It's the same operation to :meth:`issuperset()` method except it can take a set-like operand only. On the other hand :meth:`issuperset()` can take an any iterable operand as well. :param operand: another set to test :type operand: :class:`collections.Set` :returns: ``True`` if the set contains the ``operand`` :rtype: :class:`bool` """ if not isinstance(operand, collections.Set): raise TypeError('operand for >= must be an instance of ' 'collections.Set, not ' + repr(operand)) return self.issuperset(operand) def __sub__(self, operand): """Minus (:token:`-`) operator. Gets the relative complement of the ``operand`` in the set. Mostly equivalent to :meth:`difference()` method except it can take a set-like operand only. On the other hand :meth:`difference()` can take an any iterable operand as well. :param operand: another set to get the relative complement :type operand: :class:`collections.Set` :returns: the relative complement :rtype: :class:`set` """ if not isinstance(operand, collections.Set): raise TypeError('operand for - must be an instance of ' 'collections.Set, not ' + repr(operand)) return self.difference(operand) def __rsub__(self, operand): if not isinstance(operand, collections.Set): raise TypeError('operand for - must be an instance of ' 'collections.Set, not ' + repr(operand)) elif isinstance(operand, (Set, set, frozenset)): return operand.difference(self) operand = set(operand) operand.difference_update(self) return operand def __isub__(self, operand): """Minus augmented assignment (:token:`-=`). Removes all elements of the ``operand`` from this set. Mostly equivalent to :meth:`difference_update()` method except it can take only one set-like operand. On the other hand :meth:`difference_update()` can take zero or more iterable operands (not only set-like objects). :param operand: another set which has elements to remove from this set :type operand: :class:`collections.Set` :returns: the set itself :rtype: :class:`Set` """ if not isinstance(operand, collections.Set): raise TypeError('operand for -= must be an instance of ' 'collections.Set, not ' + repr(operand)) self.difference_update(operand) return self def __xor__(self, operand): """Bitwise exclusive or (:token:`^`) operator. Returns a new set with elements in either the set or the ``operand`` but not both. Mostly equivalent to :meth:`symmetric_difference()` method except it can take a set-like operand only. On the other hand :meth:`symmetric_difference()` can take an any iterable operand as well. :param operand: other set :type operand: :class:`collections.Set` :returns: a new set with elements in either the set or the ``operand`` but not both :rtype: :class:`set` """ if not isinstance(operand, collections.Set): raise TypeError('operand for ^ must be an instance of ' 'collections.Set, not ' + repr(operand)) return self.symmetric_difference(operand) def __rxor__(self, operand): if not isinstance(operand, collections.Set): raise TypeError('operand for ^ must be an instance of ' 'collections.Set, not ' + repr(operand)) return self.symmetric_difference(operand) def __ixor__(self, operand): """Bitwise exclusive argumented assignment (:token:`^=`). Updates the set with the symmetric difference of itself and ``operand``. Mostly equivalent to :meth:`symmetric_difference_update()` method except it can take a set-like operand only. On the other hand :meth:`symmetric_difference_update()` can take an any iterable operand as well. :param operand: another set :type operand: :class:`collections.Set` :returns: the set itself :rtype: :class:`Set` """ if not isinstance(operand, collections.Set): raise TypeError('operand for ^= must be an instance of ' 'collections.Set, not ' + repr(operand)) self.symmetric_difference_update(operand) return self def __or__(self, operand): """Bitwise or (:token:`|`) operator. Gets the union of operands. Mostly equivalent to :meth:`union()` method except it can take only one set-like operand. On the other hand :meth:`union()` can take zero or more iterable operands (not only set-like objects). :param operand: another set to union :type operand: :class:`collections.Set` :returns: the union set :rtype: :class:`set` """ if not isinstance(operand, collections.Set): raise TypeError('operand for | must be an instance of ' 'collections.Set, not ' + repr(operand)) return self.union(operand) def __ror__(self, operand): return self | operand def __ior__(self, operand): """Bitwise or (:token:`|=`) assignment. Updates the set with the union of itself and the ``operand``. Mostly equivalent to :meth:`update()` method except it can take only one set-like operand. On the other hand :meth:`update()` can take zero or more iterable operands (not only set-like objects). :param operand: another set to union :type operand: :class:`collections.Set` :returns: the set itself :rtype: :class:`Set` """ if not isinstance(operand, collections.Set): raise TypeError('operand for |= must be an instance of ' 'collections.Set, not ' + repr(operand)) self.update(operand) return self def __and__(self, operand): """Bitwise and (:token:`&`) operator. Gets the union of operands. Mostly equivalent to :meth:`intersection()` method except it can take only one set-like operand. On the other hand :meth:`intersection()` can take zero or more iterable operands (not only set-like objects). :param operand: another set to get intersection :type operand: :class:`collections.Set` :returns: the intersection :rtype: :class:`set` """ if not isinstance(operand, collections.Set): raise TypeError('operand for & must be an instance of ' 'collections.Set, not ' + repr(operand)) return self.intersection(operand) def __rand__(self, operand): return self & operand def __iand__(self, operand): """Bitwise and (:token:`&=`) assignment. Updates the set with the intersection of itself and the ``operand``. Mostly equivalent to :meth:`intersection_update()` method except it can take only one set-like operand. On the other hand :meth:`intersection_update()` can take zero or more iterable operands (not only set-like objects). :param operand: another set to intersection :type operand: :class:`collections.Set` :returns: the set itself :rtype: :class:`Set` """ if not isinstance(operand, collections.Set): raise TypeError('operand for &= must be an instance of ' 'collections.Set, not ' + repr(operand)) self.intersection_update(operand) return self def issubset(self, operand): """Tests whether the set is a subset of the given ``operand`` or not. To test proper (strict) subset, use :token:`<` operator instead. :param operand: another set to test :type operand: :class:`collections.Iterable` :returns: ``True`` if the ``operand`` set contains the set :rtype: :class:`bool` .. note:: This method consists of following Redis commands: 1. :redis:`SDIFF` for this set and ``operand`` 2. :redis:`SLEN` for this set 3. :redis:`SLEN` for ``operand`` If the first :redis:`SDIFF` returns anything, it sends no commands of the rest and simply returns ``False``. """ if (isinstance(operand, Set) and self.session is operand.session and self.value_type == operand.value_type): client = self.session.client self.session.mark_query([self.key]) for _ in client.sdiff(self.key, operand.key): return False return len(self) == len(client.sinter(self.key, operand.key)) return frozenset(self).issubset(operand) def issuperset(self, operand): """Tests whether the set is a superset of the given ``operand``. To test proper (strict) superset, use :token:`>` operator instead. :param operand: another set to test :type operand: :class:`collections.Iterable` :returns: ``True`` if the set contains ``operand`` :rtype: :class:`bool` """ if isinstance(operand, (Set, set, frozenset)): return operand.issubset(self) return frozenset(self).issuperset(operand) def isdisjoint(self, operand): """Tests whether two sets are disjoint or not. :param operand: another set to test :type operand: :class:`collections.Iterable` :returns: ``True`` if two sets have a null intersection :rtype: :class:`bool` .. note:: It internally uses :redis:`SINTER` command. """ if isinstance(operand, Set) and self.session is operand.session: if self.value_type != operand.value_type: return True self.session.mark_query([self.key]) for _ in self.session.client.sinter(self.key, operand.key): return False return True return super(Set, self).isdisjoint(operand) def difference(self, *sets): """Returns the difference of two or more ``sets`` as a new :class:`set` i.e. all elements that are in this set but not the others. :param sets: other iterables to get the difference :returns: the relative complement :rtype: :class:`set` .. note:: This method is mapped to :redis:`SDIFF` command. """ online_sets = [] offline_sets = [] for operand in sets: if isinstance(operand, Set) and self.session is operand.session: if self.value_type == operand.value_type: online_sets.append(operand) else: offline_sets.append(operand) keys = (operand.key for operand in online_sets) self.session.mark_query([self.key]) fetched = self.session.client.sdiff(self.key, *keys) decode = self.value_type.decode diff = set(decode(member) for member in fetched) diff.difference_update(*offline_sets) return diff def symmetric_difference(self, operand): """Returns a new set with elements in either the set or the ``operand`` but not both. :param operand: other set :type operand: :class:`collections.Iterable` :returns: a new set with elements in either the set or the ``operand`` but not both :rtype: :class:`set` .. note:: This method consists of following two commands: 1. :redis:`SUNION` of this set and the ``operand`` 2. :redis:`SINTER` of this set and the ``operand`` and then makes a new :class:`set` with elements in the first result are that are not in the second result. """ if (isinstance(operand, Set) and self.session is operand.session and self.value_type == operand.value_type): self.session.mark_query([self.key]) union = self.session.client.sunion(self.key, operand.key) inter = self.session.client.sinter(self.key, operand.key) symdiff = set(union) symdiff.difference_update(inter) decode = self.value_type.decode return set(decode(member) for member in symdiff) return set(self).symmetric_difference(operand) def union(self, *sets): """Gets the union of the given sets. :param \*sets: zero or more operand sets to union. all these must be iterable :returns: the union set :rtype: :class:`set` .. note:: It sends a :redis:`SUNION` command for other :class:`Set` objects. For other ordinary Python iterables, it unions all in the memory. """ online_sets = {self.value_type: [self]} offline_sets = [] for operand in sets: if (isinstance(operand, Set) and self.session is operand.session): group = online_sets.setdefault(operand.value_type, []) group.append(operand) else: offline_sets.append(operand) union = set() for value_type in online_sets: group = online_sets[value_type] keys = (s.key for s in group) self.session.mark_query([self.key]) subset = self.session.client.sunion(*keys) decode = value_type.decode union.update(decode(member) for member in subset) for operand in offline_sets: union.update(operand) return union def intersection(self, *sets): """Gets the intersection of the given sets. :param \*sets: zero or more operand sets to get intersection. all these must be iterable :returns: the intersection :rtype: :class:`set` """ online_sets = [] offline_sets = [] for operand in sets: if (isinstance(operand, Set) and self.session is operand.session): if self.value_type != operand.value_type: return set() online_sets.append(operand) else: offline_sets.append(operand) keys = frozenset(s.key for s in online_sets) if keys: self.session.mark_query([self.key]) inter = self.session.client.sinter(self.key, *keys) decode = self.value_type.decode online = set(decode(m) for m in inter) else: online = self if offline_sets: base = set(offline_sets.pop()) base.intersection_update(online, *offline_sets) return base return online if isinstance(online, set) else set(online) @manipulative def add(self, element): """Adds an ``element`` to the set. This has no effect if the ``element`` is already present. :param element: an element to add .. note:: This method is a direct mapping to :redis:`SADD` comamnd. """ member = self.value_type.encode(element) self.session.client.sadd(self.key, member) @manipulative def discard(self, element): """Removes an ``element`` from the set if it is a member. If the ``element`` is not a member, does nothing. :param element: an element to remove .. note:: This method is mapped to :redis:`SREM` command. """ try: member = self.value_type.encode(element) except TypeError: return self.session.client.srem(self.key, member) def pop(self): """Removes an arbitrary element from the set and returns it. Raises :exc:`~exceptions.KeyError` if the set is empty. :returns: a removed arbitrary element :raises exceptions.KeyError: if the set is empty .. note:: This method is directly mapped to :redis:`SPOP` command. """ if self.session.current_transaction is None: popped = self.session.client.spop(self.key) if popped is None: raise KeyError('pop from an empty set') return self.value_type.decode(popped) else: self.session.mark_query([self.key]) popped = self.session.client.srandmember(self.key) if popped is None: raise KeyError('pop from an empty set') value = self.value_type.decode(popped) self.session.mark_manipulative() self._raw_delete([value], self.session.client) return value @manipulative def clear(self): """Removes all elements from this set. .. note:: Under the hood it simply :redis:`DEL` the key. """ self.session.client.delete(self.key) def update(self, *sets): """Updates the set with union of itself and operands. :param \*sets: zero or more operand sets to union. all these must be iterable .. note:: It sends a :redis:`SUNIONSTORE` command for other :class:`Set` objects and a :redis:`SADD` command for other ordinary Python iterables. Multiple operands of :redis:`SADD` command has been supported since Redis 2.4.0, so it would send multiple :redis:`SADD` commands if the Redis version is less than 2.4.0. """ online_sets = [] offline_sets = [] for operand in sets: if isinstance(operand, Set) and self.session is operand.session: if self.value_type == operand.value_type: online_sets.append(operand) else: raise TypeError( 'value_type mismatch; tried union of {0!r} and ' '{1!r}'.format(self.value_type, operand.value_type) ) else: offline_sets.append(operand) def block(trial, transaction): pipe = self.session.client if online_sets: keys = [operand.key for operand in online_sets] self.session.mark_manipulative() pipe.sunionstore(self.key, self.key, *keys) update = self._raw_update for operand in offline_sets: update(operand, pipe) self.session.transaction(block, [self.key], ignore_double=True) def _raw_update(self, members, pipe): key = self.key encode = self.value_type.encode members = (encode(v) for v in members) self.session.mark_manipulative() if self.session.server_version_info < (2, 4, 0): for member in members: pipe.sadd(key, member) else: n = 100 # FIXME: it is an arbitarary magic number. for chunk in utils.chunk(members, n): pipe.sadd(key, *chunk) def intersection_update(self, *sets): """Updates the set with the intersection of itself and other ``sets``. :param \*sets: zero or more operand sets to intersection. all these must be iterable .. note:: It sends a :redis:`SINTERSTORE` command for other :class:`Set` objects and a :redis:`SREM` command for other ordinary Python iterables. Multiple operands of :redis:`SREM` command has been supported since Redis 2.4.0, so it would send multiple :redis:`SREM` commands if the Redis version is less than 2.4.0. Used commands: :redis:`SINTERSTORE`, :redis:`SMEMBERS` and :redis:`SREM`. """ online_sets = [] offline_sets = [] for operand in sets: if isinstance(operand, Set) and self.session is operand.session: if self.value_type == operand.value_type: online_sets.append(operand) else: self.session.mark_manipulative() self.session.client.delete(self.key) return else: offline_sets.append(operand) try: memory_set = offline_sets.pop() except IndexError: memory_set = frozenset() else: if offline_sets: memory_set = set(memory_set) memory_set.intersection_update(*offline_sets) keys = tuple(operand.key for operand in online_sets) def block(trial, transaction): pipe = self.session.client if memory_set: self.session.mark_query() diff = self.difference(memory_set) self.session.mark_manipulative() self._raw_delete(diff, pipe) if keys: self.session.mark_manipulative() pipe.sinterstore(self.key, self.key, *keys) self.session.transaction(block, (self.key,) + keys, ignore_double=True) def difference_update(self, *sets): """Removes all elements of other ``sets`` from this set. :param \*sets: other sets that have elements to remove from this set .. note:: For :class:`Set` objects of the same session it internally uses :redis:`SDIFFSTORE` command. For other ordinary Python iterables, it uses :redis:`SREM` commands. If the version of Redis is less than 2.4, sends :redis:`SREM` multiple times. Because multiple operands of :redis:`SREM` command has been supported since Redis 2.4. """ online_sets = [] offline_sets = [] for operand in sets: if isinstance(operand, Set) and self.session is operand.session: if self.value_type == operand.value_type: online_sets.append(operand) else: offline_sets.append(operand) def block(trial, transaction): pipe = self.session.client if online_sets: keys = tuple(operand.key for operand in online_sets) self.session.mark_manipulative(keys) pipe.sdiffstore(self.key, self.key, *keys) for elements in offline_sets: self.session.mark_manipulative() self._raw_delete(elements, pipe) self.session.transaction(block, [self.key], ignore_double=True) def symmetric_difference_update(self, operand): """Updates the set with the symmetric difference of itself and ``operand``. :param operand: another set to get symmetric difference :type operand: :class:`collections.Iterable` .. note:: This method consists of several Redis commands in a transaction: :redis:`SINTER`, :redis:`SUNIONSTORE` and :redis:`SREM`. """ if isinstance(operand, Set) and self.session == operand.session: if self.value_type == operand.value_type: def block(trial, transaction): pipe = self.session.client self.session.mark_query() inter = pipe.sinter(self.key, operand.key) self.session.mark_manipulative() pipe.sunionstore(self.key, self.key, operand.key) self._raw_delete(inter, pipe, encoded=True) self.session.transaction(block, [self.key], ignore_double=True) else: raise TypeError( 'value_type mismatch; tried update {0!r} with ' '{1!r}'.format(self.value_type, operand.value_type) ) else: operand = set(operand) inter = self & operand operand.difference_update(inter) pipe = self.session.client.pipeline() self._raw_update(operand, pipe) self._raw_delete(inter, pipe) pipe.execute() @manipulative def _raw_delete(self, elements, pipe, encoded=False): if not encoded: encode = self.value_type.encode def get_elements(): for el in elements: try: yield encode(el) except TypeError: pass enc_elements = get_elements() else: enc_elements = elements if self.session.server_version_info < (2, 4, 0): for el in enc_elements: pipe.srem(self.key, el) else: pipe.srem(self.key, *enc_elements) def __repr__(self): cls = type(self) values = list(self) values.sort() elements = ', '.join(repr(v) for v in values) return '<{0}.{1} ({2!r}) {{{3}}}>'.format( cls.__module__, cls.__name__, self.key, elements )
PypiClean
/odoo12_addon_server_environment-12.0.2.0.5-py3-none-any.whl/odoo/addons/server_environment/readme/DESCRIPTION.rst
This module provides a way to define an environment in the main Odoo configuration file and to read some configurations from files depending on the configured environment: you define the environment in the main configuration file, and the values for the various possible environments are stored in the ``server_environment_files`` companion module. The ``server_environment_files`` module is optional, the values can be set using an environment variable with a fallback on default values in the database. The configuration read from the files are visible under the Configuration menu. If you are not in the 'dev' environment you will not be able to see the values contained in the defined secret keys (by default : '*passw*', '*key*', '*secret*' and '*token*').
PypiClean
/aerostat_launcher-0.0.9-py3-none-any.whl/aerostat/core/checks.py
import subprocess import time import typer from rich import print from rich.progress import track from aerostat.core.installer import DEPENDENCIES from aerostat.core.loginer import get_aws_profile_credentials from aerostat.core.utils import OS def installed_check(command: str = None): """Check if a dependency are installed. This cannot be used as a decorator because the inner function would need to be registered with Typer. """ try: if not command: for command in track( [dependency["command"] for dependency in DEPENDENCIES], "[bold green]Checking dependencies...", ): subprocess.run( f"{command} --version", shell=True, check=True, capture_output=True ) else: subprocess.run( f"{command} --version", shell=True, check=True, capture_output=True ) except ChildProcessError as e: print( f"[bold red]Dependencies not installed. Please run [bold blue]aerostat install[/bold blue] and try again.[/bold red]" ) raise typer.Exit(1) def loggedin_check(): try: get_aws_profile_credentials("aerostat") except KeyError as e: print( "[bold red]You are not logged in. Please run [bold blue]aerostat login[/bold blue] and try again.[/bold red]" ) raise typer.Exit(1) def docker_running_check(verbose=False): """Check if a dependency are installed. This cannot be used as a decorator because the inner function would need to be registered with Typer. """ try: subprocess.run("docker ps", shell=True, check=True, capture_output=True) except Exception as e: if verbose: print( "[bold red]Docker is not running. Please start Docker and try again.[/bold red]" ) raise typer.Exit(1) def start_docker_desktop(): try: docker_running_check() except typer.Exit as e: print("[bold green]Starting Docker Desktop...[/bold green]") if OS.is_windows(): subprocess.run( r"""powershell "& 'C:\Program Files\Docker\Docker\Docker Desktop.exe'""", ) if OS.is_mac(): subprocess.run( [ "open", "-a", "Docker", ], ) count = 0 while True: time.sleep(5) if count > 10: print("[bold red]Starting Docker failed.[/bold red]") raise RuntimeError("Starting Docker failed.") try: docker_running_check() break except Exception as e: count += 1 if __name__ == "__main__": start_docker_desktop()
PypiClean
/airflow_provider_grafana_loki-0.0.2-py3-none-any.whl/grafana_loki_provider/log/loki_task_handler.py
import time from typing import Any, Dict, Optional, Tuple, List import typing import gzip if typing.TYPE_CHECKING: from airflow.models import TaskInstance from typing import Optional, Tuple import logging logging.raiseExceptions = True import time from typing import Optional import json from datetime import timedelta from airflow.utils.log.file_task_handler import FileTaskHandler from airflow.utils.log.logging_mixin import LoggingMixin from grafana_loki_provider.hooks.loki import LokiHook from airflow.compat.functools import cached_property from airflow.configuration import conf import os BasicAuth = Optional[Tuple[str, str]] DEFAULT_LOGGER_NAME = "airflow" import json import logging import typing class LokiTaskHandler(FileTaskHandler, LoggingMixin): def __init__( self, base_log_folder, name, filename_template: Optional[str] = None, enable_gzip=True, ): super().__init__(base_log_folder, filename_template) self.name: str = name self.handler: Optional[logging.FileHandler] = None self.log_relative_path = "" self.closed = False self.upload_on_close = True self.enable_gzip = enable_gzip self.labels: Dict[str, str] = {} self.extras: Dict[str, Any] = {} @cached_property def hook(self) -> LokiHook: """Returns LokiHook""" remote_conn_id = str(conf.get("logging", "REMOTE_LOG_CONN_ID")) from grafana_loki_provider.hooks.loki import LokiHook return LokiHook(loki_conn_id=remote_conn_id) def get_extras(self, ti, try_number=None) -> Dict[str, Any]: return dict( run_id=getattr(ti, "run_id", ""), try_number=try_number if try_number != None else ti.try_number, map_index=getattr(ti, "map_index", ""), ) def get_labels(self, ti) -> Dict[str, str]: return {"dag_id": ti.dag_id, "task_id": ti.task_id} def set_context(self, task_instance: "TaskInstance") -> None: super().set_context(task_instance) ti = task_instance self.log_relative_path = self._render_filename(ti, ti.try_number) self.upload_on_close = not ti.raw # Clear the file first so that duplicate data is not uploaded # when re-using the same path (e.g. with rescheduled sensors) if self.upload_on_close: if self.handler: with open(self.handler.baseFilename, "w"): pass self.labels = self.get_labels(ti) self.extras = self.get_extras(ti) def _get_task_query(self, ti, try_number, metadata) -> str: run_id = getattr(ti, "run_id", "") map_index = getattr(ti, "map_index", "") query = """ {{dag_id="{dag_id}",task_id="{task_id}"}} | json try_number="try_number",map_index="map_index",run_id="run_id" | try_number="{try_number}" and map_index="{map_index}" and run_id="{run_id}" | __error__!="JSONParserErr" """.format( try_number=try_number, map_index=map_index, run_id=run_id, dag_id=ti.dag_id, task_id=ti.task_id, ) return query def _read( self, ti, try_number: int, metadata: Optional[str] = None ) -> Tuple[str, Dict[str, bool]]: query = self._get_task_query(ti, try_number, metadata) start = ti.start_date - timedelta(days=15) #if the task is running or queued, the task will not have end_date, in that # case, we will use a resonable internal of 5 days end_date = ti.end_date or ti.start_date + timedelta(days=5) end = end_date + timedelta(hours=1) params = { "query": query, "start": start.isoformat(), "end": end.isoformat(), "limit":5000, "direction": "forward", } self.log.info(f"loki log query params {params}") data = self.hook.query_range(params) lines = [] if "data" in data and "result" in data["data"]: for i in data["data"]["result"]: for v in i["values"]: try: msg = v[1] line = json.loads(msg)["line"] lines.append(line) except Exception as e: self.log.exception(e) pass if lines: log_lines = "".join(lines) return log_lines, {"end_of_log": True} else: return super()._read(ti, try_number, metadata) def close(self): """Close and upload local log file to remote storage Loki.""" if self.closed: return super().close() if not self.upload_on_close: return local_loc = os.path.join(self.local_base, self.log_relative_path) if os.path.exists(local_loc): # read log and remove old logs to get just the latest additions with open(local_loc) as logfile: log = logfile.readlines() self.loki_write(log) # Mark closed so we don't double write if close is called twice self.closed = True def build_payload(self, log: List[str], labels, extras) -> dict: """Build JSON payload with a log entry.""" ns = 1e9 lines = [] for line in log: ts = str(int(time.time() * ns)) line = {**{"line": line}, ** extras } line = json.dumps(line) lines.append([ts, line]) stream = { "stream": labels, "values": lines, } return {"streams": [stream]} def loki_write(self, log): payload = self.build_payload(log, self.labels, self.extras) headers = {"Content-Type": "application/json"} if self.enable_gzip: payload = gzip.compress(json.dumps(payload).encode("utf-8")) headers["Content-Encoding"] = "gzip" self.hook.push_log(payload=payload, headers=headers)
PypiClean
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flasharray/FA_2_22/models/saml2_sso_get_response.py
import pprint import re import six import typing from ....properties import Property if typing.TYPE_CHECKING: from pypureclient.flasharray.FA_2_22 import models class Saml2SsoGetResponse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'more_items_remaining': 'bool', 'total_item_count': 'int', 'continuation_token': 'str', 'items': 'list[Saml2Sso]' } attribute_map = { 'more_items_remaining': 'more_items_remaining', 'total_item_count': 'total_item_count', 'continuation_token': 'continuation_token', 'items': 'items' } required_args = { } def __init__( self, more_items_remaining=None, # type: bool total_item_count=None, # type: int continuation_token=None, # type: str items=None, # type: List[models.Saml2Sso] ): """ Keyword args: more_items_remaining (bool): Returns a value of `true` if subsequent items can be retrieved. total_item_count (int): The total number of records after applying all filter query parameters. The `total_item_count` will be calculated if and only if the corresponding query parameter `total_item_count` is set to `true`. If this query parameter is not set or set to `false`, a value of `null` will be returned. continuation_token (str): Continuation token that can be provided in the `continuation_token` query param to get the next page of data. If you use the continuation token to page through data you are guaranteed to get all items exactly once regardless of how items are modified. If an item is added or deleted during the pagination then it may or may not be returned. The continuation token is generated if the limit is less than the remaining number of items, and the default sort is used (no sort is specified). items (list[Saml2Sso]) """ if more_items_remaining is not None: self.more_items_remaining = more_items_remaining if total_item_count is not None: self.total_item_count = total_item_count if continuation_token is not None: self.continuation_token = continuation_token if items is not None: self.items = items def __setattr__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `Saml2SsoGetResponse`".format(key)) self.__dict__[key] = value def __getattribute__(self, item): value = object.__getattribute__(self, item) if isinstance(value, Property): raise AttributeError else: return value def __getitem__(self, key): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `Saml2SsoGetResponse`".format(key)) return object.__getattribute__(self, key) def __setitem__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `Saml2SsoGetResponse`".format(key)) object.__setattr__(self, key, value) def __delitem__(self, key): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `Saml2SsoGetResponse`".format(key)) object.__delattr__(self, key) def keys(self): return self.attribute_map.keys() def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(Saml2SsoGetResponse, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, Saml2SsoGetResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
PypiClean
/git-machete-3.11.3.tar.gz/git-machete-3.11.3/git_machete/utils.py
import inspect from enum import auto, Enum from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, TypeVar import os import sys import re import subprocess T = TypeVar('T') # To avoid displaying the same warning multiple times during a single run. displayed_warnings: Set[str] = set() # Let's keep the flag to avoid checking for current directory's existence # every time any command is being popened or run. current_directory_confirmed_to_exist: bool = False ascii_only: bool = False debug_mode: bool = False verbose_mode: bool = False def excluding(iterable: Iterable[T], s: Iterable[T]) -> List[T]: return list(filter(lambda x: x not in s, iterable)) def flat_map(func: Callable[[T], List[T]], iterable: Iterable[T]) -> List[T]: return sum(map(func, iterable), []) def find_or_none(func: Callable[[T], bool], iterable: Iterable[T]) -> T: return next(filter(func, iterable), None) def map_truthy_only(func: Callable[[T], Optional[T]], iterable: Iterable[T]) -> List[T]: return list(filter(None, map(func, iterable))) def get_non_empty_lines(s: str) -> List[str]: return list(filter(None, s.split("\n"))) # Converts a lambda accepting N arguments to a lambda accepting one argument, an N-element tuple. # Name matching Scala's `tupled` on `FunctionX`. def tupled(f: Callable[..., T]) -> Callable[[Any], T]: return lambda tple: f(*tple) def get_second(pair: Tuple[Any, T]) -> T: a, b = pair return b def does_directory_exist(path: str) -> bool: try: # Note that os.path.isdir itself (without os.path.abspath) isn't reliable # since it returns a false positive (True) for the current directory when if it doesn't exist return os.path.isdir(os.path.abspath(path)) except OSError: return False def get_current_directory_or_none() -> Optional[str]: try: return os.getcwd() except OSError: # This happens when current directory does not exist (typically: has been deleted) return None def is_executable(path: str) -> bool: return os.access(path, os.X_OK) def find_executable(executable: str) -> Optional[str]: base, ext = os.path.splitext(executable) if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'): executable = f"{executable}.exe" if os.path.isfile(executable): return executable path = os.environ.get('PATH', os.defpath) paths = path.split(os.pathsep) for p in paths: f = os.path.join(p, executable) if os.path.isfile(f) and is_executable(f): debug(f"found {executable} at {f}") return f return None def debug(msg: Optional[str] = None) -> None: if debug_mode: function_name = bold(inspect.stack()[1].function) args, _, _, values = inspect.getargvalues(inspect.stack()[1].frame) args_to_be_redacted = {'access_token', 'password', 'secret', 'token'} # https://github.blog/2021-04-05-behind-githubs-new-authentication-token-formats/ values_to_be_redacted = ['ghp_', 'gho_', 'ghu_', 'ghs_', 'ghr_'] for arg, value in values.items(): if arg in args_to_be_redacted or any(value_ in str(value) for value_ in values_to_be_redacted): values[arg] = '***' excluded_args = {'self'} allowed_args = excluding(args, excluded_args) args_and_values_list = [arg + '=' + str(values[arg]) for arg in allowed_args] args_and_values_str = ', '.join(args_and_values_list) args_and_values_bold_str = bold(f'({args_and_values_str})') if msg is None: print(f"{function_name}{args_and_values_bold_str}", file=sys.stderr) else: print(f"{function_name}{args_and_values_bold_str}: {dim(msg)}", file=sys.stderr) def run_cmd(cmd: str, *args: str, **kwargs: Any) -> int: chdir_upwards_until_current_directory_exists() flat_cmd: str = get_cmd_shell_repr(cmd, *args, env=kwargs.get('env')) if debug_mode: print(bold(f">>> {flat_cmd}"), file=sys.stderr) elif verbose_mode: print(flat_cmd, file=sys.stderr) exit_code: int = subprocess.call([cmd] + list(args), **kwargs) # Let's defensively assume that every command executed via run_cmd # (but not via popen_cmd) can make the current directory disappear. # In practice, it's mostly 'git checkout' that carries such risk. mark_current_directory_as_possibly_non_existent() if debug_mode and exit_code != 0: print(dim(f"<exit code: {exit_code}>\n"), file=sys.stderr) return exit_code def mark_current_directory_as_possibly_non_existent() -> None: global current_directory_confirmed_to_exist current_directory_confirmed_to_exist = False def chdir_upwards_until_current_directory_exists() -> None: global current_directory_confirmed_to_exist if not current_directory_confirmed_to_exist: current_directory: Optional[str] = get_current_directory_or_none() if not current_directory: while not current_directory: # Note: 'os.chdir' only affects the current process and its subprocesses; # it doesn't propagate to the parent process (which is typically a shell). os.chdir(os.path.pardir) current_directory = get_current_directory_or_none() debug(f"current directory did not exist, chdired up into {current_directory}") current_directory_confirmed_to_exist = True def popen_cmd(cmd: str, *args: str, **kwargs: Any) -> Tuple[int, str, str]: chdir_upwards_until_current_directory_exists() flat_cmd = get_cmd_shell_repr(cmd, *args, env=kwargs.get('env')) if debug_mode: print(bold(f">>> {flat_cmd}"), file=sys.stderr) elif verbose_mode: print(flat_cmd, file=sys.stderr) process = subprocess.Popen([cmd] + list(args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) stdout_bytes, stderr_bytes = process.communicate() stdout: str = stdout_bytes.decode('utf-8') stderr: str = stderr_bytes.decode('utf-8') exit_code: int = process.returncode if debug_mode: if exit_code != 0: print(colored(f"<exit code: {exit_code}>\n", AnsiEscapeCodes.RED), file=sys.stderr) if stdout: print(f"{dim('<stdout>:')}\n{dim(stdout)}", file=sys.stderr) if stderr: print(f"{dim('<stderr>:')}\n{colored(stderr, AnsiEscapeCodes.RED)}", file=sys.stderr) return exit_code, stdout, stderr def get_cmd_shell_repr(cmd: str, *args: str, env: Optional[Dict[str, str]]) -> str: def shell_escape(arg: str) -> str: return arg.replace("(", "\\(") \ .replace(")", "\\)") \ .replace(" ", "\\ ") \ .replace("\t", "$'\\t'") \ .replace("\n", "$'\\n'") env = env if env is not None else {} # We don't want to include the env vars that are inherited from the environment of git-machete process env_repr = [k + "=" + shell_escape(v) for k, v in env.items() if k not in os.environ] return " ".join(env_repr + [cmd] + list(map(shell_escape, args))) def warn(msg: str, apply_fmt: bool = True) -> None: if msg not in displayed_warnings: print(colored("Warn: ", AnsiEscapeCodes.RED) + (fmt(msg) if apply_fmt else msg), file=sys.stderr) displayed_warnings.add(msg) def slurp_file_or_empty(path: str) -> str: try: with open(path, 'r') as file: return file.read() except IOError: return '' class AnsiEscapeCodes: try: stdout = popen_cmd('tput', 'colors')[1] __number_of_supported_colors = int(stdout) except Exception: # If we cannot retrieve the number of supported colors, let's defensively assume it's low. __number_of_supported_colors = 8 __is_full_fledged_terminal = __number_of_supported_colors >= 256 # `GIT_MACHETE_DIM_AS_GRAY` remains undocumented as for now, # is just needed for animated gifs to render correctly # (`[2m`-style dimmed text is invisible in asciicinema renders). __dim_as_gray = os.environ.get('GIT_MACHETE_DIM_AS_GRAY') == 'true' ENDC = '\033[0m' BOLD = '\033[1m' DIM = '\033[38;2;128;128;128m' if __dim_as_gray else '\033[2m' # Let's fall back to cyan on 8-color terminals UNDERLINE = '\033[4m' if __is_full_fledged_terminal else '\033[36m' GREEN = '\033[32m' YELLOW = '\033[33m' # Let's fall back to yellow on 8-color terminals ORANGE = '\033[00;38;5;208m' if __is_full_fledged_terminal else '\033[33m' # Let's fall back to dark red (which might be similar to yellow :/) on 8-color terminals RED = '\033[91m' if __is_full_fledged_terminal else '\033[31m' def bold(s: str) -> str: return s if ascii_only or not s else AnsiEscapeCodes.BOLD + s + AnsiEscapeCodes.ENDC def dim(s: str) -> str: return s if ascii_only or not s else AnsiEscapeCodes.DIM + s + AnsiEscapeCodes.ENDC def underline(s: str, star_if_ascii_only: bool = False) -> str: if s and not ascii_only: return AnsiEscapeCodes.UNDERLINE + s + AnsiEscapeCodes.ENDC elif s and star_if_ascii_only: return s + " *" else: return s def colored(s: str, color: str) -> str: return s if ascii_only or not s else color + s + AnsiEscapeCodes.ENDC fmt_transformations: List[Callable[[str], str]] = [ lambda x: re.sub('<b>(.*?)</b>', bold(r"\1"), x, flags=re.DOTALL), lambda x: re.sub('<u>(.*?)</u>', underline(r"\1"), x, flags=re.DOTALL), lambda x: re.sub('<dim>(.*?)</dim>', dim(r"\1"), x, flags=re.DOTALL), lambda x: re.sub('<red>(.*?)</red>', colored(r"\1", AnsiEscapeCodes.RED), x, flags=re.DOTALL), lambda x: re.sub('<yellow>(.*?)</yellow>', colored(r"\1", AnsiEscapeCodes.YELLOW), x, flags=re.DOTALL), lambda x: re.sub('<green>(.*?)</green>', colored(r"\1", AnsiEscapeCodes.GREEN), x, flags=re.DOTALL), lambda x: re.sub('`(.*?)`', r"`\1`" if ascii_only else AnsiEscapeCodes.UNDERLINE + r"\1" + AnsiEscapeCodes.ENDC, x), ] def fmt(*parts: str) -> str: result = ''.join(parts) for f in fmt_transformations: result = f(result) return result def get_vertical_bar() -> str: return "|" if ascii_only else u"│" def get_right_arrow() -> str: return "->" if ascii_only else u"➔" def get_pretty_choices(*choices: str) -> str: def format_choice(c: str) -> str: if not c: return '' elif c.lower() == 'y': return colored(c, AnsiEscapeCodes.GREEN) elif c.lower() == 'yq': return colored(c[0], AnsiEscapeCodes.GREEN) + colored(c[1], AnsiEscapeCodes.RED) elif c.lower() in ('n', 'q'): return colored(c, AnsiEscapeCodes.RED) else: return colored(c, AnsiEscapeCodes.ORANGE) return f" ({', '.join(map_truthy_only(format_choice, choices))}) " class SyncToParentStatus(Enum): InSync = auto() MergedToParent = auto() InSyncButForkPointOff = auto() OutOfSync = auto() sync_to_parent_status_to_edge_color_map: Dict[SyncToParentStatus, str] = { SyncToParentStatus.MergedToParent: AnsiEscapeCodes.DIM, SyncToParentStatus.InSync: AnsiEscapeCodes.GREEN, SyncToParentStatus.InSyncButForkPointOff: AnsiEscapeCodes.YELLOW, SyncToParentStatus.OutOfSync: AnsiEscapeCodes.RED } sync_to_parent_status_to_junction_ascii_only_map: Dict[SyncToParentStatus, str] = { SyncToParentStatus.MergedToParent: "m-", SyncToParentStatus.InSync: "o-", SyncToParentStatus.InSyncButForkPointOff: "?-", SyncToParentStatus.OutOfSync: "x-" }
PypiClean
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/groups/item/events/item/single_value_extended_properties/count/count_request_builder.py
from __future__ import annotations from dataclasses import dataclass from kiota_abstractions.get_path_parameters import get_path_parameters from kiota_abstractions.method import Method from kiota_abstractions.request_adapter import RequestAdapter from kiota_abstractions.request_information import RequestInformation from kiota_abstractions.request_option import RequestOption from kiota_abstractions.response_handler import ResponseHandler from kiota_abstractions.serialization import Parsable, ParsableFactory from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union if TYPE_CHECKING: from .......models.o_data_errors import o_data_error class CountRequestBuilder(): """ Provides operations to count the resources in the collection. """ def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None: """ Instantiates a new CountRequestBuilder and sets the default values. Args: pathParameters: The raw url or the Url template parameters for the request. requestAdapter: The request adapter to use to execute the requests. """ if path_parameters is None: raise Exception("path_parameters cannot be undefined") if request_adapter is None: raise Exception("request_adapter cannot be undefined") # Url template to use to build the URL for the current request builder self.url_template: str = "{+baseurl}/groups/{group%2Did}/events/{event%2Did}/singleValueExtendedProperties/$count{?%24search,%24filter}" url_tpl_params = get_path_parameters(path_parameters) self.path_parameters = url_tpl_params self.request_adapter = request_adapter async def get(self,request_configuration: Optional[CountRequestBuilderGetRequestConfiguration] = None) -> Optional[int]: """ Get the number of the resource Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: Optional[int] """ request_info = self.to_get_request_information( request_configuration ) from .......models.o_data_errors import o_data_error error_mapping: Dict[str, ParsableFactory] = { "4XX": o_data_error.ODataError, "5XX": o_data_error.ODataError, } if not self.request_adapter: raise Exception("Http core is null") return await self.request_adapter.send_primitive_async(request_info, "int", error_mapping) def to_get_request_information(self,request_configuration: Optional[CountRequestBuilderGetRequestConfiguration] = None) -> RequestInformation: """ Get the number of the resource Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: RequestInformation """ request_info = RequestInformation() request_info.url_template = self.url_template request_info.path_parameters = self.path_parameters request_info.http_method = Method.GET request_info.headers["Accept"] = ["text/plain"] if request_configuration: request_info.add_request_headers(request_configuration.headers) request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters) request_info.add_request_options(request_configuration.options) return request_info @dataclass class CountRequestBuilderGetQueryParameters(): """ Get the number of the resource """ def get_query_parameter(self,original_name: Optional[str] = None) -> str: """ Maps the query parameters names to their encoded names for the URI template parsing. Args: originalName: The original query parameter name in the class. Returns: str """ if original_name is None: raise Exception("original_name cannot be undefined") if original_name == "filter": return "%24filter" if original_name == "search": return "%24search" return original_name # Filter items by property values filter: Optional[str] = None # Search items by search phrases search: Optional[str] = None @dataclass class CountRequestBuilderGetRequestConfiguration(): """ Configuration for the request such as headers, query parameters, and middleware options. """ # Request headers headers: Optional[Dict[str, Union[str, List[str]]]] = None # Request options options: Optional[List[RequestOption]] = None # Request query parameters query_parameters: Optional[CountRequestBuilder.CountRequestBuilderGetQueryParameters] = None
PypiClean
/gaeframework-2.0.10.tar.gz/gaeframework-2.0.10/google_appengine/lib/django_1_2/django/db/backends/sqlite3/introspection.py
import re from django.db.backends import BaseDatabaseIntrospection # This light wrapper "fakes" a dictionary interface, because some SQLite data # types include variables in them -- e.g. "varchar(30)" -- and can't be matched # as a simple dictionary lookup. class FlexibleFieldLookupDict: # Maps SQL types to Django Field types. Some of the SQL types have multiple # entries here because SQLite allows for anything and doesn't normalize the # field type; it uses whatever was given. base_data_types_reverse = { 'bool': 'BooleanField', 'boolean': 'BooleanField', 'smallint': 'SmallIntegerField', 'smallint unsigned': 'PositiveSmallIntegerField', 'smallinteger': 'SmallIntegerField', 'int': 'IntegerField', 'integer': 'IntegerField', 'bigint': 'BigIntegerField', 'integer unsigned': 'PositiveIntegerField', 'decimal': 'DecimalField', 'real': 'FloatField', 'text': 'TextField', 'char': 'CharField', 'date': 'DateField', 'datetime': 'DateTimeField', 'time': 'TimeField', } def __getitem__(self, key): key = key.lower() try: return self.base_data_types_reverse[key] except KeyError: import re m = re.search(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$', key) if m: return ('CharField', {'max_length': int(m.group(1))}) raise KeyError class DatabaseIntrospection(BaseDatabaseIntrospection): data_types_reverse = FlexibleFieldLookupDict() def get_table_list(self, cursor): "Returns a list of table names in the current database." # Skip the sqlite_sequence system table used for autoincrement key # generation. cursor.execute(""" SELECT name FROM sqlite_master WHERE type='table' AND NOT name='sqlite_sequence' ORDER BY name""") return [row[0] for row in cursor.fetchall()] def get_table_description(self, cursor, table_name): "Returns a description of the table, with the DB-API cursor.description interface." return [(info['name'], info['type'], None, None, None, None, info['null_ok']) for info in self._table_info(cursor, table_name)] def get_relations(self, cursor, table_name): """ Returns a dictionary of {field_index: (field_index_other_table, other_table)} representing all relationships to the given table. Indexes are 0-based. """ # Dictionary of relations to return relations = {} # Schema for this table cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"]) results = cursor.fetchone()[0].strip() results = results[results.index('(')+1:results.rindex(')')] # Walk through and look for references to other tables. SQLite doesn't # really have enforced references, but since it echoes out the SQL used # to create the table we can look for REFERENCES statements used there. for field_index, field_desc in enumerate(results.split(',')): field_desc = field_desc.strip() if field_desc.startswith("UNIQUE"): continue m = re.search('references (.*) \(["|](.*)["|]\)', field_desc, re.I) if not m: continue table, column = [s.strip('"') for s in m.groups()] cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table]) result = cursor.fetchone() if not result: continue other_table_results = result[0].strip() li, ri = other_table_results.index('('), other_table_results.rindex(')') other_table_results = other_table_results[li+1:ri] for other_index, other_desc in enumerate(other_table_results.split(',')): other_desc = other_desc.strip() if other_desc.startswith('UNIQUE'): continue name = other_desc.split(' ', 1)[0].strip('"') if name == column: relations[field_index] = (other_index, table) break return relations def get_indexes(self, cursor, table_name): """ Returns a dictionary of fieldname -> infodict for the given table, where each infodict is in the format: {'primary_key': boolean representing whether it's the primary key, 'unique': boolean representing whether it's a unique index} """ indexes = {} for info in self._table_info(cursor, table_name): indexes[info['name']] = {'primary_key': info['pk'] != 0, 'unique': False} cursor.execute('PRAGMA index_list(%s)' % self.connection.ops.quote_name(table_name)) # seq, name, unique for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]: if not unique: continue cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index)) info = cursor.fetchall() # Skip indexes across multiple fields if len(info) != 1: continue name = info[0][2] # seqno, cid, name indexes[name]['unique'] = True return indexes def _table_info(self, cursor, name): cursor.execute('PRAGMA table_info(%s)' % self.connection.ops.quote_name(name)) # cid, name, type, notnull, dflt_value, pk return [{'name': field[1], 'type': field[2], 'null_ok': not field[3], 'pk': field[5] # undocumented } for field in cursor.fetchall()]
PypiClean
/dataclasses_avroschema-0.47.2.tar.gz/dataclasses_avroschema-0.47.2/dataclasses_avroschema/case.py
import typing import casefy from .fields.field_utils import ENUM # casefy.camelcase('foo_bar_baz') # => "fooBarBaz" # casefy.capitalcase('foo_bar_baz') # => "Foo_bar_baz" # casefy.constcase('FooBarBaz') # => "_FOO_BAR_BAZ" # casefy.lowercase('FooBarBaz') # => "foobarbaz" # casefy.pascalcase('FooBarBaz') # => "FooBarBaz" # casefy.pathcase('foo_bar_baz') # => "foo/bar/baz" # casefy.snakecase('FooBarBaz') # => "foo_bar_baz" # casefy.kebabcase('FooBarBaz') # => "-foo-bar-baz" # casefy.upperkebabcase('FooBarBaz') # => "FOO-BAR" # casefy.trimcase('FooBarBaz') # => "FooBarBaz" # casefy.uppercase('FooBarBaz') # => "FOOBARBAZ" # casefy.alphanumcase('Foo_123 Bar!') # =>'Foo123Bar' CAMELCASE = "camelcase" CAPITALCASE = "capitalcase" CONSTCASE = "constcase" LOWERCASE = "lowercase" PASCALCASE = "pascalcase" PATHCASE = "PATHCASE" SNAKECASE = "snakecase" SPINALCASE = "spinalcase" UPPERSPINALCASE = "upperkebabcase" TRIMCASE = "trimcase" UPPERCASE = "uppercase" ALPHANUMCASE = "alphanumcase" CASE_TO_FUNC = { CAMELCASE: casefy.camelcase, CAPITALCASE: casefy.capitalcase, CONSTCASE: casefy.constcase, LOWERCASE: casefy.lowercase, PASCALCASE: casefy.pascalcase, PATHCASE: lambda value: casefy.separatorcase(value, separator="/"), SNAKECASE: casefy.snakecase, SPINALCASE: casefy.kebabcase, UPPERSPINALCASE: casefy.upperkebabcase, TRIMCASE: str.strip, UPPERCASE: casefy.uppercase, ALPHANUMCASE: casefy.alphanumcase, } def case_item(item: typing.Dict, case_type: str) -> typing.Dict: case_func = CASE_TO_FUNC[case_type] new_field = {} for key, value in item.items(): if key == "name": case_name = case_func(value) # type: ignore new_field[key] = case_name elif isinstance(value, dict) and value.get("name"): # means that it is a complex type with a record new_record = case_record(value, case_type=case_type) new_field[key] = new_record else: new_field[key] = value return new_field def case_record(avro_schema_dict: typing.Dict, case_type: str) -> typing.Dict: fields = avro_schema_dict.get("fields") if fields is not None: new_fields = [] for field in fields: new_field = case_item(field, case_type) new_fields.append(new_field) avro_schema_dict["fields"] = new_fields return avro_schema_dict elif avro_schema_dict["type"] == ENUM: # enums should not be case, like records return avro_schema_dict else: return case_item(avro_schema_dict, case_type)
PypiClean
/check_ezproxy-0.2.1.tar.gz/check_ezproxy-0.2.1/README.md
##Check for EZProxy configurations A simple command-line script to check that EZProxy configurations exist for library databases. Built for checking LibGuides A-Z databases and OCLC Knowledge Base, but can be easily extended. Built/Tested with Python 3.4, but should work back to at least 2.7. ##Installation Easiest way is: ``pip install check_ezproxy`` Or: 1. Clone the repository. 2. Optional: If you want to make the file exectuable, run ``chmod +x check_ezproxy_run.py`` (in a bash system; this makes it so you shouldn't have to prepend each call with python) 3. From the main directory, run ``python check_ezproxy_run.py -args`` (or ``./check_ezproxy_run.py -args`` if you did step 2 above) 4. Run ``python setup.py install`` to install (after this, you can just use the ``check_ezproxy``, setup makes it available everywhere) Or use Docker: 1. Clone the repository 2. Edit the Dockerfile to fill in your information for environment variables (takes the place of editing cfg or JSON config) 3. With your Docker daimon running and from the project root, run ``docker build -t check_ezproxy .`` 4. Now you can run Docker run commands with flags for this tool at the end, i.e. ``docker run check_ezproxy --urlsource=libguides`` Docker support is still fairly limited, and with the current Dockerfile you won't be able to check KBART files or use a JSON config. ##Use Installing via pip or setup.py will make the ``check_ezproxy`` command available globally. It is suggested you install it into a virtual environment to: 1. not permanently pollute the global namespace in your shell 2. not pollute your base Python packages, as this utility uses some popular libraries and even if this tool isn't necessarily reliant on a particular version, another tool might be. Best to keep em separated. ##Configuration Some minimal configuration is needed for the checks to be able to run, and there are a couple of ways to supply this. - Rename 'config\_template.py' to 'config.py' and supply the listed parameters. This is best for quick testing or if you want to extend the tool. - Make a .json file containing the necessary config, then point the tool to it with the -c argument. You can optionally use a -s flag to save these configs so you don't need to point toward them again, but note that these will override any other configs you may try to give. You will need to use ``--flush-config`` to use another configuration. - You can also use the package as a tool for making your own utility and provide config in your script. Details below. Configurations to set are: - **ezproxy_prefix**- the full prefix for your EZProxy server, protocol included - **libguides_api_url**- The full URL to LibGuides API for database assets including your site id, API key, and asset\_type=10 as query parameters (v. 1.1 only at this time, v. 1.2 soon) - **ezproxy_error_text**- Some text on your page for proxied links with no stanzas set that you can be relatively sure is unique. The tool will match against this the page text to determine which links are not properly configured. - **kb_wskey**- If you want to check links directly from the OCLC knowledge base, you will need to apply for an API key. Only the WSKey is needed here, not the secret. - **kb_collections**- The name of the collections in the knowledge base to check. If using Python for config these can be in a tuple or list (or any iterable), if using JSON they go in an array. Even if you only have 1 to check, it needs to be in an iterable wrapper. ##Arguments Note: none of these arguments are mandatory, though some rely on others to be set as well. **-u, --urlsource** The location to get the urls to run the check on. Current possible options are 'libguides', 'oclc', and 'kbart' (will have to use -k to provide the path to the KBART file to check. -k can be used on its own as well). **-t, --type** The type of check to run. Current supported checks are 'text' and 'link'. ``-t text`` will make a call to retrieve the page through the proxy server and compare the text returned to known error text (can't use status codes because those are 200 even for a wrongly configured database). At Westfield this text is 'your EZproxy administrator must first' but you will need to set comparison text for your institution as ERROR\_TEXT in constants.py ``-t links`` will just make a head request and check the status code. **-k, --kbart** The path to a kbart file to check. If using this, it is not necessary to set a --urlsource, though it is fine to do so. The path can be relative, absolute, or relative to your home directory (~) **-w, --write** Write the output to a file rather than printing to the standard output **-f, --output-file** The path and filename you wish to use in conjunction with the ``-w``. It is necessary to also use ``-w``, but if you do not specify a file here it will default to 'check\_proxy.txt' in your home directory. **-p, --proxy** Force the presence or absence of a proxy prefix. Acceptable values are: - ``force`` - ``no_proxy`` ``force`` will cause every link to have a proxy prefix regardless of whether it 'should' (i.e. whether the LibGuides Database A-Z list has it set to not be proxied) ``no_proxy`` will do the opposite and force the link to have no proxy prefix, useful for checking for dead links which will still be dead when proxied (and which might give false negatives if a link is dead and proxied) **-c, --config-file** Use a JSON config file rather than one of the Python options for config files. Arg just takes the path (relative, absolute, or relative to home) of the .json file you want to use. **-s, --save-config** To be used in conjunction with ``-c``, will save the config file you used so that you don't have to provide the path to it every time. This saved config will trump everything. **--flush-config** Flush any saved JSON configuration and use either a Python or new JSON configuration. Can be used on the same call as ``-c new_json_config.json``. ##Example Calls Basic call to check link status on the OCLC Knowledge Base collection defined in your configuration. ``check_ezproxy -u oclc -t link`` Call with a JSON config that we are saving. ``check_ezproxy --config=./config.json -s`` Check set OCLC knowledge base collection with an updated config.json (you can combine flags, but any flag that takes an argument must be the last one, and you can only have 1 of these when combining) ``check_ezproxy --flush-config -sc ./config.json`` ##Add New Url Sources or types of checks import the 'register' decorator from registration.py. ###For a new Url source Write a function that returns an iterable (i.e. list or tuple) of classes or named tuples with a url and name element. Named tuple can be used like so: ```python from collections import namedtuple Record = namedtuple('Record', 'name url') new_record = Record('this_name', 'http://www.example.com') print(new_record.name) # will output 'this_name' ``` Decorate that function with the register decorator that takes what you want the argument to be called in the command line script and 'places' ```python from registration import register @register('get_links_here', 'places') def get_links_here(): return [list_of_record_named_tuples] ``` Save that file to the same directory as the rest of the scripts, import it in check proxy, and it should automatically be available in the command line script. ###For a new type of check Do the same thing as for a url source above, except the second argument to the decorator should be 'checks' and your function should take a database argument and a config argument, run some kind of check, and return that object if everything is fine and an object with name, url, and a new status attribute if something went wrong. ```python from registration import register @register('check_em_good', 'checks') def check_em_good(db): check(db) return db ``` You can also just add the check to the checks.py or places.py files and (optionally) make a pull request to bring your checks into the main repository.
PypiClean
/django-mp-suppliers-0.4.3.tar.gz/django-mp-suppliers-0.4.3/suppliers/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ from ordered_model.models import OrderedModel from ckeditor.fields import RichTextField from exchange.models import CurrencyField class Supplier(models.Model): name = models.CharField(_('Supplier name'), max_length=255) short_name = models.CharField(_('Short name'), max_length=255) code = models.CharField(_('Code'), max_length=255, unique=True) country = models.CharField(_('Country'), max_length=255, blank=True) currency = CurrencyField() delivery_info = RichTextField(_('Delivery info'), blank=True) email = models.EmailField(_('Email'), max_length=255, blank=True) discount = models.IntegerField(_('Discount, %'), blank=True, null=True) markup = models.IntegerField(_('Mark-up, %'), blank=True, null=True) price_updated = models.DateTimeField( _('Price updated date'), blank=True, editable=False, null=True) is_visible_for_unregistered_users = models.BooleanField( _('Is visible for unregistered users'), default=True) @property def warehouse_count(self): return self.warehouses.count() warehouse_count.fget.short_description = _('Warehouse count') def __str__(self): return self.name def clean_products(self): return self.products.all().delete() class Meta: verbose_name = _('Supplier') verbose_name_plural = _('Suppliers') class SupplierWarehouse(OrderedModel): supplier = models.ForeignKey( Supplier, verbose_name=_('Supplier'), related_name='warehouses', on_delete=models.CASCADE) name = models.CharField(_('Warehouse name'), max_length=255) short_name = models.CharField(_('Short name'), max_length=255) price_updated = models.DateTimeField( _('Price updated date'), blank=True, editable=False, null=True) order_with_respect_to = 'supplier' def __str__(self): return '{} - {}'.format(str(self.supplier), self.name) class Meta(OrderedModel.Meta): verbose_name = _('Supplier warehouse') verbose_name_plural = _('Supplier warehouses') class SupplierField(models.ForeignKey): def __init__( self, to=Supplier, verbose_name=_('Supplier'), blank=True, null=True, on_delete=models.PROTECT, **kwargs): super().__init__( to=to, verbose_name=verbose_name, blank=blank, null=null, on_delete=on_delete, **kwargs)
PypiClean
/social-auth-steemconnect-0.0.3.tar.gz/social-auth-steemconnect-0.0.3/README.md
python-social-auth-steemconnect =========================== Pluggable authentication backend for python-social-auth, that allows authentication via SteemConnect (v2). ## Installation instructions From pypi $ pip install social-auth-steemconnect or clone from Github $ git clone [email protected]:wise-team/python-social-auth-steemconnect.git $ cd python-social-auth-steemconnect && sudo python setup.py install ## Pre-requisites `python-social-auth` must be installed and configured first. Please visit the [python-social-auth documentation](http://python-social-auth-docs.readthedocs.io/) for instructions. ## Configuration instructions 1. Add Waveapps backend to AUTHENTICATION_BACKENDS: AUTHENTICATION_BACKENDS = ( 'steemconnect.backends.SteemConnectOAuth2', ... 'django.contrib.auth.backends.ModelBackend', ) 2. Add your Waveapps settings to your django `settings.py` file. SOCIAL_AUTH_STEEMCONNECT_KEY = '<your-steemconnect-app-acccount>' # ex. 'myproject.app' SOCIAL_AUTH_STEEMCONNECT_DEFAULT_SCOPE = ['vote', 'comment'] ## Examples Ready to use examples of projects in Django, Flask and Tornado frameworks are prepared here: https://github.com/wise-team/python-social-auth-steemconnect-examples ## Changelog ### 0.0.3 * SteemConnect endpoint changed from `https://v2.steemconnect.com` to `https://steemconnect.com` ### 0.0.2 * package rename ### 0.0.1 * Initial release
PypiClean
/msgraph-sdk-1.0.0a3.tar.gz/msgraph-sdk-1.0.0a3/msgraph/generated/me/joined_teams/item/channels/item/messages/item/replies/delta/delta_request_builder.py
from __future__ import annotations from dataclasses import dataclass from kiota_abstractions.get_path_parameters import get_path_parameters from kiota_abstractions.method import Method from kiota_abstractions.request_adapter import RequestAdapter from kiota_abstractions.request_information import RequestInformation from kiota_abstractions.request_option import RequestOption from kiota_abstractions.response_handler import ResponseHandler from kiota_abstractions.serialization import Parsable, ParsableFactory from typing import Any, Callable, Dict, List, Optional, Union from . import delta_response from ..........models.o_data_errors import o_data_error class DeltaRequestBuilder(): """ Provides operations to call the delta method. """ def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None: """ Instantiates a new DeltaRequestBuilder and sets the default values. Args: pathParameters: The raw url or the Url template parameters for the request. requestAdapter: The request adapter to use to execute the requests. """ if path_parameters is None: raise Exception("path_parameters cannot be undefined") if request_adapter is None: raise Exception("request_adapter cannot be undefined") # Url template to use to build the URL for the current request builder self.url_template: str = "{+baseurl}/me/joinedTeams/{team%2Did}/channels/{channel%2Did}/messages/{chatMessage%2Did}/replies/microsoft.graph.delta(){?%24top,%24skip,%24search,%24filter,%24count,%24select,%24orderby}" url_tpl_params = get_path_parameters(path_parameters) self.path_parameters = url_tpl_params self.request_adapter = request_adapter def create_get_request_information(self,request_configuration: Optional[DeltaRequestBuilderGetRequestConfiguration] = None) -> RequestInformation: """ Invoke function delta Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. Returns: RequestInformation """ request_info = RequestInformation() request_info.url_template = self.url_template request_info.path_parameters = self.path_parameters request_info.http_method = Method.GET request_info.headers["Accept"] = "application/json" if request_configuration: request_info.add_request_headers(request_configuration.headers) request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters) request_info.add_request_options(request_configuration.options) return request_info async def get(self,request_configuration: Optional[DeltaRequestBuilderGetRequestConfiguration] = None, response_handler: Optional[ResponseHandler] = None) -> Optional[delta_response.DeltaResponse]: """ Invoke function delta Args: requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options. responseHandler: Response handler to use in place of the default response handling provided by the core service Returns: Optional[delta_response.DeltaResponse] """ request_info = self.create_get_request_information( request_configuration ) error_mapping: Dict[str, ParsableFactory] = { "4XX": o_data_error.ODataError, "5XX": o_data_error.ODataError, } if not self.request_adapter: raise Exception("Http core is null") return await self.request_adapter.send_async(request_info, delta_response.DeltaResponse, response_handler, error_mapping) @dataclass class DeltaRequestBuilderGetQueryParameters(): """ Invoke function delta """ # Include count of items count: Optional[bool] = None # Filter items by property values filter: Optional[str] = None # Order items by property values orderby: Optional[List[str]] = None # Search items by search phrases search: Optional[str] = None # Select properties to be returned select: Optional[List[str]] = None # Skip the first n items skip: Optional[int] = None # Show only the first n items top: Optional[int] = None def get_query_parameter(self,original_name: Optional[str] = None) -> str: """ Maps the query parameters names to their encoded names for the URI template parsing. Args: originalName: The original query parameter name in the class. Returns: str """ if original_name is None: raise Exception("original_name cannot be undefined") if original_name == "count": return "%24count" if original_name == "filter": return "%24filter" if original_name == "orderby": return "%24orderby" if original_name == "search": return "%24search" if original_name == "select": return "%24select" if original_name == "skip": return "%24skip" if original_name == "top": return "%24top" return original_name @dataclass class DeltaRequestBuilderGetRequestConfiguration(): """ Configuration for the request such as headers, query parameters, and middleware options. """ # Request headers headers: Optional[Dict[str, str]] = None # Request options options: Optional[List[RequestOption]] = None # Request query parameters query_parameters: Optional[DeltaRequestBuilder.DeltaRequestBuilderGetQueryParameters] = None
PypiClean
/akari_client-0.3.3-py3-none-any.whl/akari_client/grpc/m5stack.py
import json from typing import Dict, Iterator, Optional, cast import grpc from akari_client.color import Color from akari_client.position import Positions from akari_proto import m5stack_pb2 from akari_proto.grpc.error import deserialize_error from akari_proto.m5stack_pb2_grpc import M5StackServiceStub from google.protobuf.empty_pb2 import Empty from ..m5stack_client import M5ComDict, M5StackClient from ._error import serializer def _as_proto_color(color: Optional[Color]) -> m5stack_pb2.Color: if color is None: return m5stack_pb2.Color(red=-1, green=-1, blue=-1) return m5stack_pb2.Color( red=color.red, green=color.green, blue=color.blue, ) class GrpcM5StackClient(M5StackClient): def __init__(self, channel: grpc.Channel) -> None: self._stub = M5StackServiceStub(channel) @deserialize_error(serializer) def set_dout(self, pin_id: int, value: bool, sync: bool = True) -> None: binary_pins: Dict[str, bool] = {} if pin_id == 0: binary_pins["dout0"] = value elif pin_id == 1: binary_pins["dout1"] = value else: raise ValueError(f"Out of range pin_id: {pin_id}") request = m5stack_pb2.SetPinOutRequest( binary_pins=binary_pins, sync=sync, ) self._stub.SetPinOut(request) @deserialize_error(serializer) def set_pwmout(self, pin_id: int, value: int, sync: bool = True) -> None: int_pins: Dict[str, int] = {} if pin_id == 0: int_pins["pwmout0"] = value else: raise ValueError(f"Out of range pin_id: {pin_id}") request = m5stack_pb2.SetPinOutRequest( int_pins=int_pins, sync=sync, ) self._stub.SetPinOut(request) @deserialize_error(serializer) def set_allout( self, *, dout0: Optional[bool] = None, dout1: Optional[bool] = None, pwmout0: Optional[int] = None, sync: bool = True, ) -> None: binary_pins: Dict[str, bool] = {} int_pins: Dict[str, int] = {} if dout0 is not None: binary_pins["dout0"] = dout0 if dout1 is not None: binary_pins["dout1"] = dout1 if pwmout0 is not None: int_pins["pwmout0"] = pwmout0 request = m5stack_pb2.SetPinOutRequest( binary_pins=binary_pins, int_pins=int_pins, sync=sync, ) self._stub.SetPinOut(request) @deserialize_error(serializer) def reset_allout(self, sync: bool = True) -> None: self._stub.ResetPinOut(m5stack_pb2.ResetPinOutRequest(sync=sync)) @deserialize_error(serializer) def set_display_color(self, color: Color, sync: bool = True) -> None: request = m5stack_pb2.SetDisplayColorRequest( color=_as_proto_color(color), sync=sync, ) self._stub.SetDisplayColor(request) @deserialize_error(serializer) def set_display_text( self, text: str, pos_x: int = Positions.CENTER, pos_y: int = Positions.CENTER, size: int = 5, text_color: Optional[Color] = None, back_color: Optional[Color] = None, refresh: bool = True, sync: bool = True, ) -> None: request = m5stack_pb2.SetDisplayTextRequest( text=text, pos_x=pos_x, pos_y=pos_y, size=size, text_color=_as_proto_color(text_color), bg_color=_as_proto_color(back_color), refresh=refresh, sync=sync, ) self._stub.SetDisplayText(request) @deserialize_error(serializer) def set_display_image( self, filepath: str, pos_x: int = Positions.CENTER, pos_y: int = Positions.CENTER, scale: float = -1.0, sync: bool = True, ) -> None: request = m5stack_pb2.SetDisplayImageRequest( path=filepath, pos_x=pos_x, pos_y=pos_y, scale=scale, sync=sync, ) self._stub.SetDisplayImage(request) @deserialize_error(serializer) def reset_m5(self) -> None: self._stub.Reset(Empty()) @deserialize_error(serializer) def get(self) -> M5ComDict: data: m5stack_pb2.M5StackStatus = self._stub.Get(Empty()) return cast(M5ComDict, json.loads(data.status_json)) @deserialize_error(serializer) def get_stream(self) -> Iterator[M5ComDict]: stream = self._stub.GetStream(Empty()) for r in stream: data: m5stack_pb2.M5StackStatus = r yield cast(M5ComDict, json.loads(data.status_json))
PypiClean
/tensorflow_tflex-1.13.1rc1-cp27-cp27mu-manylinux1_x86_64.whl/tensorflow_tflex-1.13.1rc1.data/purelib/tensorflow/python/layers/core.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.keras import layers as keras_layers from tensorflow.python.layers import base from tensorflow.python.ops import init_ops from tensorflow.python.util import deprecation from tensorflow.python.util.tf_export import tf_export @tf_export(v1=['layers.Dense']) class Dense(keras_layers.Dense, base.Layer): """Densely-connected layer class. This layer implements the operation: `outputs = activation(inputs * kernel + bias)` Where `activation` is the activation function passed as the `activation` argument (if not `None`), `kernel` is a weights matrix created by the layer, and `bias` is a bias vector created by the layer (only if `use_bias` is `True`). Arguments: units: Integer or Long, dimensionality of the output space. activation: Activation function (callable). Set it to None to maintain a linear activation. use_bias: Boolean, whether the layer uses a bias. kernel_initializer: Initializer function for the weight matrix. If `None` (default), weights are initialized using the default initializer used by `tf.get_variable`. bias_initializer: Initializer function for the bias. kernel_regularizer: Regularizer function for the weight matrix. bias_regularizer: Regularizer function for the bias. activity_regularizer: Regularizer function for the output. kernel_constraint: An optional projection function to be applied to the kernel after being updated by an `Optimizer` (e.g. used to implement norm constraints or value constraints for layer weights). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. bias_constraint: An optional projection function to be applied to the bias after being updated by an `Optimizer`. trainable: Boolean, if `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). name: String, the name of the layer. Layers with the same name will share weights, but to avoid mistakes we require reuse=True in such cases. reuse: Boolean, whether to reuse the weights of a previous layer by the same name. Properties: units: Python integer, dimensionality of the output space. activation: Activation function (callable). use_bias: Boolean, whether the layer uses a bias. kernel_initializer: Initializer instance (or name) for the kernel matrix. bias_initializer: Initializer instance (or name) for the bias. kernel_regularizer: Regularizer instance for the kernel matrix (callable) bias_regularizer: Regularizer instance for the bias (callable). activity_regularizer: Regularizer instance for the output (callable) kernel_constraint: Constraint function for the kernel matrix. bias_constraint: Constraint function for the bias. kernel: Weight matrix (TensorFlow variable or tensor). bias: Bias vector, if applicable (TensorFlow variable or tensor). """ def __init__(self, units, activation=None, use_bias=True, kernel_initializer=None, bias_initializer=init_ops.zeros_initializer(), kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, trainable=True, name=None, **kwargs): super(Dense, self).__init__(units=units, activation=activation, use_bias=use_bias, kernel_initializer=kernel_initializer, bias_initializer=bias_initializer, kernel_regularizer=kernel_regularizer, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, kernel_constraint=kernel_constraint, bias_constraint=bias_constraint, trainable=trainable, name=name, **kwargs) @deprecation.deprecated( date=None, instructions='Use keras.layers.dense instead.') @tf_export(v1=['layers.dense']) def dense( inputs, units, activation=None, use_bias=True, kernel_initializer=None, bias_initializer=init_ops.zeros_initializer(), kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, trainable=True, name=None, reuse=None): """Functional interface for the densely-connected layer. This layer implements the operation: `outputs = activation(inputs * kernel + bias)` where `activation` is the activation function passed as the `activation` argument (if not `None`), `kernel` is a weights matrix created by the layer, and `bias` is a bias vector created by the layer (only if `use_bias` is `True`). Arguments: inputs: Tensor input. units: Integer or Long, dimensionality of the output space. activation: Activation function (callable). Set it to None to maintain a linear activation. use_bias: Boolean, whether the layer uses a bias. kernel_initializer: Initializer function for the weight matrix. If `None` (default), weights are initialized using the default initializer used by `tf.get_variable`. bias_initializer: Initializer function for the bias. kernel_regularizer: Regularizer function for the weight matrix. bias_regularizer: Regularizer function for the bias. activity_regularizer: Regularizer function for the output. kernel_constraint: An optional projection function to be applied to the kernel after being updated by an `Optimizer` (e.g. used to implement norm constraints or value constraints for layer weights). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. bias_constraint: An optional projection function to be applied to the bias after being updated by an `Optimizer`. trainable: Boolean, if `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). name: String, the name of the layer. reuse: Boolean, whether to reuse the weights of a previous layer by the same name. Returns: Output tensor the same shape as `inputs` except the last dimension is of size `units`. Raises: ValueError: if eager execution is enabled. """ layer = Dense(units, activation=activation, use_bias=use_bias, kernel_initializer=kernel_initializer, bias_initializer=bias_initializer, kernel_regularizer=kernel_regularizer, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, kernel_constraint=kernel_constraint, bias_constraint=bias_constraint, trainable=trainable, name=name, _scope=name, _reuse=reuse) return layer.apply(inputs) @tf_export(v1=['layers.Dropout']) class Dropout(keras_layers.Dropout, base.Layer): """Applies Dropout to the input. Dropout consists in randomly setting a fraction `rate` of input units to 0 at each update during training time, which helps prevent overfitting. The units that are kept are scaled by `1 / (1 - rate)`, so that their sum is unchanged at training time and inference time. Arguments: rate: The dropout rate, between 0 and 1. E.g. `rate=0.1` would drop out 10% of input units. noise_shape: 1D tensor of type `int32` representing the shape of the binary dropout mask that will be multiplied with the input. For instance, if your inputs have shape `(batch_size, timesteps, features)`, and you want the dropout mask to be the same for all timesteps, you can use `noise_shape=[batch_size, 1, features]`. seed: A Python integer. Used to create random seeds. See `tf.set_random_seed`. for behavior. name: The name of the layer (string). """ def __init__(self, rate=0.5, noise_shape=None, seed=None, name=None, **kwargs): super(Dropout, self).__init__(rate=rate, noise_shape=noise_shape, seed=seed, name=name, **kwargs) def call(self, inputs, training=False): return super(Dropout, self).call(inputs, training=training) @deprecation.deprecated( date=None, instructions='Use keras.layers.dropout instead.') @tf_export(v1=['layers.dropout']) def dropout(inputs, rate=0.5, noise_shape=None, seed=None, training=False, name=None): """Applies Dropout to the input. Dropout consists in randomly setting a fraction `rate` of input units to 0 at each update during training time, which helps prevent overfitting. The units that are kept are scaled by `1 / (1 - rate)`, so that their sum is unchanged at training time and inference time. Arguments: inputs: Tensor input. rate: The dropout rate, between 0 and 1. E.g. "rate=0.1" would drop out 10% of input units. noise_shape: 1D tensor of type `int32` representing the shape of the binary dropout mask that will be multiplied with the input. For instance, if your inputs have shape `(batch_size, timesteps, features)`, and you want the dropout mask to be the same for all timesteps, you can use `noise_shape=[batch_size, 1, features]`. seed: A Python integer. Used to create random seeds. See `tf.set_random_seed` for behavior. training: Either a Python boolean, or a TensorFlow boolean scalar tensor (e.g. a placeholder). Whether to return the output in training mode (apply dropout) or in inference mode (return the input untouched). name: The name of the layer (string). Returns: Output tensor. Raises: ValueError: if eager execution is enabled. """ layer = Dropout(rate, noise_shape=noise_shape, seed=seed, name=name) return layer.apply(inputs, training=training) @tf_export(v1=['layers.Flatten']) class Flatten(keras_layers.Flatten, base.Layer): """Flattens an input tensor while preserving the batch axis (axis 0). Arguments: data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch, ..., channels)` while `channels_first` corresponds to inputs with shape `(batch, channels, ...)`. Examples: ``` x = tf.placeholder(shape=(None, 4, 4), dtype='float32') y = Flatten()(x) # now `y` has shape `(None, 16)` x = tf.placeholder(shape=(None, 3, None), dtype='float32') y = Flatten()(x) # now `y` has shape `(None, None)` ``` """ pass @deprecation.deprecated( date=None, instructions='Use keras.layers.flatten instead.') @tf_export(v1=['layers.flatten']) def flatten(inputs, name=None, data_format='channels_last'): """Flattens an input tensor while preserving the batch axis (axis 0). Arguments: inputs: Tensor input. name: The name of the layer (string). data_format: A string, one of `channels_last` (default) or `channels_first`. The ordering of the dimensions in the inputs. `channels_last` corresponds to inputs with shape `(batch, height, width, channels)` while `channels_first` corresponds to inputs with shape `(batch, channels, height, width)`. Returns: Reshaped tensor. Examples: ``` x = tf.placeholder(shape=(None, 4, 4), dtype='float32') y = flatten(x) # now `y` has shape `(None, 16)` x = tf.placeholder(shape=(None, 3, None), dtype='float32') y = flatten(x) # now `y` has shape `(None, None)` ``` """ layer = Flatten(name=name, data_format=data_format) return layer.apply(inputs) # Aliases FullyConnected = Dense fully_connected = dense
PypiClean
/ariadne_codegen-0.8.0.tar.gz/ariadne_codegen-0.8.0/ariadne_codegen/schema.py
from pathlib import Path from typing import Dict, Generator, List, Optional, Tuple, cast import httpx from graphql import ( DefinitionNode, DirectiveLocation, FragmentDefinitionNode, GraphQLArgument, GraphQLDirective, GraphQLSchema, GraphQLString, GraphQLSyntaxError, IntrospectionQuery, NoUnusedFragmentsRule, OperationDefinitionNode, build_ast_schema, build_client_schema, get_introspection_query, parse, specified_rules, validate, ) from .client_generators.constants import MIXIN_FROM_NAME, MIXIN_IMPORT_NAME, MIXIN_NAME from .exceptions import ( IntrospectionError, InvalidGraphqlSyntax, InvalidOperationForSchema, ) def filter_operations_definitions( definitions: Tuple[DefinitionNode, ...] ) -> List[OperationDefinitionNode]: """Return list including only operations definitions.""" return [d for d in definitions if isinstance(d, OperationDefinitionNode)] def filter_fragments_definitions( definitions: Tuple[DefinitionNode, ...] ) -> List[FragmentDefinitionNode]: """Return list including only fragments definitions.""" return [d for d in definitions if isinstance(d, FragmentDefinitionNode)] def get_graphql_queries( queries_path: str, schema: GraphQLSchema ) -> Tuple[DefinitionNode, ...]: """Get graphql queries definitions build from provided path.""" queries_str = load_graphql_files_from_path(Path(queries_path)) queries_ast = parse(queries_str) validation_errors = validate( schema=schema, document_ast=queries_ast, rules=[r for r in specified_rules if r is not NoUnusedFragmentsRule], ) if validation_errors: raise InvalidOperationForSchema( "\n\n".join(error.message for error in validation_errors) ) return queries_ast.definitions def get_graphql_schema_from_url( url: str, headers: Optional[Dict[str, str]] = None, verify_ssl: bool = True ) -> GraphQLSchema: return build_client_schema( introspect_remote_schema(url=url, headers=headers, verify_ssl=verify_ssl), assume_valid=True, ) def introspect_remote_schema( url: str, headers: Optional[Dict[str, str]] = None, verify_ssl: bool = True ) -> IntrospectionQuery: try: response = httpx.post( url, json={"query": get_introspection_query(descriptions=False)}, headers=headers, verify=verify_ssl, ) except httpx.InvalidURL as exc: raise IntrospectionError(f"Invalid remote schema url: {url}") from exc if not response.is_success: raise IntrospectionError( "Failure of remote schema introspection. " f"HTTP status code: {response.status_code}" ) try: response_json = response.json() except ValueError as exc: raise IntrospectionError("Introspection result is not a valid json.") from exc if (not isinstance(response_json, dict)) or ("data" not in response_json): raise IntrospectionError("Invalid introspection result format.") errors = response_json.get("errors") if errors: raise IntrospectionError(f"Introspection errors: {errors}") data = response_json["data"] if not isinstance(data, dict): raise IntrospectionError("Invalid data key in introspection result.") return cast(IntrospectionQuery, data) def get_graphql_schema_from_path(schema_path: str) -> GraphQLSchema: """Get graphql schema build from provided path.""" schema_str = load_graphql_files_from_path(Path(schema_path)) graphql_ast = parse(schema_str) schema: GraphQLSchema = build_ast_schema(graphql_ast, assume_valid=True) return schema def load_graphql_files_from_path(path: Path) -> str: """ Get schema from given path. If path is a directory, collect schemas from multiple files. """ if path.is_dir(): schema_list = [read_graphql_file(f) for f in sorted(walk_graphql_files(path))] return "\n".join(schema_list) return read_graphql_file(path.resolve()) def walk_graphql_files(path: Path) -> Generator[Path, None, None]: """Find graphql files within given path.""" extensions = (".graphql", ".graphqls", ".gql") for file_ in path.glob("**/*"): if file_.suffix in extensions: yield file_ def read_graphql_file(path: Path) -> str: """Return content of file.""" with open(path, "r", encoding="utf-8") as graphql_file: schema = graphql_file.read() try: parse(schema) except GraphQLSyntaxError as exc: raise InvalidGraphqlSyntax(f"Invalid graphql syntax in file {path}") from exc return schema def add_mixin_directive_to_schema(schema: GraphQLSchema) -> GraphQLSchema: if MIXIN_NAME in {d.name for d in schema.directives}: return schema schema.directives += ( GraphQLDirective( name=MIXIN_NAME, locations=[DirectiveLocation.FIELD, DirectiveLocation.FRAGMENT_DEFINITION], args={ MIXIN_IMPORT_NAME: GraphQLArgument(type_=GraphQLString), MIXIN_FROM_NAME: GraphQLArgument(type_=GraphQLString), }, is_repeatable=True, ), ) return schema
PypiClean
/tb-rest-client-3.5.tar.gz/tb-rest-client-3.5/tb_rest_client/models/models_pe/notification_rule_recipients_config.py
# Copyright 2023. ThingsBoard # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pprint import re # noqa: F401 import six class NotificationRuleRecipientsConfig(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'trigger_type': 'str' } attribute_map = { 'trigger_type': 'triggerType' } def __init__(self, trigger_type=None): # noqa: E501 """NotificationRuleRecipientsConfig - a model defined in Swagger""" # noqa: E501 self._trigger_type = None self.discriminator = None self.trigger_type = trigger_type @property def trigger_type(self): """Gets the trigger_type of this NotificationRuleRecipientsConfig. # noqa: E501 :return: The trigger_type of this NotificationRuleRecipientsConfig. # noqa: E501 :rtype: str """ return self._trigger_type @trigger_type.setter def trigger_type(self, trigger_type): """Sets the trigger_type of this NotificationRuleRecipientsConfig. :param trigger_type: The trigger_type of this NotificationRuleRecipientsConfig. # noqa: E501 :type: str """ if trigger_type is None: raise ValueError("Invalid value for `trigger_type`, must not be `None`") # noqa: E501 allowed_values = ["ALARM", "ALARM_ASSIGNMENT", "ALARM_COMMENT", "API_USAGE_LIMIT", "DEVICE_ACTIVITY", "ENTITIES_LIMIT", "ENTITY_ACTION", "INTEGRATION_LIFECYCLE_EVENT", "NEW_PLATFORM_VERSION", "RULE_ENGINE_COMPONENT_LIFECYCLE_EVENT"] # noqa: E501 if trigger_type not in allowed_values: raise ValueError( "Invalid value for `trigger_type` ({0}), must be one of {1}" # noqa: E501 .format(trigger_type, allowed_values) ) self._trigger_type = trigger_type def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(NotificationRuleRecipientsConfig, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, NotificationRuleRecipientsConfig): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
PypiClean
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/request/AlipayFundWalletTokenCreateRequest.py
import json from alipay.aop.api.FileItem import FileItem from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.AlipayFundWalletTokenCreateModel import AlipayFundWalletTokenCreateModel class AlipayFundWalletTokenCreateRequest(object): def __init__(self, biz_model=None): self._biz_model = biz_model self._biz_content = None self._version = "1.0" self._terminal_type = None self._terminal_info = None self._prod_code = None self._notify_url = None self._return_url = None self._udf_params = None self._need_encrypt = False @property def biz_model(self): return self._biz_model @biz_model.setter def biz_model(self, value): self._biz_model = value @property def biz_content(self): return self._biz_content @biz_content.setter def biz_content(self, value): if isinstance(value, AlipayFundWalletTokenCreateModel): self._biz_content = value else: self._biz_content = AlipayFundWalletTokenCreateModel.from_alipay_dict(value) @property def version(self): return self._version @version.setter def version(self, value): self._version = value @property def terminal_type(self): return self._terminal_type @terminal_type.setter def terminal_type(self, value): self._terminal_type = value @property def terminal_info(self): return self._terminal_info @terminal_info.setter def terminal_info(self, value): self._terminal_info = value @property def prod_code(self): return self._prod_code @prod_code.setter def prod_code(self, value): self._prod_code = value @property def notify_url(self): return self._notify_url @notify_url.setter def notify_url(self, value): self._notify_url = value @property def return_url(self): return self._return_url @return_url.setter def return_url(self, value): self._return_url = value @property def udf_params(self): return self._udf_params @udf_params.setter def udf_params(self, value): if not isinstance(value, dict): return self._udf_params = value @property def need_encrypt(self): return self._need_encrypt @need_encrypt.setter def need_encrypt(self, value): self._need_encrypt = value def add_other_text_param(self, key, value): if not self.udf_params: self.udf_params = dict() self.udf_params[key] = value def get_params(self): params = dict() params[P_METHOD] = 'alipay.fund.wallet.token.create' params[P_VERSION] = self.version if self.biz_model: params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':')) if self.biz_content: if hasattr(self.biz_content, 'to_alipay_dict'): params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':')) else: params['biz_content'] = self.biz_content if self.terminal_type: params['terminal_type'] = self.terminal_type if self.terminal_info: params['terminal_info'] = self.terminal_info if self.prod_code: params['prod_code'] = self.prod_code if self.notify_url: params['notify_url'] = self.notify_url if self.return_url: params['return_url'] = self.return_url if self.udf_params: params.update(self.udf_params) return params def get_multipart_params(self): multipart_params = dict() return multipart_params
PypiClean
/digimat.blink1-0.1.5.tar.gz/digimat.blink1-0.1.5/src/digimat/blink1/blink1.py
import logging import time import sys from contextlib import contextmanager import usb import webcolors from kelvin import kelvin_to_rgb, COLOR_TEMPERATURES class BlinkConnectionFailed(RuntimeError): """Raised when we cannot connect to a Blink(1) """ class InvalidColor(ValueError): """Raised when the user requests an implausible colour """ log = logging.getLogger(__name__) DEFAULT_GAMMA = (2, 2, 2) DEFAULT_WHITE_POINT = (255, 255, 255) REPORT_ID = 0x01 VENDOR_ID = 0x27b8 PRODUCT_ID = 0x01ed class ColorCorrect(object): """Apply a gamma correction to any selected RGB color, see: http://en.wikipedia.org/wiki/Gamma_correction """ def __init__(self, gamma, white_point): """ :param gamma: Tuple of r,g,b gamma values :param white_point: White point expressed as (r,g,b), integer color temperature (in Kelvin) or a string value. All gamma values should be 0 > x >= 1 """ self.gamma = gamma if isinstance(white_point, str): kelvin = COLOR_TEMPERATURES[white_point] self.white_point = kelvin_to_rgb(kelvin) elif isinstance(white_point,(int,float)): self.white_point = kelvin_to_rgb(white_point) else: self.white_point = white_point @staticmethod def gamma_correct(gamma, white, luminance): return round(white * (luminance / 255) ** gamma) def __call__(self, r, g, b): color = [r,g,b] return tuple(self.gamma_correct(g, w, l) for (g, w, l) in zip(self.gamma, self.white_point, color) ) class Blink1: """Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID. """ def __init__(self, gamma=None, white_point=None): """ :param gamma: Triple of gammas for each channel e.g. (2, 2, 2) """ self.cc = ColorCorrect( gamma=(gamma or DEFAULT_GAMMA), white_point=(white_point or DEFAULT_WHITE_POINT) ) self.dev = self.find() if not self.dev: raise BlinkConnectionFailed("Could not find an attached Blink(1)") def close(self): self.dev = None @staticmethod def find(): dev = usb.core.find(idVendor=VENDOR_ID, idProduct=PRODUCT_ID) if dev == None: return None # if not sys.platform.startswith('win'): # if ( dev.is_kernel_driver_active(0) ): try: dev.detach_kernel_driver(0) # except usb.core.USBError as e: # sys.exit("Could not detatch kernel driver: %s" % str(e)) except: pass return dev def notfound(self): return None # fixme what to do here def write(self, buf): """ Write command to blink(1) Send USB Feature Report 0x01 to blink(1) with 8-byte payload Note: arg 'buf' must be 8 bytes or bad things happen """ log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf)) if ( self.dev == None ): return self.notfound() bmRequestTypeOut = usb.util.build_request_type(usb.util.CTRL_OUT, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_INTERFACE) # small patch as ctrl_transfer need str, not byte array strdata=''.join([chr(int(x)) for x in buf]) self.dev.ctrl_transfer(bmRequestTypeOut, 0x09, # == HID set_report (3 << 8) | REPORT_ID, # (3==HID feat.report) 0, strdata) def read(self): """ Read command result from blink(1) Receive USB Feature Report 0x01 from blink(1) with 8-byte payload Note: buf must be 8 bytes or bad things happen """ bmRequestTypeIn = usb.util.build_request_type(usb.util.CTRL_IN, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_INTERFACE) buf = self.dev.ctrl_transfer(bmRequestTypeIn, 0x01, # == HID get_report (3 << 8) | REPORT_ID, 0, 8) # == number of bytes to read log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf)) return buf def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0): """ Command blink(1) to fade to RGB color, no color correction applied. """ action = ord('c') fade_time = int(fade_milliseconds / 10) th = (fade_time & 0xff00) >> 8 tl = fade_time & 0x00ff buf = [REPORT_ID, action, red, green, blue, th, tl, led_number] return self.write(buf) def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0): r, g, b = self.cc(red, green, blue) return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number=0) @staticmethod def color_to_rgb(color): if isinstance(color, tuple): return color if color.startswith('#'): try: return webcolors.hex_to_rgb(color) except ValueError: raise InvalidColor(color) try: return webcolors.name_to_rgb(color) except ValueError: raise InvalidColor(color) def fade_to_color(self, fade_milliseconds, color): """ Fade the light to a known colour in a :param fade_milliseconds: Duration of the fade in milliseconds :param color: Named color to fade to :return: None """ red, green, blue = self.color_to_rgb(color) return self.fade_to_rgb(fade_milliseconds, red, green, blue) def off(self, fade_milliseconds=0): """Switch the blink(1) off instantly """ self.fade_to_color(fade_milliseconds, 'black') def get_version(self): """Get blink(1) firmware version """ if ( self.dev == None ): return '' buf = [0x01, ord('v'), 0, 0, 0, 0, 0, 0] self.write(buf) time.sleep(.05) version_raw = self.read() version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0')) return str(version) def get_serial_number(self): """Get blink(1) serial number """ return usb.util.get_string(self.dev, 256, 3) @contextmanager def blink1(switch_off=True, gamma=None, white_point=None): """Context manager which automatically shuts down the Blink(1) after use. """ b1 = Blink1(gamma=gamma, white_point=white_point) yield b1 if switch_off: b1.off() b1.close()
PypiClean
/JaqalPaq-1.2.0a1.tar.gz/JaqalPaq-1.2.0a1/src/jaqalpaq/emulator/pygsti/model.py
import numpy as np from pygsti.modelmembers.operations import ( OpFactory, StaticUnitaryOp, StaticArbitraryOp, ) from pygsti.modelmembers.povms import ComputationalBasisPOVM from pygsti.modelmembers.states import ComputationalBasisState from pygsti.models import LocalNoiseModel from pygsti.processors import QubitProcessorSpec from pygsti.baseobjs import UnitaryGateFunction from jaqalpaq.error import JaqalError from .._import import get_ideal_action def pygsti_gate_name(gate): """Returns the canonical pyGSTi gate name of a Jaqal gate.""" return f"GJ{gate.name}" class JaqalOpFactory(OpFactory): """Jaqal gate factory Takes a function describing a Jaqal gate (with identical call signature) and optional JaqalPaq gate definition, and creates a pyGSTi operator factory appropriate for describing that gate in a noise model. """ def __init__(self, fun, gate=None, pass_args=("classical", "quantum"), **kwargs): """Construct a Jaqal gate factory. :param fun: Function generating the specified unitary/process matrix :param gate: Optional Jaqal gate. If None, this probably specifies an idle gate. :param evotype: When True (default), pass quantum arguments to fun; if False, pass classical arguments only (typically the case for ideal unitaries) :return: a PyGSTi OpFactory describing the Jaqal gate """ if "evotype" not in kwargs: kwargs["evotype"] = "default" self.num_qubits = 1 if gate is None else len(gate.quantum_parameters) kwargs["state_space"] = self.num_qubits OpFactory.__init__(self, **kwargs) self.jaqal_gate = gate self.jaqal_fun = fun self.pass_args = pass_args def create_object(self, args=None, sslbls=None): if self.jaqal_gate is None: (duration,) = args # Idle gate mat = np.array(self.jaqal_fun(None, duration)) else: n_arg = 0 n_ssl = 0 argv = [] ssls = [] for param in self.jaqal_gate.parameters: if param.classical and "classical" in self.pass_args: argv.append(args[n_arg]) n_arg += 1 elif "quantum" in self.pass_args: # We do not allow qubit-specific models (yet) argv.append(None) n_ssl += 1 mat = np.array(self.jaqal_fun(*argv)) if mat.shape == (4**self.num_qubits, 4**self.num_qubits): return StaticArbitraryOp(mat, evotype=self._evotype) return StaticUnitaryOp(mat, evotype=self._evotype) class DummyUnitaryGate(UnitaryGateFunction): def __init__(self, num_qubits): self.num_qubits = num_qubits self.shape = (2**self.num_qubits, 2**self.num_qubits) def __call__(self, arg): return -1 * np.eye(2**self.num_qubits, dtype="complex") def pygsti_independent_noisy_gate(gate, fun): """Generates a pyGSTi-compatible wrapper for a noisy gate without crosstalk. This is a convenience wrapper, and currently does not support qubit-dependent errors. :param gate: The Jaqalpaq gate definition object describing the gate. :param fun: The Python function taking parameters in the order of the Jaqal gate and returning the process matrix in the Pauli basis. :return: The StaticDenseOp or OpFactory object """ fact = False quantum_args = 0 for param in gate.parameters: if param.classical: fact = True break quantum_args += 1 if fact: return JaqalOpFactory(fun, gate) return StaticArbitraryOp(fun(*[None for i in range(quantum_args)])) def pygsti_ideal_unitary(gate, evotype): """Ideal unitary action of the gate with pyGSTi special casing. :param gate: The Jaqalpaq gate definition object describing the gate. :param evotype: the pyGSTi evolution type to use for the model. The default is "statevec", which is sufficient for noiseless simulation. """ ideal_unitary = get_ideal_action(gate) # Skip gates without defined action if ideal_unitary is None: return if len(gate.quantum_parameters) == 0: raise JaqalError(f"{gate.name} not supported") # Cast to a PyGSTi StaticUnitaryOp to avoid autoconstruction logic if len(gate.classical_parameters) == 0: return StaticUnitaryOp(ideal_unitary(), evotype=evotype) def _gate(*parms): """ :param parms: A list of all classical arguments to the gate. :return: The ideal unitary action of the gate on its target qubits, or an identity gate on the target qubit. """ if parms: return ideal_unitary(*parms) else: return np.identity(2 ** len(gate.quantum_parameters), "complex") # Cast to a JaqalOpFactory to avoid autoconstruction logic return JaqalOpFactory(_gate, gate=gate, pass_args=("classical",), evotype=evotype) def build_processor_spec(n_qubits, gates, evotype="default"): """Build a ProcessorSpec of ideal unitaries suitable for pygsti model creation. Adds key names of the form GJ<gate name> for each Jaqal gate :param n_qubits: the number of qubits in the model :param gates: a dictionary of Jaqal gates :param evotype: What kind of object pyGSTi simulates (e.g., density matrix or state vector). See pyGSTi documentation for details. :return: PyGSTi ProcessorSpec to be used in model creation """ unitaries = {} dummy_unitaries = {} availability = {} for g in gates.values(): obj = pygsti_ideal_unitary(g, evotype) # Skip gates without defined action if obj is None: continue pygsti_name = pygsti_gate_name(g) unitaries[pygsti_name] = obj if len(g.quantum_parameters) > 1: availability[pygsti_name] = "all-permutations" else: availability[pygsti_name] = [(sslbl,) for sslbl in range(n_qubits)] dummy_unitary = DummyUnitaryGate(len(g.quantum_parameters)) dummy_unitaries[pygsti_name] = dummy_unitary(None) if "Gidle" not in unitaries: unitaries["Gidle"] = JaqalOpFactory( lambda *args: np.identity(2, "complex"), evotype=evotype ) availability["Gidle"] = [(sslbl,) for sslbl in range(n_qubits)] dummy_unitary = DummyUnitaryGate(1) dummy_unitaries["Gidle"] = dummy_unitary(None) pspec = QubitProcessorSpec( n_qubits, gate_names=list(unitaries.keys()), nonstd_gate_unitaries=dummy_unitaries, availability=availability, ) return pspec, unitaries def build_noiseless_native_model(n_qubits, gates, evotype="statevec"): """Build a (noiseless) noise model for each Jaqal gate :param n_qubits: the number of qubits in the model :param gates: a dictionary of Jaqal gates :param evotype: the pyGSTi evolution type to use for the model. The default is "statevec", which is sufficient for noiseless simulation. :return: a pyGSTi noise model object """ pspec, gatedict = build_processor_spec(n_qubits, gates, evotype=evotype) target_model = LocalNoiseModel( pspec, gatedict, prep_layers=[ComputationalBasisState([0] * pspec.num_qubits, evotype=evotype)], povm_layers=[ComputationalBasisPOVM(pspec.num_qubits, evotype=evotype)], evotype=evotype, ) if evotype == "statevec": import warnings warnings.warn('Setting sim="matrix". Emulation will be SLOW.') target_model.sim = "matrix" return target_model def build_noisy_native_model( jaqal_gates, gate_models, idle_model, n_qubits, stretched_gates=None, evotype="default", ): """ :param jaqal_gates: A dictionary of JaqalPaq gate objects (with their names as keys). This must be a superset of the gates to process in gate_models. :param gate_models: A dictionary of (gatemodel, gateduration) pairs (with Jaqal gate names as keys). gatemodel is a function that is passed to pygsti_independent_noisy_gate, which converts it to a pyGSTi-compatible expression of the noisy gate. gateduration is a function that (just like its sibling, gatemodel) takes the same arguments as the corresponding Jaqal gate, and returns the duration that gate will take. :param idle_model: A function that produces the behavior of the system when idling for a given duration. :param n_qubits: The number of qubits the quantum computer is running :param stretched_gates: Whether and how to add gate stretching. Gate stretching is a mechanism to create (or modify) Jaqal gates to provide access to an additional nonnegative real-valued parameter called the "stretch factor" that causes the duration of the gate to be multiplied by this factor. This argument is passed as the last parameter to the jaqal gate. Both gateduration, and gatemodel functions must also accept this parameter as a named, OPTIONAL last positional parameter, `stretch`. This is a convenience behavior to avoid the need to manually modify and/or duplicate all the Jaqal gates, and gatemodel and gateduration functions. If set to None (the default), do not add or modify the gates to provide stretched gates. If set to "add", each gate that ends in `_streched` will also use the (gatemodel, gateduration) pair without the `_stretched` suffix. There MUST BE GATES with the `_stretched` suffix already in jaqal_gates (see jaqalpaq.core.stretch for a mechanism to automate the creation of those Jaqal gate objects). If set to any other value, that value is passed as the keyword parameter "stretch", to both gate and gateduration, (i.e., uniformly all gates are given the same stretch factor, and the API exposed to the Jaqal code is not modified in any way, only the behavior). :param evotype: What kind of object pyGSTi simulates (e.g., density matrix or state vector). See pyGSTi documentation for details. :return tuple: of pyGSTi local noise model and dictionary (of duration functions) """ gates = {} durations = {} availability = {} dummy_unitaries = {} do_stretch = lambda x: x if stretched_gates is None: patterns = ("{}",) elif stretched_gates == "add": patterns = ("{}", "{}_stretched") else: if (float(stretched_gates) != stretched_gates) or (stretched_gates < 0): raise JaqalError("stretched_gates should be a nonnegative real number.") patterns = ("{}",) def do_stretch(unstretched): return lambda *args: unstretched(*args, stretch=stretched_gates) for name, (func, dur) in gate_models.items(): jaqal_gate = jaqal_gates[name] pygsti_name = pygsti_gate_name(jaqal_gate) gate_qubit_count = len(jaqal_gate.quantum_parameters) dummy_unitary = DummyUnitaryGate(gate_qubit_count) func = do_stretch(func) dur = do_stretch(dur) for pattern in patterns: pygsti_name_spec = pattern.format(pygsti_name) name_spec = pattern.format(name) jaqal_gate_spec = jaqal_gates[name] durations[name_spec] = dur # This calls the SAME FUNCTION for both ${NAME}_stretched and # ${NAME} . CONVENTION (and the definition of ${NAME}_stretched # in jaqal_gates) determines what the additional parameters are. gates[pygsti_name_spec] = pygsti_independent_noisy_gate( jaqal_gates[name_spec], func ) if gate_qubit_count > 1: availability[pygsti_name_spec] = "all-permutations" else: availability[pygsti_name_spec] = [(sslbl,) for sslbl in range(n_qubits)] dummy_unitaries[pygsti_name_spec] = dummy_unitary(None) gates["Gidle"] = JaqalOpFactory(idle_model) availability["Gidle"] = [(sslbl,) for sslbl in range(n_qubits)] dummy_unitary = DummyUnitaryGate(1) dummy_unitaries["Gidle"] = dummy_unitary(None) # Make pspec with dummy unitaries of correct size (regardless of unitary or process mx) pspec = QubitProcessorSpec( n_qubits, gate_names=list(gates.keys()), nonstd_gate_unitaries=dummy_unitaries, availability=availability, ) target_model = LocalNoiseModel( pspec, gatedict=gates, prep_layers=[ComputationalBasisState([0] * pspec.num_qubits, evotype=evotype)], povm_layers=[ComputationalBasisPOVM(pspec.num_qubits, evotype=evotype)], evotype=evotype, simulator="matrix", ) return target_model, durations
PypiClean
/hassmart_homeassistant-0.65.4.tar.gz/hassmart_homeassistant-0.65.4/homeassistant/components/binary_sensor/skybell.py
from datetime import timedelta import logging import voluptuous as vol from homeassistant.components.binary_sensor import ( BinarySensorDevice, PLATFORM_SCHEMA) from homeassistant.components.skybell import ( DEFAULT_ENTITY_NAMESPACE, DOMAIN as SKYBELL_DOMAIN, SkybellDevice) from homeassistant.const import ( CONF_ENTITY_NAMESPACE, CONF_MONITORED_CONDITIONS) import homeassistant.helpers.config_validation as cv DEPENDENCIES = ['skybell'] _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=5) # Sensor types: Name, device_class, event SENSOR_TYPES = { 'button': ['Button', 'occupancy', 'device:sensor:button'], 'motion': ['Motion', 'motion', 'device:sensor:motion'], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_ENTITY_NAMESPACE, default=DEFAULT_ENTITY_NAMESPACE): cv.string, vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]), }) def setup_platform(hass, config, add_devices, discovery_info=None): """Set up the platform for a Skybell device.""" skybell = hass.data.get(SKYBELL_DOMAIN) sensors = [] for sensor_type in config.get(CONF_MONITORED_CONDITIONS): for device in skybell.get_devices(): sensors.append(SkybellBinarySensor(device, sensor_type)) add_devices(sensors, True) class SkybellBinarySensor(SkybellDevice, BinarySensorDevice): """A binary sensor implementation for Skybell devices.""" def __init__(self, device, sensor_type): """Initialize a binary sensor for a Skybell device.""" super().__init__(device) self._sensor_type = sensor_type self._name = "{0} {1}".format(self._device.name, SENSOR_TYPES[self._sensor_type][0]) self._device_class = SENSOR_TYPES[self._sensor_type][1] self._event = {} self._state = None @property def name(self): """Return the name of the sensor.""" return self._name @property def is_on(self): """Return True if the binary sensor is on.""" return self._state @property def device_class(self): """Return the class of the binary sensor.""" return self._device_class @property def device_state_attributes(self): """Return the state attributes.""" attrs = super().device_state_attributes attrs['event_date'] = self._event.get('createdAt') return attrs def update(self): """Get the latest data and updates the state.""" super().update() event = self._device.latest(SENSOR_TYPES[self._sensor_type][2]) self._state = bool(event and event.get('id') != self._event.get('id')) self._event = event
PypiClean
/tensorflow_ascend-1.15.0-cp37-cp37m-manylinux2014_aarch64.whl/tensorflow_core/contrib/distributions/python/ops/bijectors/cholesky_outer_product.py
"""CholeskyOuterProduct bijector.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.ops import array_ops from tensorflow.python.ops import check_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import linalg_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops.distributions import bijector from tensorflow.python.ops.distributions import util as distribution_util from tensorflow.python.util import deprecation __all__ = [ "CholeskyOuterProduct", ] class CholeskyOuterProduct(bijector.Bijector): """Compute `g(X) = X @ X.T`; X is lower-triangular, positive-diagonal matrix. Note: the upper-triangular part of X is ignored (whether or not its zero). The surjectivity of g as a map from the set of n x n positive-diagonal lower-triangular matrices to the set of SPD matrices follows immediately from executing the Cholesky factorization algorithm on an SPD matrix A to produce a positive-diagonal lower-triangular matrix L such that `A = L @ L.T`. To prove the injectivity of g, suppose that L_1 and L_2 are lower-triangular with positive diagonals and satisfy `A = L_1 @ L_1.T = L_2 @ L_2.T`. Then `inv(L_1) @ A @ inv(L_1).T = [inv(L_1) @ L_2] @ [inv(L_1) @ L_2].T = I`. Setting `L_3 := inv(L_1) @ L_2`, that L_3 is a positive-diagonal lower-triangular matrix follows from `inv(L_1)` being positive-diagonal lower-triangular (which follows from the diagonal of a triangular matrix being its spectrum), and that the product of two positive-diagonal lower-triangular matrices is another positive-diagonal lower-triangular matrix. A simple inductive argument (proceeding one column of L_3 at a time) shows that, if `I = L_3 @ L_3.T`, with L_3 being lower-triangular with positive- diagonal, then `L_3 = I`. Thus, `L_1 = L_2`, proving injectivity of g. #### Examples ```python bijector.CholeskyOuterProduct().forward(x=[[1., 0], [2, 1]]) # Result: [[1., 2], [2, 5]], i.e., x @ x.T bijector.CholeskyOuterProduct().inverse(y=[[1., 2], [2, 5]]) # Result: [[1., 0], [2, 1]], i.e., cholesky(y). ``` """ @deprecation.deprecated( "2018-10-01", "The TensorFlow Distributions library has moved to " "TensorFlow Probability " "(https://github.com/tensorflow/probability). You " "should update all references to use `tfp.distributions` " "instead of `tf.contrib.distributions`.", warn_once=True) def __init__(self, validate_args=False, name="cholesky_outer_product"): """Instantiates the `CholeskyOuterProduct` bijector. Args: validate_args: Python `bool` indicating whether arguments should be checked for correctness. name: Python `str` name given to ops managed by this object. """ self._graph_parents = [] self._name = name super(CholeskyOuterProduct, self).__init__( forward_min_event_ndims=2, validate_args=validate_args, name=name) def _forward(self, x): if self.validate_args: is_matrix = check_ops.assert_rank_at_least(x, 2) shape = array_ops.shape(x) is_square = check_ops.assert_equal(shape[-2], shape[-1]) x = control_flow_ops.with_dependencies([is_matrix, is_square], x) # For safety, explicitly zero-out the upper triangular part. x = array_ops.matrix_band_part(x, -1, 0) return math_ops.matmul(x, x, adjoint_b=True) def _inverse(self, y): return linalg_ops.cholesky(y) def _forward_log_det_jacobian(self, x): # Let Y be a symmetric, positive definite matrix and write: # Y = X X.T # where X is lower-triangular. # # Observe that, # dY[i,j]/dX[a,b] # = d/dX[a,b] { X[i,:] X[j,:] } # = sum_{d=1}^p { I[i=a] I[d=b] X[j,d] + I[j=a] I[d=b] X[i,d] } # # To compute the Jacobian dX/dY we must represent X,Y as vectors. Since Y is # symmetric and X is lower-triangular, we need vectors of dimension: # d = p (p + 1) / 2 # where X, Y are p x p matrices, p > 0. We use a row-major mapping, i.e., # k = { i (i + 1) / 2 + j i>=j # { undef i<j # and assume zero-based indexes. When k is undef, the element is dropped. # Example: # j k # 0 1 2 3 / # 0 [ 0 . . . ] # i 1 [ 1 2 . . ] # 2 [ 3 4 5 . ] # 3 [ 6 7 8 9 ] # Write vec[.] to indicate transforming a matrix to vector via k(i,j). (With # slight abuse: k(i,j)=undef means the element is dropped.) # # We now show d vec[Y] / d vec[X] is lower triangular. Assuming both are # defined, observe that k(i,j) < k(a,b) iff (1) i<a or (2) i=a and j<b. # In both cases dvec[Y]/dvec[X]@[k(i,j),k(a,b)] = 0 since: # (1) j<=i<a thus i,j!=a. # (2) i=a>j thus i,j!=a. # # Since the Jacobian is lower-triangular, we need only compute the product # of diagonal elements: # d vec[Y] / d vec[X] @[k(i,j), k(i,j)] # = X[j,j] + I[i=j] X[i,j] # = 2 X[j,j]. # Since there is a 2 X[j,j] term for every lower-triangular element of X we # conclude: # |Jac(d vec[Y]/d vec[X])| = 2^p prod_{j=0}^{p-1} X[j,j]^{p-j}. diag = array_ops.matrix_diag_part(x) # We now ensure diag is columnar. Eg, if `diag = [1, 2, 3]` then the output # is `[[1], [2], [3]]` and if `diag = [[1, 2, 3], [4, 5, 6]]` then the # output is unchanged. diag = self._make_columnar(diag) if self.validate_args: is_matrix = check_ops.assert_rank_at_least( x, 2, message="Input must be a (batch of) matrix.") shape = array_ops.shape(x) is_square = check_ops.assert_equal( shape[-2], shape[-1], message="Input must be a (batch of) square matrix.") # Assuming lower-triangular means we only need check diag>0. is_positive_definite = check_ops.assert_positive( diag, message="Input must be positive definite.") x = control_flow_ops.with_dependencies( [is_matrix, is_square, is_positive_definite], x) # Create a vector equal to: [p, p-1, ..., 2, 1]. if x.get_shape().ndims is None or x.get_shape().dims[-1].value is None: p_int = array_ops.shape(x)[-1] p_float = math_ops.cast(p_int, dtype=x.dtype) else: p_int = x.get_shape().dims[-1].value p_float = np.array(p_int, dtype=x.dtype.as_numpy_dtype) exponents = math_ops.linspace(p_float, 1., p_int) sum_weighted_log_diag = array_ops.squeeze( math_ops.matmul(math_ops.log(diag), exponents[..., array_ops.newaxis]), axis=-1) fldj = p_float * np.log(2.) + sum_weighted_log_diag # We finally need to undo adding an extra column in non-scalar cases # where there is a single matrix as input. if x.get_shape().ndims is not None: if x.get_shape().ndims == 2: fldj = array_ops.squeeze(fldj, axis=-1) return fldj shape = array_ops.shape(fldj) maybe_squeeze_shape = array_ops.concat([ shape[:-1], distribution_util.pick_vector( math_ops.equal(array_ops.rank(x), 2), np.array([], dtype=np.int32), shape[-1:])], 0) return array_ops.reshape(fldj, maybe_squeeze_shape) def _make_columnar(self, x): """Ensures non-scalar input has at least one column. Example: If `x = [1, 2, 3]` then the output is `[[1], [2], [3]]`. If `x = [[1, 2, 3], [4, 5, 6]]` then the output is unchanged. If `x = 1` then the output is unchanged. Args: x: `Tensor`. Returns: columnar_x: `Tensor` with at least two dimensions. """ if x.get_shape().ndims is not None: if x.get_shape().ndims == 1: x = x[array_ops.newaxis, :] return x shape = array_ops.shape(x) maybe_expanded_shape = array_ops.concat([ shape[:-1], distribution_util.pick_vector( math_ops.equal(array_ops.rank(x), 1), [1], np.array([], dtype=np.int32)), shape[-1:], ], 0) return array_ops.reshape(x, maybe_expanded_shape)
PypiClean
/zope.deferredimport-5.0.tar.gz/zope.deferredimport-5.0/CHANGES.rst
========= Changes ========= 5.0 (2023-06-29) ================ - Drop support for Python 2.7, 3.5, 3.6. - Add support for Python 3.11. 4.4 (2021-12-10) ================ - Add support for Python 3.8, 3.9 and 3.10. - Drop support for Python 3.4. 4.3.1 (2019-08-05) ================== - Avoid race condition in ``deferredmodule.ModuleProxy.__getattr__`` `#8 <https://github.com/zopefoundation/zope.deferredimport/issues/8>`_. 4.3 (2018-10-05) ================ - Add support for Python 3.7. 4.2.1 (2017-10-24) ================== - Preserve the docstrings of proxied modules created with ``deprecatedFrom``, ``deferredFrom``, etc. See `issue 5 <https://github.com/zopefoundation/zope.deferredimport/issues/5>`_. 4.2.0 (2017-08-08) ================== - Add support for Python 3.5 and 3.6. - Drop support for Python 2.6 and 3.3. - Convert doctests to Sphinx documentation, including building docs and running doctest snippets under ``tox``. 4.1.0 (2014-12-26) ================== - Add support for PyPy. PyPy3 support is blocked on release of fix for: https://bitbucket.org/pypy/pypy/issue/1946 - Add support for Python 3.4. - Add support for testing on Travis. 4.0.0 (2013-02-28) ================== - Add support for Python 3.3. - Drop support for Python 2.4 and 2.5. 3.5.3 (2010-09-25) ================== - Add test extra to declare test dependency on ``zope.testing``. 3.5.2 (2010-05-24) ================== - Fix unit tests broken under Python 2.4 by the switch to the standard library ``doctest`` module. 3.5.1 (2010-04-30) ================== - Prefer the standard library's ``doctest`` module to the one from ``zope.testing``. 3.5.0 (2009-02-04) ================== - Add support to bootstrap on Jython. - Add reference documentation. 3.4.0 (2007-07-19) ================== - Finish release of ``zope.deferredimport``. 3.4.0b1 (2007-07-09) ==================== - Initial release as a separate project, corresponding to the ``zope.deferredimport`` from Zope 3.4.0b1.
PypiClean
/diffusersTangkhode-0.0.1.tar.gz/diffusersTangkhode-0.0.1/src/diffusers/commands/env.py
import platform from argparse import ArgumentParser import huggingface_hub from .. import __version__ as version from ..utils import is_accelerate_available, is_torch_available, is_transformers_available, is_xformers_available from . import BaseDiffusersCLICommand def info_command_factory(_): return EnvironmentCommand() class EnvironmentCommand(BaseDiffusersCLICommand): @staticmethod def register_subcommand(parser: ArgumentParser): download_parser = parser.add_parser("env") download_parser.set_defaults(func=info_command_factory) def run(self): hub_version = huggingface_hub.__version__ pt_version = "not installed" pt_cuda_available = "NA" if is_torch_available(): import torch pt_version = torch.__version__ pt_cuda_available = torch.cuda.is_available() transformers_version = "not installed" if is_transformers_available(): import transformers transformers_version = transformers.__version__ accelerate_version = "not installed" if is_accelerate_available(): import accelerate accelerate_version = accelerate.__version__ xformers_version = "not installed" if is_xformers_available(): import xformers xformers_version = xformers.__version__ info = { "`diffusers` version": version, "Platform": platform.platform(), "Python version": platform.python_version(), "PyTorch version (GPU?)": f"{pt_version} ({pt_cuda_available})", "Huggingface_hub version": hub_version, "Transformers version": transformers_version, "Accelerate version": accelerate_version, "xFormers version": xformers_version, "Using GPU in script?": "<fill in>", "Using distributed or parallel set-up in script?": "<fill in>", } print("\nCopy-and-paste the text below in your GitHub issue and FILL OUT the two last points.\n") print(self.format_dict(info)) return info @staticmethod def format_dict(d): return "\n".join([f"- {prop}: {val}" for prop, val in d.items()]) + "\n"
PypiClean
/openpiv_python_lite-0.1-py3-none-any.whl/openpiv_python_lite/pyprocess.py
__licence_ = """ Copyright (C) 2011 www.openpiv.net This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import numpy as np import numpy.lib.stride_tricks from numpy.fft import rfft2, irfft2 from numpy import ma from numpy import log import imageio def get_coordinates(image_size, window_size, overlap): """Compute the x, y coordinates of the centers of the interrogation windows. Parameters ---------- image_size: two elements tuple a two dimensional tuple for the pixel size of the image first element is number of rows, second element is the number of columns. window_size: int the size of the interrogation windows. overlap: int the number of pixel by which two adjacent interrogation windows overlap. Returns ------- x : 2d np.ndarray a two dimensional array containing the x coordinates of the interrogation window centers, in pixels. y : 2d np.ndarray a two dimensional array containing the y coordinates of the interrogation window centers, in pixels. """ # get shape of the resulting flow field field_shape = get_field_shape(image_size, window_size, overlap) # compute grid coordinates of the interrogation window centers # compute grid coordinates of the interrogation window centers x = np.arange( field_shape[1] )*(window_size-overlap) + window_size/2.0 y = np.arange( field_shape[0] )*(window_size-overlap) + window_size/2.0 return np.meshgrid(x, y) def get_field_shape(image_size, window_size, overlap): """Compute the shape of the resulting flow field. Given the image size, the interrogation window size and the overlap size, it is possible to calculate the number of rows and columns of the resulting flow field. Parameters ---------- image_size: two elements tuple a two dimensional tuple for the pixel size of the image first element is number of rows, second element is the number of columns. window_size: int the size of the interrogation window. overlap: int the number of pixel by which two adjacent interrogation windows overlap. Returns ------- field_shape : two elements tuple the shape of the resulting flow field """ return ((image_size[0] - window_size) // (window_size - overlap) + 1, (image_size[1] - window_size) // (window_size - overlap) + 1) def moving_window_array(array, window_size, overlap): """ This is a nice numpy trick. The concept of numpy strides should be clear to understand this code. Basically, we have a 2d array and we want to perform cross-correlation over the interrogation windows. An approach could be to loop over the array but loops are expensive in python. So we create from the array a new array with three dimension, of size (n_windows, window_size, window_size), in which each slice, (along the first axis) is an interrogation window. """ sz = array.itemsize shape = array.shape array = np.ascontiguousarray(array) strides = (sz * shape[1] * (window_size - overlap), sz * (window_size - overlap), sz * shape[1], sz) shape = (int((shape[0] - window_size) / (window_size - overlap)) + 1, int( (shape[1] - window_size) / (window_size - overlap)) + 1, window_size, window_size) return numpy.lib.stride_tricks.as_strided(array, strides=strides, shape=shape).reshape(-1, window_size, window_size) def find_first_peak(corr): """ Find row and column indices of the first correlation peak. Parameters ---------- corr : np.ndarray the correlation map Returns ------- i : int the row index of the correlation peak j : int the column index of the correlation peak corr_max1 : int the value of the correlation peak """ ind = corr.argmax() s = corr.shape[1] i = ind // s j = ind % s return i, j, corr.max() def find_second_peak(corr, i=None, j=None, width=2): """ Find the value of the second largest peak. The second largest peak is the height of the peak in the region outside a 3x3 submatrxi around the first correlation peak. Parameters ---------- corr: np.ndarray the correlation map. i,j : ints row and column location of the first peak. width : int the half size of the region around the first correlation peak to ignore for finding the second peak. Returns ------- i : int the row index of the second correlation peak. j : int the column index of the second correlation peak. corr_max2 : int the value of the second correlation peak. """ if i is None or j is None: i, j, tmp = find_first_peak(corr) # create a masked view of the corr tmp = corr.view(ma.MaskedArray) # set width x width square submatrix around the first correlation peak as masked. # Before check if we are not too close to the boundaries, otherwise we # have negative indices iini = max(0, i - width) ifin = min(i + width + 1, corr.shape[0]) jini = max(0, j - width) jfin = min(j + width + 1, corr.shape[1]) tmp[iini:ifin, jini:jfin] = ma.masked i, j, corr_max2 = find_first_peak(tmp) return i, j, corr_max2 def find_subpixel_peak_position(corr, subpixel_method='gaussian'): """ Find subpixel approximation of the correlation peak. This function returns a subpixels approximation of the correlation peak by using one of the several methods available. If requested, the function also returns the signal to noise ratio level evaluated from the correlation map. Parameters ---------- corr : np.ndarray the correlation map. subpixel_method : string one of the following methods to estimate subpixel location of the peak: 'centroid' [replaces default if correlation map is negative], 'gaussian' [default if correlation map is positive], 'parabolic'. Returns ------- subp_peak_position : two elements tuple the fractional row and column indices for the sub-pixel approximation of the correlation peak. """ # initialization # default_peak_position = (np.floor(corr.shape[0] / 2.), np.floor(corr.shape[1] / 2.)) default_peak_position = (0,0) # the peak locations peak1_i, peak1_j, dummy = find_first_peak(corr) try: # the peak and its neighbours: left, right, down, up c = corr[peak1_i, peak1_j] cl = corr[peak1_i - 1, peak1_j] cr = corr[peak1_i + 1, peak1_j] cd = corr[peak1_i, peak1_j - 1] cu = corr[peak1_i, peak1_j + 1] # gaussian fit if np.any(np.array([c, cl, cr, cd, cu]) < 0) and subpixel_method == 'gaussian': subpixel_method = 'centroid' try: if subpixel_method == 'centroid': subp_peak_position = (((peak1_i - 1) * cl + peak1_i * c + (peak1_i + 1) * cr) / (cl + c + cr), ((peak1_j - 1) * cd + peak1_j * c + (peak1_j + 1) * cu) / (cd + c + cu)) elif subpixel_method == 'gaussian': subp_peak_position = (peak1_i + ((log(cl) - log(cr)) / (2 * log(cl) - 4 * log(c) + 2 * log(cr))), peak1_j + ((log(cd) - log(cu)) / (2 * log(cd) - 4 * log(c) + 2 * log(cu)))) elif subpixel_method == 'parabolic': subp_peak_position = (peak1_i + (cl - cr) / (2 * cl - 4 * c + 2 * cr), peak1_j + (cd - cu) / (2 * cd - 4 * c + 2 * cu)) except: subp_peak_position = default_peak_position except IndexError: subp_peak_position = default_peak_position return subp_peak_position[0] - default_peak_position[0], subp_peak_position[1] - default_peak_position[1] def sig2noise_ratio(corr, sig2noise_method='peak2peak', width=2): """ Computes the signal to noise ratio from the correlation map. The signal to noise ratio is computed from the correlation map with one of two available method. It is a measure of the quality of the matching between to interrogation windows. Parameters ---------- corr : 2d np.ndarray the correlation map. sig2noise_method: string the method for evaluating the signal to noise ratio value from the correlation map. Can be `peak2peak`, `peak2mean` or None if no evaluation should be made. width : int, optional the half size of the region around the first correlation peak to ignore for finding the second peak. [default: 2]. Only used if ``sig2noise_method==peak2peak``. Returns ------- sig2noise : float the signal to noise ratio from the correlation map. """ # compute first peak position peak1_i, peak1_j, corr_max1 = find_first_peak(corr) # now compute signal to noise ratio if sig2noise_method == 'peak2peak': # find second peak height peak2_i, peak2_j, corr_max2 = find_second_peak( corr, peak1_i, peak1_j, width=width) # if it's an empty interrogation window # if the image is lacking particles, totally black it will correlate to very low value, but not zero # if the first peak is on the borders, the correlation map is also # wrong if corr_max1 < 1e-3 or (peak1_i == 0 or peak1_j == corr.shape[0] or peak1_j == 0 or peak1_j == corr.shape[1] or peak2_i == 0 or peak2_j == corr.shape[0] or peak2_j == 0 or peak2_j == corr.shape[1]): # return zero, since we have no signal. return 0.0 elif sig2noise_method == 'peak2mean': # find mean of the correlation map corr_max2 = corr.mean() else: raise ValueError('wrong sig2noise_method') # avoid dividing by zero try: sig2noise = corr_max1 / corr_max2 except ValueError: sig2noise = np.inf return sig2noise def correlate_windows(window_a, window_b, corr_method='fft', nfftx=0, nffty=0): """Compute correlation function between two interrogation windows. The correlation function can be computed by using the correlation theorem to speed up the computation. Parameters ---------- window_a : 2d np.ndarray a two dimensions array for the first interrogation window, window_b : 2d np.ndarray a two dimensions array for the second interrogation window. corr_method : string one of the two methods currently implemented: 'fft' or 'direct'. Default is 'fft', which is much faster. nfftx : int the size of the 2D FFT in x-direction, [default: 2 x windows_a.shape[0] is recommended]. nffty : int the size of the 2D FFT in y-direction, [default: 2 x windows_a.shape[1] is recommended]. Returns ------- corr : 2d np.ndarray a two dimensions array for the correlation function. Note that due to the wish to use 2^N windows for faster FFT we use a slightly different convention for the size of the correlation map. The theory says it is M+N-1, and the 'direct' method gets this size out the FFT-based method returns M+N size out, where M is the window_size and N is the search_area_size It leads to inconsistency of the output """ if corr_method == 'fft': window_b = np.conj(window_b[::-1, ::-1]) if nfftx == 0: nfftx = nextpower2(window_b.shape[0] + window_a.shape[0]) if nffty == 0: nffty = nextpower2(window_b.shape[1] + window_a.shape[1]) f2a = rfft2(normalize_intensity(window_a), s=(nfftx, nffty)) f2b = rfft2(normalize_intensity(window_b), s=(nfftx, nffty)) corr = irfft2(f2a * f2b).real corr = corr[:window_a.shape[0] + window_b.shape[0], :window_b.shape[1] + window_a.shape[1]] return corr else: raise ValueError('method is not implemented') def normalize_intensity(window): """Normalize interrogation window by removing the mean value. Parameters ---------- window : 2d np.ndarray the interrogation window array Returns ------- window : 2d np.ndarray the interrogation window array, with mean value equal to zero. """ return window - window.mean() def extended_search_area_piv( frame_a, frame_b, window_size, overlap=0, dt=1.0, search_area_size=0, corr_method='fft', subpixel_method='gaussian', sig2noise_method=None, width=2, nfftx=0, nffty=0): """Standard PIV cross-correlation algorithm, with an option for extended area search that increased dynamic range. The search region in the second frame is larger than the interrogation window size in the first frame. For Cython implementation see openpiv.process.extended_search_area_piv This is a pure python implementation of the standard PIV cross-correlation algorithm. It is a zero order displacement predictor, and no iterative process is performed. Parameters ---------- frame_a : 2d np.ndarray an two dimensions array of integers containing grey levels of the first frame. frame_b : 2d np.ndarray an two dimensions array of integers containing grey levels of the second frame. window_size : int the size of the (square) interrogation window, [default: 32 pix]. overlap : int the number of pixels by which two adjacent windows overlap [default: 16 pix]. dt : float the time delay separating the two frames [default: 1.0]. corr_method : string one of the two methods implemented: 'fft' or 'direct', [default: 'fft']. subpixel_method : string one of the following methods to estimate subpixel location of the peak: 'centroid' [replaces default if correlation map is negative], 'gaussian' [default if correlation map is positive], 'parabolic'. sig2noise_method : string defines the method of signal-to-noise-ratio measure, ('peak2peak' or 'peak2mean'. If None, no measure is performed.) nfftx : int the size of the 2D FFT in x-direction, [default: 2 x windows_a.shape[0] is recommended] nffty : int the size of the 2D FFT in y-direction, [default: 2 x windows_a.shape[1] is recommended] width : int the half size of the region around the first correlation peak to ignore for finding the second peak. [default: 2]. Only used if ``sig2noise_method==peak2peak``. search_area_size : int the size of the interrogation window in the second frame, default is the same interrogation window size and it is a fallback to the simplest FFT based PIV Returns ------- u : 2d np.ndarray a two dimensional array containing the u velocity component, in pixels/seconds. v : 2d np.ndarray a two dimensional array containing the v velocity component, in pixels/seconds. sig2noise : 2d np.ndarray, ( optional: only if sig2noise_method is not None ) a two dimensional array the signal to noise ratio for each window pair. """ # check the inputs for validity if search_area_size == 0: search_area_size = window_size if overlap >= window_size: raise ValueError('Overlap has to be smaller than the window_size') if search_area_size < window_size: raise ValueError('Search size cannot be smaller than the window_size') if (window_size > frame_a.shape[0]) or (window_size > frame_a.shape[1]): raise ValueError('window size cannot be larger than the image') # get field shape n_rows, n_cols = get_field_shape((frame_a.shape[0], frame_a.shape[1]), window_size, overlap ) u, v = np.zeros((n_rows, n_cols)), np.zeros((n_rows, n_cols)) # if we want sig2noise information, allocate memory if sig2noise_method is not None: sig2noise = np.zeros((n_rows, n_cols)) # loop over the interrogation windows # i, j are the row, column indices of the center of each interrogation # window for k in range(n_rows): # range(range(search_area_size/2, frame_a.shape[0] - search_area_size/2, window_size - overlap ): for m in range(n_cols): # range(search_area_size/2, frame_a.shape[1] - search_area_size/2 , window_size - overlap ): # Select first the largest window, work like usual from the top left corner # the left edge goes as: # e.g. 0, (search_area_size - overlap), 2*(search_area_size - overlap),.... il = k*(search_area_size - overlap) ir = il + search_area_size # same for top-bottom jt = m*(search_area_size - overlap) jb = jt + search_area_size # pick up the window in the second image window_b = frame_b[il:ir, jt:jb] # now shift the left corner of the smaller window inside the larger one il += (search_area_size - window_size)//2 # and it's right side is just a window_size apart ir = il + window_size # same same jt += (search_area_size - window_size)//2 jb = jt + window_size window_a = frame_a[il:ir, jt:jb] if np.any(window_a): corr = correlate_windows(window_a, window_b, corr_method=corr_method, nfftx=nfftx, nffty=nffty) # plt.figure() # plt.contourf(corr) # plt.show() # get subpixel approximation for peak position row and column index row, col = find_subpixel_peak_position(corr, subpixel_method=subpixel_method) row -= (search_area_size + window_size - 1)//2 col -= (search_area_size + window_size - 1)//2 # get displacements, apply coordinate system definition u[k,m],v[k,m] = -col, row # get signal to noise ratio if sig2noise_method is not None: sig2noise[k,m] = sig2noise_ratio( corr, sig2noise_method=sig2noise_method, width=width) # return output depending if user wanted sig2noise information if sig2noise_method is not None: return u/dt, v/dt, sig2noise else: return u/dt, v/dt def nextpower2(i): """ Find 2^n that is equal to or greater than. """ n = 1 while n < i: n *= 2 return n def random_noise(img): """ Add random noise to image """ gauss = np.random.normal(0, 1, img.size) gauss = gauss.reshape(img.shape[0], img.shape[1]).astype('uint8') return img + gauss
PypiClean
/pulumi_aws_native-0.75.1a1693503310.tar.gz/pulumi_aws_native-0.75.1a1693503310/pulumi_aws_native/ec2/get_local_gateway_route.py
import copy import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = [ 'GetLocalGatewayRouteResult', 'AwaitableGetLocalGatewayRouteResult', 'get_local_gateway_route', 'get_local_gateway_route_output', ] @pulumi.output_type class GetLocalGatewayRouteResult: def __init__(__self__, local_gateway_virtual_interface_group_id=None, network_interface_id=None, state=None, type=None): if local_gateway_virtual_interface_group_id and not isinstance(local_gateway_virtual_interface_group_id, str): raise TypeError("Expected argument 'local_gateway_virtual_interface_group_id' to be a str") pulumi.set(__self__, "local_gateway_virtual_interface_group_id", local_gateway_virtual_interface_group_id) if network_interface_id and not isinstance(network_interface_id, str): raise TypeError("Expected argument 'network_interface_id' to be a str") pulumi.set(__self__, "network_interface_id", network_interface_id) if state and not isinstance(state, str): raise TypeError("Expected argument 'state' to be a str") pulumi.set(__self__, "state", state) if type and not isinstance(type, str): raise TypeError("Expected argument 'type' to be a str") pulumi.set(__self__, "type", type) @property @pulumi.getter(name="localGatewayVirtualInterfaceGroupId") def local_gateway_virtual_interface_group_id(self) -> Optional[str]: """ The ID of the virtual interface group. """ return pulumi.get(self, "local_gateway_virtual_interface_group_id") @property @pulumi.getter(name="networkInterfaceId") def network_interface_id(self) -> Optional[str]: """ The ID of the network interface. """ return pulumi.get(self, "network_interface_id") @property @pulumi.getter def state(self) -> Optional[str]: """ The state of the route. """ return pulumi.get(self, "state") @property @pulumi.getter def type(self) -> Optional[str]: """ The route type. """ return pulumi.get(self, "type") class AwaitableGetLocalGatewayRouteResult(GetLocalGatewayRouteResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetLocalGatewayRouteResult( local_gateway_virtual_interface_group_id=self.local_gateway_virtual_interface_group_id, network_interface_id=self.network_interface_id, state=self.state, type=self.type) def get_local_gateway_route(destination_cidr_block: Optional[str] = None, local_gateway_route_table_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetLocalGatewayRouteResult: """ Describes a route for a local gateway route table. :param str destination_cidr_block: The CIDR block used for destination matches. :param str local_gateway_route_table_id: The ID of the local gateway route table. """ __args__ = dict() __args__['destinationCidrBlock'] = destination_cidr_block __args__['localGatewayRouteTableId'] = local_gateway_route_table_id opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts) __ret__ = pulumi.runtime.invoke('aws-native:ec2:getLocalGatewayRoute', __args__, opts=opts, typ=GetLocalGatewayRouteResult).value return AwaitableGetLocalGatewayRouteResult( local_gateway_virtual_interface_group_id=pulumi.get(__ret__, 'local_gateway_virtual_interface_group_id'), network_interface_id=pulumi.get(__ret__, 'network_interface_id'), state=pulumi.get(__ret__, 'state'), type=pulumi.get(__ret__, 'type')) @_utilities.lift_output_func(get_local_gateway_route) def get_local_gateway_route_output(destination_cidr_block: Optional[pulumi.Input[str]] = None, local_gateway_route_table_id: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetLocalGatewayRouteResult]: """ Describes a route for a local gateway route table. :param str destination_cidr_block: The CIDR block used for destination matches. :param str local_gateway_route_table_id: The ID of the local gateway route table. """ ...
PypiClean
/alipay-python-3.3.17.tar.gz/alipay-python-3.3.17/alipay/aop/api/request/AlipayTradePayConsultRequest.py
import json from alipay.aop.api.FileItem import FileItem from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.AlipayTradePayConsultModel import AlipayTradePayConsultModel class AlipayTradePayConsultRequest(object): def __init__(self, biz_model=None): self._biz_model = biz_model self._biz_content = None self._version = "1.0" self._terminal_type = None self._terminal_info = None self._prod_code = None self._notify_url = None self._return_url = None self._udf_params = None self._need_encrypt = False @property def biz_model(self): return self._biz_model @biz_model.setter def biz_model(self, value): self._biz_model = value @property def biz_content(self): return self._biz_content @biz_content.setter def biz_content(self, value): if isinstance(value, AlipayTradePayConsultModel): self._biz_content = value else: self._biz_content = AlipayTradePayConsultModel.from_alipay_dict(value) @property def version(self): return self._version @version.setter def version(self, value): self._version = value @property def terminal_type(self): return self._terminal_type @terminal_type.setter def terminal_type(self, value): self._terminal_type = value @property def terminal_info(self): return self._terminal_info @terminal_info.setter def terminal_info(self, value): self._terminal_info = value @property def prod_code(self): return self._prod_code @prod_code.setter def prod_code(self, value): self._prod_code = value @property def notify_url(self): return self._notify_url @notify_url.setter def notify_url(self, value): self._notify_url = value @property def return_url(self): return self._return_url @return_url.setter def return_url(self, value): self._return_url = value @property def udf_params(self): return self._udf_params @udf_params.setter def udf_params(self, value): if not isinstance(value, dict): return self._udf_params = value @property def need_encrypt(self): return self._need_encrypt @need_encrypt.setter def need_encrypt(self, value): self._need_encrypt = value def add_other_text_param(self, key, value): if not self.udf_params: self.udf_params = dict() self.udf_params[key] = value def get_params(self): params = dict() params[P_METHOD] = 'alipay.trade.pay.consult' params[P_VERSION] = self.version if self.biz_model: params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':')) if self.biz_content: if hasattr(self.biz_content, 'to_alipay_dict'): params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':')) else: params['biz_content'] = self.biz_content if self.terminal_type: params['terminal_type'] = self.terminal_type if self.terminal_info: params['terminal_info'] = self.terminal_info if self.prod_code: params['prod_code'] = self.prod_code if self.notify_url: params['notify_url'] = self.notify_url if self.return_url: params['return_url'] = self.return_url if self.udf_params: params.update(self.udf_params) return params def get_multipart_params(self): multipart_params = dict() return multipart_params
PypiClean
/MergePythonSDK.ticketing-2.2.2-py3-none-any.whl/MergePythonSDK/hris/model/group_type_enum.py
import re # noqa: F401 import sys # noqa: F401 from typing import ( Optional, Union, List, Dict, ) from MergePythonSDK.shared.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, OpenApiModel, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) from MergePythonSDK.shared.exceptions import ApiAttributeError from MergePythonSDK.shared.model_utils import import_model_by_name from MergePythonSDK.shared.model_utils import MergeEnumType class GroupTypeEnum(ModelNormal, MergeEnumType): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { ('value',): { 'TEAM': "TEAM", 'DEPARTMENT': "DEPARTMENT", 'COST_CENTER': "COST_CENTER", 'BUSINESS_UNIT': "BUSINESS_UNIT", }, } validations = { } additional_properties_type = None _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ defined_types = { 'value': (str,), } return defined_types @cached_property def discriminator(): return None attribute_map = { } read_only_vars = { } _composed_schemas = {} @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, value, *args, **kwargs): # noqa: E501 """GroupTypeEnum - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', True) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: for arg in args: if isinstance(arg, dict): kwargs.update(arg) else: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.value = value return self required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, value, *args, **kwargs): # noqa: E501 """GroupTypeEnum - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: for arg in args: if isinstance(arg, dict): kwargs.update(arg) else: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.value = value
PypiClean
/ddns-2.13.3.tar.gz/ddns-2.13.3/util/cache.py
from os import path, stat from pickle import dump, load from time import time from logging import info, debug, warning try: from collections.abc import MutableMapping except ImportError: # Python 2 imports from collections import MutableMapping class Cache(MutableMapping): """ using file to Cache data as dictionary """ def __init__(self, path, sync=False): self.__data = {} self.__filename = path self.__sync = sync self.__time = time() self.__changed = False self.load() @property def time(self): """ 缓存修改时间 """ return self.__time def load(self, file=None): """ load data from path """ if not file: file = self.__filename debug('load cache data from %s', file) if path.isfile(file): with open(self.__filename, 'rb') as data: try: self.__data = load(data) self.__time = stat(file).st_mtime return self except ValueError: pass except Exception as e: warning(e) else: info('cache file not exist') self.__data = {} self.__time = time() self.__changed = True return self def data(self, key=None, default=None): """ 获取当前字典或者制定得键值 """ if self.__sync: self.load() if key is None: return self.__data else: return self.__data.get(key, default) def sync(self): """Sync the write buffer with the cache files and clear the buffer. """ if self.__changed: with open(self.__filename, 'wb') as data: dump(self.__data, data) debug('save cache data to %s', self.__filename) self.__time = time() self.__changed = False return self def close(self): """Sync the write buffer, then close the cache. If a closed :class:`FileCache` object's methods are called, a :exc:`ValueError` will be raised. """ self.sync() del self.__data del self.__filename del self.__time self.__sync = False def __update(self): self.__changed = True if self.__sync: self.sync() else: self.__time = time() def clear(self): if self.data() is not None: self.__data = {} self.__update() def __setitem__(self, key, value): if self.data(key) != value: self.__data[key] = value self.__update() def __delitem__(self, key): if key in self.data(): del self.__data[key] self.__update() def __getitem__(self, key): return self.data(key) def __iter__(self): for key in self.data(): yield key def __len__(self): return len(self.data()) def __contains__(self, key): return key in self.data() def __str__(self): return self.data().__str__() def __del__(self): self.close()
PypiClean
/apache_tvm-0.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/tvm/script/parser/tir/operation.py
"""The tir expression operation registration""" from typing import Type from tvm import tir from tvm.tir import IntImm from .._core import OpMethod, doc, register_op def _register_expr_op(ty: Type): # pylint: disable=invalid-name ty._dispatch_type = ty # pylint: disable=protected-access def _and(a, b): if isinstance(a, bool): a = IntImm("bool", a) if isinstance(b, bool): b = IntImm("bool", b) return tir.And(a, b) def _or(a, b): if isinstance(a, bool): a = IntImm("bool", a) if isinstance(b, bool): b = IntImm("bool", b) return tir.Or(a, b) def r(op: Type, i: int, m: OpMethod): # pylint: disable=invalid-name register_op(ty, op, i)(m) for i in [0, 1]: # Case 1. binop # doc.Add <-- is overloaded # doc.Sub <-- is overloaded # doc.Mult <-- is overloaded # doc.Div <-- is overloaded # doc.FloorDiv <-- is overloaded # doc.Mod <-- is overloaded # doc.LShift <-- is overloaded # doc.RShift <-- is overloaded # doc.BitOr <-- is overloaded # doc.BitXor <-- is overloaded # doc.BitAnd <-- is overloaded # doc.MatMult <-- not implemented # doc.Pow <-- not implemented # Case 2. cmpop r(doc.Eq, i, tir.EQ) r(doc.NotEq, i, tir.NE) r(doc.Lt, i, tir.LT) r(doc.LtE, i, tir.LE) r(doc.Gt, i, tir.GT) r(doc.GtE, i, tir.GE) # doc.Is <-- not implemented # doc.IsNot <-- not implemented # doc.In <-- not implemented # doc.NotIn <-- not implemented # Case 3. boolop r(doc.And, i, _and) r(doc.Or, i, _or) for i in [0]: # Case 4. unaryop # doc.Invert <-- is overloaded r(doc.Not, i, tir.Not) # doc.UAdd <-- is overloaded # doc.USub <-- is overloaded _register_expr_op(tir.PrimExpr) _register_expr_op(tir.IterVar)
PypiClean
/shootpandora_Cloud-20230725.1.4-py3-none-any.whl/shootpandora_cloud/flask/static/_next/static/chunks/238-882950710bdd3e1e.js
(self.webpackChunk_N_E = self.webpackChunk_N_E || []).push([[238], { 3530: function(e, t, n) { "use strict"; n.d(t, { iv: function() { return y }, tZ: function() { return b }, F4: function() { return O } }); var r, o = n(70079), i = n.t(o, 2), a = n(93865), u = n(66347), s = n(61404), l = !!i.useInsertionEffect && i.useInsertionEffect, c = l || function(e) { return e() } ; l || o.useLayoutEffect; var d = {}.hasOwnProperty , p = o.createContext("undefined" != typeof HTMLElement ? (0, a.Z)({ key: "css" }) : null); p.Provider; var f = o.createContext({}) , m = "__EMOTION_TYPE_PLEASE_DO_NOT_USE__" , h = function(e, t) { var n = {}; for (var r in t) d.call(t, r) && (n[r] = t[r]); return n[m] = e, n } , v = function(e) { var t = e.cache , n = e.serialized , r = e.isStringTag; return (0, u.hC)(t, n, r), c(function() { return (0, u.My)(t, n, r) }), null } , g = (r = function(e, t, n) { var r = e.css; "string" == typeof r && void 0 !== t.registered[r] && (r = t.registered[r]); var i = e[m] , a = [r] , l = ""; "string" == typeof e.className ? l = (0, u.fp)(t.registered, a, e.className) : null != e.className && (l = e.className + " "); var c = (0, s.O)(a, void 0, o.useContext(f)); l += t.key + "-" + c.name; var p = {}; for (var h in e) d.call(e, h) && "css" !== h && h !== m && (p[h] = e[h]); return p.ref = n, p.className = l, o.createElement(o.Fragment, null, o.createElement(v, { cache: t, serialized: c, isStringTag: "string" == typeof i }), o.createElement(i, p)) } , (0, o.forwardRef)(function(e, t) { return r(e, (0, o.useContext)(p), t) })); n(26095); var b = function(e, t) { var n = arguments; if (null == t || !d.call(t, "css")) return o.createElement.apply(void 0, n); var r = n.length , i = Array(r); i[0] = g, i[1] = h(e, t); for (var a = 2; a < r; a++) i[a] = n[a]; return o.createElement.apply(null, i) }; function y() { for (var e = arguments.length, t = Array(e), n = 0; n < e; n++) t[n] = arguments[n]; return (0, s.O)(t) } var O = function() { var e = y.apply(void 0, arguments) , t = "animation-" + e.name; return { name: t, styles: "@keyframes " + t + "{" + e.styles + "}", anim: 1, toString: function() { return "_EMO_" + this.name + "_" + this.styles + "_EMO_" } } } }, 76150: function(e, t, n) { "use strict"; n.d(t, { Z: function() { return i } }); var r = n(51516) , o = n(17224); function i(e, t) { (0, r.Z)(1, arguments); var n, i, m, h = (0, o.Z)(null !== (n = null == t ? void 0 : t.additionalDigits) && void 0 !== n ? n : 2); if (2 !== h && 1 !== h && 0 !== h) throw RangeError("additionalDigits must be 0, 1 or 2"); if (!("string" == typeof e || "[object String]" === Object.prototype.toString.call(e))) return new Date(NaN); var v = function(e) { var t, n = {}, r = e.split(a.dateTimeDelimiter); if (r.length > 2) return n; if (/:/.test(r[0]) ? t = r[0] : (n.date = r[0], t = r[1], a.timeZoneDelimiter.test(n.date) && (n.date = e.split(a.timeZoneDelimiter)[0], t = e.substr(n.date.length, e.length))), t) { var o = a.timezone.exec(t); o ? (n.time = t.replace(o[1], ""), n.timezone = o[1]) : n.time = t } return n }(e); if (v.date) { var g = function(e, t) { var n = RegExp("^(?:(\\d{4}|[+-]\\d{" + (4 + t) + "})|(\\d{2}|[+-]\\d{" + (2 + t) + "})$)") , r = e.match(n); if (!r) return { year: NaN, restDateString: "" }; var o = r[1] ? parseInt(r[1]) : null , i = r[2] ? parseInt(r[2]) : null; return { year: null === i ? o : 100 * i, restDateString: e.slice((r[1] || r[2]).length) } }(v.date, h); i = function(e, t) { if (null === t) return new Date(NaN); var n, r, o = e.match(u); if (!o) return new Date(NaN); var i = !!o[4] , a = c(o[1]) , s = c(o[2]) - 1 , l = c(o[3]) , d = c(o[4]) , m = c(o[5]) - 1; if (i) return d >= 1 && d <= 53 && m >= 0 && m <= 6 ? ((n = new Date(0)).setUTCFullYear(t, 0, 4), r = n.getUTCDay() || 7, n.setUTCDate(n.getUTCDate() + ((d - 1) * 7 + m + 1 - r)), n) : new Date(NaN); var h = new Date(0); return s >= 0 && s <= 11 && l >= 1 && l <= (p[s] || (f(t) ? 29 : 28)) && a >= 1 && a <= (f(t) ? 366 : 365) ? (h.setUTCFullYear(t, s, Math.max(a, l)), h) : new Date(NaN) }(g.restDateString, g.year) } if (!i || isNaN(i.getTime())) return new Date(NaN); var b = i.getTime() , y = 0; if (v.time && isNaN(y = function(e) { var t = e.match(s); if (!t) return NaN; var n = d(t[1]) , r = d(t[2]) , o = d(t[3]); return (24 === n ? 0 === r && 0 === o : o >= 0 && o < 60 && r >= 0 && r < 60 && n >= 0 && n < 25) ? 36e5 * n + 6e4 * r + 1e3 * o : NaN }(v.time))) return new Date(NaN); if (v.timezone) { if (isNaN(m = function(e) { if ("Z" === e) return 0; var t = e.match(l); if (!t) return 0; var n = "+" === t[1] ? -1 : 1 , r = parseInt(t[2]) , o = t[3] && parseInt(t[3]) || 0; return o >= 0 && o <= 59 ? n * (36e5 * r + 6e4 * o) : NaN }(v.timezone))) return new Date(NaN) } else { var O = new Date(b + y) , x = new Date(0); return x.setFullYear(O.getUTCFullYear(), O.getUTCMonth(), O.getUTCDate()), x.setHours(O.getUTCHours(), O.getUTCMinutes(), O.getUTCSeconds(), O.getUTCMilliseconds()), x } return new Date(b + y + m) } var a = { dateTimeDelimiter: /[T ]/, timeZoneDelimiter: /[Z ]/i, timezone: /([Z+-].*)$/ } , u = /^-?(?:(\d{3})|(\d{2})(?:-?(\d{2}))?|W(\d{2})(?:-?(\d{1}))?|)$/ , s = /^(\d{2}(?:[.,]\d*)?)(?::?(\d{2}(?:[.,]\d*)?))?(?::?(\d{2}(?:[.,]\d*)?))?$/ , l = /^([+-])(\d{2})(?::?(\d{2}))?$/; function c(e) { return e ? parseInt(e) : 1 } function d(e) { return e && parseFloat(e.replace(",", ".")) || 0 } var p = [31, null, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; function f(e) { return e % 400 == 0 || e % 4 == 0 && e % 100 != 0 } }, 35113: function(e, t, n) { "use strict"; Object.defineProperty(t, "__esModule", { value: !0 }); var r = n(39324) , o = n(71209); Object.defineProperty(t, "__esModule", { value: !0 }), function(e, t) { for (var n in t) Object.defineProperty(e, n, { enumerable: !0, get: t[n] }) }(t, { noSSR: function() { return s }, default: function() { return l } }); var i = n(64838) , a = (n(70079), i._(n(12730))); function u(e) { return { default: (null == e ? void 0 : e.default) || e } } function s(e, t) { return delete t.webpack, delete t.modules, e(t) } function l(e, t) { var n = a.default , i = { loading: function(e) { return e.error, e.isLoading, e.pastDelay, null } }; e instanceof Promise ? i.loader = function() { return e } : "function" == typeof e ? i.loader = e : "object" == typeof e && (i = r._({}, i, e)); var l = (i = r._({}, i, t)).loader; return (i.loadableGenerated && (i = r._({}, i, i.loadableGenerated), delete i.loadableGenerated), "boolean" != typeof i.ssr || i.ssr) ? n(o._(r._({}, i), { loader: function() { return null != l ? l().then(u) : Promise.resolve(u(function() { return null })) } })) : (delete i.webpack, delete i.modules, s(n, i)) } ("function" == typeof t.default || "object" == typeof t.default && null !== t.default) && void 0 === t.default.__esModule && (Object.defineProperty(t.default, "__esModule", { value: !0 }), Object.assign(t.default, t), e.exports = t.default) }, 12659: function(e, t, n) { "use strict"; Object.defineProperty(t, "__esModule", { value: !0 }), Object.defineProperty(t, "LoadableContext", { enumerable: !0, get: function() { return r } }); var r = n(64838)._(n(70079)).default.createContext(null) }, 12730: function(e, t, n) { "use strict"; Object.defineProperty(t, "__esModule", { value: !0 }); var r = n(51217) , o = n(31819) , i = n(39324) , a = n(71209); Object.defineProperty(t, "__esModule", { value: !0 }), Object.defineProperty(t, "default", { enumerable: !0, get: function() { return v } }); var u = n(64838)._(n(70079)) , s = n(12659) , l = [] , c = [] , d = !1; function p(e) { var t = e() , n = { loading: !0, loaded: null, error: null }; return n.promise = t.then(function(e) { return n.loading = !1, n.loaded = e, e }).catch(function(e) { throw n.loading = !1, n.error = e, e }), n } var f = function() { function e(t, n) { r._(this, e), this._loadFn = t, this._opts = n, this._callbacks = new Set, this._delay = null, this._timeout = null, this.retry() } return o._(e, [{ key: "promise", value: function() { return this._res.promise } }, { key: "retry", value: function() { var e = this; this._clearTimeouts(), this._res = this._loadFn(this._opts.loader), this._state = { pastDelay: !1, timedOut: !1 }; var t = this._res , n = this._opts; t.loading && ("number" == typeof n.delay && (0 === n.delay ? this._state.pastDelay = !0 : this._delay = setTimeout(function() { e._update({ pastDelay: !0 }) }, n.delay)), "number" == typeof n.timeout && (this._timeout = setTimeout(function() { e._update({ timedOut: !0 }) }, n.timeout))), this._res.promise.then(function() { e._update({}), e._clearTimeouts() }).catch(function(t) { e._update({}), e._clearTimeouts() }), this._update({}) } }, { key: "_update", value: function(e) { this._state = i._(a._(i._({}, this._state), { error: this._res.error, loaded: this._res.loaded, loading: this._res.loading }), e), this._callbacks.forEach(function(e) { return e() }) } }, { key: "_clearTimeouts", value: function() { clearTimeout(this._delay), clearTimeout(this._timeout) } }, { key: "getCurrentValue", value: function() { return this._state } }, { key: "subscribe", value: function(e) { var t = this; return this._callbacks.add(e), function() { t._callbacks.delete(e) } } }]), e }(); function m(e) { return function(e, t) { var n = function() { if (!a) { var t = new f(e,i); a = { getCurrentValue: t.getCurrentValue.bind(t), subscribe: t.subscribe.bind(t), retry: t.retry.bind(t), promise: t.promise.bind(t) } } return a.promise() } , r = function() { n(); var e = u.default.useContext(s.LoadableContext); e && Array.isArray(i.modules) && i.modules.forEach(function(t) { e(t) }) } , o = function(e, t) { r(); var n = u.default.useSyncExternalStore(a.subscribe, a.getCurrentValue, a.getCurrentValue); return u.default.useImperativeHandle(t, function() { return { retry: a.retry } }, []), u.default.useMemo(function() { var t; return n.loading || n.error ? u.default.createElement(i.loading, { isLoading: n.loading, pastDelay: n.pastDelay, timedOut: n.timedOut, error: n.error, retry: a.retry }) : n.loaded ? u.default.createElement((t = n.loaded) && t.default ? t.default : t, e) : null }, [e, n]) } , i = Object.assign({ loader: null, loading: null, delay: 200, timeout: null, webpack: null, modules: null }, t) , a = null; if (!d) { var l = i.webpack ? i.webpack() : i.modules; l && c.push(function(e) { var t = !0 , r = !1 , o = void 0; try { for (var i, a = l[Symbol.iterator](); !(t = (i = a.next()).done); t = !0) { var u = i.value; if (-1 !== e.indexOf(u)) return n() } } catch (e) { r = !0, o = e } finally { try { t || null == a.return || a.return() } finally { if (r) throw o } } }) } return o.preload = function() { return n() } , o.displayName = "LoadableComponent", u.default.forwardRef(o) }(p, e) } function h(e, t) { for (var n = []; e.length; ) { var r = e.pop(); n.push(r(t)) } return Promise.all(n).then(function() { if (e.length) return h(e, t) }) } m.preloadAll = function() { return new Promise(function(e, t) { h(l).then(e, t) } ) } , m.preloadReady = function(e) { return void 0 === e && (e = []), new Promise(function(t) { var n = function() { return d = !0, t() }; h(c, e).then(n, n) } ) } , window.__NEXT_PRELOADREADY = m.preloadReady; var v = m }, 55344: function(e, t, n) { e.exports = n(35113) }, 37394: function(e, t, n) { "use strict"; n.d(t, { A: function() { return V }, B: function() { return _ }, C: function() { return F }, D: function() { return L }, E: function() { return y }, F: function() { return eE }, G: function() { return Z }, H: function() { return O }, I: function() { return M }, J: function() { return b }, K: function() { return T }, M: function() { return j }, a: function() { return ea }, b: function() { return K }, c: function() { return eM }, d: function() { return ep }, e: function() { return ei }, f: function() { return eh }, g: function() { return em }, h: function() { return eu }, i: function() { return J }, j: function() { return eg }, k: function() { return W }, l: function() { return el }, m: function() { return N }, n: function() { return z }, o: function() { return q }, p: function() { return eO }, q: function() { return ex }, r: function() { return A }, s: function() { return R }, t: function() { return eC }, u: function() { return Y }, v: function() { return ew }, w: function() { return eI }, x: function() { return eS }, y: function() { return $ }, z: function() { return E } }); var r, o, i, a = n(67666), u = n(45675), s = n(3530), l = n(38654), c = n(97395), d = n(24622), p = n(41470), f = n(70079), m = n(99581), h = n(88905), v = n(41455), g = ["className", "clearValue", "cx", "getStyles", "getClassNames", "getValue", "hasValue", "isMulti", "isRtl", "options", "selectOption", "selectProps", "setValue", "theme"], b = function() {}; function y(e, t) { for (var n, r = arguments.length, o = Array(r > 2 ? r - 2 : 0), i = 2; i < r; i++) o[i - 2] = arguments[i]; var a = [].concat(o); if (t && e) for (var u in t) t.hasOwnProperty(u) && t[u] && a.push("".concat((n = u) ? "-" === n[0] ? e + n : e + "__" + n : e)); return a.filter(function(e) { return e }).map(function(e) { return String(e).trim() }).join(" ") } var O = function(e) { return Array.isArray(e) ? e.filter(Boolean) : "object" === (0, d.Z)(e) && null !== e ? [e] : [] } , x = function(e) { e.className, e.clearValue, e.cx, e.getStyles, e.getClassNames, e.getValue, e.hasValue, e.isMulti, e.isRtl, e.options, e.selectOption, e.selectProps, e.setValue, e.theme; var t = (0, c.Z)(e, g); return (0, a.Z)({}, t) } , C = function(e, t, n) { var r = e.cx , o = e.getStyles , i = e.getClassNames , a = e.className; return { css: o(t, e), className: r(null != n ? n : {}, i(t, e), a) } }; function Z(e) { return [document.documentElement, document.body, window].indexOf(e) > -1 } function w(e) { return Z(e) ? window.pageYOffset : e.scrollTop } function I(e, t) { if (Z(e)) { window.scrollTo(0, t); return } e.scrollTop = t } function S(e, t) { var n = arguments.length > 2 && void 0 !== arguments[2] ? arguments[2] : 200 , r = arguments.length > 3 && void 0 !== arguments[3] ? arguments[3] : b , o = w(e) , i = t - o , a = 0; !function t() { var u; a += 10, I(e, i * ((u = (u = a) / n - 1) * u * u + 1) + o), a < n ? window.requestAnimationFrame(t) : r(e) }() } function M(e, t) { var n = e.getBoundingClientRect() , r = t.getBoundingClientRect() , o = t.offsetHeight / 3; r.bottom + o > n.bottom ? I(e, Math.min(t.offsetTop + t.clientHeight - e.offsetHeight + o, e.scrollHeight)) : r.top - o < n.top && I(e, Math.max(t.offsetTop - o, 0)) } function E() { try { return document.createEvent("TouchEvent"), !0 } catch (e) { return !1 } } function V() { try { return /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) } catch (e) { return !1 } } var D = !1 , P = { get passive() { return D = !0 } } , k = "undefined" != typeof window ? window : {}; k.addEventListener && k.removeEventListener && (k.addEventListener("p", b, P), k.removeEventListener("p", b, !1)); var R = D; function T(e) { return null != e } function L(e, t, n) { return e ? t : n } function F(e) { return e } function _(e) { return e } var A = function(e) { for (var t = arguments.length, n = Array(t > 1 ? t - 1 : 0), r = 1; r < t; r++) n[r - 1] = arguments[r]; return Object.entries(e).filter(function(e) { var t = (0, l.Z)(e, 1)[0]; return !n.includes(t) }).reduce(function(e, t) { var n = (0, l.Z)(t, 2) , r = n[0] , o = n[1]; return e[r] = o, e }, {}) } , H = function(e) { return "auto" === e ? "bottom" : e } , N = function(e, t) { var n, r = e.placement, o = e.theme, i = o.borderRadius, u = o.spacing, s = o.colors; return (0, a.Z)((n = { label: "menu" }, (0, p.Z)(n, r ? ({ bottom: "top", top: "bottom" })[r] : "bottom", "100%"), (0, p.Z)(n, "position", "absolute"), (0, p.Z)(n, "width", "100%"), (0, p.Z)(n, "zIndex", 1), n), t ? {} : { backgroundColor: s.neutral0, borderRadius: i, boxShadow: "0 0 0 1px hsla(0, 0%, 0%, 0.1), 0 4px 11px hsla(0, 0%, 0%, 0.1)", marginBottom: u.menuGutter, marginTop: u.menuGutter }) } , U = (0, f.createContext)(null) , j = function(e) { var t = e.children , n = e.minMenuHeight , r = e.maxMenuHeight , o = e.menuPlacement , i = e.menuPosition , u = e.menuShouldScrollIntoView , s = e.theme , c = ((0, f.useContext)(U) || {}).setPortalPlacement , d = (0, f.useRef)(null) , p = (0, f.useState)(r) , m = (0, l.Z)(p, 2) , h = m[0] , g = m[1] , b = (0, f.useState)(null) , y = (0, l.Z)(b, 2) , O = y[0] , x = y[1] , C = s.spacing.controlHeight; return (0, v.Z)(function() { var e = d.current; if (e) { var t = "fixed" === i , a = function(e) { var t = e.maxHeight , n = e.menuEl , r = e.minHeight , o = e.placement , i = e.shouldScroll , a = e.isFixedPosition , u = e.controlHeight , s = function(e) { var t = getComputedStyle(e) , n = "absolute" === t.position , r = /(auto|scroll)/; if ("fixed" === t.position) return document.documentElement; for (var o = e; o = o.parentElement; ) if (t = getComputedStyle(o), (!n || "static" !== t.position) && r.test(t.overflow + t.overflowY + t.overflowX)) return o; return document.documentElement }(n) , l = { placement: "bottom", maxHeight: t }; if (!n || !n.offsetParent) return l; var c = s.getBoundingClientRect().height , d = n.getBoundingClientRect() , p = d.bottom , f = d.height , m = d.top , h = n.offsetParent.getBoundingClientRect().top , v = a ? window.innerHeight : Z(s) ? window.innerHeight : s.clientHeight , g = w(s) , b = parseInt(getComputedStyle(n).marginBottom, 10) , y = parseInt(getComputedStyle(n).marginTop, 10) , O = h - y , x = v - m , C = O + g , M = c - g - m , E = p - v + g + b , V = g + m - y; switch (o) { case "auto": case "bottom": if (x >= f) return { placement: "bottom", maxHeight: t }; if (M >= f && !a) return i && S(s, E, 160), { placement: "bottom", maxHeight: t }; if (!a && M >= r || a && x >= r) return i && S(s, E, 160), { placement: "bottom", maxHeight: a ? x - b : M - b }; if ("auto" === o || a) { var D = t , P = a ? O : C; return P >= r && (D = Math.min(P - b - u, t)), { placement: "top", maxHeight: D } } if ("bottom" === o) return i && I(s, E), { placement: "bottom", maxHeight: t }; break; case "top": if (O >= f) return { placement: "top", maxHeight: t }; if (C >= f && !a) return i && S(s, V, 160), { placement: "top", maxHeight: t }; if (!a && C >= r || a && O >= r) { var k = t; return (!a && C >= r || a && O >= r) && (k = a ? O - y : C - y), i && S(s, V, 160), { placement: "top", maxHeight: k } } return { placement: "bottom", maxHeight: t }; default: throw Error('Invalid placement provided "'.concat(o, '".')) } return l }({ maxHeight: r, menuEl: e, minHeight: n, placement: o, shouldScroll: u && !t, isFixedPosition: t, controlHeight: C }); g(a.maxHeight), x(a.placement), null == c || c(a.placement) } }, [r, o, i, u, n, c, C]), t({ ref: d, placerProps: (0, a.Z)((0, a.Z)({}, e), {}, { placement: O || H(o), maxHeight: h }) }) } , z = function(e, t) { var n = e.maxHeight , r = e.theme.spacing.baseUnit; return (0, a.Z)({ maxHeight: n, overflowY: "auto", position: "relative", WebkitOverflowScrolling: "touch" }, t ? {} : { paddingBottom: r, paddingTop: r }) } , B = function(e, t) { var n = e.theme , r = n.spacing.baseUnit , o = n.colors; return (0, a.Z)({ textAlign: "center" }, t ? {} : { color: o.neutral40, padding: "".concat(2 * r, "px ").concat(3 * r, "px") }) } , Y = B , W = B , G = function(e) { var t = e.children , n = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "noOptionsMessage", { "menu-notice": !0, "menu-notice--no-options": !0 }), n), t) }; G.defaultProps = { children: "No options" }; var X = function(e) { var t = e.children , n = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "loadingMessage", { "menu-notice": !0, "menu-notice--loading": !0 }), n), t) }; X.defaultProps = { children: "Loading..." }; var q = function(e) { var t = e.rect , n = e.offset , r = e.position; return { left: t.left, position: r, top: n, width: t.width, zIndex: 1 } } , K = function(e) { var t = e.isDisabled; return { label: "container", direction: e.isRtl ? "rtl" : void 0, pointerEvents: t ? "none" : void 0, position: "relative" } } , $ = function(e, t) { var n = e.theme.spacing , r = e.isMulti , o = e.hasValue , i = e.selectProps.controlShouldRenderValue; return (0, a.Z)({ alignItems: "center", display: r && o && i ? "flex" : "grid", flex: 1, flexWrap: "wrap", WebkitOverflowScrolling: "touch", position: "relative", overflow: "hidden" }, t ? {} : { padding: "".concat(n.baseUnit / 2, "px ").concat(2 * n.baseUnit, "px") }) } , J = function() { return { alignItems: "center", alignSelf: "stretch", display: "flex", flexShrink: 0 } } , Q = ["size"] , ee = { name: "8mmkcg", styles: "display:inline-block;fill:currentColor;line-height:1;stroke:currentColor;stroke-width:0" } , et = function(e) { var t = e.size , n = (0, c.Z)(e, Q); return (0, s.tZ)("svg", (0, u.Z)({ height: t, width: t, viewBox: "0 0 20 20", "aria-hidden": "true", focusable: "false", css: ee }, n)) } , en = function(e) { return (0, s.tZ)(et, (0, u.Z)({ size: 20 }, e), (0, s.tZ)("path", { d: "M14.348 14.849c-0.469 0.469-1.229 0.469-1.697 0l-2.651-3.030-2.651 3.029c-0.469 0.469-1.229 0.469-1.697 0-0.469-0.469-0.469-1.229 0-1.697l2.758-3.15-2.759-3.152c-0.469-0.469-0.469-1.228 0-1.697s1.228-0.469 1.697 0l2.652 3.031 2.651-3.031c0.469-0.469 1.228-0.469 1.697 0s0.469 1.229 0 1.697l-2.758 3.152 2.758 3.15c0.469 0.469 0.469 1.229 0 1.698z" })) } , er = function(e) { return (0, s.tZ)(et, (0, u.Z)({ size: 20 }, e), (0, s.tZ)("path", { d: "M4.516 7.548c0.436-0.446 1.043-0.481 1.576 0l3.908 3.747 3.908-3.747c0.533-0.481 1.141-0.446 1.574 0 0.436 0.445 0.408 1.197 0 1.615-0.406 0.418-4.695 4.502-4.695 4.502-0.217 0.223-0.502 0.335-0.787 0.335s-0.57-0.112-0.789-0.335c0 0-4.287-4.084-4.695-4.502s-0.436-1.17 0-1.615z" })) } , eo = function(e, t) { var n = e.isFocused , r = e.theme , o = r.spacing.baseUnit , i = r.colors; return (0, a.Z)({ label: "indicatorContainer", display: "flex", transition: "color 150ms" }, t ? {} : { color: n ? i.neutral60 : i.neutral20, padding: 2 * o, ":hover": { color: n ? i.neutral80 : i.neutral40 } }) } , ei = eo , ea = eo , eu = function(e, t) { var n = e.isDisabled , r = e.theme , o = r.spacing.baseUnit , i = r.colors; return (0, a.Z)({ label: "indicatorSeparator", alignSelf: "stretch", width: 1 }, t ? {} : { backgroundColor: n ? i.neutral10 : i.neutral20, marginBottom: 2 * o, marginTop: 2 * o }) } , es = (0, s.F4)(i || (r = ["\n 0%, 80%, 100% { opacity: 0; }\n 40% { opacity: 1; }\n"], o || (o = r.slice(0)), i = Object.freeze(Object.defineProperties(r, { raw: { value: Object.freeze(o) } })))) , el = function(e, t) { var n = e.isFocused , r = e.size , o = e.theme , i = o.colors , u = o.spacing.baseUnit; return (0, a.Z)({ label: "loadingIndicator", display: "flex", transition: "color 150ms", alignSelf: "center", fontSize: r, lineHeight: 1, marginRight: r, textAlign: "center", verticalAlign: "middle" }, t ? {} : { color: n ? i.neutral60 : i.neutral20, padding: 2 * u }) } , ec = function(e) { var t = e.delay , n = e.offset; return (0, s.tZ)("span", { css: (0, s.iv)({ animation: "".concat(es, " 1s ease-in-out ").concat(t, "ms infinite;"), backgroundColor: "currentColor", borderRadius: "1em", display: "inline-block", marginLeft: n ? "1em" : void 0, height: "1em", verticalAlign: "top", width: "1em" }, "", "") }) } , ed = function(e) { var t = e.innerProps , n = e.isRtl; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "loadingIndicator", { indicator: !0, "loading-indicator": !0 }), t), (0, s.tZ)(ec, { delay: 0, offset: n }), (0, s.tZ)(ec, { delay: 160, offset: !0 }), (0, s.tZ)(ec, { delay: 320, offset: !n })) }; ed.defaultProps = { size: 4 }; var ep = function(e, t) { var n = e.isDisabled , r = e.isFocused , o = e.theme , i = o.colors , u = o.borderRadius , s = o.spacing; return (0, a.Z)({ label: "control", alignItems: "center", cursor: "default", display: "flex", flexWrap: "wrap", justifyContent: "space-between", minHeight: s.controlHeight, outline: "0 !important", position: "relative", transition: "all 100ms" }, t ? {} : { backgroundColor: n ? i.neutral5 : i.neutral0, borderColor: n ? i.neutral10 : r ? i.primary : i.neutral20, borderRadius: u, borderStyle: "solid", borderWidth: 1, boxShadow: r ? "0 0 0 1px ".concat(i.primary) : void 0, "&:hover": { borderColor: r ? i.primary : i.neutral30 } }) } , ef = ["data"] , em = function(e, t) { var n = e.theme.spacing; return t ? {} : { paddingBottom: 2 * n.baseUnit, paddingTop: 2 * n.baseUnit } } , eh = function(e, t) { var n = e.theme , r = n.colors , o = n.spacing; return (0, a.Z)({ label: "group", cursor: "default", display: "block" }, t ? {} : { color: r.neutral40, fontSize: "75%", fontWeight: 500, marginBottom: "0.25em", paddingLeft: 3 * o.baseUnit, paddingRight: 3 * o.baseUnit, textTransform: "uppercase" }) } , ev = ["innerRef", "isDisabled", "isHidden", "inputClassName"] , eg = function(e, t) { var n = e.isDisabled , r = e.value , o = e.theme , i = o.spacing , u = o.colors; return (0, a.Z)((0, a.Z)({ visibility: n ? "hidden" : "visible", transform: r ? "translateZ(0)" : "" }, ey), t ? {} : { margin: i.baseUnit / 2, paddingBottom: i.baseUnit / 2, paddingTop: i.baseUnit / 2, color: u.neutral80 }) } , eb = { gridArea: "1 / 2", font: "inherit", minWidth: "2px", border: 0, margin: 0, outline: 0, padding: 0 } , ey = { flex: "1 1 auto", display: "inline-grid", gridArea: "1 / 1 / 2 / 3", gridTemplateColumns: "0 min-content", "&:after": (0, a.Z)({ content: 'attr(data-value) " "', visibility: "hidden", whiteSpace: "pre" }, eb) } , eO = function(e, t) { var n = e.theme , r = n.spacing , o = n.borderRadius , i = n.colors; return (0, a.Z)({ label: "multiValue", display: "flex", minWidth: 0 }, t ? {} : { backgroundColor: i.neutral10, borderRadius: o / 2, margin: r.baseUnit / 2 }) } , ex = function(e, t) { var n = e.theme , r = n.borderRadius , o = n.colors , i = e.cropWithEllipsis; return (0, a.Z)({ overflow: "hidden", textOverflow: i || void 0 === i ? "ellipsis" : void 0, whiteSpace: "nowrap" }, t ? {} : { borderRadius: r / 2, color: o.neutral80, fontSize: "85%", padding: 3, paddingLeft: 6 }) } , eC = function(e, t) { var n = e.theme , r = n.spacing , o = n.borderRadius , i = n.colors , u = e.isFocused; return (0, a.Z)({ alignItems: "center", display: "flex" }, t ? {} : { borderRadius: o / 2, backgroundColor: u ? i.dangerLight : void 0, paddingLeft: r.baseUnit, paddingRight: r.baseUnit, ":hover": { backgroundColor: i.dangerLight, color: i.danger } }) } , eZ = function(e) { var t = e.children , n = e.innerProps; return (0, s.tZ)("div", n, t) } , ew = function(e, t) { var n = e.isDisabled , r = e.isFocused , o = e.isSelected , i = e.theme , u = i.spacing , s = i.colors; return (0, a.Z)({ label: "option", cursor: "default", display: "block", fontSize: "inherit", width: "100%", userSelect: "none", WebkitTapHighlightColor: "rgba(0, 0, 0, 0)" }, t ? {} : { backgroundColor: o ? s.primary : r ? s.primary25 : "transparent", color: n ? s.neutral20 : o ? s.neutral0 : "inherit", padding: "".concat(2 * u.baseUnit, "px ").concat(3 * u.baseUnit, "px"), ":active": { backgroundColor: n ? void 0 : o ? s.primary : s.primary50 } }) } , eI = function(e, t) { var n = e.theme , r = n.spacing , o = n.colors; return (0, a.Z)({ label: "placeholder", gridArea: "1 / 1 / 2 / 3" }, t ? {} : { color: o.neutral50, marginLeft: r.baseUnit / 2, marginRight: r.baseUnit / 2 }) } , eS = function(e, t) { var n = e.isDisabled , r = e.theme , o = r.spacing , i = r.colors; return (0, a.Z)({ label: "singleValue", gridArea: "1 / 1 / 2 / 3", maxWidth: "100%", overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" }, t ? {} : { color: n ? i.neutral40 : i.neutral80, marginLeft: o.baseUnit / 2, marginRight: o.baseUnit / 2 }) } , eM = { ClearIndicator: function(e) { var t = e.children , n = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "clearIndicator", { indicator: !0, "clear-indicator": !0 }), n), t || (0, s.tZ)(en, null)) }, Control: function(e) { var t = e.children , n = e.isDisabled , r = e.isFocused , o = e.innerRef , i = e.innerProps , a = e.menuIsOpen; return (0, s.tZ)("div", (0, u.Z)({ ref: o }, C(e, "control", { control: !0, "control--is-disabled": n, "control--is-focused": r, "control--menu-is-open": a }), i), t) }, DropdownIndicator: function(e) { var t = e.children , n = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "dropdownIndicator", { indicator: !0, "dropdown-indicator": !0 }), n), t || (0, s.tZ)(er, null)) }, DownChevron: er, CrossIcon: en, Group: function(e) { var t = e.children , n = e.cx , r = e.getStyles , o = e.getClassNames , i = e.Heading , a = e.headingProps , l = e.innerProps , c = e.label , d = e.theme , p = e.selectProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "group", { group: !0 }), l), (0, s.tZ)(i, (0, u.Z)({}, a, { selectProps: p, theme: d, getStyles: r, getClassNames: o, cx: n }), c), (0, s.tZ)("div", null, t)) }, GroupHeading: function(e) { var t = x(e); t.data; var n = (0, c.Z)(t, ef); return (0, s.tZ)("div", (0, u.Z)({}, C(e, "groupHeading", { "group-heading": !0 }), n)) }, IndicatorsContainer: function(e) { var t = e.children , n = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "indicatorsContainer", { indicators: !0 }), n), t) }, IndicatorSeparator: function(e) { var t = e.innerProps; return (0, s.tZ)("span", (0, u.Z)({}, t, C(e, "indicatorSeparator", { "indicator-separator": !0 }))) }, Input: function(e) { var t = e.cx , n = e.value , r = x(e) , o = r.innerRef , i = r.isDisabled , l = r.isHidden , d = r.inputClassName , p = (0, c.Z)(r, ev); return (0, s.tZ)("div", (0, u.Z)({}, C(e, "input", { "input-container": !0 }), { "data-value": n || "" }), (0, s.tZ)("input", (0, u.Z)({ className: t({ input: !0 }, d), ref: o, style: (0, a.Z)({ label: "input", color: "inherit", background: 0, opacity: l ? 0 : 1, width: "100%" }, eb), disabled: i }, p))) }, LoadingIndicator: ed, Menu: function(e) { var t = e.children , n = e.innerRef , r = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "menu", { menu: !0 }), { ref: n }, r), t) }, MenuList: function(e) { var t = e.children , n = e.innerProps , r = e.innerRef , o = e.isMulti; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "menuList", { "menu-list": !0, "menu-list--is-multi": o }), { ref: r }, n), t) }, MenuPortal: function(e) { var t = e.appendTo , n = e.children , r = e.controlElement , o = e.innerProps , i = e.menuPlacement , c = e.menuPosition , d = (0, f.useRef)(null) , p = (0, f.useRef)(null) , g = (0, f.useState)(H(i)) , b = (0, l.Z)(g, 2) , y = b[0] , O = b[1] , x = (0, f.useMemo)(function() { return { setPortalPlacement: O } }, []) , Z = (0, f.useState)(null) , w = (0, l.Z)(Z, 2) , I = w[0] , S = w[1] , M = (0, f.useCallback)(function() { if (r) { var e, t = { bottom: (e = r.getBoundingClientRect()).bottom, height: e.height, left: e.left, right: e.right, top: e.top, width: e.width }, n = "fixed" === c ? 0 : window.pageYOffset, o = t[y] + n; (o !== (null == I ? void 0 : I.offset) || t.left !== (null == I ? void 0 : I.rect.left) || t.width !== (null == I ? void 0 : I.rect.width)) && S({ offset: o, rect: t }) } }, [r, c, y, null == I ? void 0 : I.offset, null == I ? void 0 : I.rect.left, null == I ? void 0 : I.rect.width]); (0, v.Z)(function() { M() }, [M]); var E = (0, f.useCallback)(function() { "function" == typeof p.current && (p.current(), p.current = null), r && d.current && (p.current = (0, h.Me)(r, d.current, M, { elementResize: "ResizeObserver"in window })) }, [r, M]); (0, v.Z)(function() { E() }, [E]); var V = (0, f.useCallback)(function(e) { d.current = e, E() }, [E]); if (!t && "fixed" !== c || !I) return null; var D = (0, s.tZ)("div", (0, u.Z)({ ref: V }, C((0, a.Z)((0, a.Z)({}, e), {}, { offset: I.offset, position: c, rect: I.rect }), "menuPortal", { "menu-portal": !0 }), o), n); return (0, s.tZ)(U.Provider, { value: x }, t ? (0, m.createPortal)(D, t) : D) }, LoadingMessage: X, NoOptionsMessage: G, MultiValue: function(e) { var t = e.children , n = e.components , r = e.data , o = e.innerProps , i = e.isDisabled , u = e.removeProps , l = e.selectProps , c = n.Container , d = n.Label , p = n.Remove; return (0, s.tZ)(c, { data: r, innerProps: (0, a.Z)((0, a.Z)({}, C(e, "multiValue", { "multi-value": !0, "multi-value--is-disabled": i })), o), selectProps: l }, (0, s.tZ)(d, { data: r, innerProps: (0, a.Z)({}, C(e, "multiValueLabel", { "multi-value__label": !0 })), selectProps: l }, t), (0, s.tZ)(p, { data: r, innerProps: (0, a.Z)((0, a.Z)({}, C(e, "multiValueRemove", { "multi-value__remove": !0 })), {}, { "aria-label": "Remove ".concat(t || "option") }, u), selectProps: l })) }, MultiValueContainer: eZ, MultiValueLabel: eZ, MultiValueRemove: function(e) { var t = e.children , n = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({ role: "button" }, n), t || (0, s.tZ)(en, { size: 14 })) }, Option: function(e) { var t = e.children , n = e.isDisabled , r = e.isFocused , o = e.isSelected , i = e.innerRef , a = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "option", { option: !0, "option--is-disabled": n, "option--is-focused": r, "option--is-selected": o }), { ref: i, "aria-disabled": n }, a), t) }, Placeholder: function(e) { var t = e.children , n = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "placeholder", { placeholder: !0 }), n), t) }, SelectContainer: function(e) { var t = e.children , n = e.innerProps , r = e.isDisabled , o = e.isRtl; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "container", { "--is-disabled": r, "--is-rtl": o }), n), t) }, SingleValue: function(e) { var t = e.children , n = e.isDisabled , r = e.innerProps; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "singleValue", { "single-value": !0, "single-value--is-disabled": n }), r), t) }, ValueContainer: function(e) { var t = e.children , n = e.innerProps , r = e.isMulti , o = e.hasValue; return (0, s.tZ)("div", (0, u.Z)({}, C(e, "valueContainer", { "value-container": !0, "value-container--is-multi": r, "value-container--has-value": o }), n), t) } } , eE = function(e) { return (0, a.Z)((0, a.Z)({}, eM), e.components) } }, 15420: function(e, t, n) { "use strict"; n.d(t, { ZP: function() { return ef } }); var r = n(67666) , o = n(38654) , i = n(97395) , a = n(70079) , u = ["defaultInputValue", "defaultMenuIsOpen", "defaultValue", "inputValue", "menuIsOpen", "onChange", "onInputChange", "onMenuClose", "onMenuOpen", "value"] , s = n(45675) , l = n(72843); function c(e, t) { for (var n = 0; n < t.length; n++) { var r = t[n]; r.enumerable = r.enumerable || !1, r.configurable = !0, "value"in r && (r.writable = !0), Object.defineProperty(e, (0, l.Z)(r.key), r) } } var d = n(26890); function p(e) { return (p = Object.setPrototypeOf ? Object.getPrototypeOf.bind() : function(e) { return e.__proto__ || Object.getPrototypeOf(e) } )(e) } var f = n(24622) , m = n(19919) , h = n(59428); function v(e) { return function(e) { if (Array.isArray(e)) return (0, m.Z)(e) }(e) || function(e) { if ("undefined" != typeof Symbol && null != e[Symbol.iterator] || null != e["@@iterator"]) return Array.from(e) }(e) || (0, h.Z)(e) || function() { throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.") }() } var g = n(37394) , b = n(3530) , y = Number.isNaN || function(e) { return "number" == typeof e && e != e } ; function O(e, t) { if (e.length !== t.length) return !1; for (var n, r, o = 0; o < e.length; o++) if (!((n = e[o]) === (r = t[o]) || y(n) && y(r))) return !1; return !0 } for (var x = { name: "7pg0cj-a11yText", styles: "label:a11yText;z-index:9999;border:0;clip:rect(1px, 1px, 1px, 1px);height:1px;width:1px;position:absolute;overflow:hidden;padding:0;white-space:nowrap" }, C = function(e) { return (0, b.tZ)("span", (0, s.Z)({ css: x }, e)) }, Z = { guidance: function(e) { var t = e.isSearchable , n = e.isMulti , r = e.isDisabled , o = e.tabSelectsValue; switch (e.context) { case "menu": return "Use Up and Down to choose options".concat(r ? "" : ", press Enter to select the currently focused option", ", press Escape to exit the menu").concat(o ? ", press Tab to select the option and exit the menu" : "", "."); case "input": return "".concat(e["aria-label"] || "Select", " is focused ").concat(t ? ",type to refine list" : "", ", press Down to open the menu, ").concat(n ? " press left to focus selected values" : ""); case "value": return "Use left and right to toggle between focused values, press Backspace to remove the currently focused value"; default: return "" } }, onChange: function(e) { var t = e.action , n = e.label , r = void 0 === n ? "" : n , o = e.labels , i = e.isDisabled; switch (t) { case "deselect-option": case "pop-value": case "remove-value": return "option ".concat(r, ", deselected."); case "clear": return "All selected options have been cleared."; case "initial-input-focus": return "option".concat(o.length > 1 ? "s" : "", " ").concat(o.join(","), ", selected."); case "select-option": return i ? "option ".concat(r, " is disabled. Select another option.") : "option ".concat(r, ", selected."); default: return "" } }, onFocus: function(e) { var t = e.context , n = e.focused , r = e.options , o = e.label , i = void 0 === o ? "" : o , a = e.selectValue , u = e.isDisabled , s = e.isSelected , l = function(e, t) { return e && e.length ? "".concat(e.indexOf(t) + 1, " of ").concat(e.length) : "" }; if ("value" === t && a) return "value ".concat(i, " focused, ").concat(l(a, n), "."); if ("menu" === t) { var c = "".concat(s ? "selected" : "focused").concat(u ? " disabled" : ""); return "option ".concat(i, " ").concat(c, ", ").concat(l(r, n), ".") } return "" }, onFilter: function(e) { var t = e.inputValue , n = e.resultsMessage; return "".concat(n).concat(t ? " for search term " + t : "", ".") } }, w = function(e) { var t = e.ariaSelection , n = e.focusedOption , o = e.focusedValue , i = e.focusableOptions , u = e.isFocused , s = e.selectValue , l = e.selectProps , c = e.id , d = l.ariaLiveMessages , p = l.getOptionLabel , f = l.inputValue , m = l.isMulti , h = l.isOptionDisabled , v = l.isSearchable , g = l.menuIsOpen , y = l.options , O = l.screenReaderStatus , x = l.tabSelectsValue , w = l["aria-label"] , I = l["aria-live"] , S = (0, a.useMemo)(function() { return (0, r.Z)((0, r.Z)({}, Z), d || {}) }, [d]) , M = (0, a.useMemo)(function() { var e = ""; if (t && S.onChange) { var n = t.option , o = t.options , i = t.removedValue , a = t.removedValues , u = t.value , l = i || n || (Array.isArray(u) ? null : u) , c = l ? p(l) : "" , d = o || a || void 0 , f = d ? d.map(p) : [] , m = (0, r.Z)({ isDisabled: l && h(l, s), label: c, labels: f }, t); e = S.onChange(m) } return e }, [t, S, h, s, p]) , E = (0, a.useMemo)(function() { var e = "" , t = n || o , r = !!(n && s && s.includes(n)); if (t && S.onFocus) { var a = { focused: t, label: p(t), isDisabled: h(t, s), isSelected: r, options: i, context: t === n ? "menu" : "value", selectValue: s }; e = S.onFocus(a) } return e }, [n, o, p, h, S, i, s]) , V = (0, a.useMemo)(function() { var e = ""; if (g && y.length && S.onFilter) { var t = O({ count: i.length }); e = S.onFilter({ inputValue: f, resultsMessage: t }) } return e }, [i, f, g, S, y, O]) , D = (0, a.useMemo)(function() { var e = ""; return S.guidance && (e = S.guidance({ "aria-label": w, context: o ? "value" : g ? "menu" : "input", isDisabled: n && h(n, s), isMulti: m, isSearchable: v, tabSelectsValue: x })), e }, [w, n, o, m, h, v, g, S, s, x]) , P = "".concat(E, " ").concat(V, " ").concat(D) , k = (0, b.tZ)(a.Fragment, null, (0, b.tZ)("span", { id: "aria-selection" }, M), (0, b.tZ)("span", { id: "aria-context" }, P)) , R = (null == t ? void 0 : t.action) === "initial-input-focus"; return (0, b.tZ)(a.Fragment, null, (0, b.tZ)(C, { id: c }, R && k), (0, b.tZ)(C, { "aria-live": I, "aria-atomic": "false", "aria-relevant": "additions text" }, u && !R && k)) }, I = [{ base: "A", letters: "AⒶA\xc0\xc1\xc2ẦẤẪẨ\xc3ĀĂẰẮẴẲȦǠ\xc4ǞẢ\xc5ǺǍȀȂẠẬẶḀĄȺⱯ" }, { base: "AA", letters: "Ꜳ" }, { base: "AE", letters: "\xc6ǼǢ" }, { base: "AO", letters: "Ꜵ" }, { base: "AU", letters: "Ꜷ" }, { base: "AV", letters: "ꜸꜺ" }, { base: "AY", letters: "Ꜽ" }, { base: "B", letters: "BⒷBḂḄḆɃƂƁ" }, { base: "C", letters: "CⒸCĆĈĊČ\xc7ḈƇȻꜾ" }, { base: "D", letters: "DⒹDḊĎḌḐḒḎĐƋƊƉꝹ" }, { base: "DZ", letters: "DZDŽ" }, { base: "Dz", letters: "DzDž" }, { base: "E", letters: "EⒺE\xc8\xc9\xcaỀẾỄỂẼĒḔḖĔĖ\xcbẺĚȄȆẸỆȨḜĘḘḚƐƎ" }, { base: "F", letters: "FⒻFḞƑꝻ" }, { base: "G", letters: "GⒼGǴĜḠĞĠǦĢǤƓꞠꝽꝾ" }, { base: "H", letters: "HⒽHĤḢḦȞḤḨḪĦⱧⱵꞍ" }, { base: "I", letters: "IⒾI\xcc\xcd\xceĨĪĬİ\xcfḮỈǏȈȊỊĮḬƗ" }, { base: "J", letters: "JⒿJĴɈ" }, { base: "K", letters: "KⓀKḰǨḲĶḴƘⱩꝀꝂꝄꞢ" }, { base: "L", letters: "LⓁLĿĹĽḶḸĻḼḺŁȽⱢⱠꝈꝆꞀ" }, { base: "LJ", letters: "LJ" }, { base: "Lj", letters: "Lj" }, { base: "M", letters: "MⓂMḾṀṂⱮƜ" }, { base: "N", letters: "NⓃNǸŃ\xd1ṄŇṆŅṊṈȠƝꞐꞤ" }, { base: "NJ", letters: "NJ" }, { base: "Nj", letters: "Nj" }, { base: "O", letters: "OⓄO\xd2\xd3\xd4ỒỐỖỔ\xd5ṌȬṎŌṐṒŎȮȰ\xd6ȪỎŐǑȌȎƠỜỚỠỞỢỌỘǪǬ\xd8ǾƆƟꝊꝌ" }, { base: "OI", letters: "Ƣ" }, { base: "OO", letters: "Ꝏ" }, { base: "OU", letters: "Ȣ" }, { base: "P", letters: "PⓅPṔṖƤⱣꝐꝒꝔ" }, { base: "Q", letters: "QⓆQꝖꝘɊ" }, { base: "R", letters: "RⓇRŔṘŘȐȒṚṜŖṞɌⱤꝚꞦꞂ" }, { base: "S", letters: "SⓈSẞŚṤŜṠŠṦṢṨȘŞⱾꞨꞄ" }, { base: "T", letters: "TⓉTṪŤṬȚŢṰṮŦƬƮȾꞆ" }, { base: "TZ", letters: "Ꜩ" }, { base: "U", letters: "UⓊU\xd9\xda\xdbŨṸŪṺŬ\xdcǛǗǕǙỦŮŰǓȔȖƯỪỨỮỬỰỤṲŲṶṴɄ" }, { base: "V", letters: "VⓋVṼṾƲꝞɅ" }, { base: "VY", letters: "Ꝡ" }, { base: "W", letters: "WⓌWẀẂŴẆẄẈⱲ" }, { base: "X", letters: "XⓍXẊẌ" }, { base: "Y", letters: "YⓎYỲ\xddŶỸȲẎŸỶỴƳɎỾ" }, { base: "Z", letters: "ZⓏZŹẐŻŽẒẔƵȤⱿⱫꝢ" }, { base: "a", letters: "aⓐaẚ\xe0\xe1\xe2ầấẫẩ\xe3āăằắẵẳȧǡ\xe4ǟả\xe5ǻǎȁȃạậặḁąⱥɐ" }, { base: "aa", letters: "ꜳ" }, { base: "ae", letters: "\xe6ǽǣ" }, { base: "ao", letters: "ꜵ" }, { base: "au", letters: "ꜷ" }, { base: "av", letters: "ꜹꜻ" }, { base: "ay", letters: "ꜽ" }, { base: "b", letters: "bⓑbḃḅḇƀƃɓ" }, { base: "c", letters: "cⓒcćĉċč\xe7ḉƈȼꜿↄ" }, { base: "d", letters: "dⓓdḋďḍḑḓḏđƌɖɗꝺ" }, { base: "dz", letters: "dzdž" }, { base: "e", letters: "eⓔe\xe8\xe9\xeaềếễểẽēḕḗĕė\xebẻěȅȇẹệȩḝęḙḛɇɛǝ" }, { base: "f", letters: "fⓕfḟƒꝼ" }, { base: "g", letters: "gⓖgǵĝḡğġǧģǥɠꞡᵹꝿ" }, { base: "h", letters: "hⓗhĥḣḧȟḥḩḫẖħⱨⱶɥ" }, { base: "hv", letters: "ƕ" }, { base: "i", letters: "iⓘi\xec\xed\xeeĩīĭ\xefḯỉǐȉȋịįḭɨı" }, { base: "j", letters: "jⓙjĵǰɉ" }, { base: "k", letters: "kⓚkḱǩḳķḵƙⱪꝁꝃꝅꞣ" }, { base: "l", letters: "lⓛlŀĺľḷḹļḽḻſłƚɫⱡꝉꞁꝇ" }, { base: "lj", letters: "lj" }, { base: "m", letters: "mⓜmḿṁṃɱɯ" }, { base: "n", letters: "nⓝnǹń\xf1ṅňṇņṋṉƞɲʼnꞑꞥ" }, { base: "nj", letters: "nj" }, { base: "o", letters: "oⓞo\xf2\xf3\xf4ồốỗổ\xf5ṍȭṏōṑṓŏȯȱ\xf6ȫỏőǒȍȏơờớỡởợọộǫǭ\xf8ǿɔꝋꝍɵ" }, { base: "oi", letters: "ƣ" }, { base: "ou", letters: "ȣ" }, { base: "oo", letters: "ꝏ" }, { base: "p", letters: "pⓟpṕṗƥᵽꝑꝓꝕ" }, { base: "q", letters: "qⓠqɋꝗꝙ" }, { base: "r", letters: "rⓡrŕṙřȑȓṛṝŗṟɍɽꝛꞧꞃ" }, { base: "s", letters: "sⓢs\xdfśṥŝṡšṧṣṩșşȿꞩꞅẛ" }, { base: "t", letters: "tⓣtṫẗťṭțţṱṯŧƭʈⱦꞇ" }, { base: "tz", letters: "ꜩ" }, { base: "u", letters: "uⓤu\xf9\xfa\xfbũṹūṻŭ\xfcǜǘǖǚủůűǔȕȗưừứữửựụṳųṷṵʉ" }, { base: "v", letters: "vⓥvṽṿʋꝟʌ" }, { base: "vy", letters: "ꝡ" }, { base: "w", letters: "wⓦwẁẃŵẇẅẘẉⱳ" }, { base: "x", letters: "xⓧxẋẍ" }, { base: "y", letters: "yⓨyỳ\xfdŷỹȳẏ\xffỷẙỵƴɏỿ" }, { base: "z", letters: "zⓩzźẑżžẓẕƶȥɀⱬꝣ" }], S = RegExp("[" + I.map(function(e) { return e.letters }).join("") + "]", "g"), M = {}, E = 0; E < I.length; E++) for (var V = I[E], D = 0; D < V.letters.length; D++) M[V.letters[D]] = V.base; var P = function(e) { return e.replace(S, function(e) { return M[e] }) } , k = function(e, t) { void 0 === t && (t = O); var n = null; function r() { for (var r = [], o = 0; o < arguments.length; o++) r[o] = arguments[o]; if (n && n.lastThis === this && t(r, n.lastArgs)) return n.lastResult; var i = e.apply(this, r); return n = { lastResult: i, lastArgs: r, lastThis: this }, i } return r.clear = function() { n = null } , r }(P) , R = function(e) { return e.replace(/^\s+|\s+$/g, "") } , T = function(e) { return "".concat(e.label, " ").concat(e.value) } , L = ["innerRef"]; function F(e) { var t = e.innerRef , n = (0, i.Z)(e, L) , r = (0, g.r)(n, "onExited", "in", "enter", "exit", "appear"); return (0, b.tZ)("input", (0, s.Z)({ ref: t }, r, { css: (0, b.iv)({ label: "dummyInput", background: 0, border: 0, caretColor: "transparent", fontSize: "inherit", gridArea: "1 / 1 / 2 / 3", outline: 0, padding: 0, width: 1, color: "transparent", left: -100, opacity: 0, position: "relative", transform: "scale(.01)" }, "", "") })) } var _ = function(e) { e.preventDefault(), e.stopPropagation() } , A = ["boxSizing", "height", "overflow", "paddingRight", "position"] , H = { boxSizing: "border-box", overflow: "hidden", position: "relative", height: "100%" }; function N(e) { e.preventDefault() } function U(e) { e.stopPropagation() } function j() { var e = this.scrollTop , t = this.scrollHeight , n = e + this.offsetHeight; 0 === e ? this.scrollTop = 1 : n === t && (this.scrollTop = e - 1) } function z() { return "ontouchstart"in window || navigator.maxTouchPoints } var B = !!("undefined" != typeof window && window.document && window.document.createElement) , Y = 0 , W = { capture: !1, passive: !1 } , G = function() { return document.activeElement && document.activeElement.blur() } , X = { name: "1kfdb0e", styles: "position:fixed;left:0;bottom:0;right:0;top:0" }; function q(e) { var t, n, r, o, i, u, s, l, c, d, p, f, m, h, v, y, O, x, C, Z, w, I, S, M, E = e.children, V = e.lockEnabled, D = e.captureEnabled, P = (n = (t = { isEnabled: void 0 === D || D, onBottomArrive: e.onBottomArrive, onBottomLeave: e.onBottomLeave, onTopArrive: e.onTopArrive, onTopLeave: e.onTopLeave }).isEnabled, r = t.onBottomArrive, o = t.onBottomLeave, i = t.onTopArrive, u = t.onTopLeave, s = (0, a.useRef)(!1), l = (0, a.useRef)(!1), c = (0, a.useRef)(0), d = (0, a.useRef)(null), p = (0, a.useCallback)(function(e, t) { if (null !== d.current) { var n = d.current , a = n.scrollTop , c = n.scrollHeight , p = n.clientHeight , f = d.current , m = t > 0 , h = c - p - a , v = !1; h > t && s.current && (o && o(e), s.current = !1), m && l.current && (u && u(e), l.current = !1), m && t > h ? (r && !s.current && r(e), f.scrollTop = c, v = !0, s.current = !0) : !m && -t > a && (i && !l.current && i(e), f.scrollTop = 0, v = !0, l.current = !0), v && _(e) } }, [r, o, i, u]), f = (0, a.useCallback)(function(e) { p(e, e.deltaY) }, [p]), m = (0, a.useCallback)(function(e) { c.current = e.changedTouches[0].clientY }, []), h = (0, a.useCallback)(function(e) { var t = c.current - e.changedTouches[0].clientY; p(e, t) }, [p]), v = (0, a.useCallback)(function(e) { if (e) { var t = !!g.s && { passive: !1 }; e.addEventListener("wheel", f, t), e.addEventListener("touchstart", m, t), e.addEventListener("touchmove", h, t) } }, [h, m, f]), y = (0, a.useCallback)(function(e) { e && (e.removeEventListener("wheel", f, !1), e.removeEventListener("touchstart", m, !1), e.removeEventListener("touchmove", h, !1)) }, [h, m, f]), (0, a.useEffect)(function() { if (n) { var e = d.current; return v(e), function() { y(e) } } }, [n, v, y]), function(e) { d.current = e } ), k = (x = (O = { isEnabled: V }).isEnabled, Z = void 0 === (C = O.accountForScrollbars) || C, w = (0, a.useRef)({}), I = (0, a.useRef)(null), S = (0, a.useCallback)(function(e) { if (B) { var t = document.body , n = t && t.style; if (Z && A.forEach(function(e) { var t = n && n[e]; w.current[e] = t }), Z && Y < 1) { var r = parseInt(w.current.paddingRight, 10) || 0 , o = document.body ? document.body.clientWidth : 0 , i = window.innerWidth - o + r || 0; Object.keys(H).forEach(function(e) { var t = H[e]; n && (n[e] = t) }), n && (n.paddingRight = "".concat(i, "px")) } t && z() && (t.addEventListener("touchmove", N, W), e && (e.addEventListener("touchstart", j, W), e.addEventListener("touchmove", U, W))), Y += 1 } }, [Z]), M = (0, a.useCallback)(function(e) { if (B) { var t = document.body , n = t && t.style; Y = Math.max(Y - 1, 0), Z && Y < 1 && A.forEach(function(e) { var t = w.current[e]; n && (n[e] = t) }), t && z() && (t.removeEventListener("touchmove", N, W), e && (e.removeEventListener("touchstart", j, W), e.removeEventListener("touchmove", U, W))) } }, [Z]), (0, a.useEffect)(function() { if (x) { var e = I.current; return S(e), function() { M(e) } } }, [x, S, M]), function(e) { I.current = e } ); return (0, b.tZ)(a.Fragment, null, V && (0, b.tZ)("div", { onClick: G, css: X }), E(function(e) { P(e), k(e) })) } var K = { name: "1a0ro4n-requiredInput", styles: "label:requiredInput;opacity:0;pointer-events:none;position:absolute;bottom:0;left:0;right:0;width:100%" } , $ = function(e) { var t = e.name , n = e.onFocus; return (0, b.tZ)("input", { required: !0, name: t, tabIndex: -1, "aria-hidden": "true", onFocus: n, css: K, value: "", onChange: function() {} }) } , J = { clearIndicator: g.a, container: g.b, control: g.d, dropdownIndicator: g.e, group: g.g, groupHeading: g.f, indicatorsContainer: g.i, indicatorSeparator: g.h, input: g.j, loadingIndicator: g.l, loadingMessage: g.k, menu: g.m, menuList: g.n, menuPortal: g.o, multiValue: g.p, multiValueLabel: g.q, multiValueRemove: g.t, noOptionsMessage: g.u, option: g.v, placeholder: g.w, singleValue: g.x, valueContainer: g.y } , Q = { borderRadius: 4, colors: { primary: "#2684FF", primary75: "#4C9AFF", primary50: "#B2D4FF", primary25: "#DEEBFF", danger: "#DE350B", dangerLight: "#FFBDAD", neutral0: "hsl(0, 0%, 100%)", neutral5: "hsl(0, 0%, 95%)", neutral10: "hsl(0, 0%, 90%)", neutral20: "hsl(0, 0%, 80%)", neutral30: "hsl(0, 0%, 70%)", neutral40: "hsl(0, 0%, 60%)", neutral50: "hsl(0, 0%, 50%)", neutral60: "hsl(0, 0%, 40%)", neutral70: "hsl(0, 0%, 30%)", neutral80: "hsl(0, 0%, 20%)", neutral90: "hsl(0, 0%, 10%)" }, spacing: { baseUnit: 4, controlHeight: 38, menuGutter: 8 } } , ee = { "aria-live": "polite", backspaceRemovesValue: !0, blurInputOnSelect: (0, g.z)(), captureMenuScroll: !(0, g.z)(), classNames: {}, closeMenuOnSelect: !0, closeMenuOnScroll: !1, components: {}, controlShouldRenderValue: !0, escapeClearsValue: !1, filterOption: function(e, t) { if (e.data.__isNew__) return !0; var n = (0, r.Z)({ ignoreCase: !0, ignoreAccents: !0, stringify: T, trim: !0, matchFrom: "any" }, void 0) , o = n.ignoreCase , i = n.ignoreAccents , a = n.stringify , u = n.trim , s = n.matchFrom , l = u ? R(t) : t , c = u ? R(a(e)) : a(e); return o && (l = l.toLowerCase(), c = c.toLowerCase()), i && (l = k(l), c = P(c)), "start" === s ? c.substr(0, l.length) === l : c.indexOf(l) > -1 }, formatGroupLabel: function(e) { return e.label }, getOptionLabel: function(e) { return e.label }, getOptionValue: function(e) { return e.value }, isDisabled: !1, isLoading: !1, isMulti: !1, isRtl: !1, isSearchable: !0, isOptionDisabled: function(e) { return !!e.isDisabled }, loadingMessage: function() { return "Loading..." }, maxMenuHeight: 300, minMenuHeight: 140, menuIsOpen: !1, menuPlacement: "bottom", menuPosition: "absolute", menuShouldBlockScroll: !1, menuShouldScrollIntoView: !(0, g.A)(), noOptionsMessage: function() { return "No options" }, openMenuOnFocus: !1, openMenuOnClick: !0, options: [], pageSize: 5, placeholder: "Select...", screenReaderStatus: function(e) { var t = e.count; return "".concat(t, " result").concat(1 !== t ? "s" : "", " available") }, styles: {}, tabIndex: 0, tabSelectsValue: !0, unstyled: !1 }; function et(e, t, n, r) { var o = eu(e, t, n) , i = es(e, t, n) , a = ei(e, t) , u = ea(e, t); return { type: "option", data: t, isDisabled: o, isSelected: i, label: a, value: u, index: r } } function en(e, t) { return e.options.map(function(n, r) { if ("options"in n) { var o = n.options.map(function(n, r) { return et(e, n, t, r) }).filter(function(t) { return eo(e, t) }); return o.length > 0 ? { type: "group", data: n, options: o, index: r } : void 0 } var i = et(e, n, t, r); return eo(e, i) ? i : void 0 }).filter(g.K) } function er(e) { return e.reduce(function(e, t) { return "group" === t.type ? e.push.apply(e, v(t.options.map(function(e) { return e.data }))) : e.push(t.data), e }, []) } function eo(e, t) { var n = e.inputValue , r = t.data , o = t.isSelected , i = t.label , a = t.value; return (!ec(e) || !o) && el(e, { label: i, value: a, data: r }, void 0 === n ? "" : n) } var ei = function(e, t) { return e.getOptionLabel(t) } , ea = function(e, t) { return e.getOptionValue(t) }; function eu(e, t, n) { return "function" == typeof e.isOptionDisabled && e.isOptionDisabled(t, n) } function es(e, t, n) { if (n.indexOf(t) > -1) return !0; if ("function" == typeof e.isOptionSelected) return e.isOptionSelected(t, n); var r = ea(e, t); return n.some(function(t) { return ea(e, t) === r }) } function el(e, t, n) { return !e.filterOption || e.filterOption(t, n) } var ec = function(e) { var t = e.hideSelectedOptions , n = e.isMulti; return void 0 === t ? n : t } , ed = 1 , ep = function(e) { !function(e, t) { if ("function" != typeof t && null !== t) throw TypeError("Super expression must either be null or a function"); e.prototype = Object.create(t && t.prototype, { constructor: { value: e, writable: !0, configurable: !0 } }), Object.defineProperty(e, "prototype", { writable: !1 }), t && (0, d.Z)(e, t) }(u, e); var t, n, o, i = (t = function() { if ("undefined" == typeof Reflect || !Reflect.construct || Reflect.construct.sham) return !1; if ("function" == typeof Proxy) return !0; try { return Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function() {})), !0 } catch (e) { return !1 } }(), function() { var e, n = p(u); if (t) { var r = p(this).constructor; e = Reflect.construct(n, arguments, r) } else e = n.apply(this, arguments); return function(e, t) { if (t && ("object" === (0, f.Z)(t) || "function" == typeof t)) return t; if (void 0 !== t) throw TypeError("Derived constructors may only return object or undefined"); return function(e) { if (void 0 === e) throw ReferenceError("this hasn't been initialised - super() hasn't been called"); return e }(e) }(this, e) } ); function u(e) { var t; if (!function(e, t) { if (!(e instanceof t)) throw TypeError("Cannot call a class as a function") }(this, u), (t = i.call(this, e)).state = { ariaSelection: null, focusedOption: null, focusedValue: null, inputIsHidden: !1, isFocused: !1, selectValue: [], clearFocusValueOnUpdate: !1, prevWasFocused: !1, inputIsHiddenAfterUpdate: void 0, prevProps: void 0 }, t.blockOptionHover = !1, t.isComposing = !1, t.commonProps = void 0, t.initialTouchX = 0, t.initialTouchY = 0, t.instancePrefix = "", t.openAfterFocus = !1, t.scrollToFocusedOptionOnUpdate = !1, t.userIsDragging = void 0, t.controlRef = null, t.getControlRef = function(e) { t.controlRef = e } , t.focusedOptionRef = null, t.getFocusedOptionRef = function(e) { t.focusedOptionRef = e } , t.menuListRef = null, t.getMenuListRef = function(e) { t.menuListRef = e } , t.inputRef = null, t.getInputRef = function(e) { t.inputRef = e } , t.focus = t.focusInput, t.blur = t.blurInput, t.onChange = function(e, n) { var r = t.props , o = r.onChange , i = r.name; n.name = i, t.ariaOnChange(e, n), o(e, n) } , t.setValue = function(e, n, r) { var o = t.props , i = o.closeMenuOnSelect , a = o.isMulti , u = o.inputValue; t.onInputChange("", { action: "set-value", prevInputValue: u }), i && (t.setState({ inputIsHiddenAfterUpdate: !a }), t.onMenuClose()), t.setState({ clearFocusValueOnUpdate: !0 }), t.onChange(e, { action: n, option: r }) } , t.selectOption = function(e) { var n = t.props , r = n.blurInputOnSelect , o = n.isMulti , i = n.name , a = t.state.selectValue , u = o && t.isOptionSelected(e, a) , s = t.isOptionDisabled(e, a); if (u) { var l = t.getOptionValue(e); t.setValue((0, g.B)(a.filter(function(e) { return t.getOptionValue(e) !== l })), "deselect-option", e) } else if (s) { t.ariaOnChange((0, g.C)(e), { action: "select-option", option: e, name: i }); return } else o ? t.setValue((0, g.B)([].concat(v(a), [e])), "select-option", e) : t.setValue((0, g.C)(e), "select-option"); r && t.blurInput() } , t.removeValue = function(e) { var n = t.props.isMulti , r = t.state.selectValue , o = t.getOptionValue(e) , i = r.filter(function(e) { return t.getOptionValue(e) !== o }) , a = (0, g.D)(n, i, i[0] || null); t.onChange(a, { action: "remove-value", removedValue: e }), t.focusInput() } , t.clearValue = function() { var e = t.state.selectValue; t.onChange((0, g.D)(t.props.isMulti, [], null), { action: "clear", removedValues: e }) } , t.popValue = function() { var e = t.props.isMulti , n = t.state.selectValue , r = n[n.length - 1] , o = n.slice(0, n.length - 1) , i = (0, g.D)(e, o, o[0] || null); t.onChange(i, { action: "pop-value", removedValue: r }) } , t.getValue = function() { return t.state.selectValue } , t.cx = function() { for (var e = arguments.length, n = Array(e), r = 0; r < e; r++) n[r] = arguments[r]; return g.E.apply(void 0, [t.props.classNamePrefix].concat(n)) } , t.getOptionLabel = function(e) { return ei(t.props, e) } , t.getOptionValue = function(e) { return ea(t.props, e) } , t.getStyles = function(e, n) { var r = t.props.unstyled , o = J[e](n, r); o.boxSizing = "border-box"; var i = t.props.styles[e]; return i ? i(o, n) : o } , t.getClassNames = function(e, n) { var r, o; return null === (r = (o = t.props.classNames)[e]) || void 0 === r ? void 0 : r.call(o, n) } , t.getElementId = function(e) { return "".concat(t.instancePrefix, "-").concat(e) } , t.getComponents = function() { return (0, g.F)(t.props) } , t.buildCategorizedOptions = function() { return en(t.props, t.state.selectValue) } , t.getCategorizedOptions = function() { return t.props.menuIsOpen ? t.buildCategorizedOptions() : [] } , t.buildFocusableOptions = function() { return er(t.buildCategorizedOptions()) } , t.getFocusableOptions = function() { return t.props.menuIsOpen ? t.buildFocusableOptions() : [] } , t.ariaOnChange = function(e, n) { t.setState({ ariaSelection: (0, r.Z)({ value: e }, n) }) } , t.onMenuMouseDown = function(e) { 0 === e.button && (e.stopPropagation(), e.preventDefault(), t.focusInput()) } , t.onMenuMouseMove = function(e) { t.blockOptionHover = !1 } , t.onControlMouseDown = function(e) { if (!e.defaultPrevented) { var n = t.props.openMenuOnClick; t.state.isFocused ? t.props.menuIsOpen ? "INPUT" !== e.target.tagName && "TEXTAREA" !== e.target.tagName && t.onMenuClose() : n && t.openMenu("first") : (n && (t.openAfterFocus = !0), t.focusInput()), "INPUT" !== e.target.tagName && "TEXTAREA" !== e.target.tagName && e.preventDefault() } } , t.onDropdownIndicatorMouseDown = function(e) { if ((!e || "mousedown" !== e.type || 0 === e.button) && !t.props.isDisabled) { var n = t.props , r = n.isMulti , o = n.menuIsOpen; t.focusInput(), o ? (t.setState({ inputIsHiddenAfterUpdate: !r }), t.onMenuClose()) : t.openMenu("first"), e.preventDefault() } } , t.onClearIndicatorMouseDown = function(e) { e && "mousedown" === e.type && 0 !== e.button || (t.clearValue(), e.preventDefault(), t.openAfterFocus = !1, "touchend" === e.type ? t.focusInput() : setTimeout(function() { return t.focusInput() })) } , t.onScroll = function(e) { "boolean" == typeof t.props.closeMenuOnScroll ? e.target instanceof HTMLElement && (0, g.G)(e.target) && t.props.onMenuClose() : "function" == typeof t.props.closeMenuOnScroll && t.props.closeMenuOnScroll(e) && t.props.onMenuClose() } , t.onCompositionStart = function() { t.isComposing = !0 } , t.onCompositionEnd = function() { t.isComposing = !1 } , t.onTouchStart = function(e) { var n = e.touches , r = n && n.item(0); r && (t.initialTouchX = r.clientX, t.initialTouchY = r.clientY, t.userIsDragging = !1) } , t.onTouchMove = function(e) { var n = e.touches , r = n && n.item(0); if (r) { var o = Math.abs(r.clientX - t.initialTouchX) , i = Math.abs(r.clientY - t.initialTouchY); t.userIsDragging = o > 5 || i > 5 } } , t.onTouchEnd = function(e) { t.userIsDragging || (t.controlRef && !t.controlRef.contains(e.target) && t.menuListRef && !t.menuListRef.contains(e.target) && t.blurInput(), t.initialTouchX = 0, t.initialTouchY = 0) } , t.onControlTouchEnd = function(e) { t.userIsDragging || t.onControlMouseDown(e) } , t.onClearIndicatorTouchEnd = function(e) { t.userIsDragging || t.onClearIndicatorMouseDown(e) } , t.onDropdownIndicatorTouchEnd = function(e) { t.userIsDragging || t.onDropdownIndicatorMouseDown(e) } , t.handleInputChange = function(e) { var n = t.props.inputValue , r = e.currentTarget.value; t.setState({ inputIsHiddenAfterUpdate: !1 }), t.onInputChange(r, { action: "input-change", prevInputValue: n }), t.props.menuIsOpen || t.onMenuOpen() } , t.onInputFocus = function(e) { t.props.onFocus && t.props.onFocus(e), t.setState({ inputIsHiddenAfterUpdate: !1, isFocused: !0 }), (t.openAfterFocus || t.props.openMenuOnFocus) && t.openMenu("first"), t.openAfterFocus = !1 } , t.onInputBlur = function(e) { var n = t.props.inputValue; if (t.menuListRef && t.menuListRef.contains(document.activeElement)) { t.inputRef.focus(); return } t.props.onBlur && t.props.onBlur(e), t.onInputChange("", { action: "input-blur", prevInputValue: n }), t.onMenuClose(), t.setState({ focusedValue: null, isFocused: !1 }) } , t.onOptionHover = function(e) { t.blockOptionHover || t.state.focusedOption === e || t.setState({ focusedOption: e }) } , t.shouldHideSelectedOptions = function() { return ec(t.props) } , t.onValueInputFocus = function(e) { e.preventDefault(), e.stopPropagation(), t.focus() } , t.onKeyDown = function(e) { var n = t.props , r = n.isMulti , o = n.backspaceRemovesValue , i = n.escapeClearsValue , a = n.inputValue , u = n.isClearable , s = n.isDisabled , l = n.menuIsOpen , c = n.onKeyDown , d = n.tabSelectsValue , p = n.openMenuOnFocus , f = t.state , m = f.focusedOption , h = f.focusedValue , v = f.selectValue; if (!s) { if ("function" == typeof c && (c(e), e.defaultPrevented)) return; switch (t.blockOptionHover = !0, e.key) { case "ArrowLeft": if (!r || a) return; t.focusValue("previous"); break; case "ArrowRight": if (!r || a) return; t.focusValue("next"); break; case "Delete": case "Backspace": if (a) return; if (h) t.removeValue(h); else { if (!o) return; r ? t.popValue() : u && t.clearValue() } break; case "Tab": if (t.isComposing || e.shiftKey || !l || !d || !m || p && t.isOptionSelected(m, v)) return; t.selectOption(m); break; case "Enter": if (229 === e.keyCode) break; if (l) { if (!m || t.isComposing) return; t.selectOption(m); break } return; case "Escape": l ? (t.setState({ inputIsHiddenAfterUpdate: !1 }), t.onInputChange("", { action: "menu-close", prevInputValue: a }), t.onMenuClose()) : u && i && t.clearValue(); break; case " ": if (a) return; if (!l) { t.openMenu("first"); break } if (!m) return; t.selectOption(m); break; case "ArrowUp": l ? t.focusOption("up") : t.openMenu("last"); break; case "ArrowDown": l ? t.focusOption("down") : t.openMenu("first"); break; case "PageUp": if (!l) return; t.focusOption("pageup"); break; case "PageDown": if (!l) return; t.focusOption("pagedown"); break; case "Home": if (!l) return; t.focusOption("first"); break; case "End": if (!l) return; t.focusOption("last"); break; default: return } e.preventDefault() } } , t.instancePrefix = "react-select-" + (t.props.instanceId || ++ed), t.state.selectValue = (0, g.H)(e.value), e.menuIsOpen && t.state.selectValue.length) { var n = t.buildFocusableOptions() , o = n.indexOf(t.state.selectValue[0]); t.state.focusedOption = n[o] } return t } return n = [{ key: "componentDidMount", value: function() { this.startListeningComposition(), this.startListeningToTouch(), this.props.closeMenuOnScroll && document && document.addEventListener && document.addEventListener("scroll", this.onScroll, !0), this.props.autoFocus && this.focusInput(), this.props.menuIsOpen && this.state.focusedOption && this.menuListRef && this.focusedOptionRef && (0, g.I)(this.menuListRef, this.focusedOptionRef) } }, { key: "componentDidUpdate", value: function(e) { var t = this.props , n = t.isDisabled , r = t.menuIsOpen , o = this.state.isFocused; (o && !n && e.isDisabled || o && r && !e.menuIsOpen) && this.focusInput(), o && n && !e.isDisabled ? this.setState({ isFocused: !1 }, this.onMenuClose) : o || n || !e.isDisabled || this.inputRef !== document.activeElement || this.setState({ isFocused: !0 }), this.menuListRef && this.focusedOptionRef && this.scrollToFocusedOptionOnUpdate && ((0, g.I)(this.menuListRef, this.focusedOptionRef), this.scrollToFocusedOptionOnUpdate = !1) } }, { key: "componentWillUnmount", value: function() { this.stopListeningComposition(), this.stopListeningToTouch(), document.removeEventListener("scroll", this.onScroll, !0) } }, { key: "onMenuOpen", value: function() { this.props.onMenuOpen() } }, { key: "onMenuClose", value: function() { this.onInputChange("", { action: "menu-close", prevInputValue: this.props.inputValue }), this.props.onMenuClose() } }, { key: "onInputChange", value: function(e, t) { this.props.onInputChange(e, t) } }, { key: "focusInput", value: function() { this.inputRef && this.inputRef.focus() } }, { key: "blurInput", value: function() { this.inputRef && this.inputRef.blur() } }, { key: "openMenu", value: function(e) { var t = this , n = this.state , r = n.selectValue , o = n.isFocused , i = this.buildFocusableOptions() , a = "first" === e ? 0 : i.length - 1; if (!this.props.isMulti) { var u = i.indexOf(r[0]); u > -1 && (a = u) } this.scrollToFocusedOptionOnUpdate = !(o && this.menuListRef), this.setState({ inputIsHiddenAfterUpdate: !1, focusedValue: null, focusedOption: i[a] }, function() { return t.onMenuOpen() }) } }, { key: "focusValue", value: function(e) { var t = this.state , n = t.selectValue , r = t.focusedValue; if (this.props.isMulti) { this.setState({ focusedOption: null }); var o = n.indexOf(r); r || (o = -1); var i = n.length - 1 , a = -1; if (n.length) { switch (e) { case "previous": a = 0 === o ? 0 : -1 === o ? i : o - 1; break; case "next": o > -1 && o < i && (a = o + 1) } this.setState({ inputIsHidden: -1 !== a, focusedValue: n[a] }) } } } }, { key: "focusOption", value: function() { var e = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : "first" , t = this.props.pageSize , n = this.state.focusedOption , r = this.getFocusableOptions(); if (r.length) { var o = 0 , i = r.indexOf(n); n || (i = -1), "up" === e ? o = i > 0 ? i - 1 : r.length - 1 : "down" === e ? o = (i + 1) % r.length : "pageup" === e ? (o = i - t) < 0 && (o = 0) : "pagedown" === e ? (o = i + t) > r.length - 1 && (o = r.length - 1) : "last" === e && (o = r.length - 1), this.scrollToFocusedOptionOnUpdate = !0, this.setState({ focusedOption: r[o], focusedValue: null }) } } }, { key: "getTheme", value: function() { return this.props.theme ? "function" == typeof this.props.theme ? this.props.theme(Q) : (0, r.Z)((0, r.Z)({}, Q), this.props.theme) : Q } }, { key: "getCommonProps", value: function() { var e = this.clearValue , t = this.cx , n = this.getStyles , r = this.getClassNames , o = this.getValue , i = this.selectOption , a = this.setValue , u = this.props , s = u.isMulti , l = u.isRtl , c = u.options; return { clearValue: e, cx: t, getStyles: n, getClassNames: r, getValue: o, hasValue: this.hasValue(), isMulti: s, isRtl: l, options: c, selectOption: i, selectProps: u, setValue: a, theme: this.getTheme() } } }, { key: "hasValue", value: function() { return this.state.selectValue.length > 0 } }, { key: "hasOptions", value: function() { return !!this.getFocusableOptions().length } }, { key: "isClearable", value: function() { var e = this.props , t = e.isClearable , n = e.isMulti; return void 0 === t ? n : t } }, { key: "isOptionDisabled", value: function(e, t) { return eu(this.props, e, t) } }, { key: "isOptionSelected", value: function(e, t) { return es(this.props, e, t) } }, { key: "filterOption", value: function(e, t) { return el(this.props, e, t) } }, { key: "formatOptionLabel", value: function(e, t) { if ("function" != typeof this.props.formatOptionLabel) return this.getOptionLabel(e); var n = this.props.inputValue , r = this.state.selectValue; return this.props.formatOptionLabel(e, { context: t, inputValue: n, selectValue: r }) } }, { key: "formatGroupLabel", value: function(e) { return this.props.formatGroupLabel(e) } }, { key: "startListeningComposition", value: function() { document && document.addEventListener && (document.addEventListener("compositionstart", this.onCompositionStart, !1), document.addEventListener("compositionend", this.onCompositionEnd, !1)) } }, { key: "stopListeningComposition", value: function() { document && document.removeEventListener && (document.removeEventListener("compositionstart", this.onCompositionStart), document.removeEventListener("compositionend", this.onCompositionEnd)) } }, { key: "startListeningToTouch", value: function() { document && document.addEventListener && (document.addEventListener("touchstart", this.onTouchStart, !1), document.addEventListener("touchmove", this.onTouchMove, !1), document.addEventListener("touchend", this.onTouchEnd, !1)) } }, { key: "stopListeningToTouch", value: function() { document && document.removeEventListener && (document.removeEventListener("touchstart", this.onTouchStart), document.removeEventListener("touchmove", this.onTouchMove), document.removeEventListener("touchend", this.onTouchEnd)) } }, { key: "renderInput", value: function() { var e = this.props , t = e.isDisabled , n = e.isSearchable , o = e.inputId , i = e.inputValue , u = e.tabIndex , l = e.form , c = e.menuIsOpen , d = e.required , p = this.getComponents().Input , f = this.state , m = f.inputIsHidden , h = f.ariaSelection , v = this.commonProps , b = o || this.getElementId("input") , y = (0, r.Z)((0, r.Z)((0, r.Z)({ "aria-autocomplete": "list", "aria-expanded": c, "aria-haspopup": !0, "aria-errormessage": this.props["aria-errormessage"], "aria-invalid": this.props["aria-invalid"], "aria-label": this.props["aria-label"], "aria-labelledby": this.props["aria-labelledby"], "aria-required": d, role: "combobox" }, c && { "aria-controls": this.getElementId("listbox"), "aria-owns": this.getElementId("listbox") }), !n && { "aria-readonly": !0 }), this.hasValue() ? (null == h ? void 0 : h.action) === "initial-input-focus" && { "aria-describedby": this.getElementId("live-region") } : { "aria-describedby": this.getElementId("placeholder") }); return n ? a.createElement(p, (0, s.Z)({}, v, { autoCapitalize: "none", autoComplete: "off", autoCorrect: "off", id: b, innerRef: this.getInputRef, isDisabled: t, isHidden: m, onBlur: this.onInputBlur, onChange: this.handleInputChange, onFocus: this.onInputFocus, spellCheck: "false", tabIndex: u, form: l, type: "text", value: i }, y)) : a.createElement(F, (0, s.Z)({ id: b, innerRef: this.getInputRef, onBlur: this.onInputBlur, onChange: g.J, onFocus: this.onInputFocus, disabled: t, tabIndex: u, inputMode: "none", form: l, value: "" }, y)) } }, { key: "renderPlaceholderOrValue", value: function() { var e = this , t = this.getComponents() , n = t.MultiValue , r = t.MultiValueContainer , o = t.MultiValueLabel , i = t.MultiValueRemove , u = t.SingleValue , l = t.Placeholder , c = this.commonProps , d = this.props , p = d.controlShouldRenderValue , f = d.isDisabled , m = d.isMulti , h = d.inputValue , v = d.placeholder , g = this.state , b = g.selectValue , y = g.focusedValue , O = g.isFocused; if (!this.hasValue() || !p) return h ? null : a.createElement(l, (0, s.Z)({}, c, { key: "placeholder", isDisabled: f, isFocused: O, innerProps: { id: this.getElementId("placeholder") } }), v); if (m) return b.map(function(t, u) { var l = "".concat(e.getOptionLabel(t), "-").concat(e.getOptionValue(t)); return a.createElement(n, (0, s.Z)({}, c, { components: { Container: r, Label: o, Remove: i }, isFocused: t === y, isDisabled: f, key: l, index: u, removeProps: { onClick: function() { return e.removeValue(t) }, onTouchEnd: function() { return e.removeValue(t) }, onMouseDown: function(e) { e.preventDefault() } }, data: t }), e.formatOptionLabel(t, "value")) }); if (h) return null; var x = b[0]; return a.createElement(u, (0, s.Z)({}, c, { data: x, isDisabled: f }), this.formatOptionLabel(x, "value")) } }, { key: "renderClearIndicator", value: function() { var e = this.getComponents().ClearIndicator , t = this.commonProps , n = this.props , r = n.isDisabled , o = n.isLoading , i = this.state.isFocused; if (!this.isClearable() || !e || r || !this.hasValue() || o) return null; var u = { onMouseDown: this.onClearIndicatorMouseDown, onTouchEnd: this.onClearIndicatorTouchEnd, "aria-hidden": "true" }; return a.createElement(e, (0, s.Z)({}, t, { innerProps: u, isFocused: i })) } }, { key: "renderLoadingIndicator", value: function() { var e = this.getComponents().LoadingIndicator , t = this.commonProps , n = this.props , r = n.isDisabled , o = n.isLoading , i = this.state.isFocused; return e && o ? a.createElement(e, (0, s.Z)({}, t, { innerProps: { "aria-hidden": "true" }, isDisabled: r, isFocused: i })) : null } }, { key: "renderIndicatorSeparator", value: function() { var e = this.getComponents() , t = e.DropdownIndicator , n = e.IndicatorSeparator; if (!t || !n) return null; var r = this.commonProps , o = this.props.isDisabled , i = this.state.isFocused; return a.createElement(n, (0, s.Z)({}, r, { isDisabled: o, isFocused: i })) } }, { key: "renderDropdownIndicator", value: function() { var e = this.getComponents().DropdownIndicator; if (!e) return null; var t = this.commonProps , n = this.props.isDisabled , r = this.state.isFocused , o = { onMouseDown: this.onDropdownIndicatorMouseDown, onTouchEnd: this.onDropdownIndicatorTouchEnd, "aria-hidden": "true" }; return a.createElement(e, (0, s.Z)({}, t, { innerProps: o, isDisabled: n, isFocused: r })) } }, { key: "renderMenu", value: function() { var e, t = this, n = this.getComponents(), r = n.Group, o = n.GroupHeading, i = n.Menu, u = n.MenuList, l = n.MenuPortal, c = n.LoadingMessage, d = n.NoOptionsMessage, p = n.Option, f = this.commonProps, m = this.state.focusedOption, h = this.props, v = h.captureMenuScroll, b = h.inputValue, y = h.isLoading, O = h.loadingMessage, x = h.minMenuHeight, C = h.maxMenuHeight, Z = h.menuIsOpen, w = h.menuPlacement, I = h.menuPosition, S = h.menuPortalTarget, M = h.menuShouldBlockScroll, E = h.menuShouldScrollIntoView, V = h.noOptionsMessage, D = h.onMenuScrollToTop, P = h.onMenuScrollToBottom; if (!Z) return null; var k = function(e, n) { var r = e.type , o = e.data , i = e.isDisabled , u = e.isSelected , l = e.label , c = e.value , d = m === o , h = i ? void 0 : function() { return t.onOptionHover(o) } , v = "".concat(t.getElementId("option"), "-").concat(n); return a.createElement(p, (0, s.Z)({}, f, { innerProps: { id: v, onClick: i ? void 0 : function() { return t.selectOption(o) } , onMouseMove: h, onMouseOver: h, tabIndex: -1 }, data: o, isDisabled: i, isSelected: u, key: v, label: l, type: r, value: c, isFocused: d, innerRef: d ? t.getFocusedOptionRef : void 0 }), t.formatOptionLabel(e.data, "menu")) }; if (this.hasOptions()) e = this.getCategorizedOptions().map(function(e) { if ("group" === e.type) { var n = e.data , i = e.options , u = e.index , l = "".concat(t.getElementId("group"), "-").concat(u); return a.createElement(r, (0, s.Z)({}, f, { key: l, data: n, options: i, Heading: o, headingProps: { id: "".concat(l, "-heading"), data: e.data }, label: t.formatGroupLabel(e.data) }), e.options.map(function(e) { return k(e, "".concat(u, "-").concat(e.index)) })) } if ("option" === e.type) return k(e, "".concat(e.index)) }); else if (y) { var R = O({ inputValue: b }); if (null === R) return null; e = a.createElement(c, f, R) } else { var T = V({ inputValue: b }); if (null === T) return null; e = a.createElement(d, f, T) } var L = { minMenuHeight: x, maxMenuHeight: C, menuPlacement: w, menuPosition: I, menuShouldScrollIntoView: E } , F = a.createElement(g.M, (0, s.Z)({}, f, L), function(n) { var r = n.ref , o = n.placerProps , l = o.placement , c = o.maxHeight; return a.createElement(i, (0, s.Z)({}, f, L, { innerRef: r, innerProps: { onMouseDown: t.onMenuMouseDown, onMouseMove: t.onMenuMouseMove, id: t.getElementId("listbox") }, isLoading: y, placement: l }), a.createElement(q, { captureEnabled: v, onTopArrive: D, onBottomArrive: P, lockEnabled: M }, function(n) { return a.createElement(u, (0, s.Z)({}, f, { innerRef: function(e) { t.getMenuListRef(e), n(e) }, isLoading: y, maxHeight: c, focusedOption: m }), e) })) }); return S || "fixed" === I ? a.createElement(l, (0, s.Z)({}, f, { appendTo: S, controlElement: this.controlRef, menuPlacement: w, menuPosition: I }), F) : F } }, { key: "renderFormField", value: function() { var e = this , t = this.props , n = t.delimiter , r = t.isDisabled , o = t.isMulti , i = t.name , u = t.required , s = this.state.selectValue; if (u && !this.hasValue() && !r) return a.createElement($, { name: i, onFocus: this.onValueInputFocus }); if (i && !r) { if (o) { if (n) { var l = s.map(function(t) { return e.getOptionValue(t) }).join(n); return a.createElement("input", { name: i, type: "hidden", value: l }) } var c = s.length > 0 ? s.map(function(t, n) { return a.createElement("input", { key: "i-".concat(n), name: i, type: "hidden", value: e.getOptionValue(t) }) }) : a.createElement("input", { name: i, type: "hidden", value: "" }); return a.createElement("div", null, c) } var d = s[0] ? this.getOptionValue(s[0]) : ""; return a.createElement("input", { name: i, type: "hidden", value: d }) } } }, { key: "renderLiveRegion", value: function() { var e = this.commonProps , t = this.state , n = t.ariaSelection , r = t.focusedOption , o = t.focusedValue , i = t.isFocused , u = t.selectValue , l = this.getFocusableOptions(); return a.createElement(w, (0, s.Z)({}, e, { id: this.getElementId("live-region"), ariaSelection: n, focusedOption: r, focusedValue: o, isFocused: i, selectValue: u, focusableOptions: l })) } }, { key: "render", value: function() { var e = this.getComponents() , t = e.Control , n = e.IndicatorsContainer , r = e.SelectContainer , o = e.ValueContainer , i = this.props , u = i.className , l = i.id , c = i.isDisabled , d = i.menuIsOpen , p = this.state.isFocused , f = this.commonProps = this.getCommonProps(); return a.createElement(r, (0, s.Z)({}, f, { className: u, innerProps: { id: l, onKeyDown: this.onKeyDown }, isDisabled: c, isFocused: p }), this.renderLiveRegion(), a.createElement(t, (0, s.Z)({}, f, { innerRef: this.getControlRef, innerProps: { onMouseDown: this.onControlMouseDown, onTouchEnd: this.onControlTouchEnd }, isDisabled: c, isFocused: p, menuIsOpen: d }), a.createElement(o, (0, s.Z)({}, f, { isDisabled: c }), this.renderPlaceholderOrValue(), this.renderInput()), a.createElement(n, (0, s.Z)({}, f, { isDisabled: c }), this.renderClearIndicator(), this.renderLoadingIndicator(), this.renderIndicatorSeparator(), this.renderDropdownIndicator())), this.renderMenu(), this.renderFormField()) } }], o = [{ key: "getDerivedStateFromProps", value: function(e, t) { var n = t.prevProps , o = t.clearFocusValueOnUpdate , i = t.inputIsHiddenAfterUpdate , a = t.ariaSelection , u = t.isFocused , s = t.prevWasFocused , l = e.options , c = e.value , d = e.menuIsOpen , p = e.inputValue , f = e.isMulti , m = (0, g.H)(c) , h = {}; if (n && (c !== n.value || l !== n.options || d !== n.menuIsOpen || p !== n.inputValue)) { var v, b = d ? er(en(e, m)) : [], y = o ? function(e, t) { var n = e.focusedValue , r = e.selectValue.indexOf(n); if (r > -1) { if (t.indexOf(n) > -1) return n; if (r < t.length) return t[r] } return null }(t, m) : null, O = (v = t.focusedOption) && b.indexOf(v) > -1 ? v : b[0]; h = { selectValue: m, focusedOption: O, focusedValue: y, clearFocusValueOnUpdate: !1 } } var x = a , C = u && s; return u && !C && (x = { value: (0, g.D)(f, m, m[0] || null), options: m, action: "initial-input-focus" }, C = !s), (null == a ? void 0 : a.action) === "initial-input-focus" && (x = null), (0, r.Z)((0, r.Z)((0, r.Z)({}, h), null != i && e !== n ? { inputIsHidden: i, inputIsHiddenAfterUpdate: void 0 } : {}), {}, { prevProps: e, ariaSelection: x, prevWasFocused: C }) } }], n && c(u.prototype, n), o && c(u, o), Object.defineProperty(u, "prototype", { writable: !1 }), u }(a.Component); ep.defaultProps = ee, n(93865), n(99581), n(41455); var ef = (0, a.forwardRef)(function(e, t) { var n, l, c, d, p, f, m, h, v, g, b, y, O, x, C, Z, w, I, S, M, E, V, D, P, k, R, T, L = (n = e.defaultInputValue, l = e.defaultMenuIsOpen, c = e.defaultValue, d = e.inputValue, p = e.menuIsOpen, f = e.onChange, m = e.onInputChange, h = e.onMenuClose, v = e.onMenuOpen, g = e.value, b = (0, i.Z)(e, u), y = (0, a.useState)(void 0 !== d ? d : void 0 === n ? "" : n), x = (O = (0, o.Z)(y, 2))[0], C = O[1], Z = (0, a.useState)(void 0 !== p ? p : void 0 !== l && l), I = (w = (0, o.Z)(Z, 2))[0], S = w[1], M = (0, a.useState)(void 0 !== g ? g : void 0 === c ? null : c), V = (E = (0, o.Z)(M, 2))[0], D = E[1], P = (0, a.useCallback)(function(e, t) { "function" == typeof f && f(e, t), D(e) }, [f]), k = (0, a.useCallback)(function(e, t) { var n; "function" == typeof m && (n = m(e, t)), C(void 0 !== n ? n : e) }, [m]), R = (0, a.useCallback)(function() { "function" == typeof v && v(), S(!0) }, [v]), T = (0, a.useCallback)(function() { "function" == typeof h && h(), S(!1) }, [h]), (0, r.Z)((0, r.Z)({}, b), {}, { inputValue: void 0 !== d ? d : x, menuIsOpen: void 0 !== p ? p : I, onChange: P, onInputChange: k, onMenuClose: T, onMenuOpen: R, value: void 0 !== g ? g : V })); return a.createElement(ep, (0, s.Z)({ ref: t }, L)) }) }, 41455: function(e, t, n) { "use strict"; var r = n(70079).useLayoutEffect; t.Z = r }, 19919: function(e, t, n) { "use strict"; function r(e, t) { (null == t || t > e.length) && (t = e.length); for (var n = 0, r = Array(t); n < t; n++) r[n] = e[n]; return r } n.d(t, { Z: function() { return r } }) }, 41470: function(e, t, n) { "use strict"; n.d(t, { Z: function() { return o } }); var r = n(72843); function o(e, t, n) { return (t = (0, r.Z)(t))in e ? Object.defineProperty(e, t, { value: n, enumerable: !0, configurable: !0, writable: !0 }) : e[t] = n, e } }, 67666: function(e, t, n) { "use strict"; n.d(t, { Z: function() { return i } }); var r = n(41470); function o(e, t) { var n = Object.keys(e); if (Object.getOwnPropertySymbols) { var r = Object.getOwnPropertySymbols(e); t && (r = r.filter(function(t) { return Object.getOwnPropertyDescriptor(e, t).enumerable })), n.push.apply(n, r) } return n } function i(e) { for (var t = 1; t < arguments.length; t++) { var n = null != arguments[t] ? arguments[t] : {}; t % 2 ? o(Object(n), !0).forEach(function(t) { (0, r.Z)(e, t, n[t]) }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(n)) : o(Object(n)).forEach(function(t) { Object.defineProperty(e, t, Object.getOwnPropertyDescriptor(n, t)) }) } return e } }, 97395: function(e, t, n) { "use strict"; n.d(t, { Z: function() { return o } }); var r = n(92897); function o(e, t) { if (null == e) return {}; var n, o, i = (0, r.Z)(e, t); if (Object.getOwnPropertySymbols) { var a = Object.getOwnPropertySymbols(e); for (o = 0; o < a.length; o++) n = a[o], !(t.indexOf(n) >= 0) && Object.prototype.propertyIsEnumerable.call(e, n) && (i[n] = e[n]) } return i } }, 38654: function(e, t, n) { "use strict"; n.d(t, { Z: function() { return o } }); var r = n(59428); function o(e, t) { return function(e) { if (Array.isArray(e)) return e }(e) || function(e, t) { var n = null == e ? null : "undefined" != typeof Symbol && e[Symbol.iterator] || e["@@iterator"]; if (null != n) { var r, o, i, a, u = [], s = !0, l = !1; try { if (i = (n = n.call(e)).next, 0 === t) { if (Object(n) !== n) return; s = !1 } else for (; !(s = (r = i.call(n)).done) && (u.push(r.value), u.length !== t); s = !0) ; } catch (e) { l = !0, o = e } finally { try { if (!s && null != n.return && (a = n.return(), Object(a) !== a)) return } finally { if (l) throw o } } return u } }(e, t) || (0, r.Z)(e, t) || function() { throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.") }() } }, 72843: function(e, t, n) { "use strict"; n.d(t, { Z: function() { return o } }); var r = n(24622); function o(e) { var t = function(e, t) { if ("object" !== (0, r.Z)(e) || null === e) return e; var n = e[Symbol.toPrimitive]; if (void 0 !== n) { var o = n.call(e, t || "default"); if ("object" !== (0, r.Z)(o)) return o; throw TypeError("@@toPrimitive must return a primitive value.") } return ("string" === t ? String : Number)(e) }(e, "string"); return "symbol" === (0, r.Z)(t) ? t : String(t) } }, 24622: function(e, t, n) { "use strict"; function r(e) { return (r = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(e) { return typeof e } : function(e) { return e && "function" == typeof Symbol && e.constructor === Symbol && e !== Symbol.prototype ? "symbol" : typeof e } )(e) } n.d(t, { Z: function() { return r } }) }, 59428: function(e, t, n) { "use strict"; n.d(t, { Z: function() { return o } }); var r = n(19919); function o(e, t) { if (e) { if ("string" == typeof e) return (0, r.Z)(e, t); var n = Object.prototype.toString.call(e).slice(8, -1); if ("Object" === n && e.constructor && (n = e.constructor.name), "Map" === n || "Set" === n) return Array.from(e); if ("Arguments" === n || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return (0, r.Z)(e, t) } } }, 16266: function(e, t, n) { "use strict"; var r = n(70079); let o = r.forwardRef(function({title: e, titleId: t, ...n}, o) { return r.createElement("svg", Object.assign({ xmlns: "http://www.w3.org/2000/svg", viewBox: "0 0 24 24", fill: "currentColor", "aria-hidden": "true", ref: o, "aria-labelledby": t }, n), e ? r.createElement("title", { id: t }, e) : null, r.createElement("path", { fillRule: "evenodd", d: "M18.685 19.097A9.723 9.723 0 0021.75 12c0-5.385-4.365-9.75-9.75-9.75S2.25 6.615 2.25 12a9.723 9.723 0 003.065 7.097A9.716 9.716 0 0012 21.75a9.716 9.716 0 006.685-2.653zm-12.54-1.285A7.486 7.486 0 0112 15a7.486 7.486 0 015.855 2.812A8.224 8.224 0 0112 20.25a8.224 8.224 0 01-5.855-2.438zM15.75 9a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0z", clipRule: "evenodd" })) }); t.Z = o } }]);
PypiClean
/Vespa_Suite-1.1.0-py37-none-any.whl/vespa/common/wx_gravy/image_panel_roi.py
import math # 3rd party modules import matplotlib import matplotlib.cm as cm import wx import numpy as np # If we set the backend unconditionally, we sometimes get an undesirable # message. if matplotlib.get_backend() != "WXAgg": matplotlib.use('WXAgg') import matplotlib.patches from matplotlib.transforms import blended_transform_factory from matplotlib.patches import Rectangle, Circle, Ellipse, Polygon from matplotlib.lines import Line2D # Our modules class ImagePanel(wx.Panel): """ The ImagePanel has a Figure and a Canvas and 'n' Axes. The user defines the number of axes on Init and this number cannot be changed thereafter. However, the user can change the number of axes displayed in the Figure. Axes are specified on Init because the zoom and widlev actions need an axes to attach to initialize properly. on_size events simply set a flag, and the actual resizing of the figure is triggered by an Idle event. """ # Set _EVENT_DEBUG to True to activate printing of messages to stdout # during events. _EVENT_DEBUG = False def __init__(self, parent, naxes=1, color=None, bgcolor="#ffffff", dpi=None, zoom='none', widlev=False, roitool=False, unlink=False, do_zoom_select_event=False, do_zoom_motion_event=False, do_widlev_select_event=False, do_widlev_motion_event=False, do_roi_select_event=False, do_roi_motion_event=False, do_scroll_event=False, xscale_bump=0.0, yscale_bump=0.0, widlev_rate=3.0, # multiplier for mouse motion props_zoom=None, props_roi=None, data=None, colormap=cm.gray, # or gist_gray to reverse ceiling=None, floor=None, **kwargs): from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg from matplotlib.figure import Figure # initialize Panel if 'id' not in list(kwargs.keys()): kwargs['id'] = wx.ID_ANY if 'style' not in list(kwargs.keys()): kwargs['style'] = wx.NO_FULL_REPAINT_ON_RESIZE wx.Panel.__init__( self, parent, **kwargs ) self.parent = parent self.unlink = unlink self.xscale_bump = xscale_bump self.yscale_bump = yscale_bump self.widlev_rate = widlev_rate self.ceiling = [ceiling for i in range(naxes)] self.floor = [floor for i in range(naxes)] self.width = [256.0 for i in range(naxes)] self.level = [128.0 for i in range(naxes)] self.width_base = [256.0 for i in range(naxes)] self.level_base = [128.0 for i in range(naxes)] self.norm = matplotlib.colors.Normalize(vmin=0.0, vmax=255.0) self.cmap = [colormap for i in range(naxes)] # AxesImage id returned from imshow() command self.imageid = [None for i in range(naxes)] # store image data with ceil/floor applied, but not normalized 0-255 # - used to display values from a given voxel not in grayscale range self.img_hard_limits = [None for i in range(naxes)] # store image data with ceil/floor applied AND normalize to 0-255 # using max() and min() image value. # - Start point for applying interactive width/level via widlev plugin self.img_norm = [None for i in range(naxes)] # Under GTK we need to track self's size to avoid a continuous flow # of size events. self._platform_is_gtk = ("__WXGTK__" in wx.PlatformInfo) self._current_size = (-1, -1) # initialize matplotlib stuff self.figure = Figure( None, dpi, frameon=True ) self.canvas = FigureCanvasWxAgg( self, -1, self.figure ) # here we create the required naxes, add them to the figure, but we # also keep a permanent reference to each axes so they can be added # or removed from the figure as the user requests 1-N axes be displayed self.axes = [] for i in range(naxes): self.axes.append(self.figure.add_subplot(naxes,1,i+1)) self.naxes = naxes self.all_axes = list(self.axes) # internal data setup if not data or len(data) != naxes: data = self._default_data() self.set_data(data) self.figure.subplots_adjust(left=0.01,right=0.99, bottom=0.01,top=0.99, wspace=0.0,hspace=0.0) # for images we don't show x or y axis values for axis in self.all_axes: axis.set_facecolor(bgcolor) axis.xaxis.set_visible(False) axis.yaxis.set_visible(False) self.zoom = [] self.widlev = [] self.roitool = [] self.set_color( color ) self._set_size() self._resizeflag = False self.Bind(wx.EVT_IDLE, self._on_idle) self.Bind(wx.EVT_SIZE, self._on_size) # ensure that properties for zoom and reference regions exist if not props_zoom: props_zoom = dict(alpha=0.2, facecolor='gold') # ensure that properties for zoom and reference regions exist if not props_roi: props_roi = dict(alpha=0.2, facecolor='red') #---------------------------------------------------------------------- # enable Zoom, Reference, Middle and Scroll functionality as required if zoom == 'box': if not unlink: self.zoom = ZoomBox( self, self.axes, drawtype='box', useblit=True, button=1, do_zoom_select_event=do_zoom_select_event, do_zoom_motion_event=do_zoom_motion_event, spancoords='data', rectprops=props_zoom) else: for axes in self.axes: self.zoom.append(ZoomBox( self, [axes], drawtype='box', useblit=True, button=1, do_zoom_select_event=do_zoom_select_event, do_zoom_motion_event=do_zoom_motion_event, spancoords='data', rectprops=props_zoom)) if widlev: if not unlink: self.widlev = WidLevEvents(self, self.axes, button=2, do_widlev_select_event=do_widlev_select_event, do_widlev_motion_event=do_widlev_motion_event) else: for axes in self.axes: self.widlev.append( WidLevEvents(self, [axes], button=2, do_widlev_select_event=do_widlev_select_event, do_widlev_motion_event=do_widlev_motion_event)) if roitool: for axes in self.axes: self.roitool.append(RoiTool(axes, button=3, roi_shape='circle', props_draw=None, props_roi=props_roi, useblit=True)) # if not unlink: # self.roitool = RoiTool( self, self.axes, # drawtype='box', # useblit=True, # button=3, # do_roi_select_event=do_roi_select_event, # do_roi_motion_event=do_roi_motion_event, # rectprops=props_roi) # else: # for axes in self.axes: # self.roitool.append( RoiTool( self, [axes], # drawtype='box', # useblit=True, # button=3, # do_roi_select_event=do_roi_select_event, # do_roi_motion_event=do_roi_motion_event, # rectprops=props_roi)) self.do_motion_event = True self.motion_id = self.canvas.mpl_connect('motion_notify_event', self._on_move) self.do_scroll_event = do_scroll_event if self.do_scroll_event: self.scroll_id = self.canvas.mpl_connect('scroll_event', self._on_scroll) # initialize plots with initial data and format axes self.set_data(self.data) self.update() #======================================================= # # Internal Helper Functions # #======================================================= def _dprint(self, a_string): if self._EVENT_DEBUG: print(a_string) def _on_size( self, event ): if self._platform_is_gtk: # This is a workaround current_x, current_y = self._current_size new_x, new_y = tuple(event.GetSize()) if (abs(current_x - new_x) > 1) or (abs(current_y - new_y) > 1): self._resizeflag = True else: # Size has only changed by one pixel or less. I ignore it. event.Skip(False) else: self._resizeflag = True def _on_idle( self, evt ): if self._resizeflag: self._resizeflag = False self._set_size() def _set_size( self ): pixels = tuple( self.parent.GetClientSize() ) self.SetSize( pixels ) self.canvas.SetSize( pixels ) self.figure.set_size_inches( float( pixels[0] )/self.figure.get_dpi(), float( pixels[1] )/self.figure.get_dpi() ) self._current_size = pixels def _on_move(self, event): """ This is the internal method that organizes the data that is sent to the external user defined event handler for motion events. In here we gather data values from line plots, determine which axis we are in, then call the (hopefully) overloaded on_motion() method """ if event.inaxes == None or not self.do_motion_event: return x0, y0, x1, y1 = bounds = event.inaxes.dataLim.bounds values, raw = self._get_values(event) iaxis = self._get_current_axis_index(event) self.on_motion(event.xdata, event.ydata, raw, bounds, iaxis) def _on_scroll(self, event): """ This is the internal method that organizes the data that is sent to the external user defined event handler for scroll events. In here we determine which axis we are in, then call the (hopefully) overloaded on_scroll() method """ if event.inaxes == None: return iaxis = self._get_current_axis_index(event) self.on_scroll(event.button, event.step, iaxis) def _get_current_axis_index(self, event): iaxis = None for i,axis in enumerate(self.axes): if axis == event.inaxes: iaxis = i return iaxis def _default_data(self): data = [] for i in range(self.naxes): data.append([self._dist(128),]) return data def _get_values(self, event): """ Generic utility function that polls the axes that the mouse is within to return a list of data values at the x location of the cursor. Note. value list here contains the uint8 scaled values in the axes.images array while the rvalue list contains values from the data array after floor/ceil values are applied, but before the window/level parameters are applied. Since there may be one or more data arrays in the canvas (hopefully with alpha values < 1.0) we return value and rvalue as a list. """ iaxis = self._get_current_axis_index(event) if iaxis is None: iaxis = 0 x0, y0, x1, y1 = event.inaxes.dataLim.bounds xindx, yindx = self._get_data_index(event) value = [] for image in event.inaxes.get_images(): dat = image.get_array() if xindx < dat.shape[0] and yindx < dat.shape[1]: value.append(dat[xindx,yindx]) rvalue = [] image = self.img_hard_limits[iaxis] if xindx < image.shape[0] and yindx < image.shape[1]: rvalue.append(image[xindx,yindx]) return value, rvalue def _get_data_index(self, event, xvalue=None, yvalue=None): """ Generic utility function that polls the axes that the mouse is within to return an index within the data array for the x location of the cursor. """ indx = 0 x0, y0, x1, y1 = event.inaxes.dataLim.bounds xpts,ypts = event.inaxes.get_images()[0].get_size() if not xvalue: xvalue = event.xdata if not yvalue: yvalue = event.ydata if xpts>=0: xindx = int(round((xpts-1) * (xvalue-x0)/x1)) if ypts>=0: yindx = int(round((ypts-1) * (yvalue-y0)/y1)) if xindx > (xpts-1): xindx = xpts-1 if xindx < 0: xindx = 0 if yindx > (ypts-1): yindx = ypts-1 if yindx < 0: yindx = 0 return xindx, yindx def _dist(self, n, m=None): """ Return a rectangular array in which each pixel = euclidian distance from the origin. """ n1 = n if not m: m1 = n else: m1 = m x = np.arange(n1) x = np.array([val**2 if val < (n1-val) else (n1-val)**2 for val in x ]) a = np.ndarray((n1,m1),float) #Make array for i in range(int((m1/2)+1)): #Row loop y = np.sqrt(x + i**2.0) #Euclidian distance a[i,:] = y #Insert the row if i != 0: a[m1-i,:] = y #Symmetrical return a #======================================================= # # User Accessible Data Functions # #======================================================= def set_data(self, data, index=None): """ User can set data into one or all axes using this method. If index is supplied, we assume that only one ndarray is being passed in via the data parameter. If no index is supplied then we assume that a list of ndarrays the size of self.naxes is being passed in to replace all data in all axes. Example 1 - Data is a list of dicts raw = {'data' : raw_data, # 2D numpy array 'alpha' : 1.0 } # value 0.0-1.0 fit = {'data' : fit_data, # 2D numpy array 'alpha' : 1.0 } # value 0.0-1.0 data = [raw, fit] self.view.set_data(data) self.view.update(set_scale=not self._scale_intialized, no_draw=True) self.view.canvas.draw() Example 2 - Data is a single numpy array, the colors dict will use default values set in set_data() method data = [raw_data,] # 2D numpy array data = [[data]] self.view.set_data(data) # alpha defaults to 1.0 self.view.update(set_scale=not self._scale_intialized, no_draw=True) self.view.canvas.draw() """ for i, item in enumerate(data): for j, dat in enumerate(item): if isinstance(dat, dict): # Dict in this item, but ensure all keys are present if 'alpha' not in list(dat.keys()): dat['alpha'] = 1.0 if 'cmap' not in list(dat.keys()): dat['cmap'] = self.cmap[i] else: # Only data in this item, so add all default values dat = { 'data' : dat, 'alpha' : 1.0, 'cmap' : self.cmap[i] } item[j] = dat if index: if index < 0 or index >= self.naxes: raise ValueError("index must be within that number of axes in the plot_panel") if data[0][0]['data'].shape != self.data[0][0]['data'].shape: raise ValueError("new data must be a same number of spectral points as existing data") # even though we are inserting into an index, I want to force users # to submit a dict in a list of lists format so it is consistent # with submitting a whole new set of data (below). We just take the # first list of dicts from the submitted data and put it in the # index position self.data[index] = data[0] else: if len(data) != self.naxes: raise ValueError("data must be a list with naxes number of ndarrays") for item in data: for dat in item: d = dat['data'] padding = 2 - len(d.shape) if padding > 0: d.shape = ([1] * padding) + list(d.shape) elif padding == 0: # Nothing to do; data already has correct number of dims pass else: # padding < 0 ==> data has too many dims raise ValueError("Data with shape %s has too many dimensions" % str(item.shape)) if d.shape != data[0][0]['data'].shape: raise ValueError("all ndarrays must have same dimensions") self.data = data def update(self, index=None, keep_norm=False, no_draw=False): """ Convenience function that runs through all the typical steps needed to refresh the screen after a set_data(). The set_scale option is typically used only once to start set the bounding box to reasonable bounds for when a zoom box zooms back out. """ self.apply_hard_limits(index=index) self.apply_norm_widlev(index=index, keep_norm=keep_norm) self.update_images(index=index) if not no_draw: self.canvas.draw() def update_images(self, index=None, force_bounds=False): """ Sets the data from the normalized image numpy arrays into the axes. We also set the axes dataLim and viewLim ranges here just in case the dimensions on the image being displayed has changed. This reset allows the zoom reset to work properly. """ indices = self.parse_indices(index) for i in indices: axes = self.all_axes[i] if axes.images: yold, xold = axes.images[0].get_array().shape else: yold, xold = -1,-1 axes.images = [] ddict = self.data[i][0] img_norm = self.img_norm[i] alpha = ddict['alpha'] cmap = ddict['cmap'] img_norm = self.calc_lut_value(img_norm, self.width[i], self.level[i]) xmin, xwid, ymin, ywid = 0, img_norm.shape[1], 0, img_norm.shape[0] self.imageid[i] = axes.imshow(img_norm, norm=self.norm, cmap=cmap, alpha=alpha) if xold != xwid or yold!=ywid or force_bounds: # Set new bounds for dataLims to x,y extent of data in the # new image. On reset zoom this is how far we reset the limits. axes.ignore_existing_data_limits = True axes.update_datalim([[xmin,ymin],[xmin+xwid,ymin+ywid]]) # Matches viewLims view limits to the new data. By # default, new data and updating causes display to show the # entire image. Any zoom setting is lost. axes.set_xlim((xmin, xmin+xwid), auto=None) axes.set_ylim((ymin, ymin+ywid), auto=None) # may need this line in future if we do overlay # self.figure.hold(True) #self.figure.hold(False) def calc_lut_value(self, data, width, level): """Apply Look-Up Table values to data for given width/level values.""" conditions = [data <= (level-0.5-(width-1)/2), data > (level-0.5+(width-1)/2)] functions = [0, 255, lambda data: ((data - (level - 0.5))/(width-1) + 0.5)*(255-0)] # 3rd function is default lutvalue = np.piecewise(data, conditions, functions) # Convert the resultant array to an unsigned 8-bit array to create # an 8-bit grayscale LUT since the range is only from 0 to 255 return np.array(lutvalue, dtype=np.uint8) def parse_indices(self, index=None): """ Ensure we know what data axes to act upon - index can be a list or scalar - if list, must be naxes items or less - list/scalar values need to be in range of 0 to naxes-1 """ if index is None: indices = list(range(self.naxes)) else: if isinstance(index, list): if len(index) <= self.naxes: if all(index < self.naxes): indices = index else: raise ValueError("index in list outside naxes range") else: raise ValueError("too many index entries") else: if index < self.naxes: indices = [index] else: raise ValueError("scalar index outside naxes range") return indices def set_floor_ceiling(self, index, floor, ceiling): """ Intent is to allow users to set floor and ceiling values for each image This can be abused in a variety of ways. Typically I would expect them to use one of the scenarios below: 1) provide one scalar index and a scalar floor and/or scalar ceiling value to be inserted 2) set index to None and have floor and ceiling lists of naxes length which map one to one to the existing arrays 3) set index to None and have scalar floor and ceiling values that map to all existing arrays. """ indices = self.parse_indices(index) for i in indices: if ceiling is not None: if isinstance(ceiling, list): if len(ceiling) == len(self.ceiling): # apply one to one map of values to axes self.ceiling[i] = ceiling[i] else: raise ValueError("Ceiling list wrong size. Set either all values or only one.") else: # apply scalar value to all axes self.ceiling[i] = ceiling if floor is not None: if isinstance(floor, list): if len(floor) == len(self.floor): # apply one to one map of values to axes self.floor[i] = floor[i] else: raise ValueError("Floor list wrong size. Set either all values or only one.") else: # apply scalar value to all axes in indices self.floor[i] = floor def apply_hard_limits(self, index=None): """ Apply the user provided ceiling/floor values to limit the range of data values in the original image data. We save these floor to ceiling normalized images to serve as a starting point from which we calculate normalized images. We also use the saved limited images to snag values out of to report in various events. """ indices = self.parse_indices(index) # loop through the data, apply ceiling/floor value and save new image for i in indices: data = self.data[i][0]['data'].copy() orig_shape = data.shape data = np.abs(data.flatten()) if self.ceiling[i]: data[data>self.ceiling[i]] = self.ceiling[i] if self.floor[i]: data[data<self.floor[i]] = self.floor[i] data.shape = orig_shape self.img_hard_limits[i] = data # ceil/floor applied def calc_norm_widlev(self, index=None): """ Calculate the width and level values to that describe the original data in images in terms of their data.min and data.max values """ indices = self.parse_indices(index) for i in indices: data = self.img_hard_limits[i].copy() self.width_base[i] = np.int(np.abs(data.max()) + np.abs(data.min())) self.level_base[i] = np.int(self.width_base[i]*0.5 - np.abs(data.min())) def apply_norm_widlev(self, index=None, keep_norm=False): """ Apply the calculated width and level values that describe the original data in images in terms of their data.min and data.max values We save these 0-255 normalized images to serve as a starting point from which we apply the user selected width and level values to window the image on the screen. """ indices = self.parse_indices(index) for i in indices: data = self.img_hard_limits[i].copy() if not keep_norm: self.calc_norm_widlev(i) self.img_norm[i] = self.calc_lut_value(data, self.width_base[i], self.level_base[i]) #======================================================= # # User Accessible Plotting Functions # #======================================================= def set_color( self, rgbtuple=None ): """Set figure and canvas colours to be the same.""" if rgbtuple is None: rgbtuple = wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ).Get() clr = [c/255. for c in rgbtuple] self.figure.set_facecolor( clr ) self.figure.set_edgecolor( clr ) self.canvas.SetBackgroundColour( wx.Colour( *rgbtuple ) ) def change_naxes(self, n): """ Allows user to determine serially which of the N axes are included in the figure. Using this method the user supplies only the number of axes to include and the first 1:n axes in the long term storage list are added to the figure. This method also updates the axes lists in the zoom, widlev and middle functor methods. """ ncurrent = len(self.figure.axes) if n > self.naxes: return elif n < 0: return elif n == ncurrent: return self.axes = self.all_axes[0:n] # remove old axes, but don't destroy figure_axes = list(self.figure.axes) for axes in figure_axes: self.figure.delaxes(axes) # add back however many were requested for axes in self.axes: ax = self.figure.add_axes(axes) if not self.unlink: if self.zoom: self.zoom.axes = self.axes if self.widlev: self.widlev.axes = self.axes if self.roitool: self.roitool.axes = self.axes # this resets figure to have 1 or 2 or N axes shown naxes = len(self.axes) for i in range(naxes): self.figure.axes[i].change_geometry(naxes,1,i+1) self.canvas.draw() def display_naxes(self, flags): """ Allows user to specify exactly which of the N axes defined in the Init() method are included in the figure. The user has to supply a boolean list of flags of the same length as the list of all_axes. The axes that correspond to flags set to True are included in the figure. This method also updates the axes lists in the zoom, widlev and middle functor methods. """ ncurrent = len(self.all_axes) nflags = len(flags) if nflags != ncurrent: return faxes = list(self.figure.axes) for axes in faxes: self.figure.delaxes(axes) for i, axes in enumerate(self.axes): if flags[i] != False: ax = self.figure.add_axes(axes) if not self.unlink: if self.zoom: self.zoom.axes = self.axes if self.widlev: self.widlev.axes = self.axes if self.roitool: self.roitool.axes = self.axes self.canvas.draw() def new_axes(self, axes): if isinstance(axes, list): self.axes = axes elif isinstance(axes, matplotlib.axes.Axes): self.axes = [axes] else: return if self.zoom is not None: self.zoom.new_axes(self.axes) if self.widlev is not None: self.widlev.new_axes(self.axes) if self.roitool is not None: self.roitool.new_axes(self.axes) if self.canvas is not self.axes[0].figure.canvas: self.canvas.mpl_disconnect(self.motion_id) self.canvas = self.axes[0].figure.canvas self.motion_id = self.canvas.mpl_connect('motion_notify_event', self._on_move) if self.figure is not self.axes[0].figure: self.figure = self.axes[0].figure #======================================================= # # Default Event Handlers # #======================================================= def on_motion(self, xdata, ydata, value, bounds, iaxis): """ placeholder, overload for user defined event handling """ self._dprint('on_move, xdata='+str(xdata)+' ydata='+str(ydata)+' val='+str(value)+' bounds = '+str(bounds)+' iaxis='+str(iaxis)) def on_scroll(self, button, step, iaxis): """ placeholder, overload for user defined event handling """ self._dprint('on_move, button='+str(button)+' step='+str(step)+' iaxis='+str(iaxis)) def on_zoom_select(self, xmin, xmax, val, ymin, ymax, reset=False, iplot=None, xdata=None, ydata=None ): """ placeholder, overload for user defined event handling """ self._dprint('on_zoom_select, xmin='+str(xmin)+' xmax='+str(xmax)+' val='+str(val)+' ymin='+str(ymin)+' ymax='+str(ymax)) def on_zoom_motion(self, xmin, xmax, val, ymin, ymax, iplot=None): """ placeholder, overload for user defined event handling """ self._dprint('on_zoom_move, xmin='+str(xmin)+' xmax='+str(xmax)+' val='+str(val)+' ymin='+str(ymin)+' ymax='+str(ymax)) def on_widlev_select(self, xstr, ystr, xend, yend, indx, reset=False): """ placeholder, overload for user defined event handling """ self._dprint('ext on_widlev_select, X(str,end)='+str(xstr)+','+str(xend)+' Y(str,end)='+str(ystr)+','+str(yend)+' Index = '+str(indx)) def on_widlev_motion(self, xcur, ycur, xprev, yprev, indx): """ placeholder, overload for user defined event handling """ self._dprint('on_widlev_move, X(cur,prev)='+str(xcur)+','+str(xprev)+' Y(cur,prev)='+str(ycur)+','+str(yprev)+' Index = '+str(indx)) def on_widlev_press(self, xloc, yloc, indx): """ placeholder, overload for user defined event handling """ self._dprint('on_widlev_press, Xloc='+str(xloc)+' Yloc='+str(yloc)+' Index = '+str(indx)) def on_roi_select(self, xstr, ystr, xend, yend, indx, reset=False): """ placeholder, overload for user defined event handling """ self._dprint('ext on_roi_select, X(str,end)='+str(xstr)+','+str(xend)+' Y(str,end)='+str(ystr)+','+str(yend)+' Index = '+str(indx)) def on_roi_motion(self, xcur, ycur, xprev, yprev, indx): """ placeholder, overload for user defined event handling """ self._dprint('on_roi_move, X(cur,prev)='+str(xcur)+','+str(xprev)+' Y(cur,prev)='+str(ycur)+','+str(yprev)+' Index = '+str(indx)) def on_roi_press(self, xloc, yloc, indx): """ placeholder, overload for user defined event handling """ self._dprint('on_roi_press, Xloc='+str(xloc)+' Yloc='+str(yloc)+' Index = '+str(indx)) class ZoomBox: """ Select a min/max range of the x axes for a matplotlib Axes Example usage:: from matplotlib.widgets import RectangleSelector from pylab import * def onselect(xmin, xmax, value, ymin, ymax): 'eclick and erelease are matplotlib events at press and release' print ' x,y min position : (%f, %f)' % (xmin, ymin) print ' x,y max position : (%f, %f)' % (xmax, ymax) print ' used button : ', eclick.button def toggle_selector(event): print ' Key pressed.' if event.key in ['Q', 'q'] and toggle_selector.RS.active: print ' RectangleSelector deactivated.' toggle_selector.RS.set_active(False) if event.key in ['A', 'a'] and not toggle_selector.RS.active: print ' RectangleSelector activated.' toggle_selector.RS.set_active(True) x = arange(100)/(99.0) y = sin(x) fig = figure axes = subplot(111) axes.plot(x,y) toggle_selector.RS = ZoomBox(axes, onselect, drawtype='line') connect('key_press_event', toggle_selector) show() """ def __init__(self, parent, axes, drawtype='box', minspanx=None, minspany=None, useblit=False, lineprops=None, rectprops=None, do_zoom_select_event=False, do_zoom_motion_event=False, spancoords='data', button=None): """ Create a selector in axes. When a selection is made, clear the span and call onselect with onselect(pos_1, pos_2) and clear the drawn box/line. There pos_i are arrays of length 2 containing the x- and y-coordinate. If minspanx is not None then events smaller than minspanx in x direction are ignored(it's the same for y). The rect is drawn with rectprops; default rectprops = dict(facecolor='red', edgecolor = 'black', alpha=0.5, fill=False) The line is drawn with lineprops; default lineprops = dict(color='black', linestyle='-', linewidth = 2, alpha=0.5) Use type if you want the mouse to draw a line, a box or nothing between click and actual position ny setting drawtype = 'line', drawtype='box' or drawtype = 'none'. spancoords is one of 'data' or 'pixels'. If 'data', minspanx and minspanx will be interpreted in the same coordinates as the x and y axis, if 'pixels', they are in pixels button is a list of integers indicating which mouse buttons should be used for rectangle selection. You can also specify a single integer if only a single button is desired. Default is None, which does not limit which button can be used. Note, typically: 1 = left mouse button 2 = center mouse button (scroll wheel) 3 = right mouse button """ self.parent = parent self.axes = None self.canvas = None self.visible = True self.cids = [] self.active = True # for activation / deactivation self.to_draw = [] self.background = None self.axes_index = None self.do_zoom_select_event = do_zoom_select_event self.do_zoom_motion_event = do_zoom_motion_event self.useblit = useblit self.minspanx = minspanx self.minspany = minspany if button is None or isinstance(button, list): self.validButtons = button elif isinstance(button, int): self.validButtons = [button] assert(spancoords in ('data', 'pixels')) self.spancoords = spancoords self.eventpress = None # will save the data (position at mouseclick) self.eventrelease = None # will save the data (pos. at mouserelease) self.new_axes(axes, rectprops) def new_axes(self,axes, rectprops=None): self.axes = axes if self.canvas is not axes[0].figure.canvas: for cid in self.cids: self.canvas.mpl_disconnect(cid) self.canvas = axes[0].figure.canvas self.cids.append(self.canvas.mpl_connect('motion_notify_event', self.onmove)) self.cids.append(self.canvas.mpl_connect('button_press_event', self.press)) self.cids.append(self.canvas.mpl_connect('button_release_event', self.release)) self.cids.append(self.canvas.mpl_connect('draw_event', self.update_background)) self.cids.append(self.canvas.mpl_connect('motion_notify_event', self.onmove)) if rectprops is None: rectprops = dict(facecolor='white', edgecolor= 'black', alpha=0.5, fill=False) self.rectprops = rectprops for axes in self.axes: self.to_draw.append(Rectangle((0,0), 0, 1,visible=False,**self.rectprops)) for axes,to_draw in zip(self.axes, self.to_draw): axes.add_patch(to_draw) def update_background(self, event): 'force an update of the background' if self.useblit: self.background = self.canvas.copy_from_bbox(self.canvas.figure.bbox) def ignore(self, event): 'return True if event should be ignored' # is ZoomBox active : if not self.active: return True # is canvas locked if not self.canvas.widgetlock.available(self): return True # Only do selection if event was triggered with a desired button if self.validButtons is not None: if not event.button in self.validButtons: return True # If no button pressed yet or if it was out of the axes, ignore if self.eventpress == None: return event.inaxes not in self.axes # If a button pressed, check if the release-button is the same return (event.inaxes not in self.axes or event.button != self.eventpress.button) def press(self, event): 'on button press event' # Is the correct button pressed within the correct axes? if self.ignore(event): return # only send one motion event while selecting if self.do_zoom_motion_event: self.parent.do_motion_event = False for i in range(len(self.axes)): if event.inaxes == self.axes[i]: self.axes_index = i # make the drawn box/line visible get the click-coordinates, # button, ... for to_draw in self.to_draw: to_draw.set_visible(self.visible) self.eventpress = event return False def release(self, event): 'on button release event' if self.eventpress is None or self.ignore(event): return self.parent.SetFocus() # sets focus into Plot_Panel widget canvas # only send one motion event while selecting if self.do_zoom_motion_event: self.parent.do_motion_event = True # make the box/line invisible again for to_draw in self.to_draw: to_draw.set_visible(False) # left-click in place resets the x-axis or y-axis if self.eventpress.xdata == event.xdata and self.eventpress.ydata == event.ydata: x0, y0, x1, y1 = event.inaxes.dataLim.bounds xdel = self.parent.xscale_bump*(x1-x0) ydel = self.parent.yscale_bump*(y1-y0) for axes in self.axes: axes.set_xlim(x0-xdel,x0+x1+xdel) axes.set_ylim(y0-ydel,y0+y1+ydel) self.canvas.draw() if self.do_zoom_select_event: self.parent.on_zoom_select(x0-xdel, x0+x1+xdel, [0.0], y0-ydel, y0+y1+ydel, reset=True, iplot=self.axes_index, xdata=event.xdata, ydata=event.ydata ) return self.canvas.draw() # release coordinates, button, ... self.eventrelease = event if self.spancoords=='data': xmin, ymin = self.eventpress.xdata, self.eventpress.ydata xmax, ymax = self.eventrelease.xdata, self.eventrelease.ydata # calculate dimensions of box or line get values in the right # order elif self.spancoords=='pixels': xmin, ymin = self.eventpress.x, self.eventpress.y xmax, ymax = self.eventrelease.x, self.eventrelease.y else: raise ValueError('spancoords must be "data" or "pixels"') # assure that min<max values if xmin>xmax: xmin, xmax = xmax, xmin if ymin>ymax: ymin, ymax = ymax, ymin # assure that x and y values are not equal if xmin == xmax: xmax = xmin*1.0001 if ymin == ymax: ymax = ymin*1.0001 spanx = xmax - xmin spany = ymax - ymin xproblems = self.minspanx is not None and spanx<self.minspanx yproblems = self.minspany is not None and spany<self.minspany if (xproblems or yproblems): """Box too small""" # check if drawed distance (if it exists) is return # not to small in neither x nor y-direction for axes in self.axes: axes.set_xlim((xmin,xmax)) axes.set_ylim((ymin,ymax)) self.canvas.draw() data_test = event.inaxes.images!=[] if self.do_zoom_select_event and data_test: # gather the values to report in a selection event value, raw = self.parent._get_values(event) self.parent.on_zoom_select(xmin, xmax, raw, ymin, ymax, iplot=self.axes_index) # zeros are for consistency with box zoom self.axes_index = None self.eventpress = None # reset the variables to their self.eventrelease = None # inital values return False def update(self): 'draw using newfangled blit or oldfangled draw depending on useblit' if self.useblit: if self.background is not None: self.canvas.restore_region(self.background) for axes, to_draw in zip(self.axes, self.to_draw): axes.draw_artist(to_draw) self.canvas.blit(self.canvas.figure.bbox) else: self.canvas.draw_idle() return False def onmove(self, event): 'on motion notify event if box/line is wanted' if self.eventpress is None or self.ignore(event): return x,y = event.xdata, event.ydata # actual position (with # (button still pressed) minx, maxx = self.eventpress.xdata, x # click-x and actual mouse-x miny, maxy = self.eventpress.ydata, y # click-y and actual mouse-y if minx>maxx: minx, maxx = maxx, minx # get them in the right order if miny>maxy: miny, maxy = maxy, miny for to_draw in self.to_draw: to_draw.set_x(minx) # set lower left of box to_draw.set_y(miny) to_draw.set_width(maxx-minx) # set width and height of box to_draw.set_height(maxy-miny) data_test = event.inaxes.images!=[] if self.do_zoom_motion_event and data_test: # gather the values to report in a selection event value, raw = self.parent._get_values(event) self.parent.on_zoom_motion(minx, maxx, raw, miny, maxy, iplot=self.axes_index) # zeros are for consistency with box zoom self.update() return False def set_active(self, active): """ Use this to activate / deactivate the RectangleSelector from your program with an boolean variable 'active'. """ self.active = active def get_active(self): """ to get status of active mode (boolean variable)""" return self.active class WidLevEvents: """ Act on events having to do with image scaling width and level changes see below for example usage """ def __init__(self, parent, axes, do_widlev_select_event=False, do_widlev_motion_event=False, do_widlev_press_event=False, button=None): """ Attach this to a button and up/down motions while the button is pressed will change the level value in the ImagePanel,and left/right motions will change the width value in the ImagePanel button is a list of integers indicating which mouse buttons should be used for width and level changes. You can also specify a single integer if only a single button is desired. Default is None, which does not limit which button can be used. Note, typically: 1 = left mouse button 2 = center mouse button (scroll wheel) 3 = right mouse button """ self.parent = parent self.axes = None self.canvas = None self.cids = [] self.background = None self.pressxy = None self.axes_index = None self.do_widlev_select_event = do_widlev_select_event self.do_widlev_motion_event = do_widlev_motion_event self.do_widlev_press_event = do_widlev_press_event # Needed when dragging out of axes self.buttonDown = False self.prevxy = (0,0) if button is None or isinstance(button, list): self.validButtons = button elif isinstance(button, int): self.validButtons = [button] self.eventpress = None # will save the data (position at mouseclick) self.new_axes(axes) def new_axes(self,axes): self.axes = axes if self.canvas is not axes[0].figure.canvas: for cid in self.cids: self.canvas.mpl_disconnect(cid) self.canvas = axes[0].figure.canvas self.cids.append(self.canvas.mpl_connect('motion_notify_event', self.onmove)) self.cids.append(self.canvas.mpl_connect('button_press_event', self.press)) self.cids.append(self.canvas.mpl_connect('button_release_event', self.release)) def ignore(self, event): 'return True if event should be ignored' # Only do selection if event was triggered with a desired button if self.validButtons is not None: if not event.button in self.validButtons: return True # If no button pressed yet or if it was out of the axes, ignore if self.eventpress == None: return event.inaxes not in self.axes # If a button pressed, check if the release-button is the same return (event.inaxes not in self.axes or event.button != self.eventpress.button) def press(self, event): 'on button press event' if self.ignore(event): return self.buttonDown = True # only send one motion event while selecting if self.do_widlev_motion_event: self.parent.do_widlev_event = False for i in range(len(self.axes)): if event.inaxes == self.axes[i]: self.axes_index = i self.pressxy = event.x, event.y self.prevxy = event.x, event.y if self.do_widlev_press_event: self.parent.on_widlev_press(event.x, event.y, self.axes_index) self.eventpress = event return False def release(self, event): 'on button release event' if self.pressxy is None or (self.ignore(event) and not self.buttonDown): return self.parent.SetFocus() # sets focus into Plot_Panel widget canvas self.buttonDown = False # only send one motion event while selecting if self.do_widlev_motion_event: self.parent.do_widlev_event = True xstr, ystr = self.pressxy xend = event.x yend = event.y # left-click in place resets autoscale width/level values if self.eventpress.xdata == event.xdata and self.eventpress.ydata == event.ydata: indx = self.axes_index self.parent.width[indx] = 255.0 self.parent.level[indx] = 128.0 self.parent.update_images(index=indx) self.parent.canvas.draw() if self.do_widlev_select_event: self.parent.on_widlev_select(xstr, ystr, xend, yend, self.axes_index, True) return if self.do_widlev_select_event: self.parent.on_widlev_select(xstr, ystr, xend, yend, self.axes_index, False) self.axes_index = None self.pressxy = None self.eventpress = None return False def onmove(self, event): 'on motion notify event' if self.pressxy is None or self.ignore(event): return rate = self.parent.widlev_rate xcurr, ycurr = event.x, event.y xprev, yprev = self.prevxy self.prevxy = event.x, event.y indx = self.axes_index xdelt = np.int((xprev-xcurr)) # user set divide to slow down change ydelt = np.int((yprev-ycurr)) # due to mouse motion. if abs(ydelt) >= abs(xdelt): self.parent.level[indx] += ydelt else: self.parent.width[indx] += xdelt self.parent.update_images(index=indx) # for image in self.parent.img_hard_limits[indx]: # data_lut = self.parent.calc_lut_value(image, self.parent.width[indx], self.parent.level[indx]) # self.parent.imageid[indx].set_data(data_lut) # self.parent.figure.hold(True) # self.parent.figure.hold(False) self.parent.canvas.draw() if self.do_widlev_motion_event: self.parent.on_widlev_motion(xcurr, ycurr, xprev, yprev, self.axes_index) return False ALLOWED_SHAPES = ['circle','rectangle','ellipse','lasso'] class DraggableResizeablePatch: """ Draggable and resizeable patches with the animation blit techniques. Based on example code at http://matplotlib.sourceforge.net/users/event_handling.html This class works for Circle, Rectangle, Ellipse, and Polygon type patches. Resize and drag work for Circle, Rectangle, Ellipse types. Only drag works for Polygon since the concept of resize is indeterminate for a randomly drawn ROI. Keywords *allow_resize* - bool, if *True* a patch can be resized by dragging near its lines. *border_tol* - float, specifies how close the pointer has to be to a line for the drag to be considered a resize operation. Dragging is still possible by clicking the interior of the patch. *fixed_aspect_ratio* - bool, determines if the patch keeps its aspect ratio during resize operations. This keyword is only relevant to Rectangle and Ellipse patches *button* - list, integers (1,2 or 3) indicating which buttons are relevant to trigger patch events. 1-left, 2-middle, 3-right. """ lock = None # only one can be animated at a time def __init__(self, patch, border_tol=0.15, allow_resize=True, fixed_aspect_ratio=True, button=None): self.active = True self.patch = patch self.border_tol = border_tol self.allow_resize = allow_resize self.fixed_aspect_ratio = fixed_aspect_ratio self.press = None self.background = None self.cidpress = None self.cidrelease = None self.cidmotion = None if button is None or isinstance(button, list): self.validButtons = button elif isinstance(button, int): self.validButtons = [button] def ignore(self, event): 'return True if event should be ignored' # is this object set to 'active' if not self.active: return True if event.inaxes != self.patch.axes: return # was event was triggered with appropriate button if self.validButtons is not None: if not event.button in self.validButtons: return True def connect(self): 'connect to all the events we need' self.cidpress = self.patch.figure.canvas.mpl_connect( 'button_press_event', self.on_press) self.cidrelease = self.patch.figure.canvas.mpl_connect( 'button_release_event', self.on_release) self.cidmotion = self.patch.figure.canvas.mpl_connect( 'motion_notify_event', self.on_motion) def on_press(self, event): 'on button press we will see if the mouse is over us and store some data' # check all 'ignore' conditions if self.ignore(event): return # only one patch can be acted on at a time if DraggableResizeablePatch.lock is not None: return # was mouse clicked while over this object contains, attrd = self.patch.contains(event) if not contains: return xd, yd = event.xdata, event.ydata if isinstance(self.patch, matplotlib.patches.Rectangle): patch_type = 'rect' x0,y0 = self.patch.xy w0,h0 = self.patch.get_width(), self.patch.get_height() aspect_ratio = np.true_divide(w0, h0) self.press = x0, y0, w0, h0, aspect_ratio, xd, yd, patch_type elif isinstance(self.patch, matplotlib.patches.Circle): patch_type = 'circ' x0, y0 = self.patch.center press_radius = np.sqrt((x0-xd)*(x0-xd)+(y0-yd)*(y0-yd)) self.press = x0, y0, self.patch.radius, press_radius, patch_type elif isinstance(self.patch, matplotlib.patches.Ellipse): patch_type = 'ellip' x0, y0 = self.patch.center w0, h0 = self.patch.width, self.patch.height aspect_ratio = np.true_divide(w0, h0) self.press = x0, y0, w0, h0, aspect_ratio, xd, yd, patch_type elif isinstance(self.patch, matplotlib.patches.Polygon): self.press = ['poly',] else: return DraggableResizeablePatch.lock = self self.last = xd, yd canvas = self.patch.figure.canvas axes = self.patch.axes # draw everything but the selected rectangle and store the pixel buffer self.patch.set_animated(True) canvas.draw() self.background = canvas.copy_from_bbox(self.patch.axes.bbox) # now redraw just the rectangle axes.draw_artist(self.patch) # and blit just the redrawn area canvas.blit(axes.bbox) def on_motion(self, event): 'on motion we will move/resize the patch if the mouse is over us' # Is the correct button pressed within the correct axes? if self.ignore(event): return # are we the object that the press event selected? if DraggableResizeablePatch.lock is not self: return # we might be outside the canvas if event.xdata is None or event.ydata is None: return canvas = self.patch.figure.canvas axes = self.patch.axes self.now = event.xdata, event.ydata if self.press[-1] == 'rect': self.update_rect() elif self.press[-1] == 'circ': self.update_circ() elif self.press[-1] == 'ellip': self.update_ellip() elif self.press[-1] == 'poly': self.update_poly() self.last = event.xdata, event.ydata # restore the background region canvas.restore_region(self.background) # redraw just the current rectangle axes.draw_artist(self.patch) # blit just the redrawn area canvas.blit(axes.bbox) def on_release(self, event): 'on release we reset the press data' # are we the object that the press event selected? if DraggableResizeablePatch.lock is not self: return # reset 'press' event data self.press = None DraggableResizeablePatch.lock = None # turn off the rect animation property and reset the background self.patch.set_animated(False) self.background = None if self.patch.figure is not None: # redraw the full figure self.patch.figure.canvas.draw() def disconnect(self): 'disconnect all the stored connection ids' self.patch.figure.canvas.mpl_disconnect(self.cidpress) self.patch.figure.canvas.mpl_disconnect(self.cidrelease) self.patch.figure.canvas.mpl_disconnect(self.cidmotion) def reconnect(self): # this is a hack to enable a disconnect/connect in one step, we use # it to reorder event handling to allow last used ROI to 'float to # the top of the event callback queue' if self.patch is not None: canvas = self.patch.figure.canvas canvas.mpl_disconnect(self.cidpress) canvas.mpl_disconnect(self.cidrelease) canvas.mpl_disconnect(self.cidmotion) self.cidpress = canvas.mpl_connect( 'button_press_event', self.on_press) self.cidrelease = canvas.mpl_connect( 'button_release_event', self.on_release) self.cidmotion = canvas.mpl_connect( 'motion_notify_event', self.on_motion) def update_rect(self): """ Because we test for left/right/top/bottom, there are 8 corners, left side top/bottom, right side top/bottom, top side left/right, and bottom side left/right. I need to test for all 12 of these conditions for proper operation of resizing. """ xdata, ydata = self.now x0, y0, w0, h0, aspect_ratio, xpress, ypress, patch_type = self.press dx, dy = xdata-xpress, ydata-ypress bt = self.border_tol fixed_ar = self.fixed_aspect_ratio if (not self.allow_resize or (abs(x0+np.true_divide(w0,2)-xpress)<np.true_divide(w0,2)-bt*w0 and abs(y0+np.true_divide(h0,2)-ypress)<np.true_divide(h0,2)-bt*h0)): # drag event self.patch.set_x(x0+dx) self.patch.set_y(y0+dy) elif abs(x0-xpress)<bt*w0: # resize from left of rect self.patch.set_x(x0+dx) self.patch.set_width(w0-dx) if abs(y0-ypress)<bt*h0: # corner grab - covers both left-bottom and bottom-left dy = np.true_divide(dx, aspect_ratio) self.patch.set_y(y0+dy) self.patch.set_height(h0-dy) elif abs(y0+h0-ypress)<bt*h0: # corner grab - covers both left-top and top-left dy = np.true_divide(dx, aspect_ratio) self.patch.set_height(h0-dy) elif fixed_ar: # left side grab with fixed_aspect dy = np.true_divide(dx, aspect_ratio) dy2 = np.true_divide(dy, 2.0) self.patch.set_y(y0+dy2) self.patch.set_height(h0-dy) elif abs(x0+w0-xpress)<bt*w0: # resize from right of rect self.patch.set_width(w0+dx) if abs(y0-ypress)<bt*h0: # corner grab - covers both right-bottom and bottom-right dy = np.true_divide(dx, aspect_ratio) self.patch.set_y(y0-dy) self.patch.set_height(h0+dy) elif abs(y0+h0-ypress)<bt*h0: # corner grab - covers both right-top and top-right dy = np.true_divide(dx, aspect_ratio) self.patch.set_height(h0+dy) elif fixed_ar: # right side grab with fixed_aspect dy = np.true_divide(dx, aspect_ratio) dy2 = np.true_divide(dy, 2.0) self.patch.set_y(y0-dy2) self.patch.set_height(h0+dy) elif abs(y0-ypress)<bt*h0: # resize from bottom of rect self.patch.set_y(y0+dy) self.patch.set_height(h0-dy) if fixed_ar: # bottom side grab with fixed_aspect dx = dy*aspect_ratio dx2 = np.true_divide(dx, 2.0) self.patch.set_x(x0+dx2) self.patch.set_width(w0-dx) elif abs(y0+h0-ypress)<bt*h0: # resize from top of rect self.patch.set_height(h0+dy) if fixed_ar: # top grab with fixed aspect dx = dy*aspect_ratio dx2 = np.true_divide(dx, 2.0) self.patch.set_x(x0-dx2) self.patch.set_width(w0+dx) def update_circ(self): x0, y0, orig_radius, press_radius, patch_type = self.press xd, yd = self.now lx, ly = self.last bt = self.border_tol new_radius = np.sqrt((x0-xd)*(x0-xd)+(y0-yd)*(y0-yd)) delta = new_radius - press_radius if (not self.allow_resize or orig_radius-press_radius > bt*orig_radius*2): # update for drag self.patch.center = [self.patch.center[0]+(xd-lx),self.patch.center[1]+(yd-ly)] elif abs(orig_radius-press_radius)<bt*orig_radius*2: # update for resize self.patch.set_radius(orig_radius+delta) def update_ellip(self): xdata, ydata = self.now x0, y0, w0, h0, aspect_ratio, xpress, ypress, patch_type = self.press dx, dy = xdata-xpress, ydata-ypress w2 = np.true_divide(w0, 2.0) h2 = np.true_divide(h0, 2.0) bt = self.border_tol fixed_ar = self.fixed_aspect_ratio if (not self.allow_resize or (abs(x0-xpress)<w2-bt*w0 and abs(y0-ypress)<h2-bt*h0)): # drag event self.patch.center = [x0+dx, y0+dy] elif abs(x0-w2-xpress)<bt*w0: # resize from left of ellip self.patch.width = (w0-dx) if fixed_ar: # left side grab with fixed_aspect dy = np.true_divide(dx, aspect_ratio) self.patch.height = (h0-dy) elif abs(x0+w2-xpress)<bt*w0: # resize from right of ellip self.patch.width = (w0+dx) if fixed_ar: # right side grab with fixed_aspect dy = np.true_divide(dx, aspect_ratio) self.patch.height = (h0+dy) elif abs(y0-h2-ypress)<bt*h0: # resize from bottom of ellip self.patch.height = h0-dy if fixed_ar: # bottom side grab with fixed_aspect dx = dy*aspect_ratio self.patch.width = (w0-dx) elif abs(y0+h2-ypress)<bt*h0: # resize from top of ellip self.patch.height = (h0+dy) if fixed_ar: # top grab with fixed aspect dx = dy*aspect_ratio self.patch.width = (w0+dx) def update_poly(self): patch_type = self.press xl, yl = self.last xn, yn = self.now delta = xl-xn, yl-yn # update for drag xys = self.patch.get_xy() - delta self.patch.set_xy(xys) class RoiTool: """ Select none, one or more Regions of Interest Setup: axes - object, the Axes to contain ROIs, only one allowed! parent - deprecated button - list, integers indicating which mouse buttons should be used for ROI selection., 1-left, 2-middle, 3-right mouse button props_draw - dict, contains the properties of the ROI patch shown when an ROI is being drawn, default is; dict(facecolor='yellow', edgecolor='black', alpha=0.2, linewidth=2.0) props_roi - dict, contains the properties of the ROI patch shown after it is drawn, default is; dict(facecolor='magenta', edgecolor='black', alpha=0.2) Controls: Basic actions can be controlled with a single button, selected using the 'button' property on initialization. - Create ROI - click and drag on canvas background then release - Resize ROI - click on existing ROI near edge and drag - Move ROI - click in middle of existing ROI away from edge and drag - Delete ROI - double click within an existing ROI Other Actions: Assume myroi = RoiTool( [tmp.axes,], button=3 ) and draw a few ROIs To change the shape of the ROI use the set_shape() method, shape values have to be one of those in the ALLOWED_SHAPES constant: myroi.set_shape('circle') To get a list of ROI masks use the get_roi_masks() method. This returns a list of lists, one for each ROI in the axes. Each ROI list is a 1D set of booleans that indicate if a voxel was in the ROI. The 1D list is basically a 'ravel' of the image, so it can be reconstituted via: rois, dims = myroi.get_roi_masks() # dims are the 2D image dimensions masks = [] for roi in rois: roi = numpy.array(roi) # convert boolean list to array roi.shape = dims[0], dims[1] # reshape array to 2D masks.append(roi) # add to masks list Example: import matplotlib.pyplot as plt a = dist(128) # this returns a 128x128 array with an example image fig = plt.figure() # returns Figure object tmp = plt.imshow(a) # returns Axes object myroi = RoiTool( [tmp.axes,], button=3 ) myroi.set_shape('circle') # sets ROI shape to a circle """ def __init__(self, axes, button=None, roi_shape = 'circle', props_draw=None, props_roi=None, useblit=False): self.axes = None self.canvas = None self.cids = [] self.rois = [] self.xys = [] self.roi_draw = False self.last_roi = None # last ROI patch selected by a press event, used to update event ordering self.poly_xys = None # only used when roi_shape is 'lasso' self.active = True # for activation / deactivation self.to_draw = None self.background = None self.useblit = useblit # set initial shape of ROI if roi_shape not in ALLOWED_SHAPES: raise ValueError("ROI shape='%s' not allowed, returning." % (roi_shape)) self.roi_shape = roi_shape # ensure that properties for roi regions exist if props_roi is None: props_roi = dict(facecolor='magenta', edgecolor='black', alpha=0.2) self.props_roi = props_roi if button is None or isinstance(button, list): self.validButtons = button elif isinstance(button, int): self.validButtons = [button] self.eventpress = None # will save the data (x,y at mouseclick) self.eventrelease = None # will save the data (x,y at mouserelease) self.new_axes(axes, props_draw) def new_axes(self, axes, props_draw=None): self.axes = axes if self.canvas is not self.axes.figure.canvas: for cid in self.cids: self.canvas.mpl_disconnect(cid) self.canvas = self.axes.figure.canvas self.cids.append(self.canvas.mpl_connect('motion_notify_event', self.onmove)) self.cids.append(self.canvas.mpl_connect('button_press_event', self.press)) self.cids.append(self.canvas.mpl_connect('button_release_event', self.release)) self.cids.append(self.canvas.mpl_connect('draw_event', self.update_background)) self.cids.append(self.canvas.mpl_connect('motion_notify_event', self.onmove)) if props_draw is None: props_draw = dict(facecolor='yellow', edgecolor= 'black', alpha=0.2, linewidth=2.0) self.props_draw = props_draw self.set_to_draw() def set_to_draw(self): """ The to_draw attribute stores the patch that is used when interactively drawing an ROI. We get rid of existing patches in to_draw list and from the axes. Then repopulate to_draw list with new patch of whatever shape. """ # remove previous ROI patches, if needed if self.to_draw is not None: self.axes.patches.remove(self.to_draw) self.to_draw = None # create ROI template of selected shape if self.roi_shape == 'circle': self.to_draw = Circle((0,0), radius=1, visible=False, **self.props_draw) elif self.roi_shape == 'ellipse': self.to_draw = Ellipse((0,0), 1, 1, visible=False, **self.props_draw) elif self.roi_shape == 'rectangle': self.to_draw = Rectangle((0,0), 0, 1, visible=False, **self.props_draw) elif self.roi_shape == 'lasso': start_path = [[0,0], [0,1], [1,1], [1,0]] self.to_draw = Polygon(start_path, visible=False, **self.props_draw) # add ROI template patch to each axis self.axes.add_patch(self.to_draw) def set_shape(self, shape): 'change shape of the roi drawn to one in the allowed shapes constant' if shape in ALLOWED_SHAPES: self.roi_shape = shape self.set_to_draw() def update_background(self, event): 'force an update of the background' if self.useblit: self.background = self.canvas.copy_from_bbox(self.canvas.figure.bbox) def ignore(self, event): ''' return True if event should be ignored Note. The order of these tests is pretty important for proper behavior. Add or reorganize at your own risk. ''' # is RoiTool active? if not self.active: return True # was event triggered with appropriate button if self.validButtons is not None: if not event.button in self.validButtons: return True # has an ROI drag/resize event already started if DraggableResizeablePatch.lock is not None: return True # is canvas locked if not self.canvas.widgetlock.available(self): return True for roi in self.rois: contains, attrd = roi.patch.contains(event) if contains and not self.roi_draw: # roi_draw flag indicates that we are currently drawing an roi # so we don't need to ignore a motion event just because the # cursor has moved over an existing ROI patch return True # If no button pressed yet or if it was out of the axes, ignore if self.eventpress == None: return event.inaxes != self.axes # If a button pressed, check if the release-button is the same return (event.inaxes != self.axes or event.button != self.eventpress.button) def in_roi(self, event): for roi in self.rois: contains, attrd = roi.patch.contains(event) if contains: return roi return None def press(self, event): 'on button press event' # on double click over an ROI, delete it if event.dblclick: # was event triggered with appropriate button if self.validButtons is not None: if event.button not in self.validButtons: return True roi = self.in_roi(event) if roi is not None: # remove from the two locations roi was saved in release() if roi.patch in self.axes.patches: self.axes.patches.remove(roi.patch) self.rois.remove(roi) self.canvas.draw() return # check global 'ignore' conditions if self.ignore(event): return self.roi_draw = True if self.roi_shape == 'lasso': self.poly_xys = [[event.xdata,event.ydata],] self.to_draw.set_xy(self.poly_xys) self.to_draw.set_visible(True) # make the drawn patch visible ... self.to_draw.set_visible(True) self.eventpress = event return False def release(self, event): """on button release event Note. This release event occurs prior to the DragableResizablePatch on_release event happening. We saved reference to the last ROI interacted with in onmove() method. We'd like last ROI to 'rise to the top' of the hierarchy. E.g. if it overlaps other ROIs and we click on the overlap region, the last one is the one selected for the event interaction. We do this by disconnect/reconnecting the ROI patch events handlers. It seems that first in -> first served when events served. So we move the self.last_roi reference to the 0 index of the rois list and reconnect() all the rois. Clunky but it works. """ # reorder ROI events handling to favor last_roi if self.last_roi is not None: if self.last_roi in self.rois: self.rois.remove(self.last_roi) self.rois.insert(0,self.last_roi) for roi in self.rois: roi.reconnect() self.last_roi = None # indicate we are not in an ROI event anymore if self.eventpress is None or self.ignore(event): return # make the box/line invisible again self.to_draw.set_visible(False) self.canvas.draw() # release coordinates, button, ... self.eventrelease = event x0, y0 = self.eventpress.xdata, self.eventpress.ydata xmin, ymin = self.eventpress.xdata, self.eventpress.ydata xmax, ymax = self.eventrelease.xdata, self.eventrelease.ydata if xmin>xmax: xmin, xmax = xmax, xmin if ymin>ymax: ymin, ymax = ymax, ymin # assure that x and y values are not equal if xmin == xmax: xmax = xmin*1.0001 if ymin == ymax: ymax = ymin*1.0001 spanx = xmax - xmin spany = ymax - ymin if self.roi_draw: if isinstance(self.to_draw, matplotlib.patches.Rectangle): patch = Rectangle([xmin,ymin], spanx, spany, **self.props_roi) elif isinstance(self.to_draw, matplotlib.patches.Circle): radius = np.sqrt((xmax-xmin)*(xmax-xmin)+(ymax-ymin)*(ymax-ymin)) patch = Circle([x0, y0], radius=radius, **self.props_roi) elif isinstance(self.to_draw, matplotlib.patches.Ellipse): patch = Ellipse([x0, y0], spanx*2, spany*2, **self.props_roi) elif isinstance(self.to_draw, matplotlib.patches.Polygon): xys = self.to_draw.get_xy() patch = Polygon(xys, **self.props_roi) self.axes.add_patch(patch) dr = DraggableResizeablePatch(patch, fixed_aspect_ratio=False, button=self.validButtons) dr.connect() self.rois.insert(0,dr) # ensure last drawn ROI is top of event tree for roi in self.rois: roi.reconnect() self.roi_draw = False self.canvas.draw() self.eventpress = None # reset the variables to their self.eventrelease = None # inital values return False def update(self): 'draw using newfangled blit or oldfangled draw depending on useblit' if self.useblit: if self.background is not None: self.canvas.restore_region(self.background) self.axes.draw_artist(self.to_draw) self.canvas.blit(self.canvas.figure.bbox) else: self.canvas.draw_idle() return False def onmove(self, event): """ Note. The self.press event occurs prior to the DragableResizablePatch on_press event happening. So, we have to put the code to save the self.last_roi reference into this self.onmove method because we get the value from the DragableResizablePatch.lock attribute. """ # save ref to last ROI interacted with, used in release() if DraggableResizeablePatch.lock is not None: self.last_roi = DraggableResizeablePatch.lock if self.eventpress is None or self.ignore(event): return x,y = event.xdata, event.ydata # actual position (with # (button still pressed) if isinstance(self.to_draw, matplotlib.patches.Rectangle): minx, maxx = self.eventpress.xdata, x # click-x and actual mouse-x miny, maxy = self.eventpress.ydata, y # click-y and actual mouse-y if minx>maxx: minx, maxx = maxx, minx # get them in the right order if miny>maxy: miny, maxy = maxy, miny self.to_draw.set_x(minx) # set lower left of box self.to_draw.set_y(miny) self.to_draw.set_width(maxx-minx) # set width and height of box self.to_draw.set_height(maxy-miny) elif isinstance(self.to_draw, matplotlib.patches.Circle): x0, y0 = self.eventpress.xdata, self.eventpress.ydata radius = np.sqrt((x0-x)*(x0-x)+(y0-y)*(y0-y)) self.to_draw.center = [x0,y0] self.to_draw.set_radius(radius) elif isinstance(self.to_draw, matplotlib.patches.Ellipse): x0, y0 = self.eventpress.xdata, self.eventpress.ydata width = 2.0 * np.abs(x-x0) height = 2.0 * np.abs(y-y0) self.to_draw.center = x0,y0 self.to_draw.width = (width) self.to_draw.height = (height) elif isinstance(self.to_draw, matplotlib.patches.Polygon): self.poly_xys.append([x,y]) self.to_draw.set_xy(self.poly_xys) self.update() return False def set_active(self, active): """ Use this to activate / deactivate the RectangleSelector from your program with an boolean variable 'active'. """ self.active = active def get_active(self): """ to get status of active mode (boolean variable)""" return self.active def set_xys(self): self.xys = [] nx, ny = self.axes.images[0].get_array().shape points = [(val % nx, int(val/nx)) for val in range(nx * ny)] self.xys.append(points) def get_roi_masks(self): """ Returns a list of lists, and a tuple of image x,y dims. Outer list is for each ROI in the rois list, and inner list is a boolean for each point in the plot. True if in the current ROI or False if outside. """ if not self.rois: return None, None self.set_xys() masks = [] for roi in self.rois: # Get the path and the affine transformation path = roi.patch.get_path() transform = roi.patch.get_patch_transform() # Now apply the transform to the path p = transform.transform_path(path) indx = p.contains_points(self.xys[0]) masks.append(indx) dims = self.axes.images[0].get_array().shape return masks, dims # ----------------------------------------------------------------------------- # Test Code class util_CreateMenuBar: """ Example of the menuData function that needs to be in the program in which you are creating a Menu def menuData(self): return [("&File", ( ("&New", "New Sketch file", self.OnNew), ("&Open", "Open sketch file", self.OnOpen), ("&Save", "Save sketch file", self.OnSave), ("", "", ""), ("&Color", ( ("&Black", "", self.OnColor, wx.ITEM_RADIO), ("&Red", "", self.OnColor, wx.ITEM_RADIO), ("&Green", "", self.OnColor, wx.ITEM_RADIO), ("&Blue", "", self.OnColor, wx.ITEM_RADIO), ("&Other...", "", self.OnOtherColor, wx.ITEM_RADIO))), ("", "", ""), ("About...", "Show about window", self.OnAbout), ("&Quit", "Quit the program", self.OnCloseWindow)))] """ def __init__(self, self2): menuBar = wx.MenuBar() for eachMenuData in self2.menuData(): menuLabel = eachMenuData[0] menuItems = eachMenuData[1] menuBar.Append(self.createMenu(self2, menuItems), menuLabel) self2.SetMenuBar(menuBar) def createMenu(self, self2, menuData): menu = wx.Menu() for eachItem in menuData: if len(eachItem) == 2: label = eachItem[0] subMenu = self.createMenu(self2, eachItem[1]) menu.Append(wx.ID_ANY, label, subMenu) else: self.createMenuItem(self2, menu, *eachItem) return menu def createMenuItem(self, self2, menu, label, status, handler, kind=wx.ITEM_NORMAL): if not label: menu.AppendSeparator() return menuItem = menu.Append(-1, label, status, kind) self2.Bind(wx.EVT_MENU, handler, menuItem) class DemoImagePanel(ImagePanel): """Plots several lines in distinct colors.""" # Activate event messages _EVENT_DEBUG = True def __init__( self, parent, tab, **kwargs ): # initiate plotter ImagePanel.__init__( self, parent, **kwargs ) self.tab = tab self.top = wx.GetApp().GetTopWindow() self.parent = parent self.count = 0 def on_motion(self, xdata, ydata, value, bounds, iaxis): self.top.statusbar.SetStatusText( " Value = %s" % (str(value), ), 0) self.top.statusbar.SetStatusText( " X,Y = %i,%i" % (xdata,ydata) , 1) def on_scroll(self, button, step, iaxis): pass #self.set_vertical_scale(step) def on_zoom_motion(self, xmin, xmax, val, ymin, ymax, iplot=None): delta = xmax - xmin self.top.statusbar.SetStatusText(( " Point Range = %.2f to %.2f" % (xmin, xmax)), 0) self.top.statusbar.SetStatusText(( " dPoints = %i " % (delta, )), 2) def on_widlev_motion(self, xcur, ycur, xprev, yprev, indx): pass def on_widlev_select(self, xstr, ystr, xend, yend, indx, reset=False): pass class MyFrame(wx.Frame): def __init__(self, title="New Title Please", size=(350,200)): wx.Frame.__init__(self, None, title=title, pos=(150,150), size=size) self.Bind(wx.EVT_CLOSE, self.on_close) util_CreateMenuBar(self) self.statusbar = self.CreateStatusBar(4, 0) self.size_small = 64 self.size_medium = 128 self.size_large = 256 data1 = { 'data' : self.dist(self.size_medium), 'alpha' : 1.0 } data2 = { 'data' : 100-self.dist(self.size_medium), 'alpha' : 0.5, 'cmap' : cm.hsv, } data = [[data1], [data2]] #data = [[data1], ] self.nb = wx.Notebook(self, -1, style=wx.BK_BOTTOM) panel1 = wx.Panel(self.nb, -1) self.view = DemoImagePanel( panel1, self, naxes=2, zoom='box', widlev=True, roitool='box', unlink=True, do_zoom_select_event=True, do_zoom_motion_event=True, do_widlev_select_event=True, do_widlev_motion_event=True, do_roi_select_event=False, do_roi_motion_event=False, do_scroll_event=True, xscale_bump=0.0, yscale_bump=0.0, data = data, colormap=cm.gray, ) sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(self.view, 1, wx.LEFT | wx.TOP | wx.EXPAND) panel1.SetSizer(sizer) self.view.Fit() self.nb.AddPage(panel1, "One") self.view.set_color( (255,255,255) ) def menuData(self): return [("&File", ( ("", "", ""), ("&Quit", "Quit the program", self.on_close))), ("ROI Shape", ( ("Circle", "Set ROI shape to circle", self.on_roi_circle), ("Rectangle", "Set ROI shape to rectangle", self.on_roi_rectangle), ("Lasso", "Set ROI shape to lasso", self.on_roi_lasso), ("Ellipse", "Set ROI shape to ellipse", self.on_roi_ellipse))), ("Tests", ( ("Show all Three", "", self.on_show_three, wx.ITEM_RADIO), ("Show only One", "", self.on_show_one, wx.ITEM_RADIO), ("", "", ""), ("Set Small Images - keep norm", "", self.on_small_images_keep_norm), ("Set Medium Images - keep norm", "", self.on_medium_images_keep_norm), ("Set Large Images - keep norm", "", self.on_large_images_keep_norm), ("", "", ""), ("Set Small Images", "", self.on_small_images), ("Set Medium Images", "", self.on_medium_images), ("Set Large Images", "", self.on_large_images), ("", "", ""), ("Recalc Norm", "", self.on_recalc_norm), ("", "", ""), ("Placeholder", "non-event", self.on_placeholder)))] def on_close(self, event): dlg = wx.MessageDialog(self, "Do you really want to close this application?", "Confirm Exit", wx.OK|wx.CANCEL|wx.ICON_QUESTION) result = dlg.ShowModal() dlg.Destroy() if result == wx.ID_OK: self.Destroy() def on_roi_circle(self, event): for roitool in self.view.roitool: roitool.set_shape('circle') def on_roi_rectangle(self, event): for roitool in self.view.roitool: roitool.set_shape('rectangle') def on_roi_lasso(self, event): for roitool in self.view.roitool: roitool.set_shape('lasso') def on_roi_ellipse(self, event): for roitool in self.view.roitool: roitool.set_shape('ellipse') def on_placeholder(self, event): """ bob = [True, False, False, True] """ print("Event handler for on_placeholder - not implemented") #FIXME - bjs, there's a bug where initial image and set of rois in top axis, get # flipped the first time this is called. Need to see if this happens due # to set_data() or update() calls, or something with ROIs. masks, dims = self.view.roitool[0].get_roi_masks() if masks is None: return bmasks = [] for mask in masks: bmask = [1.0 if item else 0.0 for item in mask] bmask = np.array(bmask, dtype=np.float) bmask.shape = dims np.flipud(bmask) bmasks.append(bmask) data1 = { 'data' : self.dist(dims[0]), 'alpha' : 1.0 } d2 = 100-self.dist(dims[0]) for bmask in bmasks: d2 += bmask*30 data2 = { 'data' : d2, 'alpha' : 0.5, 'cmap' : cm.hsv, } data = [[data1], [data2]] self.view.set_data(data) self.view.update(no_draw=True, keep_norm=True) self.view.canvas.draw() def on_show_one(self, event): self.view.change_naxes(1) def on_show_three(self, event): self.view.change_naxes(3) def on_small_images_keep_norm(self, event): self.on_small_images(event, keep_norm=True) def on_small_images(self, event, keep_norm=False): data1 = { 'data' : self.dist(self.size_small), 'alpha' : 1.0 } data2 = { 'data' : 100-self.dist(self.size_small), 'alpha' : 0.5, 'cmap' : cm.hsv, } data = [[data1], [data2]] self.view.set_data(data) self.view.update(no_draw=True, keep_norm=keep_norm) self.view.canvas.draw() def on_medium_images_keep_norm(self, event): self.on_medium_images(event, keep_norm=True) def on_medium_images(self, event, keep_norm=False): data1 = { 'data' : self.dist(self.size_medium), 'alpha' : 1.0 } data2 = { 'data' : 100-self.dist(self.size_medium), 'alpha' : 0.5, 'cmap' : cm.hsv, } data = [[data1], [data2]] self.view.set_data(data) self.view.update(no_draw=True, keep_norm=keep_norm) self.view.canvas.draw() def on_large_images_keep_norm(self, event): self.on_large_images(event, keep_norm=True) def on_large_images(self, event, keep_norm=False): data1 = { 'data' : self.dist(self.size_large), 'alpha' : 1.0 } data2 = { 'data' : 100-self.dist(self.size_large), 'alpha' : 0.5, 'cmap' : cm.hsv, } data = [[data1], [data2]] self.view.set_data(data) self.view.update(no_draw=True, keep_norm=keep_norm) self.view.canvas.draw() def on_recalc_norm(self, event): self.view.apply_norm_widlev(keep_norm=False) self.view.update_images() self.view.canvas.draw() def dist(self, n, m=None): """ Return a rectangular array in which each pixel = euclidian distance from the origin. """ n1 = n m1 = m if m else n x = np.arange(n1) x = np.array([val**2 if val < (n1-val) else (n1-val)**2 for val in x ]) a = np.ndarray((n1,m1),float) # Make array for i in range(int((m1/2)+1)): # Row loop y = np.sqrt(x + i**2.0) # Euclidian distance a[i,:] = y # Insert the row if i != 0: a[m1-i,:] = y # Symmetrical return a #------------------------------------------------------------------------------ if __name__ == '__main__': # app = wx.App( 0 ) app = wx.App( False ) frame = MyFrame( title='Image Panel with RoiTool Example', size=(600,600) ) frame.Show() app.MainLoop()
PypiClean
/django-backtalk-1.0.tar.gz/django-backtalk-1.0/backtalk/templatetags/comments.py
from django import template from django.template.loader import render_to_string from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.utils import six from django.utils.deprecation import RenameMethodsBase from django.utils.encoding import smart_text import backtalk register = template.Library() class RenameBaseCommentNodeMethods(RenameMethodsBase): renamed_methods = ( ('get_query_set', 'get_queryset', PendingDeprecationWarning), ) class BaseCommentNode(six.with_metaclass(RenameBaseCommentNodeMethods, template.Node)): """ Base helper class (abstract) for handling the get_comment_* template tags. Looks a bit strange, but the subclasses below should make this a bit more obvious. """ @classmethod def handle_token(cls, parser, token): """Class method to parse get_comment_list/count/form and return a Node.""" tokens = token.split_contents() if tokens[1] != 'for': raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0]) # {% get_whatever for obj as varname %} if len(tokens) == 5: if tokens[3] != 'as': raise template.TemplateSyntaxError("Third argument in %r must be 'as'" % tokens[0]) return cls( object_expr = parser.compile_filter(tokens[2]), as_varname = tokens[4], ) # {% get_whatever for app.model pk as varname %} elif len(tokens) == 6: if tokens[4] != 'as': raise template.TemplateSyntaxError("Fourth argument in %r must be 'as'" % tokens[0]) return cls( ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]), object_pk_expr = parser.compile_filter(tokens[3]), as_varname = tokens[5] ) else: raise template.TemplateSyntaxError("%r tag requires 4 or 5 arguments" % tokens[0]) @staticmethod def lookup_content_type(token, tagname): try: app, model = token.split('.') return ContentType.objects.get_by_natural_key(app, model) except ValueError: raise template.TemplateSyntaxError("Third argument in %r must be in the format 'app.model'" % tagname) except ContentType.DoesNotExist: raise template.TemplateSyntaxError("%r tag has non-existant content-type: '%s.%s'" % (tagname, app, model)) def __init__(self, ctype=None, object_pk_expr=None, object_expr=None, as_varname=None, comment=None): if ctype is None and object_expr is None: raise template.TemplateSyntaxError("Comment nodes must be given either a literal object or a ctype and object pk.") self.comment_model = backtalk.get_model() self.as_varname = as_varname self.ctype = ctype self.object_pk_expr = object_pk_expr self.object_expr = object_expr self.comment = comment def render(self, context): qs = self.get_queryset(context) context[self.as_varname] = self.get_context_value_from_queryset(context, qs) return '' def get_queryset(self, context): ctype, object_pk = self.get_target_ctype_pk(context) if not object_pk: return self.comment_model.objects.none() qs = self.comment_model.objects.filter( content_type = ctype, object_pk = smart_text(object_pk), site__pk = settings.SITE_ID, ) # The is_public and is_removed fields are implementation details of the # built-in comment model's spam filtering system, so they might not # be present on a custom comment model subclass. If they exist, we # should filter on them. field_names = [f.name for f in self.comment_model._meta.fields] if 'is_public' in field_names: qs = qs.filter(is_public=True) if getattr(settings, 'COMMENTS_HIDE_REMOVED', True) and 'is_removed' in field_names: qs = qs.filter(is_removed=False) return qs def get_target_ctype_pk(self, context): if self.object_expr: try: obj = self.object_expr.resolve(context) except template.VariableDoesNotExist: return None, None return ContentType.objects.get_for_model(obj), obj.pk else: return self.ctype, self.object_pk_expr.resolve(context, ignore_failures=True) def get_context_value_from_queryset(self, context, qs): """Subclasses should override this.""" raise NotImplementedError class CommentListNode(BaseCommentNode): """Insert a list of comments into the context.""" def get_context_value_from_queryset(self, context, qs): return list(qs) class CommentCountNode(BaseCommentNode): """Insert a count of comments into the context.""" def get_context_value_from_queryset(self, context, qs): return qs.count() class CommentFormNode(BaseCommentNode): """Insert a form for the comment model into the context.""" def get_form(self, context): obj = self.get_object(context) if obj: return backtalk.get_form()(obj) else: return None def get_object(self, context): if self.object_expr: try: return self.object_expr.resolve(context) except template.VariableDoesNotExist: return None else: object_pk = self.object_pk_expr.resolve(context, ignore_failures=True) return self.ctype.get_object_for_this_type(pk=object_pk) def render(self, context): context[self.as_varname] = self.get_form(context) return '' class RenderCommentFormNode(CommentFormNode): """Render the comment form directly""" @classmethod def handle_token(cls, parser, token): """Class method to parse render_comment_form and return a Node.""" tokens = token.split_contents() if tokens[1] != 'for': raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0]) # {% render_comment_form for obj %} if len(tokens) == 3: return cls(object_expr=parser.compile_filter(tokens[2])) # {% render_comment_form for app.models pk %} elif len(tokens) == 4: return cls( ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]), object_pk_expr = parser.compile_filter(tokens[3]) ) def render(self, context): ctype, object_pk = self.get_target_ctype_pk(context) if object_pk: template_search_list = [ "backtalk/%s/%s/form.html" % (ctype.app_label, ctype.model), "backtalk/%s/form.html" % ctype.app_label, "backtalk/form.html" ] context.push() formstr = render_to_string(template_search_list, {"form" : self.get_form(context)}, context) context.pop() return formstr else: return '' class RenderCommentListNode(CommentListNode): """Render the comment list directly""" @classmethod def handle_token(cls, parser, token): """Class method to parse render_comment_list and return a Node.""" tokens = token.split_contents() if tokens[1] != 'for': raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0]) # {% render_comment_list for obj %} if len(tokens) == 3: return cls(object_expr=parser.compile_filter(tokens[2])) # {% render_comment_list for app.models pk %} elif len(tokens) == 4: return cls( ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]), object_pk_expr = parser.compile_filter(tokens[3]) ) def render(self, context): ctype, object_pk = self.get_target_ctype_pk(context) if object_pk: template_search_list = [ "backtalk/%s/%s/list.html" % (ctype.app_label, ctype.model), "backtalk/%s/list.html" % ctype.app_label, "backtalk/list.html" ] qs = self.get_queryset(context) context.push() liststr = render_to_string(template_search_list, { "comment_list" : self.get_context_value_from_queryset(context, qs) }, context) context.pop() return liststr else: return '' # We could just register each classmethod directly, but then we'd lose out on # the automagic docstrings-into-admin-docs tricks. So each node gets a cute # wrapper function that just exists to hold the docstring. @register.tag def get_comment_count(parser, token): """ Gets the comment count for the given params and populates the template context with a variable containing that value, whose name is defined by the 'as' clause. Syntax:: {% get_comment_count for [object] as [varname] %} {% get_comment_count for [app].[model] [object_id] as [varname] %} Example usage:: {% get_comment_count for event as comment_count %} {% get_comment_count for calendar.event event.id as comment_count %} {% get_comment_count for calendar.event 17 as comment_count %} """ return CommentCountNode.handle_token(parser, token) @register.tag def get_comment_list(parser, token): """ Gets the list of comments for the given params and populates the template context with a variable containing that value, whose name is defined by the 'as' clause. Syntax:: {% get_comment_list for [object] as [varname] %} {% get_comment_list for [app].[model] [object_id] as [varname] %} Example usage:: {% get_comment_list for event as comment_list %} {% for comment in comment_list %} ... {% endfor %} """ return CommentListNode.handle_token(parser, token) @register.tag def render_comment_list(parser, token): """ Render the comment list (as returned by ``{% get_comment_list %}``) through the ``backtalk/list.html`` template Syntax:: {% render_comment_list for [object] %} {% render_comment_list for [app].[model] [object_id] %} Example usage:: {% render_comment_list for event %} """ return RenderCommentListNode.handle_token(parser, token) @register.tag def get_comment_form(parser, token): """ Get a (new) form object to post a new comment. Syntax:: {% get_comment_form for [object] as [varname] %} {% get_comment_form for [app].[model] [object_id] as [varname] %} """ return CommentFormNode.handle_token(parser, token) @register.tag def render_comment_form(parser, token): """ Render the comment form (as returned by ``{% render_comment_form %}``) through the ``backtalk/form.html`` template. Syntax:: {% render_comment_form for [object] %} {% render_comment_form for [app].[model] [object_id] %} """ return RenderCommentFormNode.handle_token(parser, token) @register.simple_tag def comment_form_target(): """ Get the target URL for the comment form. Example:: <form action="{% comment_form_target %}" method="post"> """ return backtalk.get_form_target() @register.simple_tag def get_comment_permalink(comment, anchor_pattern=None): """ Get the permalink for a comment, optionally specifying the format of the named anchor to be appended to the end of the URL. Example:: {% get_comment_permalink comment "#c%(id)s-by-%(user_name)s" %} """ if anchor_pattern: return comment.get_absolute_url(anchor_pattern) return comment.get_absolute_url()
PypiClean
/pyreess-1.0.tar.gz/pyreess-1.0/README.md
# py-reess CLI application for deterministic password generation and recall Failing to **REE**call your pa**SS**word? Try **py-REESS**! [![Python Version](https://img.shields.io/pypi/pyversions/pyreess.svg?color=yellow&style=flat-square)](https://www.python.org/downloads/) [![GitHub Licence](https://img.shields.io/github/license/BananaLoaf/pyreess.svg?color=blue&style=flat-square)](https://github.com/BananaLoaf/pyreess/blob/master/LICENSE) [![Package Version](https://img.shields.io/pypi/v/pyreess.svg?color=green&style=flat-square)](https://pypi.org/project/pyreess/) ### Install ```bash pip install pyreess ``` ### Usage ```bash pyreess --help ``` The core principle is simple - **for the same input you get the same output**. The input consists of input string, salt, length and alphabet (digits, lowercase, uppercase, symbols). Output is the generated password and can always be restored with the same input.
PypiClean
/django-data-gateways-0.1.tar.gz/django-data-gateways-0.1/data_gateways/sending/senders/telegram_sender.py
from typing import Any from telebot import TeleBot from .abstract_sender import AbstractSender from ...reporters.sending_report import SendingReport from ...message_dao.base_message_dao import BaseMessageDAO from ...message_builder.abstract_message_builder import AbstractMessageBuilder class TelegramSender(AbstractSender): """Отправитель данных в Telegram""" def __init__( self, bot_token: str, group_id: str, message_builder: AbstractMessageBuilder | None = None, subject_template: str | None = None, body_template: str | None = None, ) -> None: """ Инициализатор класса. :param bot_token: Токен telegram-бота. :param group_id: ID telegram-группы. :param message_builder: Объект билдера сообщения на основе данных. Если None, используется дефолтный билдер сообщений. :param subject_template: Путь до шаблона темы сообщения. Нужен, если не передан параметр message_builder. Если message_builder передан, параметр игнорируется. :param body_template: Путь до шаблона тела сообщения. Нужен, если не передан параметр message_builder. Если message_builder передан, параметр игнорируется. """ super().__init__(message_builder, subject_template, body_template) self.__bot = TeleBot(bot_token) self.__bot.config['api_key'] = bot_token self.__group_id = group_id def send(self, message_dao: BaseMessageDAO) -> SendingReport: """ Отправка данных в telegram-группу. :param message_dao: Объект доступа к данным. :return: Объект отчета об отправке. """ # Генерация сообщения. message_subject, message_body = self.compile_message(message_dao) result_message = message_subject + '\n\n' + message_body # Пытаемся отправить сообщение в telegram. response: dict[str, Any] = self.__bot.send_message( chat_id=self.__group_id, text=result_message, ) # Вслучае ошибки поднимаем исключение. if not response['ok']: raise Exception(response['error']) return SendingReport(status=SendingReport.Status.OK, sender=self)
PypiClean
/servoarray-0.5.tar.gz/servoarray-0.5/src/pybind11/docs/changelog.rst
.. _changelog: Changelog ######### Starting with version 1.8.0, pybind11 releases use a `semantic versioning <http://semver.org>`_ policy. v2.3.0 (Not yet released) ----------------------------------------------------- * Significantly reduced module binary size (10-20%) when compiled in C++11 mode with GCC/Clang, or in any mode with MSVC. Function signatures are now always precomputed at compile time (this was previously only available in C++14 mode for non-MSVC compilers). `#934 <https://github.com/pybind/pybind11/pull/934>`_. * Add basic support for tag-based static polymorphism, where classes provide a method to returns the desired type of an instance. `#1326 <https://github.com/pybind/pybind11/pull/1326>`_. * Added support for write only properties. `#1144 <https://github.com/pybind/pybind11/pull/1144>`_. * Python type wrappers (``py::handle``, ``py::object``, etc.) now support map Python's number protocol onto C++ arithmetic operators such as ``operator+``, ``operator/=``, etc. `#1511 <https://github.com/pybind/pybind11/pull/1511>`_. * A number of improvements related to enumerations: 1. The ``enum_`` implementation was rewritten from scratch to reduce code bloat. Rather than instantiating a full implementation for each enumeration, most code is now contained in a generic base class. `#1511 <https://github.com/pybind/pybind11/pull/1511>`_. 2. The ``value()`` method of ``py::enum_`` now accepts an optional docstring that will be shown in the documentation of the associated enumeration. `#1160 <https://github.com/pybind/pybind11/pull/1160>`_. 3. check for already existing enum value and throw an error if present. `#1453 <https://github.com/pybind/pybind11/pull/1453>`_. * added ``py::ellipsis()`` method for slicing of multidimensional NumPy arrays `#1502 <https://github.com/pybind/pybind11/pull/1502>`_. * ``pybind11_add_module()``: allow including Python as a ``SYSTEM`` include path. `#1416 <https://github.com/pybind/pybind11/pull/1416>`_. * ``pybind11/stl.h`` does not convert strings to ``vector<string>`` anymore. `#1258 <https://github.com/pybind/pybind11/issues/1258>`_. v2.2.4 (September 11, 2018) ----------------------------------------------------- * Use new Python 3.7 Thread Specific Storage (TSS) implementation if available. `#1454 <https://github.com/pybind/pybind11/pull/1454>`_, `#1517 <https://github.com/pybind/pybind11/pull/1517>`_. * Fixes for newer MSVC versions and C++17 mode. `#1347 <https://github.com/pybind/pybind11/pull/1347>`_, `#1462 <https://github.com/pybind/pybind11/pull/1462>`_. * Propagate return value policies to type-specific casters when casting STL containers. `#1455 <https://github.com/pybind/pybind11/pull/1455>`_. * Allow ostream-redirection of more than 1024 characters. `#1479 <https://github.com/pybind/pybind11/pull/1479>`_. * Set ``Py_DEBUG`` define when compiling against a debug Python build. `#1438 <https://github.com/pybind/pybind11/pull/1438>`_. * Untangle integer logic in number type caster to work for custom types that may only be castable to a restricted set of builtin types. `#1442 <https://github.com/pybind/pybind11/pull/1442>`_. * CMake build system: Remember Python version in cache file. `#1434 <https://github.com/pybind/pybind11/pull/1434>`_. * Fix for custom smart pointers: use ``std::addressof`` to obtain holder address instead of ``operator&``. `#1435 <https://github.com/pybind/pybind11/pull/1435>`_. * Properly report exceptions thrown during module initialization. `#1362 <https://github.com/pybind/pybind11/pull/1362>`_. * Fixed a segmentation fault when creating empty-shaped NumPy array. `#1371 <https://github.com/pybind/pybind11/pull/1371>`_. * The version of Intel C++ compiler must be >= 2017, and this is now checked by the header files. `#1363 <https://github.com/pybind/pybind11/pull/1363>`_. * A few minor typo fixes and improvements to the test suite, and patches that silence compiler warnings. v2.2.3 (April 29, 2018) ----------------------------------------------------- * The pybind11 header location detection was replaced by a new implementation that no longer depends on ``pip`` internals (the recently released ``pip`` 10 has restricted access to this API). `#1190 <https://github.com/pybind/pybind11/pull/1190>`_. * Small adjustment to an implementation detail to work around a compiler segmentation fault in Clang 3.3/3.4. `#1350 <https://github.com/pybind/pybind11/pull/1350>`_. * The minimal supported version of the Intel compiler was >= 17.0 since pybind11 v2.1. This check is now explicit, and a compile-time error is raised if the compiler meet the requirement. `#1363 <https://github.com/pybind/pybind11/pull/1363>`_. * Fixed an endianness-related fault in the test suite. `#1287 <https://github.com/pybind/pybind11/pull/1287>`_. v2.2.2 (February 7, 2018) ----------------------------------------------------- * Fixed a segfault when combining embedded interpreter shutdown/reinitialization with external loaded pybind11 modules. `#1092 <https://github.com/pybind/pybind11/pull/1092>`_. * Eigen support: fixed a bug where Nx1/1xN numpy inputs couldn't be passed as arguments to Eigen vectors (which for Eigen are simply compile-time fixed Nx1/1xN matrices). `#1106 <https://github.com/pybind/pybind11/pull/1106>`_. * Clarified to license by moving the licensing of contributions from ``LICENSE`` into ``CONTRIBUTING.md``: the licensing of contributions is not actually part of the software license as distributed. This isn't meant to be a substantial change in the licensing of the project, but addresses concerns that the clause made the license non-standard. `#1109 <https://github.com/pybind/pybind11/issues/1109>`_. * Fixed a regression introduced in 2.1 that broke binding functions with lvalue character literal arguments. `#1128 <https://github.com/pybind/pybind11/pull/1128>`_. * MSVC: fix for compilation failures under /permissive-, and added the flag to the appveyor test suite. `#1155 <https://github.com/pybind/pybind11/pull/1155>`_. * Fixed ``__qualname__`` generation, and in turn, fixes how class names (especially nested class names) are shown in generated docstrings. `#1171 <https://github.com/pybind/pybind11/pull/1171>`_. * Updated the FAQ with a suggested project citation reference. `#1189 <https://github.com/pybind/pybind11/pull/1189>`_. * Added fixes for deprecation warnings when compiled under C++17 with ``-Wdeprecated`` turned on, and add ``-Wdeprecated`` to the test suite compilation flags. `#1191 <https://github.com/pybind/pybind11/pull/1191>`_. * Fixed outdated PyPI URLs in ``setup.py``. `#1213 <https://github.com/pybind/pybind11/pull/1213>`_. * Fixed a refcount leak for arguments that end up in a ``py::args`` argument for functions with both fixed positional and ``py::args`` arguments. `#1216 <https://github.com/pybind/pybind11/pull/1216>`_. * Fixed a potential segfault resulting from possible premature destruction of ``py::args``/``py::kwargs`` arguments with overloaded functions. `#1223 <https://github.com/pybind/pybind11/pull/1223>`_. * Fixed ``del map[item]`` for a ``stl_bind.h`` bound stl map. `#1229 <https://github.com/pybind/pybind11/pull/1229>`_. * Fixed a regression from v2.1.x where the aggregate initialization could unintentionally end up at a constructor taking a templated ``std::initializer_list<T>`` argument. `#1249 <https://github.com/pybind/pybind11/pull/1249>`_. * Fixed an issue where calling a function with a keep_alive policy on the same nurse/patient pair would cause the internal patient storage to needlessly grow (unboundedly, if the nurse is long-lived). `#1251 <https://github.com/pybind/pybind11/issues/1251>`_. * Various other minor fixes. v2.2.1 (September 14, 2017) ----------------------------------------------------- * Added ``py::module::reload()`` member function for reloading a module. `#1040 <https://github.com/pybind/pybind11/pull/1040>`_. * Fixed a reference leak in the number converter. `#1078 <https://github.com/pybind/pybind11/pull/1078>`_. * Fixed compilation with Clang on host GCC < 5 (old libstdc++ which isn't fully C++11 compliant). `#1062 <https://github.com/pybind/pybind11/pull/1062>`_. * Fixed a regression where the automatic ``std::vector<bool>`` caster would fail to compile. The same fix also applies to any container which returns element proxies instead of references. `#1053 <https://github.com/pybind/pybind11/pull/1053>`_. * Fixed a regression where the ``py::keep_alive`` policy could not be applied to constructors. `#1065 <https://github.com/pybind/pybind11/pull/1065>`_. * Fixed a nullptr dereference when loading a ``py::module_local`` type that's only registered in an external module. `#1058 <https://github.com/pybind/pybind11/pull/1058>`_. * Fixed implicit conversion of accessors to types derived from ``py::object``. `#1076 <https://github.com/pybind/pybind11/pull/1076>`_. * The ``name`` in ``PYBIND11_MODULE(name, variable)`` can now be a macro. `#1082 <https://github.com/pybind/pybind11/pull/1082>`_. * Relaxed overly strict ``py::pickle()`` check for matching get and set types. `#1064 <https://github.com/pybind/pybind11/pull/1064>`_. * Conversion errors now try to be more informative when it's likely that a missing header is the cause (e.g. forgetting ``<pybind11/stl.h>``). `#1077 <https://github.com/pybind/pybind11/pull/1077>`_. v2.2.0 (August 31, 2017) ----------------------------------------------------- * Support for embedding the Python interpreter. See the :doc:`documentation page </advanced/embedding>` for a full overview of the new features. `#774 <https://github.com/pybind/pybind11/pull/774>`_, `#889 <https://github.com/pybind/pybind11/pull/889>`_, `#892 <https://github.com/pybind/pybind11/pull/892>`_, `#920 <https://github.com/pybind/pybind11/pull/920>`_. .. code-block:: cpp #include <pybind11/embed.h> namespace py = pybind11; int main() { py::scoped_interpreter guard{}; // start the interpreter and keep it alive py::print("Hello, World!"); // use the Python API } * Support for inheriting from multiple C++ bases in Python. `#693 <https://github.com/pybind/pybind11/pull/693>`_. .. code-block:: python from cpp_module import CppBase1, CppBase2 class PyDerived(CppBase1, CppBase2): def __init__(self): CppBase1.__init__(self) # C++ bases must be initialized explicitly CppBase2.__init__(self) * ``PYBIND11_MODULE`` is now the preferred way to create module entry points. ``PYBIND11_PLUGIN`` is deprecated. See :ref:`macros` for details. `#879 <https://github.com/pybind/pybind11/pull/879>`_. .. code-block:: cpp // new PYBIND11_MODULE(example, m) { m.def("add", [](int a, int b) { return a + b; }); } // old PYBIND11_PLUGIN(example) { py::module m("example"); m.def("add", [](int a, int b) { return a + b; }); return m.ptr(); } * pybind11's headers and build system now more strictly enforce hidden symbol visibility for extension modules. This should be seamless for most users, but see the :doc:`upgrade` if you use a custom build system. `#995 <https://github.com/pybind/pybind11/pull/995>`_. * Support for ``py::module_local`` types which allow multiple modules to export the same C++ types without conflicts. This is useful for opaque types like ``std::vector<int>``. ``py::bind_vector`` and ``py::bind_map`` now default to ``py::module_local`` if their elements are builtins or local types. See :ref:`module_local` for details. `#949 <https://github.com/pybind/pybind11/pull/949>`_, `#981 <https://github.com/pybind/pybind11/pull/981>`_, `#995 <https://github.com/pybind/pybind11/pull/995>`_, `#997 <https://github.com/pybind/pybind11/pull/997>`_. * Custom constructors can now be added very easily using lambdas or factory functions which return a class instance by value, pointer or holder. This supersedes the old placement-new ``__init__`` technique. See :ref:`custom_constructors` for details. `#805 <https://github.com/pybind/pybind11/pull/805>`_, `#1014 <https://github.com/pybind/pybind11/pull/1014>`_. .. code-block:: cpp struct Example { Example(std::string); }; py::class_<Example>(m, "Example") .def(py::init<std::string>()) // existing constructor .def(py::init([](int n) { // custom constructor return std::make_unique<Example>(std::to_string(n)); })); * Similarly to custom constructors, pickling support functions are now bound using the ``py::pickle()`` adaptor which improves type safety. See the :doc:`upgrade` and :ref:`pickling` for details. `#1038 <https://github.com/pybind/pybind11/pull/1038>`_. * Builtin support for converting C++17 standard library types and general conversion improvements: 1. C++17 ``std::variant`` is supported right out of the box. C++11/14 equivalents (e.g. ``boost::variant``) can also be added with a simple user-defined specialization. See :ref:`cpp17_container_casters` for details. `#811 <https://github.com/pybind/pybind11/pull/811>`_, `#845 <https://github.com/pybind/pybind11/pull/845>`_, `#989 <https://github.com/pybind/pybind11/pull/989>`_. 2. Out-of-the-box support for C++17 ``std::string_view``. `#906 <https://github.com/pybind/pybind11/pull/906>`_. 3. Improved compatibility of the builtin ``optional`` converter. `#874 <https://github.com/pybind/pybind11/pull/874>`_. 4. The ``bool`` converter now accepts ``numpy.bool_`` and types which define ``__bool__`` (Python 3.x) or ``__nonzero__`` (Python 2.7). `#925 <https://github.com/pybind/pybind11/pull/925>`_. 5. C++-to-Python casters are now more efficient and move elements out of rvalue containers whenever possible. `#851 <https://github.com/pybind/pybind11/pull/851>`_, `#936 <https://github.com/pybind/pybind11/pull/936>`_, `#938 <https://github.com/pybind/pybind11/pull/938>`_. 6. Fixed ``bytes`` to ``std::string/char*`` conversion on Python 3. `#817 <https://github.com/pybind/pybind11/pull/817>`_. 7. Fixed lifetime of temporary C++ objects created in Python-to-C++ conversions. `#924 <https://github.com/pybind/pybind11/pull/924>`_. * Scope guard call policy for RAII types, e.g. ``py::call_guard<py::gil_scoped_release>()``, ``py::call_guard<py::scoped_ostream_redirect>()``. See :ref:`call_policies` for details. `#740 <https://github.com/pybind/pybind11/pull/740>`_. * Utility for redirecting C++ streams to Python (e.g. ``std::cout`` -> ``sys.stdout``). Scope guard ``py::scoped_ostream_redirect`` in C++ and a context manager in Python. See :ref:`ostream_redirect`. `#1009 <https://github.com/pybind/pybind11/pull/1009>`_. * Improved handling of types and exceptions across module boundaries. `#915 <https://github.com/pybind/pybind11/pull/915>`_, `#951 <https://github.com/pybind/pybind11/pull/951>`_, `#995 <https://github.com/pybind/pybind11/pull/995>`_. * Fixed destruction order of ``py::keep_alive`` nurse/patient objects in reference cycles. `#856 <https://github.com/pybind/pybind11/pull/856>`_. * Numpy and buffer protocol related improvements: 1. Support for negative strides in Python buffer objects/numpy arrays. This required changing integers from unsigned to signed for the related C++ APIs. Note: If you have compiler warnings enabled, you may notice some new conversion warnings after upgrading. These can be resolved with ``static_cast``. `#782 <https://github.com/pybind/pybind11/pull/782>`_. 2. Support ``std::complex`` and arrays inside ``PYBIND11_NUMPY_DTYPE``. `#831 <https://github.com/pybind/pybind11/pull/831>`_, `#832 <https://github.com/pybind/pybind11/pull/832>`_. 3. Support for constructing ``py::buffer_info`` and ``py::arrays`` using arbitrary containers or iterators instead of requiring a ``std::vector``. `#788 <https://github.com/pybind/pybind11/pull/788>`_, `#822 <https://github.com/pybind/pybind11/pull/822>`_, `#860 <https://github.com/pybind/pybind11/pull/860>`_. 4. Explicitly check numpy version and require >= 1.7.0. `#819 <https://github.com/pybind/pybind11/pull/819>`_. * Support for allowing/prohibiting ``None`` for specific arguments and improved ``None`` overload resolution order. See :ref:`none_arguments` for details. `#843 <https://github.com/pybind/pybind11/pull/843>`_. `#859 <https://github.com/pybind/pybind11/pull/859>`_. * Added ``py::exec()`` as a shortcut for ``py::eval<py::eval_statements>()`` and support for C++11 raw string literals as input. See :ref:`eval`. `#766 <https://github.com/pybind/pybind11/pull/766>`_, `#827 <https://github.com/pybind/pybind11/pull/827>`_. * ``py::vectorize()`` ignores non-vectorizable arguments and supports member functions. `#762 <https://github.com/pybind/pybind11/pull/762>`_. * Support for bound methods as callbacks (``pybind11/functional.h``). `#815 <https://github.com/pybind/pybind11/pull/815>`_. * Allow aliasing pybind11 methods: ``cls.attr("foo") = cls.attr("bar")``. `#802 <https://github.com/pybind/pybind11/pull/802>`_. * Don't allow mixed static/non-static overloads. `#804 <https://github.com/pybind/pybind11/pull/804>`_. * Fixed overriding static properties in derived classes. `#784 <https://github.com/pybind/pybind11/pull/784>`_. * Improved deduction of member functions of a derived class when its bases aren't registered with pybind11. `#855 <https://github.com/pybind/pybind11/pull/855>`_. .. code-block:: cpp struct Base { int foo() { return 42; } } struct Derived : Base {} // Now works, but previously required also binding `Base` py::class_<Derived>(m, "Derived") .def("foo", &Derived::foo); // function is actually from `Base` * The implementation of ``py::init<>`` now uses C++11 brace initialization syntax to construct instances, which permits binding implicit constructors of aggregate types. `#1015 <https://github.com/pybind/pybind11/pull/1015>`_. .. code-block:: cpp struct Aggregate { int a; std::string b; }; py::class_<Aggregate>(m, "Aggregate") .def(py::init<int, const std::string &>()); * Fixed issues with multiple inheritance with offset base/derived pointers. `#812 <https://github.com/pybind/pybind11/pull/812>`_, `#866 <https://github.com/pybind/pybind11/pull/866>`_, `#960 <https://github.com/pybind/pybind11/pull/960>`_. * Fixed reference leak of type objects. `#1030 <https://github.com/pybind/pybind11/pull/1030>`_. * Improved support for the ``/std:c++14`` and ``/std:c++latest`` modes on MSVC 2017. `#841 <https://github.com/pybind/pybind11/pull/841>`_, `#999 <https://github.com/pybind/pybind11/pull/999>`_. * Fixed detection of private operator new on MSVC. `#893 <https://github.com/pybind/pybind11/pull/893>`_, `#918 <https://github.com/pybind/pybind11/pull/918>`_. * Intel C++ compiler compatibility fixes. `#937 <https://github.com/pybind/pybind11/pull/937>`_. * Fixed implicit conversion of `py::enum_` to integer types on Python 2.7. `#821 <https://github.com/pybind/pybind11/pull/821>`_. * Added ``py::hash`` to fetch the hash value of Python objects, and ``.def(hash(py::self))`` to provide the C++ ``std::hash`` as the Python ``__hash__`` method. `#1034 <https://github.com/pybind/pybind11/pull/1034>`_. * Fixed ``__truediv__`` on Python 2 and ``__itruediv__`` on Python 3. `#867 <https://github.com/pybind/pybind11/pull/867>`_. * ``py::capsule`` objects now support the ``name`` attribute. This is useful for interfacing with ``scipy.LowLevelCallable``. `#902 <https://github.com/pybind/pybind11/pull/902>`_. * Fixed ``py::make_iterator``'s ``__next__()`` for past-the-end calls. `#897 <https://github.com/pybind/pybind11/pull/897>`_. * Added ``error_already_set::matches()`` for checking Python exceptions. `#772 <https://github.com/pybind/pybind11/pull/772>`_. * Deprecated ``py::error_already_set::clear()``. It's no longer needed following a simplification of the ``py::error_already_set`` class. `#954 <https://github.com/pybind/pybind11/pull/954>`_. * Deprecated ``py::handle::operator==()`` in favor of ``py::handle::is()`` `#825 <https://github.com/pybind/pybind11/pull/825>`_. * Deprecated ``py::object::borrowed``/``py::object::stolen``. Use ``py::object::borrowed_t{}``/``py::object::stolen_t{}`` instead. `#771 <https://github.com/pybind/pybind11/pull/771>`_. * Changed internal data structure versioning to avoid conflicts between modules compiled with different revisions of pybind11. `#1012 <https://github.com/pybind/pybind11/pull/1012>`_. * Additional compile-time and run-time error checking and more informative messages. `#786 <https://github.com/pybind/pybind11/pull/786>`_, `#794 <https://github.com/pybind/pybind11/pull/794>`_, `#803 <https://github.com/pybind/pybind11/pull/803>`_. * Various minor improvements and fixes. `#764 <https://github.com/pybind/pybind11/pull/764>`_, `#791 <https://github.com/pybind/pybind11/pull/791>`_, `#795 <https://github.com/pybind/pybind11/pull/795>`_, `#840 <https://github.com/pybind/pybind11/pull/840>`_, `#844 <https://github.com/pybind/pybind11/pull/844>`_, `#846 <https://github.com/pybind/pybind11/pull/846>`_, `#849 <https://github.com/pybind/pybind11/pull/849>`_, `#858 <https://github.com/pybind/pybind11/pull/858>`_, `#862 <https://github.com/pybind/pybind11/pull/862>`_, `#871 <https://github.com/pybind/pybind11/pull/871>`_, `#872 <https://github.com/pybind/pybind11/pull/872>`_, `#881 <https://github.com/pybind/pybind11/pull/881>`_, `#888 <https://github.com/pybind/pybind11/pull/888>`_, `#899 <https://github.com/pybind/pybind11/pull/899>`_, `#928 <https://github.com/pybind/pybind11/pull/928>`_, `#931 <https://github.com/pybind/pybind11/pull/931>`_, `#944 <https://github.com/pybind/pybind11/pull/944>`_, `#950 <https://github.com/pybind/pybind11/pull/950>`_, `#952 <https://github.com/pybind/pybind11/pull/952>`_, `#962 <https://github.com/pybind/pybind11/pull/962>`_, `#965 <https://github.com/pybind/pybind11/pull/965>`_, `#970 <https://github.com/pybind/pybind11/pull/970>`_, `#978 <https://github.com/pybind/pybind11/pull/978>`_, `#979 <https://github.com/pybind/pybind11/pull/979>`_, `#986 <https://github.com/pybind/pybind11/pull/986>`_, `#1020 <https://github.com/pybind/pybind11/pull/1020>`_, `#1027 <https://github.com/pybind/pybind11/pull/1027>`_, `#1037 <https://github.com/pybind/pybind11/pull/1037>`_. * Testing improvements. `#798 <https://github.com/pybind/pybind11/pull/798>`_, `#882 <https://github.com/pybind/pybind11/pull/882>`_, `#898 <https://github.com/pybind/pybind11/pull/898>`_, `#900 <https://github.com/pybind/pybind11/pull/900>`_, `#921 <https://github.com/pybind/pybind11/pull/921>`_, `#923 <https://github.com/pybind/pybind11/pull/923>`_, `#963 <https://github.com/pybind/pybind11/pull/963>`_. v2.1.1 (April 7, 2017) ----------------------------------------------------- * Fixed minimum version requirement for MSVC 2015u3 `#773 <https://github.com/pybind/pybind11/pull/773>`_. v2.1.0 (March 22, 2017) ----------------------------------------------------- * pybind11 now performs function overload resolution in two phases. The first phase only considers exact type matches, while the second allows for implicit conversions to take place. A special ``noconvert()`` syntax can be used to completely disable implicit conversions for specific arguments. `#643 <https://github.com/pybind/pybind11/pull/643>`_, `#634 <https://github.com/pybind/pybind11/pull/634>`_, `#650 <https://github.com/pybind/pybind11/pull/650>`_. * Fixed a regression where static properties no longer worked with classes using multiple inheritance. The ``py::metaclass`` attribute is no longer necessary (and deprecated as of this release) when binding classes with static properties. `#679 <https://github.com/pybind/pybind11/pull/679>`_, * Classes bound using ``pybind11`` can now use custom metaclasses. `#679 <https://github.com/pybind/pybind11/pull/679>`_, * ``py::args`` and ``py::kwargs`` can now be mixed with other positional arguments when binding functions using pybind11. `#611 <https://github.com/pybind/pybind11/pull/611>`_. * Improved support for C++11 unicode string and character types; added extensive documentation regarding pybind11's string conversion behavior. `#624 <https://github.com/pybind/pybind11/pull/624>`_, `#636 <https://github.com/pybind/pybind11/pull/636>`_, `#715 <https://github.com/pybind/pybind11/pull/715>`_. * pybind11 can now avoid expensive copies when converting Eigen arrays to NumPy arrays (and vice versa). `#610 <https://github.com/pybind/pybind11/pull/610>`_. * The "fast path" in ``py::vectorize`` now works for any full-size group of C or F-contiguous arrays. The non-fast path is also faster since it no longer performs copies of the input arguments (except when type conversions are necessary). `#610 <https://github.com/pybind/pybind11/pull/610>`_. * Added fast, unchecked access to NumPy arrays via a proxy object. `#746 <https://github.com/pybind/pybind11/pull/746>`_. * Transparent support for class-specific ``operator new`` and ``operator delete`` implementations. `#755 <https://github.com/pybind/pybind11/pull/755>`_. * Slimmer and more efficient STL-compatible iterator interface for sequence types. `#662 <https://github.com/pybind/pybind11/pull/662>`_. * Improved custom holder type support. `#607 <https://github.com/pybind/pybind11/pull/607>`_. * ``nullptr`` to ``None`` conversion fixed in various builtin type casters. `#732 <https://github.com/pybind/pybind11/pull/732>`_. * ``enum_`` now exposes its members via a special ``__members__`` attribute. `#666 <https://github.com/pybind/pybind11/pull/666>`_. * ``std::vector`` bindings created using ``stl_bind.h`` can now optionally implement the buffer protocol. `#488 <https://github.com/pybind/pybind11/pull/488>`_. * Automated C++ reference documentation using doxygen and breathe. `#598 <https://github.com/pybind/pybind11/pull/598>`_. * Added minimum compiler version assertions. `#727 <https://github.com/pybind/pybind11/pull/727>`_. * Improved compatibility with C++1z. `#677 <https://github.com/pybind/pybind11/pull/677>`_. * Improved ``py::capsule`` API. Can be used to implement cleanup callbacks that are involved at module destruction time. `#752 <https://github.com/pybind/pybind11/pull/752>`_. * Various minor improvements and fixes. `#595 <https://github.com/pybind/pybind11/pull/595>`_, `#588 <https://github.com/pybind/pybind11/pull/588>`_, `#589 <https://github.com/pybind/pybind11/pull/589>`_, `#603 <https://github.com/pybind/pybind11/pull/603>`_, `#619 <https://github.com/pybind/pybind11/pull/619>`_, `#648 <https://github.com/pybind/pybind11/pull/648>`_, `#695 <https://github.com/pybind/pybind11/pull/695>`_, `#720 <https://github.com/pybind/pybind11/pull/720>`_, `#723 <https://github.com/pybind/pybind11/pull/723>`_, `#729 <https://github.com/pybind/pybind11/pull/729>`_, `#724 <https://github.com/pybind/pybind11/pull/724>`_, `#742 <https://github.com/pybind/pybind11/pull/742>`_, `#753 <https://github.com/pybind/pybind11/pull/753>`_. v2.0.1 (Jan 4, 2017) ----------------------------------------------------- * Fix pointer to reference error in type_caster on MSVC `#583 <https://github.com/pybind/pybind11/pull/583>`_. * Fixed a segmentation in the test suite due to a typo `cd7eac <https://github.com/pybind/pybind11/commit/cd7eac>`_. v2.0.0 (Jan 1, 2017) ----------------------------------------------------- * Fixed a reference counting regression affecting types with custom metaclasses (introduced in v2.0.0-rc1). `#571 <https://github.com/pybind/pybind11/pull/571>`_. * Quenched a CMake policy warning. `#570 <https://github.com/pybind/pybind11/pull/570>`_. v2.0.0-rc1 (Dec 23, 2016) ----------------------------------------------------- The pybind11 developers are excited to issue a release candidate of pybind11 with a subsequent v2.0.0 release planned in early January next year. An incredible amount of effort by went into pybind11 over the last ~5 months, leading to a release that is jam-packed with exciting new features and numerous usability improvements. The following list links PRs or individual commits whenever applicable. Happy Christmas! * Support for binding C++ class hierarchies that make use of multiple inheritance. `#410 <https://github.com/pybind/pybind11/pull/410>`_. * PyPy support: pybind11 now supports nightly builds of PyPy and will interoperate with the future 5.7 release. No code changes are necessary, everything "just" works as usual. Note that we only target the Python 2.7 branch for now; support for 3.x will be added once its ``cpyext`` extension support catches up. A few minor features remain unsupported for the time being (notably dynamic attributes in custom types). `#527 <https://github.com/pybind/pybind11/pull/527>`_. * Significant work on the documentation -- in particular, the monolithic ``advanced.rst`` file was restructured into a easier to read hierarchical organization. `#448 <https://github.com/pybind/pybind11/pull/448>`_. * Many NumPy-related improvements: 1. Object-oriented API to access and modify NumPy ``ndarray`` instances, replicating much of the corresponding NumPy C API functionality. `#402 <https://github.com/pybind/pybind11/pull/402>`_. 2. NumPy array ``dtype`` array descriptors are now first-class citizens and are exposed via a new class ``py::dtype``. 3. Structured dtypes can be registered using the ``PYBIND11_NUMPY_DTYPE()`` macro. Special ``array`` constructors accepting dtype objects were also added. One potential caveat involving this change: format descriptor strings should now be accessed via ``format_descriptor::format()`` (however, for compatibility purposes, the old syntax ``format_descriptor::value`` will still work for non-structured data types). `#308 <https://github.com/pybind/pybind11/pull/308>`_. 4. Further improvements to support structured dtypes throughout the system. `#472 <https://github.com/pybind/pybind11/pull/472>`_, `#474 <https://github.com/pybind/pybind11/pull/474>`_, `#459 <https://github.com/pybind/pybind11/pull/459>`_, `#453 <https://github.com/pybind/pybind11/pull/453>`_, `#452 <https://github.com/pybind/pybind11/pull/452>`_, and `#505 <https://github.com/pybind/pybind11/pull/505>`_. 5. Fast access operators. `#497 <https://github.com/pybind/pybind11/pull/497>`_. 6. Constructors for arrays whose storage is owned by another object. `#440 <https://github.com/pybind/pybind11/pull/440>`_. 7. Added constructors for ``array`` and ``array_t`` explicitly accepting shape and strides; if strides are not provided, they are deduced assuming C-contiguity. Also added simplified constructors for 1-dimensional case. 8. Added buffer/NumPy support for ``char[N]`` and ``std::array<char, N>`` types. 9. Added ``memoryview`` wrapper type which is constructible from ``buffer_info``. * Eigen: many additional conversions and support for non-contiguous arrays/slices. `#427 <https://github.com/pybind/pybind11/pull/427>`_, `#315 <https://github.com/pybind/pybind11/pull/315>`_, `#316 <https://github.com/pybind/pybind11/pull/316>`_, `#312 <https://github.com/pybind/pybind11/pull/312>`_, and `#267 <https://github.com/pybind/pybind11/pull/267>`_ * Incompatible changes in ``class_<...>::class_()``: 1. Declarations of types that provide access via the buffer protocol must now include the ``py::buffer_protocol()`` annotation as an argument to the ``class_`` constructor. 2. Declarations of types that require a custom metaclass (i.e. all classes which include static properties via commands such as ``def_readwrite_static()``) must now include the ``py::metaclass()`` annotation as an argument to the ``class_`` constructor. These two changes were necessary to make type definitions in pybind11 future-proof, and to support PyPy via its cpyext mechanism. `#527 <https://github.com/pybind/pybind11/pull/527>`_. 3. This version of pybind11 uses a redesigned mechanism for instantiating trampoline classes that are used to override virtual methods from within Python. This led to the following user-visible syntax change: instead of .. code-block:: cpp py::class_<TrampolineClass>("MyClass") .alias<MyClass>() .... write .. code-block:: cpp py::class_<MyClass, TrampolineClass>("MyClass") .... Importantly, both the original and the trampoline class are now specified as an arguments (in arbitrary order) to the ``py::class_`` template, and the ``alias<..>()`` call is gone. The new scheme has zero overhead in cases when Python doesn't override any functions of the underlying C++ class. `rev. 86d825 <https://github.com/pybind/pybind11/commit/86d825>`_. * Added ``eval`` and ``eval_file`` functions for evaluating expressions and statements from a string or file. `rev. 0d3fc3 <https://github.com/pybind/pybind11/commit/0d3fc3>`_. * pybind11 can now create types with a modifiable dictionary. `#437 <https://github.com/pybind/pybind11/pull/437>`_ and `#444 <https://github.com/pybind/pybind11/pull/444>`_. * Support for translation of arbitrary C++ exceptions to Python counterparts. `#296 <https://github.com/pybind/pybind11/pull/296>`_ and `#273 <https://github.com/pybind/pybind11/pull/273>`_. * Report full backtraces through mixed C++/Python code, better reporting for import errors, fixed GIL management in exception processing. `#537 <https://github.com/pybind/pybind11/pull/537>`_, `#494 <https://github.com/pybind/pybind11/pull/494>`_, `rev. e72d95 <https://github.com/pybind/pybind11/commit/e72d95>`_, and `rev. 099d6e <https://github.com/pybind/pybind11/commit/099d6e>`_. * Support for bit-level operations, comparisons, and serialization of C++ enumerations. `#503 <https://github.com/pybind/pybind11/pull/503>`_, `#508 <https://github.com/pybind/pybind11/pull/508>`_, `#380 <https://github.com/pybind/pybind11/pull/380>`_, `#309 <https://github.com/pybind/pybind11/pull/309>`_. `#311 <https://github.com/pybind/pybind11/pull/311>`_. * The ``class_`` constructor now accepts its template arguments in any order. `#385 <https://github.com/pybind/pybind11/pull/385>`_. * Attribute and item accessors now have a more complete interface which makes it possible to chain attributes as in ``obj.attr("a")[key].attr("b").attr("method")(1, 2, 3)``. `#425 <https://github.com/pybind/pybind11/pull/425>`_. * Major redesign of the default and conversion constructors in ``pytypes.h``. `#464 <https://github.com/pybind/pybind11/pull/464>`_. * Added built-in support for ``std::shared_ptr`` holder type. It is no longer necessary to to include a declaration of the form ``PYBIND11_DECLARE_HOLDER_TYPE(T, std::shared_ptr<T>)`` (though continuing to do so won't cause an error). `#454 <https://github.com/pybind/pybind11/pull/454>`_. * New ``py::overload_cast`` casting operator to select among multiple possible overloads of a function. An example: .. code-block:: cpp py::class_<Pet>(m, "Pet") .def("set", py::overload_cast<int>(&Pet::set), "Set the pet's age") .def("set", py::overload_cast<const std::string &>(&Pet::set), "Set the pet's name"); This feature only works on C++14-capable compilers. `#541 <https://github.com/pybind/pybind11/pull/541>`_. * C++ types are automatically cast to Python types, e.g. when assigning them as an attribute. For instance, the following is now legal: .. code-block:: cpp py::module m = /* ... */ m.attr("constant") = 123; (Previously, a ``py::cast`` call was necessary to avoid a compilation error.) `#551 <https://github.com/pybind/pybind11/pull/551>`_. * Redesigned ``pytest``-based test suite. `#321 <https://github.com/pybind/pybind11/pull/321>`_. * Instance tracking to detect reference leaks in test suite. `#324 <https://github.com/pybind/pybind11/pull/324>`_ * pybind11 can now distinguish between multiple different instances that are located at the same memory address, but which have different types. `#329 <https://github.com/pybind/pybind11/pull/329>`_. * Improved logic in ``move`` return value policy. `#510 <https://github.com/pybind/pybind11/pull/510>`_, `#297 <https://github.com/pybind/pybind11/pull/297>`_. * Generalized unpacking API to permit calling Python functions from C++ using notation such as ``foo(a1, a2, *args, "ka"_a=1, "kb"_a=2, **kwargs)``. `#372 <https://github.com/pybind/pybind11/pull/372>`_. * ``py::print()`` function whose behavior matches that of the native Python ``print()`` function. `#372 <https://github.com/pybind/pybind11/pull/372>`_. * Added ``py::dict`` keyword constructor:``auto d = dict("number"_a=42, "name"_a="World");``. `#372 <https://github.com/pybind/pybind11/pull/372>`_. * Added ``py::str::format()`` method and ``_s`` literal: ``py::str s = "1 + 2 = {}"_s.format(3);``. `#372 <https://github.com/pybind/pybind11/pull/372>`_. * Added ``py::repr()`` function which is equivalent to Python's builtin ``repr()``. `#333 <https://github.com/pybind/pybind11/pull/333>`_. * Improved construction and destruction logic for holder types. It is now possible to reference instances with smart pointer holder types without constructing the holder if desired. The ``PYBIND11_DECLARE_HOLDER_TYPE`` macro now accepts an optional second parameter to indicate whether the holder type uses intrusive reference counting. `#533 <https://github.com/pybind/pybind11/pull/533>`_ and `#561 <https://github.com/pybind/pybind11/pull/561>`_. * Mapping a stateless C++ function to Python and back is now "for free" (i.e. no extra indirections or argument conversion overheads). `rev. 954b79 <https://github.com/pybind/pybind11/commit/954b79>`_. * Bindings for ``std::valarray<T>``. `#545 <https://github.com/pybind/pybind11/pull/545>`_. * Improved support for C++17 capable compilers. `#562 <https://github.com/pybind/pybind11/pull/562>`_. * Bindings for ``std::optional<t>``. `#475 <https://github.com/pybind/pybind11/pull/475>`_, `#476 <https://github.com/pybind/pybind11/pull/476>`_, `#479 <https://github.com/pybind/pybind11/pull/479>`_, `#499 <https://github.com/pybind/pybind11/pull/499>`_, and `#501 <https://github.com/pybind/pybind11/pull/501>`_. * ``stl_bind.h``: general improvements and support for ``std::map`` and ``std::unordered_map``. `#490 <https://github.com/pybind/pybind11/pull/490>`_, `#282 <https://github.com/pybind/pybind11/pull/282>`_, `#235 <https://github.com/pybind/pybind11/pull/235>`_. * The ``std::tuple``, ``std::pair``, ``std::list``, and ``std::vector`` type casters now accept any Python sequence type as input. `rev. 107285 <https://github.com/pybind/pybind11/commit/107285>`_. * Improved CMake Python detection on multi-architecture Linux. `#532 <https://github.com/pybind/pybind11/pull/532>`_. * Infrastructure to selectively disable or enable parts of the automatically generated docstrings. `#486 <https://github.com/pybind/pybind11/pull/486>`_. * ``reference`` and ``reference_internal`` are now the default return value properties for static and non-static properties, respectively. `#473 <https://github.com/pybind/pybind11/pull/473>`_. (the previous defaults were ``automatic``). `#473 <https://github.com/pybind/pybind11/pull/473>`_. * Support for ``std::unique_ptr`` with non-default deleters or no deleter at all (``py::nodelete``). `#384 <https://github.com/pybind/pybind11/pull/384>`_. * Deprecated ``handle::call()`` method. The new syntax to call Python functions is simply ``handle()``. It can also be invoked explicitly via ``handle::operator<X>()``, where ``X`` is an optional return value policy. * Print more informative error messages when ``make_tuple()`` or ``cast()`` fail. `#262 <https://github.com/pybind/pybind11/pull/262>`_. * Creation of holder types for classes deriving from ``std::enable_shared_from_this<>`` now also works for ``const`` values. `#260 <https://github.com/pybind/pybind11/pull/260>`_. * ``make_iterator()`` improvements for better compatibility with various types (now uses prefix increment operator); it now also accepts iterators with different begin/end types as long as they are equality comparable. `#247 <https://github.com/pybind/pybind11/pull/247>`_. * ``arg()`` now accepts a wider range of argument types for default values. `#244 <https://github.com/pybind/pybind11/pull/244>`_. * Support ``keep_alive`` where the nurse object may be ``None``. `#341 <https://github.com/pybind/pybind11/pull/341>`_. * Added constructors for ``str`` and ``bytes`` from zero-terminated char pointers, and from char pointers and length. Added constructors for ``str`` from ``bytes`` and for ``bytes`` from ``str``, which will perform UTF-8 decoding/encoding as required. * Many other improvements of library internals without user-visible changes 1.8.1 (July 12, 2016) ---------------------- * Fixed a rare but potentially very severe issue when the garbage collector ran during pybind11 type creation. 1.8.0 (June 14, 2016) ---------------------- * Redesigned CMake build system which exports a convenient ``pybind11_add_module`` function to parent projects. * ``std::vector<>`` type bindings analogous to Boost.Python's ``indexing_suite`` * Transparent conversion of sparse and dense Eigen matrices and vectors (``eigen.h``) * Added an ``ExtraFlags`` template argument to the NumPy ``array_t<>`` wrapper to disable an enforced cast that may lose precision, e.g. to create overloads for different precisions and complex vs real-valued matrices. * Prevent implicit conversion of floating point values to integral types in function arguments * Fixed incorrect default return value policy for functions returning a shared pointer * Don't allow registering a type via ``class_`` twice * Don't allow casting a ``None`` value into a C++ lvalue reference * Fixed a crash in ``enum_::operator==`` that was triggered by the ``help()`` command * Improved detection of whether or not custom C++ types can be copy/move-constructed * Extended ``str`` type to also work with ``bytes`` instances * Added a ``"name"_a`` user defined string literal that is equivalent to ``py::arg("name")``. * When specifying function arguments via ``py::arg``, the test that verifies the number of arguments now runs at compile time. * Added ``[[noreturn]]`` attribute to ``pybind11_fail()`` to quench some compiler warnings * List function arguments in exception text when the dispatch code cannot find a matching overload * Added ``PYBIND11_OVERLOAD_NAME`` and ``PYBIND11_OVERLOAD_PURE_NAME`` macros which can be used to override virtual methods whose name differs in C++ and Python (e.g. ``__call__`` and ``operator()``) * Various minor ``iterator`` and ``make_iterator()`` improvements * Transparently support ``__bool__`` on Python 2.x and Python 3.x * Fixed issue with destructor of unpickled object not being called * Minor CMake build system improvements on Windows * New ``pybind11::args`` and ``pybind11::kwargs`` types to create functions which take an arbitrary number of arguments and keyword arguments * New syntax to call a Python function from C++ using ``*args`` and ``*kwargs`` * The functions ``def_property_*`` now correctly process docstring arguments (these formerly caused a segmentation fault) * Many ``mkdoc.py`` improvements (enumerations, template arguments, ``DOC()`` macro accepts more arguments) * Cygwin support * Documentation improvements (pickling support, ``keep_alive``, macro usage) 1.7 (April 30, 2016) ---------------------- * Added a new ``move`` return value policy that triggers C++11 move semantics. The automatic return value policy falls back to this case whenever a rvalue reference is encountered * Significantly more general GIL state routines that are used instead of Python's troublesome ``PyGILState_Ensure`` and ``PyGILState_Release`` API * Redesign of opaque types that drastically simplifies their usage * Extended ability to pass values of type ``[const] void *`` * ``keep_alive`` fix: don't fail when there is no patient * ``functional.h``: acquire the GIL before calling a Python function * Added Python RAII type wrappers ``none`` and ``iterable`` * Added ``*args`` and ``*kwargs`` pass-through parameters to ``pybind11.get_include()`` function * Iterator improvements and fixes * Documentation on return value policies and opaque types improved 1.6 (April 30, 2016) ---------------------- * Skipped due to upload to PyPI gone wrong and inability to recover (https://github.com/pypa/packaging-problems/issues/74) 1.5 (April 21, 2016) ---------------------- * For polymorphic types, use RTTI to try to return the closest type registered with pybind11 * Pickling support for serializing and unserializing C++ instances to a byte stream in Python * Added a convenience routine ``make_iterator()`` which turns a range indicated by a pair of C++ iterators into a iterable Python object * Added ``len()`` and a variadic ``make_tuple()`` function * Addressed a rare issue that could confuse the current virtual function dispatcher and another that could lead to crashes in multi-threaded applications * Added a ``get_include()`` function to the Python module that returns the path of the directory containing the installed pybind11 header files * Documentation improvements: import issues, symbol visibility, pickling, limitations * Added casting support for ``std::reference_wrapper<>`` 1.4 (April 7, 2016) -------------------------- * Transparent type conversion for ``std::wstring`` and ``wchar_t`` * Allow passing ``nullptr``-valued strings * Transparent passing of ``void *`` pointers using capsules * Transparent support for returning values wrapped in ``std::unique_ptr<>`` * Improved docstring generation for compatibility with Sphinx * Nicer debug error message when default parameter construction fails * Support for "opaque" types that bypass the transparent conversion layer for STL containers * Redesigned type casting interface to avoid ambiguities that could occasionally cause compiler errors * Redesigned property implementation; fixes crashes due to an unfortunate default return value policy * Anaconda package generation support 1.3 (March 8, 2016) -------------------------- * Added support for the Intel C++ compiler (v15+) * Added support for the STL unordered set/map data structures * Added support for the STL linked list data structure * NumPy-style broadcasting support in ``pybind11::vectorize`` * pybind11 now displays more verbose error messages when ``arg::operator=()`` fails * pybind11 internal data structures now live in a version-dependent namespace to avoid ABI issues * Many, many bugfixes involving corner cases and advanced usage 1.2 (February 7, 2016) -------------------------- * Optional: efficient generation of function signatures at compile time using C++14 * Switched to a simpler and more general way of dealing with function default arguments. Unused keyword arguments in function calls are now detected and cause errors as expected * New ``keep_alive`` call policy analogous to Boost.Python's ``with_custodian_and_ward`` * New ``pybind11::base<>`` attribute to indicate a subclass relationship * Improved interface for RAII type wrappers in ``pytypes.h`` * Use RAII type wrappers consistently within pybind11 itself. This fixes various potential refcount leaks when exceptions occur * Added new ``bytes`` RAII type wrapper (maps to ``string`` in Python 2.7) * Made handle and related RAII classes const correct, using them more consistently everywhere now * Got rid of the ugly ``__pybind11__`` attributes on the Python side---they are now stored in a C++ hash table that is not visible in Python * Fixed refcount leaks involving NumPy arrays and bound functions * Vastly improved handling of shared/smart pointers * Removed an unnecessary copy operation in ``pybind11::vectorize`` * Fixed naming clashes when both pybind11 and NumPy headers are included * Added conversions for additional exception types * Documentation improvements (using multiple extension modules, smart pointers, other minor clarifications) * unified infrastructure for parsing variadic arguments in ``class_`` and cpp_function * Fixed license text (was: ZLIB, should have been: 3-clause BSD) * Python 3.2 compatibility * Fixed remaining issues when accessing types in another plugin module * Added enum comparison and casting methods * Improved SFINAE-based detection of whether types are copy-constructible * Eliminated many warnings about unused variables and the use of ``offsetof()`` * Support for ``std::array<>`` conversions 1.1 (December 7, 2015) -------------------------- * Documentation improvements (GIL, wrapping functions, casting, fixed many typos) * Generalized conversion of integer types * Improved support for casting function objects * Improved support for ``std::shared_ptr<>`` conversions * Initial support for ``std::set<>`` conversions * Fixed type resolution issue for types defined in a separate plugin module * Cmake build system improvements * Factored out generic functionality to non-templated code (smaller code size) * Added a code size / compile time benchmark vs Boost.Python * Added an appveyor CI script 1.0 (October 15, 2015) ------------------------ * Initial release
PypiClean
/polygon_finance-1.0.0-py3-none-any.whl/sdks/webull_sdk/derivative_query.py
class QueryDerivatives: def __init__(self, option_data): self.open = option_data.get('open') self.high = option_data.get('high') self.low = option_data.get('low') self.strikePrice = option_data.get('strikePrice') self.isStdSettle = option_data.get('isStdSettle') self.preClose = option_data.get('preClose') self.openInterest = option_data.get('openInterest') self.volume = option_data.get('volume') self.latestPriceVol = option_data.get('latestPriceVol') self.delta = option_data.get('delta') self.vega = option_data.get('vega') self.impVol = option_data.get('impVol') self.gamma = option_data.get('gamma') self.theta = option_data.get('theta') self.rho = option_data.get('rho') self.close = option_data.get('close') self.change = option_data.get('change') self.changeRatio = option_data.get('changeRatio') self.expireDate = option_data.get('expireDate') self.tickerId = option_data.get('tickerId') self.belongTickerId = option_data.get('belongTickerId') self.openIntChange = option_data.get('openIntChange') self.activeLevel = option_data.get('activeLevel') self.cycle = option_data.get('cycle') self.weekly = option_data.get('weekly') self.executionType = option_data.get('executionType') self.direction = option_data.get('direction') self.derivativeStatus = option_data.get('derivativeStatus') self.currencyId = option_data.get('currencyId') self.regionId = option_data.get('regionId') self.exchangeId = option_data.get('exchangeId') self.symbol = option_data.get('symbol') self.unSymbol = option_data.get('unSymbol') self.askList = option_data.get('askList') self.bidList = option_data.get('bidList') self.quoteMultiplier = option_data.get('quoteMultiplier') self.quoteLotSize = option_data.get('quoteLotSize') self.tradeTime = option_data.get('tradeTime') self.timeZone = option_data.get('timeZone') self.tzName = option_data.get('tzName') self.tradeStatus = option_data.get('tradeStatus') self.tradeStamp = option_data.get('tradeStamp')
PypiClean
/stock_core-0.4.6.tar.gz/stock_core-0.4.6/src/stock_core/nfscrapy/nfs/spiders/c104.py
import time from scrapy.selector import Selector from nfs import items from nfs.spiders import common # cmd usage : scrapy crawl c104_aq -a code=005930 # 총 8페이지를 스크랩하는 각각의 클래스 존재함. class C104Base(common.C103C104Base): # 개별 C104클래스가 상속하는 기반클래스 basename = 'c104' def parse_c104(self, response, code): # html에서 table을 추출하여 dataframe생성 self.driver.get(response.url) time.sleep(self.WAIT) html = Selector(text=self.driver.page_source) table_xpath = '//table[@class="gHead01 all-width data-list"]' # 테이블명을 _을 기준으로 나눠 리스트를 만든다. title_list = self.title.split('_') self.logger.debug(title_list) # dataframe 리스트를 만든다. df_list = [] for i in range(2): # 상위테이블 0, 하위테이블 1 df_list.append(common.C103C104Base.get_df_from_html(html, table_xpath, i)) self.logger.debug(df_list) # 테이블명리스트와 df리스트를 매치하여 데이터베이스에 저장하기 위해 yield시킴 for title, df in list(zip(title_list, df_list)): # df를 log로 출력한다. self.logger.info(title) self.logger.debug(df) # make item to yield item = items.C104items() item['코드'] = code item['title'] = title item['df'] = df yield item ''' # XPATH 상수 수익성 = '//*[ @id="val_tab1"]' 성장성 = '//*[ @id="val_tab2"]' 안정성 = '//*[ @id="val_tab3"]' 활동성 = '//*[ @id="val_tab4"]' 연간 = '//*[@id="frqTyp0"]' 분기 = '//*[@id="frqTyp1"]' 검색 = '//*[@id="hfinGubun"]' 가치분석연간 = '//*[@id="frqTyp0_2"]' 가치분석분기 = '//*[@id="frqTyp1_2"]' 가치분석검색 = '//*[@id="hfinGubun2"]' ''' class C104AQ(C104Base): name = 'c104_aq' def __init__(self, code): super().__init__(code, title='수익성q_가치분석q') def click_buttons(self): buttons = [ ('수익성', '//*[ @id="val_tab1"]'), ('분기', '//*[@id="frqTyp1"]'), ('검색', '//*[@id="hfinGubun"]'), ('가치분석분기', '//*[@id="frqTyp1_2"]'), ('가치분석검색', '//*[@id="hfinGubun2"]'), ] super().click_buttons(buttons) class C104BQ(C104Base): name = 'c104_bq' def __init__(self, code): super().__init__(code, title='성장성q') def click_buttons(self): buttons = [ ('성장성', '//*[ @id="val_tab2"]'), ('분기', '//*[@id="frqTyp1"]'), ('검색', '//*[@id="hfinGubun"]'), ] super().click_buttons(buttons) class C104CQ(C104Base): name = 'c104_cq' def __init__(self, code): super().__init__(code, title='안정성q') def click_buttons(self): buttons = [ ('안정성', '//*[ @id="val_tab3"]'), ('분기', '//*[@id="frqTyp1"]'), ('검색', '//*[@id="hfinGubun"]'), ] super().click_buttons(buttons) class C104DQ(C104Base): name = 'c104_dq' def __init__(self, code): super().__init__(code, title='활동성q') def click_buttons(self): buttons = [ ('활동성', '//*[ @id="val_tab4"]'), ('분기', '//*[@id="frqTyp1"]'), ('검색', '//*[@id="hfinGubun"]'), ] super().click_buttons(buttons) class C104AY(C104Base): name = 'c104_ay' def __init__(self, code): super().__init__(code, title='수익성y_가치분석y') def click_buttons(self): buttons = [ ('수익성', '//*[ @id="val_tab1"]'), ('연간', '//*[@id="frqTyp0"]'), ('검색', '//*[@id="hfinGubun"]'), ('가치분석연간', '//*[@id="frqTyp0_2"]'), ('가치분석검색', '//*[@id="hfinGubun2"]'), ] super().click_buttons(buttons) class C104BY(C104Base): name = 'c104_by' def __init__(self, code): super().__init__(code, title='성장성y') def click_buttons(self): buttons = [ ('성장성', '//*[ @id="val_tab2"]'), ('연간', '//*[@id="frqTyp0"]'), ('검색', '//*[@id="hfinGubun"]'), ] super().click_buttons(buttons) class C104CY(C104Base): name = 'c104_cy' def __init__(self, code): super().__init__(code, title='안정성y') def click_buttons(self): buttons = [ ('안정성', '//*[ @id="val_tab3"]'), ('연간', '//*[@id="frqTyp0"]'), ('검색', '//*[@id="hfinGubun"]'), ] super().click_buttons(buttons) class C104DY(C104Base): name = 'c104_dy' def __init__(self, code): super().__init__(code, title='활동성y') def click_buttons(self): buttons = [ ('활동성', '//*[ @id="val_tab4"]'), ('연간', '//*[@id="frqTyp0"]'), ('검색', '//*[@id="hfinGubun"]'), ] super().click_buttons(buttons)
PypiClean
/cacheless-airflow-1.9.0.tar.gz/cacheless-airflow-1.9.0/airflow/contrib/auth/backends/ldap_auth.py
from future.utils import native import flask_login from flask_login import login_required, current_user, logout_user from flask import flash from wtforms import ( Form, PasswordField, StringField) from wtforms.validators import InputRequired from ldap3 import Server, Connection, Tls, LEVEL, SUBTREE, BASE import ssl from flask import url_for, redirect from airflow import settings from airflow import models from airflow import configuration from airflow.configuration import AirflowConfigException import traceback import re from airflow.utils.log.logging_mixin import LoggingMixin login_manager = flask_login.LoginManager() login_manager.login_view = 'airflow.login' # Calls login() below login_manager.login_message = None log = LoggingMixin().log class AuthenticationError(Exception): pass class LdapException(Exception): pass def get_ldap_connection(dn=None, password=None): tls_configuration = None use_ssl = False try: cacert = configuration.get("ldap", "cacert") tls_configuration = Tls(validate=ssl.CERT_REQUIRED, ca_certs_file=cacert) use_ssl = True except: pass server = Server(configuration.get("ldap", "uri"), use_ssl, tls_configuration) conn = Connection(server, native(dn), native(password)) if not conn.bind(): log.error("Cannot bind to ldap server: %s ", conn.last_error) raise AuthenticationError("Cannot bind to ldap server") return conn def group_contains_user(conn, search_base, group_filter, user_name_attr, username): search_filter = '(&({0}))'.format(group_filter) if not conn.search(native(search_base), native(search_filter), attributes=[native(user_name_attr)]): log.warning("Unable to find group for %s %s", search_base, search_filter) else: for entry in conn.entries: if username in getattr(entry, user_name_attr).values: return True return False def groups_user(conn, search_base, user_filter, user_name_att, username): search_filter = "(&({0})({1}={2}))".format(user_filter, user_name_att, username) try: memberof_attr = configuration.get("ldap", "group_member_attr") except: memberof_attr = "memberOf" res = conn.search(native(search_base), native(search_filter), attributes=[native(memberof_attr)]) if not res: log.info("Cannot find user %s", username) raise AuthenticationError("Invalid username or password") if conn.response and memberof_attr not in conn.response[0]["attributes"]: log.warning("""Missing attribute "%s" when looked-up in Ldap database. The user does not seem to be a member of a group and therefore won't see any dag if the option filter_by_owner=True and owner_mode=ldapgroup are set""", memberof_attr) return [] user_groups = conn.response[0]["attributes"][memberof_attr] regex = re.compile("cn=([^,]*).*", re.IGNORECASE) groups_list = [] try: groups_list = [regex.search(i).group(1) for i in user_groups] except IndexError: log.warning("Parsing error when retrieving the user's group(s)." " Check if the user belongs to at least one group" " or if the user's groups name do not contain special characters") return groups_list class LdapUser(models.User): def __init__(self, user): self.user = user self.ldap_groups = [] # Load and cache superuser and data_profiler settings. conn = get_ldap_connection(configuration.get("ldap", "bind_user"), configuration.get("ldap", "bind_password")) superuser_filter = None data_profiler_filter = None try: superuser_filter = configuration.get("ldap", "superuser_filter") except AirflowConfigException: pass if not superuser_filter: self.superuser = True log.debug("Missing configuration for superuser settings or empty. Skipping.") else: self.superuser = group_contains_user(conn, configuration.get("ldap", "basedn"), superuser_filter, configuration.get("ldap", "user_name_attr"), user.username) try: data_profiler_filter = configuration.get("ldap", "data_profiler_filter") except AirflowConfigException: pass if not data_profiler_filter: self.data_profiler = True log.debug("Missing configuration for data profiler settings or empty. " "Skipping.") else: self.data_profiler = group_contains_user(conn, configuration.get("ldap", "basedn"), data_profiler_filter, configuration.get("ldap", "user_name_attr"), user.username) # Load the ldap group(s) a user belongs to try: self.ldap_groups = groups_user(conn, configuration.get("ldap", "basedn"), configuration.get("ldap", "user_filter"), configuration.get("ldap", "user_name_attr"), user.username) except AirflowConfigException: log.debug("Missing configuration for ldap settings. Skipping") @staticmethod def try_login(username, password): conn = get_ldap_connection(configuration.get("ldap", "bind_user"), configuration.get("ldap", "bind_password")) search_filter = "(&({0})({1}={2}))".format( configuration.get("ldap", "user_filter"), configuration.get("ldap", "user_name_attr"), username ) search_scopes = { "LEVEL": LEVEL, "SUBTREE": SUBTREE, "BASE": BASE } search_scope = LEVEL if configuration.has_option("ldap", "search_scope"): search_scope = SUBTREE if configuration.get("ldap", "search_scope") == "SUBTREE" else LEVEL # todo: BASE or ONELEVEL? res = conn.search(native(configuration.get("ldap", "basedn")), native(search_filter), search_scope=native(search_scope)) # todo: use list or result? if not res: log.info("Cannot find user %s", username) raise AuthenticationError("Invalid username or password") entry = conn.response[0] conn.unbind() if 'dn' not in entry: # The search filter for the user did not return any values, so an # invalid user was used for credentials. raise AuthenticationError("Invalid username or password") try: conn = get_ldap_connection(entry['dn'], password) except KeyError as e: log.error(""" Unable to parse LDAP structure. If you're using Active Directory and not specifying an OU, you must set search_scope=SUBTREE in airflow.cfg. %s """ % traceback.format_exc()) raise LdapException("Could not parse LDAP structure. Try setting search_scope in airflow.cfg, or check logs") if not conn: log.info("Password incorrect for user %s", username) raise AuthenticationError("Invalid username or password") def is_active(self): '''Required by flask_login''' return True def is_authenticated(self): '''Required by flask_login''' return True def is_anonymous(self): '''Required by flask_login''' return False def get_id(self): '''Returns the current user id as required by flask_login''' return self.user.get_id() def data_profiling(self): '''Provides access to data profiling tools''' return self.data_profiler def is_superuser(self): '''Access all the things''' return self.superuser @login_manager.user_loader def load_user(userid): log.debug("Loading user %s", userid) if not userid or userid == 'None': return None session = settings.Session() user = session.query(models.User).filter(models.User.id == int(userid)).first() session.expunge_all() session.commit() session.close() return LdapUser(user) def login(self, request): if current_user.is_authenticated(): flash("You are already logged in") return redirect(url_for('admin.index')) username = None password = None form = LoginForm(request.form) if request.method == 'POST' and form.validate(): username = request.form.get("username") password = request.form.get("password") if not username or not password: return self.render('airflow/login.html', title="Airflow - Login", form=form) try: LdapUser.try_login(username, password) log.info("User %s successfully authenticated", username) session = settings.Session() user = session.query(models.User).filter( models.User.username == username).first() if not user: user = models.User( username=username, is_superuser=False) session.merge(user) session.commit() flask_login.login_user(LdapUser(user)) session.commit() session.close() return redirect(request.args.get("next") or url_for("admin.index")) except (LdapException, AuthenticationError) as e: if type(e) == LdapException: flash(e, "error") else: flash("Incorrect login details") return self.render('airflow/login.html', title="Airflow - Login", form=form) class LoginForm(Form): username = StringField('Username', [InputRequired()]) password = PasswordField('Password', [InputRequired()])
PypiClean
/bolton-eris-0.2.3.tar.gz/bolton-eris-0.2.3/CHANGELOG.md
# Changelog for `eris` All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. [Keep a Changelog]: https://keepachangelog.com/en/1.0.0/ [Semantic Versioning]: https://semver.org/ ## [Unreleased](https://github.com/python-boltons/eris/compare/0.2.3...HEAD) No notable changes have been made. ## [0.2.3](https://github.com/python-boltons/eris/compare/0.2.2...0.2.3) - 2022-01-13 ### Added * Add `eris.ErisResult` type. ## [0.2.2](https://github.com/python-boltons/eris/compare/0.2.1...0.2.2) - 2021-12-24 ### Changed * Represent all `ExcInfo` objects as dictionaries. * Make `AbstractResult` apart of public API. ## [0.2.1](https://github.com/python-boltons/eris/compare/0.2.0...0.2.1) - 2021-12-23 ### Changed * The `Err` `dataclass` now accepts an optional `up` kwarg. ## [0.2.0](https://github.com/python-boltons/eris/compare/0.1.2...0.2.0) - 2021-12-23 ### Changed * Allow custom sub-types of `ErisError` to be used with the `Result` type. * *BREAKING CHANGE*: The `Err` type is now a generic with two type variables. ## [0.1.2](https://github.com/python-boltons/eris/compare/0.1.1...0.1.2) - 2021-12-23 ### Changed * Add `eris.ErisErrorChain` and `eris.ErisErrorDict` to public API. ### Removed * Remove `eris.chain_errors()` public API function. ## [0.1.1](https://github.com/python-boltons/eris/compare/0.1.0...0.1.1) - 2021-12-22 ### Changed * Rename `Error` to `ErisError`. ## [0.1.0](https://github.com/python-boltons/eris/releases/tag/0.1.0) - 2021-12-22 ### Miscellaneous * Port `eris` library from (original) `bugyi-lib` library. * First release.
PypiClean
/arcor2_arserver-1.1.0.tar.gz/arcor2_arserver-1.1.0/arcor2_arserver/rpc/objects.py
import asyncio from dataclasses import dataclass, field from typing import NamedTuple from websockets.server import WebSocketServerProtocol as WsClient from arcor2 import helpers as hlp from arcor2.cached import CachedProject, CachedScene from arcor2.clients import aio_scene_service as scene_srv from arcor2.data import events, rpc from arcor2.data.common import Parameter, Pose, Position, SceneObject from arcor2.data.object_type import Model3dType from arcor2.data.scene import MeshFocusAction from arcor2.exceptions import Arcor2Exception from arcor2.object_types.abstract import CollisionObject, GenericWithPose, Robot from arcor2.source.utils import tree_to_str from arcor2_arserver import globals as glob from arcor2_arserver import logger from arcor2_arserver import notifications as notif from arcor2_arserver import settings from arcor2_arserver.clients import project_service as storage from arcor2_arserver.helpers import ctx_write_lock, ensure_write_locked from arcor2_arserver.object_types.data import ObjectTypeData from arcor2_arserver.object_types.source import new_object_type from arcor2_arserver.object_types.utils import add_ancestor_actions, object_actions, remove_object_type from arcor2_arserver.objects_actions import update_object_model from arcor2_arserver.robot import check_eef_arm, get_end_effector_pose from arcor2_arserver.scene import ( can_modify_scene, ensure_scene_started, get_instance, get_robot_instance, scenes, update_scene_object_pose, ) from arcor2_arserver_data import events as sevts from arcor2_arserver_data import rpc as srpc @dataclass class AimedObject: obj_id: str robot: rpc.common.RobotArg poses: dict[int, Pose] = field(default_factory=dict) _objects_being_aimed: dict[str, AimedObject] = {} # key == user_name async def object_aiming_start_cb(req: srpc.o.ObjectAimingStart.Request, ui: WsClient) -> None: """Starts the aiming process for a selected object (with mesh) and robot. Only possible when the scene is started/online. UI have to acquire write locks for object and robot in advance. :param req: :param ui: :return: """ scene = glob.LOCK.scene_or_exception() if glob.LOCK.project: raise Arcor2Exception("Project has to be closed first.") ensure_scene_started() user_name = glob.USERS.user_name(ui) if user_name in _objects_being_aimed: raise Arcor2Exception("Aiming already started.") obj_id = req.args.object_id scene_obj = scene.object(obj_id) obj_type = glob.OBJECT_TYPES[scene_obj.type].meta if not obj_type.has_pose: raise Arcor2Exception("Only available for objects with pose.") if not obj_type.object_model or obj_type.object_model.type != Model3dType.MESH: raise Arcor2Exception("Only available for objects with mesh model.") assert obj_type.object_model.mesh focus_points = obj_type.object_model.mesh.focus_points if not focus_points: raise Arcor2Exception("focusPoints not defined for the mesh.") await ensure_write_locked(req.args.object_id, user_name) await ensure_write_locked(req.args.robot.robot_id, user_name) await check_eef_arm(get_robot_instance(req.args.robot.robot_id), req.args.robot.arm_id, req.args.robot.end_effector) if req.dry_run: return _objects_being_aimed[user_name] = AimedObject(req.args.object_id, req.args.robot) logger.info( f"{user_name} just started aiming of {scene_obj.name} using {scene.object(req.args.robot.robot_id).name}." ) class AimingTuple(NamedTuple): obj: AimedObject user_name: str async def object_aiming_prune() -> None: """Deletes records for users that already lost their locks. :return: """ to_delete: list[str] = [] # users in db but not holding a lock for the object should be deleted for un, fo in _objects_being_aimed.items(): if not await glob.LOCK.is_write_locked(fo.obj_id, un): logger.info(f"Object aiming cancelled for {un}.") to_delete.append(un) for td in to_delete: _objects_being_aimed.pop(td, None) async def object_aiming_check(ui: WsClient) -> AimingTuple: """Gets object that is being aimed by the user or exception. :param ui: :return: """ user_name = glob.USERS.user_name(ui) try: fo = _objects_being_aimed[user_name] except KeyError: raise Arcor2Exception("Aiming has to be started first.") await ensure_write_locked(fo.obj_id, user_name) await ensure_write_locked(fo.robot.robot_id, user_name) return AimingTuple(fo, user_name) async def object_aiming_cancel_cb(req: srpc.o.ObjectAimingCancel.Request, ui: WsClient) -> None: """Cancel aiming of the object. :param req: :param ui: :return: """ fo, user_name = await object_aiming_check(ui) if req.dry_run: return _objects_being_aimed.pop(user_name, None) if glob.LOCK.scene: logger.info(f"Aiming for {glob.LOCK.scene.object(fo.obj_id).name} cancelled by {user_name}.") async def object_aiming_add_point_cb( req: srpc.o.ObjectAimingAddPoint.Request, ui: WsClient ) -> srpc.o.ObjectAimingAddPoint.Response: scene = glob.LOCK.scene_or_exception() fo, user_name = await object_aiming_check(ui) pt_idx = req.args.point_idx scene_obj = scene.object(fo.obj_id) obj_type = glob.OBJECT_TYPES[scene_obj.type].meta assert obj_type.has_pose assert obj_type.object_model assert obj_type.object_model.mesh focus_points = obj_type.object_model.mesh.focus_points assert focus_points if pt_idx < 0 or pt_idx > len(focus_points) - 1: raise Arcor2Exception("Index out of range.") robot_id, end_effector, arm_id = fo.robot.as_tuple() robot_inst = get_robot_instance(robot_id) r = srpc.o.ObjectAimingAddPoint.Response() r.data = r.Data(finished_indexes=list(fo.poses.keys())) if not req.dry_run: fo.poses[pt_idx] = await get_end_effector_pose(robot_inst, end_effector, arm_id) r.data = r.Data(finished_indexes=list(fo.poses.keys())) logger.info( f"{user_name} just aimed index {pt_idx} for {scene_obj.name}. Done indexes: {r.data.finished_indexes}." ) return r async def object_aiming_done_cb(req: srpc.o.ObjectAimingDone.Request, ui: WsClient) -> None: """Calls scene service to get a new pose for the object. In case of success, robot and object are kept locked, unlocking is responsibility of ui. On failure, UI may do another attempt or call ObjectAimingCancel. :param req: :param ui: :return: """ scene = glob.LOCK.scene_or_exception() fo, user_name = await object_aiming_check(ui) obj_type = glob.OBJECT_TYPES[scene.object(fo.obj_id).type].meta assert obj_type.object_model assert obj_type.object_model.mesh focus_points = obj_type.object_model.mesh.focus_points assert focus_points if len(fo.poses) < len(focus_points): raise Arcor2Exception(f"Only {len(fo.poses)} points were done out of {len(focus_points)}.") obj = scene.object(fo.obj_id) assert obj.pose obj_inst = get_instance(fo.obj_id, CollisionObject) if req.dry_run: return fp: list[Position] = [] rp: list[Position] = [] for idx, pose in fo.poses.items(): fp.append(focus_points[idx].position) rp.append(pose.position) mfa = MeshFocusAction(fp, rp) logger.debug(f"Attempt to aim object {obj_inst.name}, data: {mfa}") try: new_pose = await scene_srv.focus(mfa) # TODO how long does it take? except scene_srv.SceneServiceException as e: logger.error(f"Aiming failed with: {e}, mfa: {mfa}.") raise Arcor2Exception(f"Aiming failed. {str(e)}") from e logger.info(f"Done aiming for {obj_inst.name}.") _objects_being_aimed.pop(user_name, None) asyncio.create_task(update_scene_object_pose(scene, obj, new_pose, obj_inst)) return None async def new_object_type_cb(req: srpc.o.NewObjectType.Request, ui: WsClient) -> None: async with ctx_write_lock(glob.LOCK.SpecialValues.ADDING_OBJECT, glob.USERS.user_name(ui)): meta = req.args if meta.type in glob.OBJECT_TYPES: raise Arcor2Exception("Object type already exists.") hlp.is_valid_type(meta.type) if meta.base not in glob.OBJECT_TYPES: raise Arcor2Exception( f"Unknown base object type '{meta.base}', " f"known types are: {', '.join(glob.OBJECT_TYPES.keys())}." ) base = glob.OBJECT_TYPES[meta.base] if base.meta.disabled: raise Arcor2Exception("Base object is disabled.") assert base.type_def is not None if issubclass(base.type_def, Robot): raise Arcor2Exception("Can't subclass Robot.") meta.has_pose = issubclass(base.type_def, GenericWithPose) if issubclass(base.type_def, CollisionObject): if not meta.object_model: raise Arcor2Exception("Objects based on CollisionObject must have collision model.") else: if meta.object_model: raise Arcor2Exception("Only objects based on CollisionObject can have collision model.") if req.dry_run: return None obj = meta.to_object_type() ast = new_object_type(glob.OBJECT_TYPES[meta.base].meta, meta) obj.source = tree_to_str(ast) if meta.object_model: await update_object_model(meta, meta.object_model) type_def = await hlp.run_in_executor( hlp.save_and_import_type_def, obj.source, obj.id, base.type_def, settings.OBJECT_TYPE_PATH, settings.OBJECT_TYPE_MODULE, ) assert issubclass(type_def, base.type_def) actions = object_actions(type_def, ast) meta.modified = await storage.update_object_type(obj) glob.OBJECT_TYPES[meta.type] = ObjectTypeData(meta, type_def, actions, ast) add_ancestor_actions(meta.type, glob.OBJECT_TYPES) evt = sevts.o.ChangedObjectTypes([meta]) evt.change_type = events.Event.Type.ADD asyncio.ensure_future(notif.broadcast_event(evt)) return None async def update_object_model_cb(req: srpc.o.UpdateObjectModel.Request, ui: WsClient) -> None: can_modify_scene() glob.LOCK.scene_or_exception(True) # only allow while editing scene obj_data = glob.OBJECT_TYPES[req.args.object_type_id] if not obj_data.type_def: raise Arcor2Exception("ObjectType disabled.") if not issubclass(obj_data.type_def, CollisionObject): raise Arcor2Exception("Not a CollisionObject.") assert obj_data.meta.object_model assert obj_data.ast if req.args.object_model == obj_data.meta.object_model: raise Arcor2Exception("No change requested.") await ensure_write_locked(req.args.object_type_id, glob.USERS.user_name(ui)) if req.dry_run: return await update_object_model(obj_data.meta, req.args.object_model) obj_data.meta.object_model = req.args.object_model ot = obj_data.meta.to_object_type() ot.source = tree_to_str(obj_data.ast) obj_data.meta.modified = await storage.update_object_type(ot) evt = sevts.o.ChangedObjectTypes([obj_data.meta]) evt.change_type = events.Event.Type.UPDATE asyncio.ensure_future(notif.broadcast_event(evt)) async def get_object_actions_cb(req: srpc.o.GetActions.Request, ui: WsClient) -> srpc.o.GetActions.Response: return srpc.o.GetActions.Response(data=list(glob.OBJECT_TYPES[req.args.type].actions.values())) async def get_object_types_cb(req: srpc.o.GetObjectTypes.Request, ui: WsClient) -> srpc.o.GetObjectTypes.Response: return srpc.o.GetObjectTypes.Response(data=[obj.meta for obj in glob.OBJECT_TYPES.values()]) def check_scene_for_object_type(scene: CachedScene, object_type: str) -> None: if object_type in scene.object_types: raise Arcor2Exception(f"Used in scene {scene.name}.") async def delete_object_type_cb( req: srpc.o.DeleteObjectTypes.Request, ui: WsClient ) -> srpc.o.DeleteObjectTypes.Response: async def _delete_model(obj_type: ObjectTypeData) -> None: # do not care so much if delete_model fails if not obj_type.meta.object_model: return try: await storage.delete_model(obj_type.meta.object_model.model().id) except storage.ProjectServiceException as e: logger.error(str(e)) async def _delete_ot(ot: str) -> None: obj_type = glob.OBJECT_TYPES[ot] if obj_type.meta.built_in: raise Arcor2Exception("Can't delete built-in type.") for obj in glob.OBJECT_TYPES.values(): if obj.meta.base == ot: raise Arcor2Exception(f"Object type is base of '{obj.meta.type}'.") async for scene in scenes(): check_scene_for_object_type(scene, ot) if glob.LOCK.scene: check_scene_for_object_type(glob.LOCK.scene, ot) if req.dry_run: return await asyncio.gather(storage.delete_object_type(ot), _delete_model(obj_type), remove_object_type(ot)) await glob.LOCK.write_unlock(ot, user_name) # need to be unlocked while it exists in glob.OBJECT_TYPES del glob.OBJECT_TYPES[ot] evt = sevts.o.ChangedObjectTypes([obj_type.meta]) evt.change_type = events.Event.Type.REMOVE asyncio.create_task(notif.broadcast_event(evt)) user_name = glob.USERS.user_name(ui) obj_types_to_delete: list[str] = ( list(req.args) if req.args is not None else [obj.meta.type for obj in glob.OBJECT_TYPES.values() if not obj.meta.built_in] ) response = srpc.o.DeleteObjectTypes.Response() response.data = [] async with ctx_write_lock(obj_types_to_delete, user_name, auto_unlock=False, dry_run=req.dry_run): res = await asyncio.gather(*[_delete_ot(ot) for ot in obj_types_to_delete], return_exceptions=True) for idx, r in enumerate(res): if isinstance(r, Arcor2Exception): response.data.append(srpc.o.DeleteObjectTypes.Response.Data(obj_types_to_delete[idx], str(r))) else: assert r is None if not response.data: response.data = None if response.data: response.result = False response.messages = [] response.messages.append("Failed to delete one or more ObjectTypes.") return response def check_override( scene: CachedScene, project: CachedProject, obj_id: str, override: Parameter, add_new_one: bool = False ) -> SceneObject: obj = scene.object(obj_id) for par in glob.OBJECT_TYPES[obj.type].meta.settings: if par.name == override.name: if par.type != override.type: raise Arcor2Exception("Override can't change parameter type.") break else: raise Arcor2Exception("Unknown parameter name.") if add_new_one: try: for existing_override in project.overrides[obj.id]: if override.name == existing_override.name: raise Arcor2Exception("Override already exists.") except KeyError: pass else: if obj.id not in project.overrides: raise Arcor2Exception("There are no overrides for the object.") for override in project.overrides[obj.id]: if override.name == override.name: break else: raise Arcor2Exception("Override not found.") return obj async def add_override_cb(req: srpc.o.AddOverride.Request, ui: WsClient) -> None: scene = glob.LOCK.scene_or_exception() project = glob.LOCK.project_or_exception() obj = check_override(scene, project, req.args.id, req.args.override, add_new_one=True) await ensure_write_locked(req.args.id, glob.USERS.user_name(ui)) if req.dry_run: return if obj.id not in project.overrides: project.overrides[obj.id] = [] project.overrides[obj.id].append(req.args.override) project.update_modified() evt = sevts.o.OverrideUpdated(req.args.override) evt.change_type = events.Event.Type.ADD evt.parent_id = req.args.id asyncio.ensure_future(notif.broadcast_event(evt)) async def update_override_cb(req: srpc.o.UpdateOverride.Request, ui: WsClient) -> None: scene = glob.LOCK.scene_or_exception() project = glob.LOCK.project_or_exception() obj = check_override(scene, project, req.args.id, req.args.override) await ensure_write_locked(req.args.id, glob.USERS.user_name(ui)) if req.dry_run: return for override in project.overrides[obj.id]: if override.name == override.name: override.value = req.args.override.value project.update_modified() evt = sevts.o.OverrideUpdated(req.args.override) evt.change_type = events.Event.Type.UPDATE evt.parent_id = req.args.id asyncio.ensure_future(notif.broadcast_event(evt)) async def delete_override_cb(req: srpc.o.DeleteOverride.Request, ui: WsClient) -> None: scene = glob.LOCK.scene_or_exception() project = glob.LOCK.project_or_exception() obj = check_override(scene, project, req.args.id, req.args.override) await ensure_write_locked(req.args.id, glob.USERS.user_name(ui)) if req.dry_run: return project.overrides[obj.id] = [ov for ov in project.overrides[obj.id] if ov.name != req.args.override.name] if not project.overrides[obj.id]: del project.overrides[obj.id] project.update_modified() evt = sevts.o.OverrideUpdated(req.args.override) evt.change_type = events.Event.Type.REMOVE evt.parent_id = req.args.id asyncio.ensure_future(notif.broadcast_event(evt)) async def object_type_usage_cb(req: srpc.o.ObjectTypeUsage.Request, ui: WsClient) -> srpc.o.ObjectTypeUsage.Response: resp = srpc.o.ObjectTypeUsage.Response() resp.data = set() # mypy does not recognize it correctly with Response(data=set()) async for scene in scenes(): if req.args.id in scene.object_types: resp.data.add(scene.id) return resp
PypiClean
/tableau_tools-6.0.0.tar.gz/tableau_tools-6.0.0/tableau_tools/examples/modifying_users_immediately_before_sso.py
from tableau_tools import * import datetime # This script is to show how to add or modify a user prior to SSO # The use case is for passing in properties to a user session in a secure way # When using Restricted (standard) Trusted Tickets, a user cannot see or modify their Full Name property, # which allows it to be repurposed as a stored of secure values set programmatically # # If you have Core based licensing, and no need to track users beyond just that momentary session, you can also # create usernames at will and include details on the username. This could also be useful if you use SAML or Unrestricted # trusted tickets (where the user can change the Full Name property, # although you do lose out on schedules and customization and such due to the transient nature of those sessions server_url = "" username = "" password = "" site_content_url = "" t_server = TableauServerRest35(server=server_url, username=username, password=password, site_content_url=site_content_url) t_server.signin() def update_user_with_fullname_properties(tableau_server: TableauServerRest, username: str, properties_on_fullname: List[str], delimiter: str): t = tableau_server final_fullname = delimiter.join(properties_on_fullname) t.users.update_user(username_or_luid=username, full_name=final_fullname) def create_a_temporary_user(tableau_server: TableauServerRest, username: str, other_properties_on_username: List[str], properties_on_fullname: List[str], delimiter: str) -> str: t = tableau_server if len(other_properties_on_username) > 0: full_list = [username, ] full_list.extend(other_properties_on_username) final_username = delimiter.join(full_list) else: final_username = username final_fullname = delimiter.join(properties_on_fullname) new_user_luid = t.users.add_user(username=final_username, fullname=final_fullname, site_role='Viewer') # Because we might be storing properties on the username, return it back to whatever is then going to SSO the user return final_username # Typically we don't delete users from Tableau Server, but just set them to Unlicensed. # See delete_old_unlicensed_users for a pattern for actually deleting them based on some filter criteria def unlicense_a_temporary_user(tableau_server: TableauServerRest, username: str): t = tableau_server t.users.unlicense_users(username_or_luid_s=username) # This logic gets actually removes any licensed user who didn't log in today. # Could be lengthened out for a long-term type cleanup script def delete_old_unlicensed_users(tableau_server: TableauServerRest): t = tableau_server today = datetime.datetime.now() offset_time = datetime.timedelta(days=1) time_to_filter_by = today - offset_time last_login_filter = t.url_filters.get_last_login_filter('lte', time_to_filter_by) site_role_f = t.url_filters.get_site_role_filter('Unlicensed') unlicensed_users_from_before_today = t.users.query_users(last_login_filter=last_login_filter, site_role_filter=site_role_f) users_dict = t.xml_list_to_dict(unlicensed_users_from_before_today) # users_dict is username : luid, so you just need the values but must cast to a list t.users.remove_users_from_site(list(users_dict.values()))
PypiClean
/cds-sorenson-0.1.8.tar.gz/cds-sorenson-0.1.8/docs/usage.rst
.. This file is part of CERN Document Server. Copyright (C) 2016 CERN. Invenio is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. Invenio is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Invenio; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. In applying this license, CERN does not waive the privileges and immunities granted to it by virtue of its status as an Intergovernmental Organization or submit itself to any jurisdiction. Usage ===== .. automodule:: cds_sorenson
PypiClean
/kivy-django-1.9.1.tar.gz/kivy-django-1.9.1/django/contrib/admin/static/admin/js/admin/DateTimeShortcuts.js
(function() { 'use strict'; var DateTimeShortcuts = { calendars: [], calendarInputs: [], clockInputs: [], dismissClockFunc: [], dismissCalendarFunc: [], calendarDivName1: 'calendarbox', // name of calendar <div> that gets toggled calendarDivName2: 'calendarin', // name of <div> that contains calendar calendarLinkName: 'calendarlink',// name of the link that is used to toggle clockDivName: 'clockbox', // name of clock <div> that gets toggled clockLinkName: 'clocklink', // name of the link that is used to toggle shortCutsClass: 'datetimeshortcuts', // class of the clock and cal shortcuts timezoneWarningClass: 'timezonewarning', // class of the warning for timezone mismatch timezoneOffset: 0, init: function() { var body = document.getElementsByTagName('body')[0]; var serverOffset = body.getAttribute('data-admin-utc-offset'); if (serverOffset) { var localOffset = new Date().getTimezoneOffset() * -60; DateTimeShortcuts.timezoneOffset = localOffset - serverOffset; } var inputs = document.getElementsByTagName('input'); for (var i = 0; i < inputs.length; i++) { var inp = inputs[i]; if (inp.getAttribute('type') === 'text' && inp.className.match(/vTimeField/)) { DateTimeShortcuts.addClock(inp); DateTimeShortcuts.addTimezoneWarning(inp); } else if (inp.getAttribute('type') === 'text' && inp.className.match(/vDateField/)) { DateTimeShortcuts.addCalendar(inp); DateTimeShortcuts.addTimezoneWarning(inp); } } }, // Return the current time while accounting for the server timezone. now: function() { var body = document.getElementsByTagName('body')[0]; var serverOffset = body.getAttribute('data-admin-utc-offset'); if (serverOffset) { var localNow = new Date(); var localOffset = localNow.getTimezoneOffset() * -60; localNow.setTime(localNow.getTime() + 1000 * (serverOffset - localOffset)); return localNow; } else { return new Date(); } }, // Add a warning when the time zone in the browser and backend do not match. addTimezoneWarning: function(inp) { var $ = django.jQuery; var warningClass = DateTimeShortcuts.timezoneWarningClass; var timezoneOffset = DateTimeShortcuts.timezoneOffset / 3600; // Only warn if there is a time zone mismatch. if (!timezoneOffset) { return; } // Check if warning is already there. if ($(inp).siblings('.' + warningClass).length) { return; } var message; if (timezoneOffset > 0) { message = ngettext( 'Note: You are %s hour ahead of server time.', 'Note: You are %s hours ahead of server time.', timezoneOffset ); } else { timezoneOffset *= -1; message = ngettext( 'Note: You are %s hour behind server time.', 'Note: You are %s hours behind server time.', timezoneOffset ); } message = interpolate(message, [timezoneOffset]); var $warning = $('<span>'); $warning.attr('class', warningClass); $warning.text(message); $(inp).parent() .append($('<br>')) .append($warning); }, // Add clock widget to a given field addClock: function(inp) { var num = DateTimeShortcuts.clockInputs.length; DateTimeShortcuts.clockInputs[num] = inp; DateTimeShortcuts.dismissClockFunc[num] = function() { DateTimeShortcuts.dismissClock(num); return true; }; // Shortcut links (clock icon and "Now" link) var shortcuts_span = document.createElement('span'); shortcuts_span.className = DateTimeShortcuts.shortCutsClass; inp.parentNode.insertBefore(shortcuts_span, inp.nextSibling); var now_link = document.createElement('a'); now_link.setAttribute('href', "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", -1);"); now_link.appendChild(document.createTextNode(gettext('Now'))); var clock_link = document.createElement('a'); clock_link.setAttribute('href', 'javascript:DateTimeShortcuts.openClock(' + num + ');'); clock_link.id = DateTimeShortcuts.clockLinkName + num; quickElement( 'span', clock_link, '', 'class', 'clock-icon', 'title', gettext('Choose a Time') ); shortcuts_span.appendChild(document.createTextNode('\u00A0')); shortcuts_span.appendChild(now_link); shortcuts_span.appendChild(document.createTextNode('\u00A0|\u00A0')); shortcuts_span.appendChild(clock_link); // Create clock link div // // Markup looks like: // <div id="clockbox1" class="clockbox module"> // <h2>Choose a time</h2> // <ul class="timelist"> // <li><a href="#">Now</a></li> // <li><a href="#">Midnight</a></li> // <li><a href="#">6 a.m.</a></li> // <li><a href="#">Noon</a></li> // <li><a href="#">6 p.m.</a></li> // </ul> // <p class="calendar-cancel"><a href="#">Cancel</a></p> // </div> var clock_box = document.createElement('div'); clock_box.style.display = 'none'; clock_box.style.position = 'absolute'; clock_box.className = 'clockbox module'; clock_box.setAttribute('id', DateTimeShortcuts.clockDivName + num); document.body.appendChild(clock_box); addEvent(clock_box, 'click', cancelEventPropagation); quickElement('h2', clock_box, gettext('Choose a time')); var time_list = quickElement('ul', clock_box); time_list.className = 'timelist'; quickElement("a", quickElement("li", time_list), gettext("Now"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", -1);"); quickElement("a", quickElement("li", time_list), gettext("Midnight"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", 0);"); quickElement("a", quickElement("li", time_list), gettext("6 a.m."), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", 6);"); quickElement("a", quickElement("li", time_list), gettext("Noon"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", 12);"); quickElement("a", quickElement("li", time_list), gettext("6 p.m."), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", 18);"); var cancel_p = quickElement('p', clock_box); cancel_p.className = 'calendar-cancel'; quickElement('a', cancel_p, gettext('Cancel'), 'href', 'javascript:DateTimeShortcuts.dismissClock(' + num + ');'); django.jQuery(document).bind('keyup', function(event) { if (event.which === 27) { // ESC key closes popup DateTimeShortcuts.dismissClock(num); event.preventDefault(); } }); }, openClock: function(num) { var clock_box = document.getElementById(DateTimeShortcuts.clockDivName + num); var clock_link = document.getElementById(DateTimeShortcuts.clockLinkName + num); // Recalculate the clockbox position // is it left-to-right or right-to-left layout ? if (getStyle(document.body, 'direction') !== 'rtl') { clock_box.style.left = findPosX(clock_link) + 17 + 'px'; } else { // since style's width is in em, it'd be tough to calculate // px value of it. let's use an estimated px for now // TODO: IE returns wrong value for findPosX when in rtl mode // (it returns as it was left aligned), needs to be fixed. clock_box.style.left = findPosX(clock_link) - 110 + 'px'; } clock_box.style.top = Math.max(0, findPosY(clock_link) - 30) + 'px'; // Show the clock box clock_box.style.display = 'block'; addEvent(document, 'click', DateTimeShortcuts.dismissClockFunc[num]); }, dismissClock: function(num) { document.getElementById(DateTimeShortcuts.clockDivName + num).style.display = 'none'; removeEvent(document, 'click', DateTimeShortcuts.dismissClockFunc[num]); }, handleClockQuicklink: function(num, val) { var d; if (val === -1) { d = DateTimeShortcuts.now(); } else { d = new Date(1970, 1, 1, val, 0, 0, 0); } DateTimeShortcuts.clockInputs[num].value = d.strftime(get_format('TIME_INPUT_FORMATS')[0]); DateTimeShortcuts.clockInputs[num].focus(); DateTimeShortcuts.dismissClock(num); }, // Add calendar widget to a given field. addCalendar: function(inp) { var num = DateTimeShortcuts.calendars.length; DateTimeShortcuts.calendarInputs[num] = inp; DateTimeShortcuts.dismissCalendarFunc[num] = function() { DateTimeShortcuts.dismissCalendar(num); return true; }; // Shortcut links (calendar icon and "Today" link) var shortcuts_span = document.createElement('span'); shortcuts_span.className = DateTimeShortcuts.shortCutsClass; inp.parentNode.insertBefore(shortcuts_span, inp.nextSibling); var today_link = document.createElement('a'); today_link.setAttribute('href', 'javascript:DateTimeShortcuts.handleCalendarQuickLink(' + num + ', 0);'); today_link.appendChild(document.createTextNode(gettext('Today'))); var cal_link = document.createElement('a'); cal_link.setAttribute('href', 'javascript:DateTimeShortcuts.openCalendar(' + num + ');'); cal_link.id = DateTimeShortcuts.calendarLinkName + num; quickElement( 'span', cal_link, '', 'class', 'date-icon', 'title', gettext('Choose a Date') ); shortcuts_span.appendChild(document.createTextNode('\u00A0')); shortcuts_span.appendChild(today_link); shortcuts_span.appendChild(document.createTextNode('\u00A0|\u00A0')); shortcuts_span.appendChild(cal_link); // Create calendarbox div. // // Markup looks like: // // <div id="calendarbox3" class="calendarbox module"> // <h2> // <a href="#" class="link-previous">&lsaquo;</a> // <a href="#" class="link-next">&rsaquo;</a> February 2003 // </h2> // <div class="calendar" id="calendarin3"> // <!-- (cal) --> // </div> // <div class="calendar-shortcuts"> // <a href="#">Yesterday</a> | <a href="#">Today</a> | <a href="#">Tomorrow</a> // </div> // <p class="calendar-cancel"><a href="#">Cancel</a></p> // </div> var cal_box = document.createElement('div'); cal_box.style.display = 'none'; cal_box.style.position = 'absolute'; cal_box.className = 'calendarbox module'; cal_box.setAttribute('id', DateTimeShortcuts.calendarDivName1 + num); document.body.appendChild(cal_box); addEvent(cal_box, 'click', cancelEventPropagation); // next-prev links var cal_nav = quickElement('div', cal_box); var cal_nav_prev = quickElement('a', cal_nav, '<', 'href', 'javascript:DateTimeShortcuts.drawPrev(' + num + ');'); cal_nav_prev.className = 'calendarnav-previous'; var cal_nav_next = quickElement('a', cal_nav, '>', 'href', 'javascript:DateTimeShortcuts.drawNext(' + num + ');'); cal_nav_next.className = 'calendarnav-next'; // main box var cal_main = quickElement('div', cal_box, '', 'id', DateTimeShortcuts.calendarDivName2 + num); cal_main.className = 'calendar'; DateTimeShortcuts.calendars[num] = new Calendar(DateTimeShortcuts.calendarDivName2 + num, DateTimeShortcuts.handleCalendarCallback(num)); DateTimeShortcuts.calendars[num].drawCurrent(); // calendar shortcuts var shortcuts = quickElement('div', cal_box); shortcuts.className = 'calendar-shortcuts'; quickElement('a', shortcuts, gettext('Yesterday'), 'href', 'javascript:DateTimeShortcuts.handleCalendarQuickLink(' + num + ', -1);'); shortcuts.appendChild(document.createTextNode('\u00A0|\u00A0')); quickElement('a', shortcuts, gettext('Today'), 'href', 'javascript:DateTimeShortcuts.handleCalendarQuickLink(' + num + ', 0);'); shortcuts.appendChild(document.createTextNode('\u00A0|\u00A0')); quickElement('a', shortcuts, gettext('Tomorrow'), 'href', 'javascript:DateTimeShortcuts.handleCalendarQuickLink(' + num + ', +1);'); // cancel bar var cancel_p = quickElement('p', cal_box); cancel_p.className = 'calendar-cancel'; quickElement('a', cancel_p, gettext('Cancel'), 'href', 'javascript:DateTimeShortcuts.dismissCalendar(' + num + ');'); django.jQuery(document).bind('keyup', function(event) { if (event.which === 27) { // ESC key closes popup DateTimeShortcuts.dismissCalendar(num); event.preventDefault(); } }); }, openCalendar: function(num) { var cal_box = document.getElementById(DateTimeShortcuts.calendarDivName1 + num); var cal_link = document.getElementById(DateTimeShortcuts.calendarLinkName + num); var inp = DateTimeShortcuts.calendarInputs[num]; // Determine if the current value in the input has a valid date. // If so, draw the calendar with that date's year and month. if (inp.value) { var format = get_format('DATE_INPUT_FORMATS')[0]; var selected = inp.value.strptime(format); var year = selected.getFullYear(); var month = selected.getMonth() + 1; var re = /\d{4}/; if (re.test(year.toString()) && month >= 1 && month <= 12) { DateTimeShortcuts.calendars[num].drawDate(month, year, selected); } } // Recalculate the clockbox position // is it left-to-right or right-to-left layout ? if (getStyle(document.body, 'direction') !== 'rtl') { cal_box.style.left = findPosX(cal_link) + 17 + 'px'; } else { // since style's width is in em, it'd be tough to calculate // px value of it. let's use an estimated px for now // TODO: IE returns wrong value for findPosX when in rtl mode // (it returns as it was left aligned), needs to be fixed. cal_box.style.left = findPosX(cal_link) - 180 + 'px'; } cal_box.style.top = Math.max(0, findPosY(cal_link) - 75) + 'px'; cal_box.style.display = 'block'; addEvent(document, 'click', DateTimeShortcuts.dismissCalendarFunc[num]); }, dismissCalendar: function(num) { document.getElementById(DateTimeShortcuts.calendarDivName1 + num).style.display = 'none'; removeEvent(document, 'click', DateTimeShortcuts.dismissCalendarFunc[num]); }, drawPrev: function(num) { DateTimeShortcuts.calendars[num].drawPreviousMonth(); }, drawNext: function(num) { DateTimeShortcuts.calendars[num].drawNextMonth(); }, handleCalendarCallback: function(num) { var format = get_format('DATE_INPUT_FORMATS')[0]; // the format needs to be escaped a little format = format.replace('\\', '\\\\'); format = format.replace('\r', '\\r'); format = format.replace('\n', '\\n'); format = format.replace('\t', '\\t'); format = format.replace("'", "\\'"); return ["function(y, m, d) { DateTimeShortcuts.calendarInputs[", num, "].value = new Date(y, m-1, d).strftime('", format, "');DateTimeShortcuts.calendarInputs[", num, "].focus();document.getElementById(DateTimeShortcuts.calendarDivName1+", num, ").style.display='none';}"].join(''); }, handleCalendarQuickLink: function(num, offset) { var d = DateTimeShortcuts.now(); d.setDate(d.getDate() + offset); DateTimeShortcuts.calendarInputs[num].value = d.strftime(get_format('DATE_INPUT_FORMATS')[0]); DateTimeShortcuts.calendarInputs[num].focus(); DateTimeShortcuts.dismissCalendar(num); } }; addEvent(window, 'load', DateTimeShortcuts.init); window.DateTimeShortcuts = DateTimeShortcuts; })();
PypiClean
/dsin100daysv33-6.0.1.tar.gz/dsin100daysv33-6.0.1/notebook/static/components/codemirror/src/display/update_line.js
import { buildLineContent } from "../line/line_data.js" import { lineNumberFor } from "../line/utils_line.js" import { ie, ie_version } from "../util/browser.js" import { elt } from "../util/dom.js" import { signalLater } from "../util/operation_group.js" // When an aspect of a line changes, a string is added to // lineView.changes. This updates the relevant part of the line's // DOM structure. export function updateLineForChanges(cm, lineView, lineN, dims) { for (let j = 0; j < lineView.changes.length; j++) { let type = lineView.changes[j] if (type == "text") updateLineText(cm, lineView) else if (type == "gutter") updateLineGutter(cm, lineView, lineN, dims) else if (type == "class") updateLineClasses(cm, lineView) else if (type == "widget") updateLineWidgets(cm, lineView, dims) } lineView.changes = null } // Lines with gutter elements, widgets or a background class need to // be wrapped, and have the extra elements added to the wrapper div function ensureLineWrapped(lineView) { if (lineView.node == lineView.text) { lineView.node = elt("div", null, null, "position: relative") if (lineView.text.parentNode) lineView.text.parentNode.replaceChild(lineView.node, lineView.text) lineView.node.appendChild(lineView.text) if (ie && ie_version < 8) lineView.node.style.zIndex = 2 } return lineView.node } function updateLineBackground(cm, lineView) { let cls = lineView.bgClass ? lineView.bgClass + " " + (lineView.line.bgClass || "") : lineView.line.bgClass if (cls) cls += " CodeMirror-linebackground" if (lineView.background) { if (cls) lineView.background.className = cls else { lineView.background.parentNode.removeChild(lineView.background); lineView.background = null } } else if (cls) { let wrap = ensureLineWrapped(lineView) lineView.background = wrap.insertBefore(elt("div", null, cls), wrap.firstChild) cm.display.input.setUneditable(lineView.background) } } // Wrapper around buildLineContent which will reuse the structure // in display.externalMeasured when possible. function getLineContent(cm, lineView) { let ext = cm.display.externalMeasured if (ext && ext.line == lineView.line) { cm.display.externalMeasured = null lineView.measure = ext.measure return ext.built } return buildLineContent(cm, lineView) } // Redraw the line's text. Interacts with the background and text // classes because the mode may output tokens that influence these // classes. function updateLineText(cm, lineView) { let cls = lineView.text.className let built = getLineContent(cm, lineView) if (lineView.text == lineView.node) lineView.node = built.pre lineView.text.parentNode.replaceChild(built.pre, lineView.text) lineView.text = built.pre if (built.bgClass != lineView.bgClass || built.textClass != lineView.textClass) { lineView.bgClass = built.bgClass lineView.textClass = built.textClass updateLineClasses(cm, lineView) } else if (cls) { lineView.text.className = cls } } function updateLineClasses(cm, lineView) { updateLineBackground(cm, lineView) if (lineView.line.wrapClass) ensureLineWrapped(lineView).className = lineView.line.wrapClass else if (lineView.node != lineView.text) lineView.node.className = "" let textClass = lineView.textClass ? lineView.textClass + " " + (lineView.line.textClass || "") : lineView.line.textClass lineView.text.className = textClass || "" } function updateLineGutter(cm, lineView, lineN, dims) { if (lineView.gutter) { lineView.node.removeChild(lineView.gutter) lineView.gutter = null } if (lineView.gutterBackground) { lineView.node.removeChild(lineView.gutterBackground) lineView.gutterBackground = null } if (lineView.line.gutterClass) { let wrap = ensureLineWrapped(lineView) lineView.gutterBackground = elt("div", null, "CodeMirror-gutter-background " + lineView.line.gutterClass, `left: ${cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth}px; width: ${dims.gutterTotalWidth}px`) cm.display.input.setUneditable(lineView.gutterBackground) wrap.insertBefore(lineView.gutterBackground, lineView.text) } let markers = lineView.line.gutterMarkers if (cm.options.lineNumbers || markers) { let wrap = ensureLineWrapped(lineView) let gutterWrap = lineView.gutter = elt("div", null, "CodeMirror-gutter-wrapper", `left: ${cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth}px`) cm.display.input.setUneditable(gutterWrap) wrap.insertBefore(gutterWrap, lineView.text) if (lineView.line.gutterClass) gutterWrap.className += " " + lineView.line.gutterClass if (cm.options.lineNumbers && (!markers || !markers["CodeMirror-linenumbers"])) lineView.lineNumber = gutterWrap.appendChild( elt("div", lineNumberFor(cm.options, lineN), "CodeMirror-linenumber CodeMirror-gutter-elt", `left: ${dims.gutterLeft["CodeMirror-linenumbers"]}px; width: ${cm.display.lineNumInnerWidth}px`)) if (markers) for (let k = 0; k < cm.options.gutters.length; ++k) { let id = cm.options.gutters[k], found = markers.hasOwnProperty(id) && markers[id] if (found) gutterWrap.appendChild(elt("div", [found], "CodeMirror-gutter-elt", `left: ${dims.gutterLeft[id]}px; width: ${dims.gutterWidth[id]}px`)) } } } function updateLineWidgets(cm, lineView, dims) { if (lineView.alignable) lineView.alignable = null for (let node = lineView.node.firstChild, next; node; node = next) { next = node.nextSibling if (node.className == "CodeMirror-linewidget") lineView.node.removeChild(node) } insertLineWidgets(cm, lineView, dims) } // Build a line's DOM representation from scratch export function buildLineElement(cm, lineView, lineN, dims) { let built = getLineContent(cm, lineView) lineView.text = lineView.node = built.pre if (built.bgClass) lineView.bgClass = built.bgClass if (built.textClass) lineView.textClass = built.textClass updateLineClasses(cm, lineView) updateLineGutter(cm, lineView, lineN, dims) insertLineWidgets(cm, lineView, dims) return lineView.node } // A lineView may contain multiple logical lines (when merged by // collapsed spans). The widgets for all of them need to be drawn. function insertLineWidgets(cm, lineView, dims) { insertLineWidgetsFor(cm, lineView.line, lineView, dims, true) if (lineView.rest) for (let i = 0; i < lineView.rest.length; i++) insertLineWidgetsFor(cm, lineView.rest[i], lineView, dims, false) } function insertLineWidgetsFor(cm, line, lineView, dims, allowAbove) { if (!line.widgets) return let wrap = ensureLineWrapped(lineView) for (let i = 0, ws = line.widgets; i < ws.length; ++i) { let widget = ws[i], node = elt("div", [widget.node], "CodeMirror-linewidget") if (!widget.handleMouseEvents) node.setAttribute("cm-ignore-events", "true") positionLineWidget(widget, node, lineView, dims) cm.display.input.setUneditable(node) if (allowAbove && widget.above) wrap.insertBefore(node, lineView.gutter || lineView.text) else wrap.appendChild(node) signalLater(widget, "redraw") } } function positionLineWidget(widget, node, lineView, dims) { if (widget.noHScroll) { ;(lineView.alignable || (lineView.alignable = [])).push(node) let width = dims.wrapperWidth node.style.left = dims.fixedPos + "px" if (!widget.coverGutter) { width -= dims.gutterTotalWidth node.style.paddingLeft = dims.gutterTotalWidth + "px" } node.style.width = width + "px" } if (widget.coverGutter) { node.style.zIndex = 5 node.style.position = "relative" if (!widget.noHScroll) node.style.marginLeft = -dims.gutterTotalWidth + "px" } }
PypiClean
/napari-blossom-0.0.4.tar.gz/napari-blossom-0.0.4/.napari/DESCRIPTION.md
<!-- This file is designed to provide you with a starting template for documenting the functionality of your plugin. Its content will be rendered on your plugin's napari hub page. The sections below are given as a guide for the flow of information only, and are in no way prescriptive. You should feel free to merge, remove, add and rename sections at will to make this document work best for your plugin. ## Description This should be a detailed description of the context of your plugin and its intended purpose. If you have videos or screenshots of your plugin in action, you should include them here as well, to make them front and center for new users. You should use absolute links to these assets, so that we can easily display them on the hub. The easiest way to include a video is to use a GIF, for example hosted on imgur. You can then reference this GIF as an image. ![Example GIF hosted on Imgur](https://i.imgur.com/A5phCX4.gif) Note that GIFs larger than 5MB won't be rendered by GitHub - we will however, render them on the napari hub. The other alternative, if you prefer to keep a video, is to use GitHub's video embedding feature. 1. Push your `DESCRIPTION.md` to GitHub on your repository (this can also be done as part of a Pull Request) 2. Edit `.napari/DESCRIPTION.md` **on GitHub**. 3. Drag and drop your video into its desired location. It will be uploaded and hosted on GitHub for you, but will not be placed in your repository. 4. We will take the resolved link to the video and render it on the hub. Here is an example of an mp4 video embedded this way. https://user-images.githubusercontent.com/17995243/120088305-6c093380-c132-11eb-822d-620e81eb5f0e.mp4 ## Intended Audience & Supported Data This section should describe the target audience for this plugin (any knowledge, skills and experience required), as well as a description of the types of data supported by this plugin. Try to make the data description as explicit as possible, so that users know the format your plugin expects. This applies both to reader plugins reading file formats and to function/dock widget plugins accepting layers and/or layer data. For example, if you know your plugin only works with 3D integer data in "tyx" order, make sure to mention this. If you know of researchers, groups or labs using your plugin, or if it has been cited anywhere, feel free to also include this information here. ## Quickstart This section should go through step-by-step examples of how your plugin should be used. Where your plugin provides multiple dock widgets or functions, you should split these out into separate subsections for easy browsing. Include screenshots and videos wherever possible to elucidate your descriptions. Ideally, this section should start with minimal examples for those who just want a quick overview of the plugin's functionality, but you should definitely link out to more complex and in-depth tutorials highlighting any intricacies of your plugin, and more detailed documentation if you have it. ## Additional Install Steps (uncommon) We will be providing installation instructions on the hub, which will be sufficient for the majority of plugins. They will include instructions to pip install, and to install via napari itself. Most plugins can be installed out-of-the-box by just specifying the package requirements over in `setup.cfg`. However, if your plugin has any more complex dependencies, or requires any additional preparation before (or after) installation, you should add this information here. ## Getting Help This section should point users to your preferred support tools, whether this be raising an issue on GitHub, asking a question on image.sc, or using some other method of contact. If you distinguish between usage support and bug/feature support, you should state that here. ## How to Cite Many plugins may be used in the course of published (or publishable) research, as well as during conference talks and other public facing events. If you'd like to be cited in a particular format, or have a DOI you'd like used, you should provide that information here. --> The developer has not yet provided a napari-hub specific description.
PypiClean
/open_aea-1.38.0-py3-none-win32.whl/aea/cli/publish.py
"""Implementation of the 'aea publish' subcommand.""" import os import shutil from abc import ABC, abstractmethod from contextlib import suppress from pathlib import Path from shutil import copyfile from tempfile import TemporaryDirectory from typing import cast import click from aea.cli.push import _save_item_locally as _push_item_locally from aea.cli.registry.publish import publish_agent from aea.cli.registry.push import push_item as _push_item_remote from aea.cli.registry.settings import REGISTRY_REMOTE, REMOTE_IPFS from aea.cli.registry.utils import get_package_meta from aea.cli.utils.click_utils import registry_flag from aea.cli.utils.config import ( get_default_remote_registry, get_ipfs_node_multiaddr, validate_item_config, ) from aea.cli.utils.context import Context from aea.cli.utils.decorators import check_aea_project from aea.cli.utils.exceptions import AEAConfigException from aea.cli.utils.package_utils import ( try_get_item_source_path, try_get_item_target_path, ) from aea.configurations.base import AgentConfig, CRUDCollection, PublicId from aea.configurations.constants import ( AGENT, AGENTS, CONNECTIONS, CONTRACTS, DEFAULT_AEA_CONFIG_FILE, DEFAULT_README_FILE, ITEM_TYPE_PLURAL_TO_TYPE, PROTOCOLS, SKILLS, ) from aea.helpers.cid import to_v0, to_v1 try: from aea_cli_ipfs.ipfs_utils import IPFSTool # type: ignore IS_IPFS_PLUGIN_INSTALLED = True except ImportError: IS_IPFS_PLUGIN_INSTALLED = False PUSH_ITEMS_FLAG = "--push-missing" @click.command(name="publish") @registry_flag() @click.option( "--push-missing", is_flag=True, help="Push missing components to registry." ) @click.pass_context def publish( click_context: click.Context, registry: str, push_missing: bool ) -> None: # pylint: disable=unused-argument """Publish the agent to the registry.""" publish_agent_package(click_context, registry, push_missing) @check_aea_project def publish_agent_package( click_context: click.Context, registry: str, push_missing: bool ) -> None: """Publish an agent package to a registry.""" ctx = cast(Context, click_context.obj) _validate_pkp(ctx.agent_config.private_key_paths) _validate_config(ctx) if registry == REGISTRY_REMOTE: if get_default_remote_registry() == REMOTE_IPFS: _publish_agent_ipfs(ctx, push_missing) else: _publish_agent_remote(ctx, push_missing=push_missing) else: _save_agent_locally(ctx, is_mixed=False, push_missing=push_missing) def _validate_config(ctx: Context) -> None: """ Validate agent config. :param ctx: Context object. :raises ClickException: if validation is failed. """ try: validate_item_config(AGENT, Path(ctx.cwd)) except AEAConfigException as e: # pragma: no cover raise click.ClickException("Failed to validate agent config. {}".format(str(e))) def _validate_pkp(private_key_paths: CRUDCollection) -> None: """ Prevent to publish agents with non-empty private_key_paths. :param private_key_paths: private_key_paths from agent config. :raises ClickException: if private_key_paths is not empty. """ if private_key_paths.read_all() != []: raise click.ClickException( "You are not allowed to publish agents with non-empty private_key_paths. Use the `aea remove-key` command to remove key paths from `private_key_paths: {}` in `aea-config.yaml`." ) class BaseRegistry(ABC): """Base registry class.""" @abstractmethod def check_item_present(self, item_type_plural: str, public_id: PublicId) -> None: """ Check item present in registry. Raise ClickException if not found. :param item_type_plural: str, item type. :param public_id: PublicId of the item to check. :return: None """ @abstractmethod def push_item(self, item_type_plural: str, public_id: PublicId) -> None: """ Push item to registry. :param item_type_plural: str, item type. :param public_id: PublicId of the item to check. :return: None """ def check_item_present_and_push( self, item_type_plural: str, public_id: PublicId ) -> None: """ Check item present in registry and push if needed.Raise ClickException if not found. :param item_type_plural: str, item type. :param public_id: PublicId of the item to check. :return: None """ with suppress(click.ClickException): return self.check_item_present(item_type_plural, public_id) try: self.push_item(item_type_plural, public_id) except Exception as e: raise click.ClickException( f"Failed to push missing item: {item_type_plural} {public_id}: {e}" ) from e try: self.check_item_present(item_type_plural, public_id) except Exception as e: raise click.ClickException( f"Failed to find item after push: {item_type_plural} {public_id}: {e}" ) from e class IPFSRegistry(BaseRegistry): """IPFS registry.""" def __init__(self, ctx: Context) -> None: """Initialize object.""" super().__init__() self.ctx = ctx if not IS_IPFS_PLUGIN_INSTALLED: raise click.ClickException( "Please install ipfs plugin using `pip3 install open-aea-cli-ipfs`" ) self.ipfs_tool = IPFSTool(addr=get_ipfs_node_multiaddr()) def check_item_present(self, item_type_plural: str, public_id: PublicId) -> None: """Check if item is pinned on the node.""" if not self.ipfs_tool.is_a_package(to_v0(public_id.hash)): raise click.ClickException( f"Dependency {public_id} is missing from registry.\nPlease push it first and then retry or use {PUSH_ITEMS_FLAG} flag to push automatically." ) def push_item(self, item_type_plural: str, public_id: PublicId) -> None: """Push item to an ipfs registry.""" try: component_path = try_get_item_source_path( self.ctx.cwd, None, item_type_plural, public_id.name ) except click.ClickException: component_path = try_get_item_source_path( os.path.join(self.ctx.cwd, "vendor"), public_id.author, item_type_plural, public_id.name, ) _, package_hash, _ = self.ipfs_tool.add(component_path) package_hash = to_v1(package_hash) click.echo("Pushed missing package with:") click.echo(f"\tPublicId: {public_id}") click.echo(f"\tPackage hash: {package_hash}") class LocalRegistry(BaseRegistry): """Local directory registry.""" def __init__(self, ctx: Context): """Init registry.""" self.ctx = ctx try: self.registry_path = ctx.registry_path except ValueError as e: # pragma: nocover raise click.ClickException(str(e)) def check_item_present(self, item_type_plural: str, public_id: PublicId) -> None: """ Check item present in registry. Raise ClickException if not found. :param item_type_plural: str, item type. :param public_id: PublicId of the item to check. """ try: try_get_item_source_path( self.registry_path, public_id.author, item_type_plural, public_id.name ) except click.ClickException as e: raise click.ClickException( f"Dependency is missing. {str(e)}\nPlease push it first and then retry or use {PUSH_ITEMS_FLAG} flag to push automatically." ) def push_item(self, item_type_plural: str, public_id: PublicId) -> None: """ Push item to registry. :param item_type_plural: str, item type. :param public_id: PublicId of the item to check. """ item_type = ITEM_TYPE_PLURAL_TO_TYPE[item_type_plural] _push_item_locally(self.ctx, item_type, public_id) class MixedRegistry(LocalRegistry): """Mixed remote and local component registry.""" def check_item_present(self, item_type_plural: str, public_id: PublicId) -> None: """ Check item present in registry. Raise ClickException if not found. :param item_type_plural: str, item type. :param public_id: PublicId of the item to check. """ item_type = ITEM_TYPE_PLURAL_TO_TYPE[item_type_plural] try: LocalRegistry.check_item_present(self, item_type_plural, public_id) except click.ClickException: click.echo( f"Can not find dependency locally: {item_type} {public_id}. Trying remote registry..." ) try: RemoteRegistry(self.ctx).check_item_present(item_type_plural, public_id) except click.ClickException: raise click.ClickException( f"Can not find dependency locally or remotely: {item_type} {public_id}. Try to add flag `{PUSH_ITEMS_FLAG}` to push dependency package to the registry." ) class RemoteRegistry(BaseRegistry): """Remote components registry.""" def __init__(self, ctx: Context) -> None: """Init registry.""" self.ctx = ctx def check_item_present(self, item_type_plural: str, public_id: PublicId) -> None: """ Check item present in registry.Raise ClickException if not found. :param item_type_plural: str, item type. :param public_id: PublicId of the item to check. """ item_type = ITEM_TYPE_PLURAL_TO_TYPE[item_type_plural] try: get_package_meta(item_type, public_id) except click.ClickException as e: raise click.ClickException( f"Package not found in remote registry: {str(e)}. You can try to add {PUSH_ITEMS_FLAG} flag." ) def push_item(self, item_type_plural: str, public_id: PublicId) -> None: """ Push item to registry. :param item_type_plural: str, item type. :param public_id: PublicId of the item to check. """ item_type = ITEM_TYPE_PLURAL_TO_TYPE[item_type_plural] _push_item_remote(self.ctx, item_type, public_id) def _check_dependencies_in_registry( registry: BaseRegistry, agent_config: AgentConfig, push_missing: bool ) -> None: """Check all agent dependencies present in registry.""" for item_type_plural in (PROTOCOLS, CONTRACTS, CONNECTIONS, SKILLS): dependencies = getattr(agent_config, item_type_plural) for public_id in dependencies: if push_missing: registry.check_item_present_and_push(item_type_plural, public_id) else: registry.check_item_present(item_type_plural, public_id) def _save_agent_locally( ctx: Context, is_mixed: bool = False, push_missing: bool = False ) -> None: """ Save agent to local packages. :param ctx: the context :param is_mixed: whether or not to fetch in mixed mode :param push_missing: bool. flag to push missing items """ try: registry_path = ctx.registry_path except ValueError as e: # pragma: nocover raise click.ClickException(str(e)) registry = MixedRegistry(ctx) if is_mixed else LocalRegistry(ctx) _check_dependencies_in_registry(registry, ctx.agent_config, push_missing) item_type_plural = AGENTS target_dir = try_get_item_target_path( registry_path, ctx.agent_config.author, item_type_plural, ctx.agent_config.name, ) if not os.path.exists(target_dir): os.makedirs(target_dir, exist_ok=True) source_path = os.path.join(ctx.cwd, DEFAULT_AEA_CONFIG_FILE) target_path = os.path.join(target_dir, DEFAULT_AEA_CONFIG_FILE) copyfile(source_path, target_path) click.echo( f'Agent "{ctx.agent_config.name}" successfully saved in packages folder.' ) def _publish_agent_remote(ctx: Context, push_missing: bool) -> None: """ Push agent to remote registry. :param ctx: the context :param push_missing: bool. flag to push missing items """ registry = RemoteRegistry(ctx) _check_dependencies_in_registry(registry, ctx.agent_config, push_missing) publish_agent(ctx) def _publish_agent_ipfs(ctx: Context, push_missing: bool) -> None: """ Push agent to remote registry. :param ctx: the context :param push_missing: bool. flag to push missing items """ if not IS_IPFS_PLUGIN_INSTALLED: raise click.ClickException("Please install IPFS cli plugin.") registry = IPFSRegistry(ctx) _check_dependencies_in_registry(registry, ctx.agent_config, push_missing) name = ctx.agent_config.agent_name config_file_source_path = os.path.join(ctx.cwd, DEFAULT_AEA_CONFIG_FILE) readme_source_path = os.path.join(ctx.cwd, DEFAULT_README_FILE) with TemporaryDirectory() as temp_dir: package_dir = os.path.join(temp_dir, name) os.makedirs(package_dir) config_file_target_path = os.path.join(package_dir, DEFAULT_AEA_CONFIG_FILE) shutil.copy(config_file_source_path, config_file_target_path) if os.path.exists(readme_source_path): readme_file_target_path = os.path.join(package_dir, DEFAULT_README_FILE) shutil.copy(readme_source_path, readme_file_target_path) _, package_hash, _ = registry.ipfs_tool.add(package_dir) package_hash = to_v1(package_hash) click.echo( f"Successfully published agent {name} to the Registry with.\n\tPublic ID: {ctx.agent_config.public_id}\n\tPackage hash: {package_hash}" )
PypiClean
/threemystic_cloud_data_client-0.1.89-py3-none-any.whl/threemystic_cloud_data_client/cloud_providers/azure/client/actions/secrets.py
from threemystic_cloud_data_client.cloud_providers.azure.client.actions.base_class.base import cloud_data_client_azure_client_action_base as base import asyncio from azure.mgmt.keyvault import KeyVaultManagementClient from azure.keyvault.secrets import SecretClient from azure.keyvault.keys import KeyClient from azure.keyvault.administration import KeyVaultAccessControlClient class cloud_data_client_azure_client_action(base): def __init__(self, *args, **kwargs): super().__init__( data_action="secrets", logger_name= "cloud_data_client_azure_client_action_secrets", *args, **kwargs) async def __process_get_resources_key_vault_access_key(self, account, key_vault, *args, **kwargs): try: client = KeyVaultAccessControlClient(vault_url= f'https://{key_vault}.vault.azure.net/', credential= self.get_cloud_client().get_tenant_credential(tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True)), subscription_id= self.get_cloud_client().get_account_id(account= account)) # oid = print(self.get_cloud_client().get_tenant_credential_full(tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True)).get("jwt").get("oid")) return [ key for key in self.get_cloud_client().sdk_request( tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True), lambda_sdk_command=lambda: client.list_properties_of_keys() ) ] except Exception as err: return [] async def __process_get_resources_key_vault_keys(self, account, key_vault, *args, **kwargs): try: client = KeyClient(vault_url= f'https://{key_vault}.vault.azure.net/', credential= self.get_cloud_client().get_tenant_credential(tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True)), subscription_id= self.get_cloud_client().get_account_id(account= account)) return [ key for key in self.get_cloud_client().sdk_request( tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True), lambda_sdk_command=lambda: client.list_properties_of_keys() ) ] except Exception as err: return [] async def __process_get_resources_key_vault_secrets(self, account, key_vault, *args, **kwargs): try: client = SecretClient(vault_url= f'https://{key_vault}.vault.azure.net/', credential= self.get_cloud_client().get_tenant_credential(tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True)), subscription_id= self.get_cloud_client().get_account_id(account= account)) return [ secret for secret in self.get_cloud_client().sdk_request( tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True), lambda_sdk_command=lambda: client.list_properties_of_secrets() ) ] except Exception as err: return [] async def __process_get_resources_key_vaults(self, account, *args, **kwargs): try: client = KeyVaultManagementClient(credential= self.get_cloud_client().get_tenant_credential(tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True)), subscription_id= self.get_cloud_client().get_account_id(account= account)) return { self.get_cloud_client().get_resource_id(resource= kv):kv for kv in self.get_cloud_client().sdk_request( tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True), lambda_sdk_command=lambda: client.vaults.list() ) } except Exception as err: return {} async def _process_account_data(self, account, loop, *args, **kwargs): # print(account) # keyvaults = await self.__process_get_resources_key_vaults(account= account) # for kv in keyvaults.values(): # print(self.get_cloud_client().serialize_resource(resource= kv)) # break return { "account": account, "data": [ # self.get_common().helper_type().dictionary().merge_dictionary([ # {}, # await self.get_base_return_data( # account= self.get_cloud_client().serialize_resource(resource= account), # resource_id= self.get_cloud_client().get_resource_id(resource= item), # resource= item, # region= self.get_cloud_client().get_resource_location(resource= item), # resource_groups= [self.get_cloud_client().get_resource_group_from_resource(resource= item)], # ), # { # "extra_resource": self.get_cloud_client().serialize_resource(tasks["resource"].result().get(self.get_cloud_client().get_resource_id(resource= item))), # "extra_availability_set": tasks["availability_sets"].result().get(self.get_cloud_client().get_resource_id(resource= item)), # "extra_nics": tasks["nics"].result().get(self.get_cloud_client().get_resource_id(resource= item)), # "extra_load_balancers": await self._process_account_data_get_vm_load_balancers( # vm_nics= tasks["nics"].result().get(self.get_cloud_client().get_resource_id(resource= item)), # load_balancers_by_nics = tasks["load_balancers"].result() # ), # }, # ]) for item in self.get_cloud_client().sdk_request( # tenant= self.get_cloud_client().get_tenant_id(tenant= account, is_account= True), # lambda_sdk_command=lambda: client.virtual_machines.list_all() # ) ] }
PypiClean
/dsconfig-1.6.7.tar.gz/dsconfig-1.6.7/README.md
# Dsconfig This is a command line tool for managing configuration of Tango device servers. It runs on python 2.7 as well as 3.6 and up. The goal of this project is to provide tools for configuring a Tango database in a convenient way. Right now the focus is on supporting Excel files as input ("xls2json"), but support for other formats should follow. The main idea is that the input files are parsed and turned into an intermediate JSON format, specified by a schema. This file can then be given to the "json2tango" tool which then tries to make the database contents match, by adding, modifying or removing servers, devices and properties. The JSON format is easy to create and supported by many tools and languages, so generating them from various sources should be straightforward. Once you have such a file, it should be a simple thing to configure the Tango database. ## Caveats There are a few things to be aware of before using this tool. - The basic idea of dsconfig is *idempotence*. This means that applying the same dsconfig file a second time should result in no changes at all. The intention is that it should be useful not only to update the configuration but also to be able to check if anything has changed since the last application. Therefore, the tool tries to figure out the smallest set of database operations needed to get to the intended state. - TANGO is *case insensitive* for names, for example of devices and properties. But there are some cases where this causes confusing results. For example, TANGO keeps the case that was used when last written, which means that the same name may exist in different places with different cases. dsconfig tries to handle this gracefully, but it is complex (for example, all relevant string comparisons need to be done in a case insensitive way) and there are bound to be corner cases where the behavior is unexpected. Please report such cases if you run into them. ## JSON format This is an example of the format, with comments (comments are not actually supported by JSON so don't copy-paste this!): ```json { // these lines are meta information and are ignored so far "_version": 1, "_source": "ConfigInjectorDiag.xls", "_title": "MAX-IV Tango JSON intermediate format", "_date": "2014-11-03 17:45:04.258926", // here comes the actual Tango data // First, server instances and devices... "servers": { "some-server/instance": { "SomeDeviceClass": { "some/device/1": { "properties": { "someImportantProperty": [ "foo", "bar" ], "otherProperty": ["7"] }, "attribute_properties": { "anAttribute": { "min_value": ["-5"], "unit": ["mV"] } } } } } }, // Here you can list your class properties "classes": { "SomeDeviceClass": { "properties": { "aClassProperty": ["67.4"] } } } } ``` Note that all properties are given as lists of strings. This is how the Tango DB represents them so it gets a lot easier to compare things if we do it too. Note: the format is now more loosely defined; it is allowed to split the server and instance names into separate levels, like so: ```json "servers": { "some-server": { "instance": { "SomeDeviceClass": ... ``` Note: Leaving out "properties" in a device will mean that the tool just ignores any existing properties when applying the configuration. An empty properties object ("properties": {}) means that existing properties will get removed. Same goes for "attribute_properties". ## xls2json This tool converts a excel file (of proper format) into a dsconfig JSON file. The XLS format supported is almost identical to the dsgenerator format, with a few changes: - It is now possible to spread server definitions over any number of pages, and to selectively use only a subset of these by giving their names to the xls2json tool. - The column names (the first line of each column) are now significant, so that their order can be relaxed. There are a few differences to the "standard" sheet though; "ServerName" should be "Server", "Devices" should be "Device" and, in the "ParamConfig" tab, "Parameter" should now be "Attribute". These changes were made for consistency. - A few features have been added for flexibility; see the example Excel file in "test/files/test.xls". Converting an excel file is done like this: ```bash xls2json config.xls ``` This will output the resulting JSON data to stdout. If there are errors or warnings, they will be printed on stderr. To save the JSON to a file, just redirect the output. By default, all sheets are processed. If you want to only include some of them, include the sheet names as further arguments to the command: ```bash xls2json config.xls sheet1 sheet2 ``` The "Dynamics" and "ParamConfig" sheets are treated specially as they follow a different style. Some syntax checking is done on dynamic formulas, to make sure they compile. Failures are printed to stderr and the corresponding properties skipped, so be careful (see the -f flag to override this). The command is quite verbose and it will by default happily skip lines that contain incomplete information. Make sure to check the stderr output for hints about this. At the end the command prints a line of statistics, listing the number of servers, etc, it has found. This is intended as a useful sanity check. Also look over the JSON result to see if it makes sense. Useful flags (see --help): - `--fatal (-f)` means that the command will treat any parsing failure as a fatal error and exit instead of skipping the line as normal. Use if you don't like the lenient default behavior. ## json2tango This tool reads a JSON file (or from stdout if no filename is given), validates it and, optionally, configures a Tango database accordingly. By default, it will only check the current DB state, compare, and print out what actions would be performed, without changing anything. This should always be the first step, in order to catch errors before they are permanently written to the DB. ```bash json2tango config.json ``` Inspect the output of this command carefully. Things in red means removal, green additions and yellow changes. Note that properties are stored as lists of strings in the DB, so don't be confused by the fact that your numeric properties turn up as strings. [Pro-tip: if you're unsure of what's going on, it's a good idea to inspect the output of the `-d` argument (see below) before doing any non-trivial changes. It's usually less readable than the normal diff output, but garanteed to be accurate.] A summary of the numbers of different database operations is printed at the end. This should be useful to double check, usually you have a good idea of e.g. how many devices should be added, etc. Once you're convinced that the actions are correct, add the "-w" flag to the command line (this can be at the end or anywhere). Now the command will actually perform the actions in the Tango DB. For safety and convenience, the program also writes the previous DB state that was changed into a temp JSON file (this is the same as the output of the -d flag). It should, in principle, be possible to undo the changes made by swapping your input JSON file with the temp file. This is a new feature that is not tested for many cases so don't rely on it. Note that the tool in principle only concerns itself with the server instances defined in your JSON file. All other servers in the DB are left untouched. The exception is if your JSON contains devices that already exist in the DB, but in different servers. The devices will be moved to the new servers, and if any of the original servers become empty of devices, they will be removed. There is currently no other way to remove a server with dsconfig. Some useful flags (see --help for a complete list): - `--write (-w)` is needed in order to actually do anything to the database. This means that the command will perform the actions needed to bring the DB into the described state. If the state is already correct, nothing is done. - `--update (-u)` means that "nothing" (be careful, see caveats below) will be removed, only changed or added. Again the exception is any existing duplicates of your devices. Also, this only applies to whole properties, not individual lines. So if your JSON has lines removed from a property, the lines will be removed from the DB as the whole property is overwritten, regardless of the --update flag. - `--include (-i)` [Experimental] lets you filter the configuration before applying it. You give a filter consisting of a "term" (server/class/device/property) and a regular expression, separated by colon. E.g. "--include=device:VAC/IP.*01". This will cause the command to only apply configuration that concerns those devices matching the regex. It is possible to add several includes, just tack more "--include=..." statements on. - `--exclude (-x)` [Experimental] works like --include except it removes the matching parts from the config instead. Some less useful flags: - `--no-validation (-v)` skips the JSON validation step. If you know what you're doing, this may be useful as the validation is very strict, while the tool itself is more forgiving. Watch out for unexpected behavior though; you're on your own! It's probably a better idea to fix your JSON. - `--dbcalls (-d)` prints out all the Tango database API calls that were, or would have been, made to perform the changes. This is mostly handy for debugging problems. Since this is the real list of commands that are performed, it is guaranteed to correspond to reality. - `--sleep (-s)` tweaks the time to wait between db calls. The default is 0.01 s. This is intended to lighten the load on the Tango DB service a bit, but it can be set to 0 if you just want the config to be done as fast as possible. - `--input (-p)` tells the command to simply print the configuration file, but after any filters have been applied. It can be useful in order to check the result of filtering. If no filters are used, it will just (pretty) print whatever file you gave as input. This flag skips all database operations so it can be used "offline". ## Other features ### Dumping the database It's often useful to be able to make a "snapshot" of the current state of the configuration, e.g. for safe keeping, or for usage in scripts. There is a module in dsconfig that allows this called `dump`. ```bash python -m dsconfig.dump ``` It outputs the contents of the current TANGO database to `stdout`. There are some filtering functionality to allow only dumping selected parts (e.g. servers), like so: ```bash python -m dsconfig.dump server:LimaCCDs/* ``` For more help, try the `--help` flag. ### Viewing JSON files Reading a large, nested JSON file can be painful, but dsconfig has a solution; a hierarchical, terminal based JSON viewer! If you install the python packages `urwid` and `urwidtrees`, you can interactively view any JSON file by running ```bash python -m dsconfig.viewer something.json ``` From the start, everything is "folded" but you can navigate the structure by using the arrow keys and return to fold/unfold nodes.
PypiClean
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flasharray/FA_2_11/models/protection_group.py
import pprint import re import six import typing from ....properties import Property if typing.TYPE_CHECKING: from pypureclient.flasharray.FA_2_11 import models class ProtectionGroup(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'name': 'str', 'destroyed': 'bool', 'host_count': 'int', 'host_group_count': 'int', 'is_local': 'bool', 'pod': 'FixedReference', 'replication_schedule': 'ReplicationSchedule', 'snapshot_schedule': 'SnapshotSchedule', 'source': 'FixedReference', 'source_retention': 'RetentionPolicy', 'space': 'Space', 'target_count': 'int', 'target_retention': 'RetentionPolicy', 'time_remaining': 'int', 'volume_count': 'int' } attribute_map = { 'name': 'name', 'destroyed': 'destroyed', 'host_count': 'host_count', 'host_group_count': 'host_group_count', 'is_local': 'is_local', 'pod': 'pod', 'replication_schedule': 'replication_schedule', 'snapshot_schedule': 'snapshot_schedule', 'source': 'source', 'source_retention': 'source_retention', 'space': 'space', 'target_count': 'target_count', 'target_retention': 'target_retention', 'time_remaining': 'time_remaining', 'volume_count': 'volume_count' } required_args = { } def __init__( self, name=None, # type: str destroyed=None, # type: bool host_count=None, # type: int host_group_count=None, # type: int is_local=None, # type: bool pod=None, # type: models.FixedReference replication_schedule=None, # type: models.ReplicationSchedule snapshot_schedule=None, # type: models.SnapshotSchedule source=None, # type: models.FixedReference source_retention=None, # type: models.RetentionPolicy space=None, # type: models.Space target_count=None, # type: int target_retention=None, # type: models.RetentionPolicy time_remaining=None, # type: int volume_count=None, # type: int ): """ Keyword args: name (str): A user-specified name. The name must be locally unique and can be changed. destroyed (bool): Has this protection group been destroyed? To destroy a protection group, patch to `true`. To recover a destroyed protection group, patch to `false`. If not specified, defaults to `false`. host_count (int): Number of hosts in this protection group. host_group_count (int): Number of host groups in this protection group. is_local (bool): If set to `true`, the protection group belongs to the local array. If set to `false`, the protection group belongs to the remote array. pod (FixedReference): The pod in which the protection group resides. replication_schedule (ReplicationSchedule): The schedule settings for asynchronous replication. snapshot_schedule (SnapshotSchedule): The schedule settings for protection group snapshots. source (FixedReference): The array on which the protection group was created. source_retention (RetentionPolicy): The retention policy for the source array of the protection group. space (Space): Returns provisioned size and physical storage consumption data for each protection group. target_count (int): The number of targets to where this protection group replicates. target_retention (RetentionPolicy): The retention policy for the target(s) of the protection group. time_remaining (int): The amount of time left until the destroyed protection group is permanently eradicated. Measured in milliseconds. Before the `time_remaining` period has elapsed, the destroyed protection group can be recovered by setting `destroyed=false`. volume_count (int): The number of volumes in the protection group. """ if name is not None: self.name = name if destroyed is not None: self.destroyed = destroyed if host_count is not None: self.host_count = host_count if host_group_count is not None: self.host_group_count = host_group_count if is_local is not None: self.is_local = is_local if pod is not None: self.pod = pod if replication_schedule is not None: self.replication_schedule = replication_schedule if snapshot_schedule is not None: self.snapshot_schedule = snapshot_schedule if source is not None: self.source = source if source_retention is not None: self.source_retention = source_retention if space is not None: self.space = space if target_count is not None: self.target_count = target_count if target_retention is not None: self.target_retention = target_retention if time_remaining is not None: self.time_remaining = time_remaining if volume_count is not None: self.volume_count = volume_count def __setattr__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ProtectionGroup`".format(key)) self.__dict__[key] = value def __getattribute__(self, item): value = object.__getattribute__(self, item) if isinstance(value, Property): raise AttributeError else: return value def __getitem__(self, key): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ProtectionGroup`".format(key)) return object.__getattribute__(self, key) def __setitem__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ProtectionGroup`".format(key)) object.__setattr__(self, key, value) def __delitem__(self, key): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ProtectionGroup`".format(key)) object.__delattr__(self, key) def keys(self): return self.attribute_map.keys() def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(ProtectionGroup, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ProtectionGroup): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
PypiClean
/formification-1.2.0-py3-none-any.whl/formulaic/static/admin/formulaic/ember-formulaic/node_modules/eslint/lib/rules/accessor-pairs.js
"use strict"; //------------------------------------------------------------------------------ // Helpers //------------------------------------------------------------------------------ /** * Checks whether or not a given node is an `Identifier` node which was named a given name. * @param {ASTNode} node - A node to check. * @param {string} name - An expected name of the node. * @returns {boolean} `true` if the node is an `Identifier` node which was named as expected. */ function isIdentifier(node, name) { return node.type === "Identifier" && node.name === name; } /** * Checks whether or not a given node is an argument of a specified method call. * @param {ASTNode} node - A node to check. * @param {number} index - An expected index of the node in arguments. * @param {string} object - An expected name of the object of the method. * @param {string} property - An expected name of the method. * @returns {boolean} `true` if the node is an argument of the specified method call. */ function isArgumentOfMethodCall(node, index, object, property) { const parent = node.parent; return ( parent.type === "CallExpression" && parent.callee.type === "MemberExpression" && parent.callee.computed === false && isIdentifier(parent.callee.object, object) && isIdentifier(parent.callee.property, property) && parent.arguments[index] === node ); } /** * Checks whether or not a given node is a property descriptor. * @param {ASTNode} node - A node to check. * @returns {boolean} `true` if the node is a property descriptor. */ function isPropertyDescriptor(node) { // Object.defineProperty(obj, "foo", {set: ...}) if (isArgumentOfMethodCall(node, 2, "Object", "defineProperty") || isArgumentOfMethodCall(node, 2, "Reflect", "defineProperty") ) { return true; } /* * Object.defineProperties(obj, {foo: {set: ...}}) * Object.create(proto, {foo: {set: ...}}) */ node = node.parent.parent; return node.type === "ObjectExpression" && ( isArgumentOfMethodCall(node, 1, "Object", "create") || isArgumentOfMethodCall(node, 1, "Object", "defineProperties") ); } //------------------------------------------------------------------------------ // Rule Definition //------------------------------------------------------------------------------ module.exports = { meta: { docs: { description: "enforce getter and setter pairs in objects", category: "Best Practices", recommended: false }, schema: [{ type: "object", properties: { getWithoutSet: { type: "boolean" }, setWithoutGet: { type: "boolean" } }, additionalProperties: false }] }, create(context) { const config = context.options[0] || {}; const checkGetWithoutSet = config.getWithoutSet === true; const checkSetWithoutGet = config.setWithoutGet !== false; /** * Checks a object expression to see if it has setter and getter both present or none. * @param {ASTNode} node The node to check. * @returns {void} * @private */ function checkLonelySetGet(node) { let isSetPresent = false; let isGetPresent = false; const isDescriptor = isPropertyDescriptor(node); for (let i = 0, end = node.properties.length; i < end; i++) { const property = node.properties[i]; let propToCheck = ""; if (property.kind === "init") { if (isDescriptor && !property.computed) { propToCheck = property.key.name; } } else { propToCheck = property.kind; } switch (propToCheck) { case "set": isSetPresent = true; break; case "get": isGetPresent = true; break; default: // Do nothing } if (isSetPresent && isGetPresent) { break; } } if (checkSetWithoutGet && isSetPresent && !isGetPresent) { context.report({ node, message: "Getter is not present." }); } else if (checkGetWithoutSet && isGetPresent && !isSetPresent) { context.report({ node, message: "Setter is not present." }); } } return { ObjectExpression(node) { if (checkSetWithoutGet || checkGetWithoutSet) { checkLonelySetGet(node); } } }; } };
PypiClean
/accelbyte_py_sdk-0.48.0.tar.gz/accelbyte_py_sdk-0.48.0/accelbyte_py_sdk/api/platform/operations/service_plugin_config/update_section_plugin_config.py
# template file: ags_py_codegen # pylint: disable=duplicate-code # pylint: disable=line-too-long # pylint: disable=missing-function-docstring # pylint: disable=missing-module-docstring # pylint: disable=too-many-arguments # pylint: disable=too-many-branches # pylint: disable=too-many-instance-attributes # pylint: disable=too-many-lines # pylint: disable=too-many-locals # pylint: disable=too-many-public-methods # pylint: disable=too-many-return-statements # pylint: disable=too-many-statements # pylint: disable=unused-import # AccelByte Gaming Services Platform Service (4.34.0) from __future__ import annotations from typing import Any, Dict, List, Optional, Tuple, Union from .....core import Operation from .....core import HeaderStr from .....core import HttpResponse from ...models import SectionPluginConfigInfo from ...models import SectionPluginConfigUpdate from ...models import ValidationErrorEntity class UpdateSectionPluginConfig(Operation): """Update section plugin config (updateSectionPluginConfig) Update section config. Other detail info: * Required permission : resource=ADMIN:NAMESPACE:{namespace}:PLUGIN:CATALOG, action=4 (UPDATE) * Returns : updated service plugin config Properties: url: /platform/admin/namespaces/{namespace}/catalog/plugins/section method: PUT tags: ["ServicePluginConfig"] consumes: ["application/json"] produces: ["application/json"] securities: [BEARER_AUTH] body: (body) OPTIONAL SectionPluginConfigUpdate in body namespace: (namespace) REQUIRED str in path Responses: 200: OK - SectionPluginConfigInfo (successful operation) 422: Unprocessable Entity - ValidationErrorEntity (20002: validation error) """ # region fields _url: str = "/platform/admin/namespaces/{namespace}/catalog/plugins/section" _method: str = "PUT" _consumes: List[str] = ["application/json"] _produces: List[str] = ["application/json"] _securities: List[List[str]] = [["BEARER_AUTH"]] _location_query: str = None body: SectionPluginConfigUpdate # OPTIONAL in [body] namespace: str # REQUIRED in [path] # endregion fields # region properties @property def url(self) -> str: return self._url @property def method(self) -> str: return self._method @property def consumes(self) -> List[str]: return self._consumes @property def produces(self) -> List[str]: return self._produces @property def securities(self) -> List[List[str]]: return self._securities @property def location_query(self) -> str: return self._location_query # endregion properties # region get methods # endregion get methods # region get_x_params methods def get_all_params(self) -> dict: return { "body": self.get_body_params(), "path": self.get_path_params(), } def get_body_params(self) -> Any: if not hasattr(self, "body") or self.body is None: return None return self.body.to_dict() def get_path_params(self) -> dict: result = {} if hasattr(self, "namespace"): result["namespace"] = self.namespace return result # endregion get_x_params methods # region is/has methods # endregion is/has methods # region with_x methods def with_body(self, value: SectionPluginConfigUpdate) -> UpdateSectionPluginConfig: self.body = value return self def with_namespace(self, value: str) -> UpdateSectionPluginConfig: self.namespace = value return self # endregion with_x methods # region to methods def to_dict(self, include_empty: bool = False) -> dict: result: dict = {} if hasattr(self, "body") and self.body: result["body"] = self.body.to_dict(include_empty=include_empty) elif include_empty: result["body"] = SectionPluginConfigUpdate() if hasattr(self, "namespace") and self.namespace: result["namespace"] = str(self.namespace) elif include_empty: result["namespace"] = "" return result # endregion to methods # region response methods # noinspection PyMethodMayBeStatic def parse_response( self, code: int, content_type: str, content: Any ) -> Tuple[ Union[None, SectionPluginConfigInfo], Union[None, HttpResponse, ValidationErrorEntity], ]: """Parse the given response. 200: OK - SectionPluginConfigInfo (successful operation) 422: Unprocessable Entity - ValidationErrorEntity (20002: validation error) ---: HttpResponse (Undocumented Response) ---: HttpResponse (Unexpected Content-Type Error) ---: HttpResponse (Unhandled Error) """ pre_processed_response, error = self.pre_process_response( code=code, content_type=content_type, content=content ) if error is not None: return None, None if error.is_no_content() else error code, content_type, content = pre_processed_response if code == 200: return SectionPluginConfigInfo.create_from_dict(content), None if code == 422: return None, ValidationErrorEntity.create_from_dict(content) return self.handle_undocumented_response( code=code, content_type=content_type, content=content ) # endregion response methods # region static methods @classmethod def create( cls, namespace: str, body: Optional[SectionPluginConfigUpdate] = None, **kwargs ) -> UpdateSectionPluginConfig: instance = cls() instance.namespace = namespace if body is not None: instance.body = body return instance @classmethod def create_from_dict( cls, dict_: dict, include_empty: bool = False ) -> UpdateSectionPluginConfig: instance = cls() if "body" in dict_ and dict_["body"] is not None: instance.body = SectionPluginConfigUpdate.create_from_dict( dict_["body"], include_empty=include_empty ) elif include_empty: instance.body = SectionPluginConfigUpdate() if "namespace" in dict_ and dict_["namespace"] is not None: instance.namespace = str(dict_["namespace"]) elif include_empty: instance.namespace = "" return instance @staticmethod def get_field_info() -> Dict[str, str]: return { "body": "body", "namespace": "namespace", } @staticmethod def get_required_map() -> Dict[str, bool]: return { "body": False, "namespace": True, } # endregion static methods
PypiClean
/prismacloud_api-5.2.7-py3-none-any.whl/prismacloud/api/pccs/pccs.py
import json import time import requests class PrismaCloudAPIPCCSMixin(): """ Requests and Output """ # pylint: disable=too-many-arguments,too-many-branches,too-many-locals,too-many-statements def execute_code_security(self, action, endpoint, query_params=None, body_params=None, request_headers=None, force=False, paginated=False): self.suppress_warnings_when_verify_false() if not self.token: self.login() if int(time.time() - self.token_timer) > self.token_limit: self.extend_login() if not request_headers: request_headers = {'Content-Type': 'application/json'} if body_params: body_params_json = json.dumps(body_params) else: body_params_json = None # Endpoints that return large numbers of results use a 'hasNext' key. # Pagination is via query parameters for both GET and POST, and appears to be specific to "List File Errors - POST". offset = 0 limit = 50 more = False results = [] while offset == 0 or more is True: if int(time.time() - self.token_timer) > self.token_limit: self.extend_login() if paginated: url = 'https://%s/%s?limit=%s&offset=%s' % (self.api, endpoint, limit, offset) else: url = 'https://%s/%s' % (self.api, endpoint) if self.token: request_headers['authorization'] = self.token self.debug_print('API URL: %s' % url) self.debug_print('API Headers: %s' % request_headers) self.debug_print('API Query Params: %s' % query_params) self.debug_print('API Body Params: %s' % body_params_json) # Add User-Agent to the headers request_headers['User-Agent'] = self.user_agent api_response = requests.request(action, url, headers=request_headers, params=query_params, data=body_params_json, verify=self.verify, timeout=self.timeout) self.debug_print('API Response Status Code: %s' % api_response.status_code) self.debug_print('API Response Headers: (%s)' % api_response.headers) if api_response.status_code in self.retry_status_codes: for exponential_wait in self.retry_waits: time.sleep(exponential_wait) api_response = requests.request(action, url, headers=request_headers, params=query_params, data=body_params_json, verify=self.verify, timeout=self.timeout) if api_response.ok: break # retry loop if api_response.ok: if not api_response.content: return None if api_response.headers.get('Content-Type') == 'application/x-gzip': return api_response.content if api_response.headers.get('Content-Type') == 'text/csv': return api_response.content.decode('utf-8') try: result = json.loads(api_response.content) #if result is None: # self.logger.error('JSON returned None, API: (%s) with query params: (%s) and body params: (%s) parsing response: (%s)' % (url, query_params, body_params, api_response.content)) # if force: # return results # or continue # self.error_and_exit(api_response.status_code, 'JSON returned None, API: (%s) with query params: (%s) and body params: (%s) parsing response: (%s)' % (url, query_params, body_params, api_response.content)) except ValueError: self.logger.error('JSON raised ValueError, API: (%s) with query params: (%s) and body params: (%s) parsing response: (%s)' % (url, query_params, body_params, api_response.content)) if force: return results # or continue self.error_and_exit(api_response.status_code, 'JSON raised ValueError, API: (%s) with query params: (%s) and body params: (%s) parsing response: (%s)' % (url, query_params, body_params, api_response.content)) if paginated: results.extend(result['data']) if 'hasNext' in result: self.debug_print('Retrieving Next Page of Results') offset += limit more = result['hasNext'] else: return results else: return result else: self.logger.error('API: (%s) responded with a status of: (%s), with query: (%s) and body params: (%s)' % (url, api_response.status_code, query_params, body_params)) if force: return results self.error_and_exit(api_response.status_code, 'API: (%s) with query params: (%s) and body params: (%s) responded with an error and this response:\n%s' % (url, query_params, body_params, api_response.text)) return results # Exit handler (Error). @classmethod def error_and_exit(cls, error_code, error_message='', system_message=''): raise SystemExit('\n\nStatus Code: %s\n%s\n%s\n' % (error_code, error_message, system_message))
PypiClean
/django-geoprisma-0.0.1.tar.gz/django-geoprisma-0.0.1/geoprisma/static/geoprisma/lib/OpenLayers/tools/jsmin.py
# This code is original from jsmin by Douglas Crockford, it was translated to # Python by Baruch Even. The original code had the following copyright and # license. # # /* jsmin.c # 2007-01-08 # # Copyright (c) 2002 Douglas Crockford (www.crockford.com) # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software is furnished to do # so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # The Software shall be used for Good, not Evil. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # */ from StringIO import StringIO def jsmin(js): ins = StringIO(js) outs = StringIO() JavascriptMinify().minify(ins, outs) str = outs.getvalue() if len(str) > 0 and str[0] == '\n': str = str[1:] return str def isAlphanum(c): """return true if the character is a letter, digit, underscore, dollar sign, or non-ASCII character. """ return ((c >= 'a' and c <= 'z') or (c >= '0' and c <= '9') or (c >= 'A' and c <= 'Z') or c == '_' or c == '$' or c == '\\' or (c is not None and ord(c) > 126)); class UnterminatedComment(Exception): pass class UnterminatedStringLiteral(Exception): pass class UnterminatedRegularExpression(Exception): pass class JavascriptMinify(object): def _outA(self): self.outstream.write(self.theA) def _outB(self): self.outstream.write(self.theB) def _get(self): """return the next character from stdin. Watch out for lookahead. If the character is a control character, translate it to a space or linefeed. """ c = self.theLookahead self.theLookahead = None if c == None: c = self.instream.read(1) if c >= ' ' or c == '\n': return c if c == '': # EOF return '\000' if c == '\r': return '\n' return ' ' def _peek(self): self.theLookahead = self._get() return self.theLookahead def _next(self): """get the next character, excluding comments. peek() is used to see if a '/' is followed by a '/' or '*'. """ c = self._get() if c == '/': p = self._peek() if p == '/': c = self._get() while c > '\n': c = self._get() return c if p == '*': c = self._get() while 1: c = self._get() if c == '*': if self._peek() == '/': self._get() return ' ' if c == '\000': raise UnterminatedComment() return c def _action(self, action): """do something! What you do is determined by the argument: 1 Output A. Copy B to A. Get the next B. 2 Copy B to A. Get the next B. (Delete A). 3 Get the next B. (Delete B). action treats a string as a single character. Wow! action recognizes a regular expression if it is preceded by ( or , or =. """ if action <= 1: self._outA() if action <= 2: self.theA = self.theB if self.theA == "'" or self.theA == '"': while 1: self._outA() self.theA = self._get() if self.theA == self.theB: break if self.theA <= '\n': raise UnterminatedStringLiteral() if self.theA == '\\': self._outA() self.theA = self._get() if action <= 3: self.theB = self._next() if self.theB == '/' and (self.theA == '(' or self.theA == ',' or self.theA == '=' or self.theA == ':' or self.theA == '[' or self.theA == '?' or self.theA == '!' or self.theA == '&' or self.theA == '|'): self._outA() self._outB() while 1: self.theA = self._get() if self.theA == '/': break elif self.theA == '\\': self._outA() self.theA = self._get() elif self.theA <= '\n': raise UnterminatedRegularExpression() self._outA() self.theB = self._next() def _jsmin(self): """Copy the input to the output, deleting the characters which are insignificant to JavaScript. Comments will be removed. Tabs will be replaced with spaces. Carriage returns will be replaced with linefeeds. Most spaces and linefeeds will be removed. """ self.theA = '\n' self._action(3) while self.theA != '\000': if self.theA == ' ': if isAlphanum(self.theB): self._action(1) else: self._action(2) elif self.theA == '\n': if self.theB in ['{', '[', '(', '+', '-']: self._action(1) elif self.theB == ' ': self._action(3) else: if isAlphanum(self.theB): self._action(1) else: self._action(2) else: if self.theB == ' ': if isAlphanum(self.theA): self._action(1) else: self._action(3) elif self.theB == '\n': if self.theA in ['}', ']', ')', '+', '-', '"', '\'']: self._action(1) else: if isAlphanum(self.theA): self._action(1) else: self._action(3) else: self._action(1) def minify(self, instream, outstream): self.instream = instream self.outstream = outstream self.theA = None self.thaB = None self.theLookahead = None self._jsmin() self.instream.close() if __name__ == '__main__': import sys jsm = JavascriptMinify() jsm.minify(sys.stdin, sys.stdout)
PypiClean
/Ardy-0.0.6.tar.gz/Ardy-0.0.6/ardy/core/cmd/main.py
from __future__ import unicode_literals, print_function import argparse import sys import traceback from ardy.config import GlobalConfig from ardy.core.build import Build from ardy.core.deploy import Deploy from ardy.utils.log import logger class Command(object): config = None parser = None args = [] def __init__(self, *args, **kwargs): arguments = kwargs.get("arguments", False) self.exit_at_finish = kwargs.get("exit_at_finish", True) if not arguments: arguments = sys.argv[1:] self.parser = self.init_config(arguments) commands = self.parser.add_subparsers(title="Commands", description='Available commands', dest='command_name') # Add deploy commands parser_deploy = commands.add_parser('deploy', help='Upload functions to AWS Lambda') parser_deploy.add_argument("lambdafunctions", default="_ALL_", nargs='*', type=str, help='Lambda(s) to deploy') parser_deploy.add_argument("-z", "--zipfile", help="Path and filename of artefact to deploy") environments = self.config["deploy"].get("deploy_environments", []) if environments: parser_deploy.add_argument("environment", choices=environments, type=str, help='Environment where deploy: {}'.format(environments)) # Add invoke commands parser_invoke = commands.add_parser('invoke', help='Invoke a functions from AWS Lambda') parser_invoke.add_argument("-l", "--lambda-function", help="lambda") # Add build commands parser_build = commands.add_parser('build', help='Create an artefact and Upload to S3 if S3 is configured (See config)') parser_build.add_argument("-r", "--requirements", help="Path and filename of the python project") self.args = self.parser.parse_args(arguments) try: result = self.parse_commandline() if result: self.exit_ok("OK") except Exception as e: # traceback = sys.exc_info()[2] logger.error(traceback.format_exc()) self.exit_with_error("ERROR") @property def parser_base(self): parser = argparse.ArgumentParser(description='Ardy. AWS Lambda Toolkit') parser.add_argument("-f", "--conffile", help="Name to the project config file") parser.add_argument("-p", "--project", help="Project path") return parser def init_config(self, arguments): # TODO: refactor this method... sooo ugly :S parser = self.parser_base parser.add_argument('args', nargs=argparse.REMAINDER) base_parser = parser.parse_args(arguments) params = {} if getattr(base_parser, "project", False) and base_parser.project is not None: params["path"] = base_parser.project if getattr(base_parser, "conffile", False) and base_parser.conffile is not None: params["filename"] = base_parser.conffile self.config = GlobalConfig(**params) return self.parser_base def parse_commandline(self): params = {} run_params = {} result = False if self.args.command_name == "deploy": if self.args.lambdafunctions and self.args.lambdafunctions is not "_ALL_": params["lambdas_to_deploy"] = self.args.lambdafunctions if getattr(self.args, "environment", False): params["environment"] = self.args.environment if getattr(self.args, "zipfile", False): run_params["path_to_zip_file"] = self.args.zipfile deploy = Deploy(config=self.config, **params) result = deploy.run(**run_params) elif self.args.command_name == "invoke": pass elif self.args.command_name == "build": if getattr(self.args, "requirements", False): run_params["requirements"] = self.args.requirements build = Build(config=self.config) result = build.run(**params) else: self.parser.print_help() return result def exit_with_error(self, msg=""): self.print_error(msg) if self.exit_at_finish: sys.exit(2) def exit_ok(self, msg=""): self.print_ok(msg) if self.exit_at_finish: sys.exit(0) @staticmethod def print_ok(msg=""): print('\033[92m\033[1m ' + msg + ' \033[0m\033[0m') @staticmethod def print_error(msg=""): print('\033[91m\033[1m ' + msg + ' \033[0m\033[0m') if __name__ == '__main__': cmd = Command(arguments=sys.argv[1:])
PypiClean
/tensorflow_tflex-1.13.1rc1-cp27-cp27mu-manylinux1_x86_64.whl/tensorflow_tflex-1.13.1rc1.data/purelib/tensorflow/python/layers/layers.py
# pylint: disable=line-too-long """This library provides a set of high-level neural networks layers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=g-bad-import-order,unused-import # Base objects. from tensorflow.python.layers.base import Layer from tensorflow.python.keras.engine.input_spec import InputSpec # Core layers. from tensorflow.python.layers.core import Dense from tensorflow.python.layers.core import Dropout from tensorflow.python.layers.core import Flatten from tensorflow.python.layers.core import dense from tensorflow.python.layers.core import dropout from tensorflow.python.layers.core import flatten # Convolutional layers. from tensorflow.python.layers.convolutional import SeparableConv1D from tensorflow.python.layers.convolutional import SeparableConv2D from tensorflow.python.layers.convolutional import SeparableConvolution2D from tensorflow.python.layers.convolutional import Conv2DTranspose from tensorflow.python.layers.convolutional import Convolution2DTranspose from tensorflow.python.layers.convolutional import Conv3DTranspose from tensorflow.python.layers.convolutional import Convolution3DTranspose from tensorflow.python.layers.convolutional import Conv1D from tensorflow.python.layers.convolutional import Convolution1D from tensorflow.python.layers.convolutional import Conv2D from tensorflow.python.layers.convolutional import Convolution2D from tensorflow.python.layers.convolutional import Conv3D from tensorflow.python.layers.convolutional import Convolution3D from tensorflow.python.layers.convolutional import separable_conv1d from tensorflow.python.layers.convolutional import separable_conv2d from tensorflow.python.layers.convolutional import conv2d_transpose from tensorflow.python.layers.convolutional import conv3d_transpose from tensorflow.python.layers.convolutional import conv1d from tensorflow.python.layers.convolutional import conv2d from tensorflow.python.layers.convolutional import conv3d # Pooling layers. from tensorflow.python.layers.pooling import AveragePooling1D from tensorflow.python.layers.pooling import MaxPooling1D from tensorflow.python.layers.pooling import AveragePooling2D from tensorflow.python.layers.pooling import MaxPooling2D from tensorflow.python.layers.pooling import AveragePooling3D from tensorflow.python.layers.pooling import MaxPooling3D from tensorflow.python.layers.pooling import average_pooling1d from tensorflow.python.layers.pooling import max_pooling1d from tensorflow.python.layers.pooling import average_pooling2d from tensorflow.python.layers.pooling import max_pooling2d from tensorflow.python.layers.pooling import average_pooling3d from tensorflow.python.layers.pooling import max_pooling3d # Normalization layers. from tensorflow.python.layers.normalization import BatchNormalization from tensorflow.python.layers.normalization import batch_normalization # pylint: enable=g-bad-import-order,unused-import
PypiClean
/scribes.file.quick-open-fix-0.2.3.tar.gz/scribes.file.quick-open-fix-0.2.3/PluginFixedQuickOpen.py
import time import os from gio import File import gobject name = "Fixed Quick Open plugin" authors = ["Anton Bobrov <[email protected]>"] version = 0.2 autoload = True class_name = "FixedQuickOpenPlugin" short_description = "Keep start directory from changing" long_description = "Patches original QuickOpen for emmiting only start directory" try: import quick_open_settings as settings except ImportError: open(os.path.join(os.path.dirname(__file__), 'quick_open_settings.py'), 'w').write("recent_pathes = []\n") import quick_open_settings as settings class FixedQuickOpenPlugin(object): def __init__(self, editor): editor.response() self.editor = editor self.last_root = None def do_patch(self): from QuickOpen.FolderPathUpdater import Updater def new_updater(this, parent=False): editor_uri = this._Updater__editor.pwd_uri root = None for p in settings.recent_pathes: if editor_uri.startswith(p): root = p break if not root: root = self.find_project_root(editor_uri) settings.recent_pathes.append(root) if parent: i = settings.recent_pathes.index(self.last_root or root) root = settings.recent_pathes[(i + 1) % len(settings.recent_pathes)] self.last_root = root this._Updater__manager.emit("current-path", root) return False Updater._Updater__update = new_updater def check_core_plugins(self, *args): try: from QuickOpen.FolderPathUpdater import Updater self.do_patch() except ImportError: print "Can't find core plugins. Waiting ant try again" gobject.timeout_add(300, self.check_core_plugins) return False def load(self): self.check_core_plugins() def unload(self): pass def find_project_root(self, path): f = File(path) project_files = ('.git', '.ropeproject', '.bzr', '.hg', '.scribes_project') while True: if any(f.get_child(r).query_exists() for r in project_files): return f.get_uri() p = f.get_parent() if p: f = p else: return path
PypiClean
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flashblade/FB_2_7/models/api_clients_response.py
import pprint import re import six import typing from ....properties import Property if typing.TYPE_CHECKING: from pypureclient.flashblade.FB_2_7 import models class ApiClientsResponse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'items': 'list[ApiClient]' } attribute_map = { 'items': 'items' } required_args = { } def __init__( self, items=None, # type: List[models.ApiClient] ): """ Keyword args: items (list[ApiClient]) """ if items is not None: self.items = items def __setattr__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `ApiClientsResponse`".format(key)) self.__dict__[key] = value def __getattribute__(self, item): value = object.__getattribute__(self, item) if isinstance(value, Property): return None else: return value def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(ApiClientsResponse, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ApiClientsResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
PypiClean
/winevtrc-20220106.tar.gz/winevtrc-20220106/docs/sources/eventlog-providers/Provider-SceSrv.md
## SceSrv Seen on: * Windows 10 (1511, 1607, 1703, 1709, 1803, 1809, 1903, 1909, 2004, 20H2) * Windows 11 (21H2) * Windows 2000 * Windows 2003 * Windows 2008 * Windows 2012 * Windows 7 * Windows 8.0 * Windows 8.1 * Windows Vista * Windows XP 32-bit * Windows XP 64-bit <table border="1" class="docutils"> <tbody> <tr> <td><b>Log source(s):</b></td> <td>SceSrv</td> </tr> <tr> <td><b>Log type:</b></td> <td>Application</td> </tr> <tr> <td><b>Event message file(s):</b></td> <td>%systemroot%\system32\scesrv.dll</td> </tr> </tbody> </table> &nbsp;
PypiClean
/pyrallel.lib-0.0.10.tar.gz/pyrallel.lib-0.0.10/pyrallel/parallel_processor.py
import multiprocess as mp import threading import queue import inspect import sys import typing from typing import Callable, Iterable from pyrallel import Paralleller if sys.version_info >= (3, 8): from pyrallel import ShmQueue class Mapper(object): """ Mapper class. This defines how mapper works. The methods will be called in following order:: enter (one time) -> process (many times) -> exit (one time) """ def __init__(self, idx): self._idx = idx self._progress_info = ProgressThread.init_mapper_progress_info() def __enter__(self): self.enter() return self def __exit__(self, exc_type, exc_val, exc_tb): self.exit(exc_type, exc_val, exc_tb) def enter(self): """ Invoked when subprocess is created and listening the queue. """ pass def exit(self, *args, **kwargs): """ Invoked when subprocess is going to exit. Arguments will be set if exception occurred. """ pass def process(self, *args, **kwargs): """ Same as mapper function, but `self` argument can provide additional context (e.g., `self._idx`). """ raise NotImplementedError class CollectorThread(threading.Thread): """ Handle collector in main process. Create a thread and call ParallelProcessor.collect(). """ def __init__(self, instance, collector): super(CollectorThread, self).__init__() self.collector = collector self.instance = instance def run(self): for batched_collector in self.instance.collect(): for o in batched_collector: self.collector(*o) class ProgressThread(threading.Thread): """ Progress information in main process. """ P_ADDED = 0 P_LOADED = 1 P_PROCESSED = 2 P_TOTAL = 3 def __init__(self, instance, progress, progress_total, num_of_processor): super(ProgressThread, self).__init__() self.progress_info = { ProgressThread.P_ADDED: 0, ProgressThread.P_LOADED: 0, ProgressThread.P_PROCESSED: 0, ProgressThread.P_TOTAL: progress_total } self.mapper_progress_info = [ProgressThread.init_mapper_progress_info() for _ in range(num_of_processor)] self.instance = instance self.progress = progress @staticmethod def init_mapper_progress_info(): return {ProgressThread.P_LOADED: 0, ProgressThread.P_PROCESSED: 0} def refresh_progress_info(self): self.progress_info[ProgressThread.P_LOADED] \ = sum([p[ProgressThread.P_LOADED] for p in self.mapper_progress_info]) self.progress_info[ProgressThread.P_PROCESSED] \ = sum([p[ProgressThread.P_PROCESSED] for p in self.mapper_progress_info]) def run(self): for idx, mapper_progress_info in self.instance.get_progress(): self.mapper_progress_info[idx] = mapper_progress_info self.refresh_progress_info() progress_info = { 'added': self.progress_info[ProgressThread.P_ADDED], 'loaded': self.progress_info[ProgressThread.P_LOADED], 'processed': self.progress_info[ProgressThread.P_PROCESSED], 'total': self.progress_info[ProgressThread.P_TOTAL], } self.progress(progress_info) class ParallelProcessor(Paralleller): """ Args: num_of_processor (int): Number of processes to use. mapper (Callable / Mapper): Function or subclass of `Mapper` class. max_size_per_mapper_queue (int, optional): Maximum size of mapper queue for one process. If it's full, the corresponding process will be blocked. 0 by default means unlimited. collector (Callable, optional): If the collector data needs to be get in main process (another thread), set this handler, the arguments are same to the return from mapper. The return result is one by one, order is arbitrary. max_size_per_collector_queue (int, optional): Maximum size of collector queue for one process. If it's full, the corresponding process will be blocked. 0 by default means unlimited. enable_process_id (bool, optional): If it's true, an additional argument `_idx` (process id) will be passed to `mapper` function. This has no effect for `Mapper` class. It defaults to False. batch_size (int, optional): Batch size, defaults to 1. progress (Callable, optional): Progress function, which takes a dictionary as input. The dictionary contains following keys: `total` can be set by `progress_total`, `added` indicates the number of tasks has been added to the queue, `loaded` indicates the number of tasks has been loaded to worker processes, `processed` indicates the number of tasks has been processed by worker processes. Defaults to None. progress_total (int, optional): Total number of tasks. Defaults to None. use_shm (bool, optional): When True, and when running on Python version 3.8 or later, use ShmQueue for higher performance. Defaults to False. enable_collector_queues (bool, optional): When True, create a collector queue for each processor. When False, do not allocate collector queues, saving resources. Defaults to True. single_mapper_queue (bool, optional): When True, allocate a single mapper queue that will be shared between the worker processes. Sending processes can go to sleep when the mapper queue is full. When False, each process gets its own mapper queue, and CPU-intensive polling may be needed to find a mapper queue which can accept a new request. Note: - Do NOT implement heavy compute-intensive operations in collector, they should be in mapper. - Tune the value for queue size and batch size will optimize performance a lot. - `collector` only collects returns from `mapper` or `Mapper.process`. - The frequency of executing `progress` function depends on CPU. """ # Command format in queue. Represent in tuple. # The first element of tuple will be command, the rests are arguments or data. # (CMD_XXX, args...) CMD_DATA = 0 CMD_STOP = 1 QSTATS_ON = 0 QSTATS_OFF = 1 def __init__(self, num_of_processor: int, mapper: Callable, max_size_per_mapper_queue: int = 0, collector: Callable = None, max_size_per_collector_queue: int = 0, enable_process_id: bool = False, batch_size: int = 1, progress=None, progress_total = None, use_shm=False, enable_collector_queues=True, single_mapper_queue: bool = False): self.num_of_processor = num_of_processor self.single_mapper_queue = single_mapper_queue if sys.version_info >= (3, 8): self.collector_queues: typing.Optional[typing.Union[ShmQueue, mp.Queue]] else: self.collector_queues: typing.Optional[mp.Queue] if use_shm: if sys.version_info >= (3, 8): if single_mapper_queue: self.mapper_queues = [ShmQueue(maxsize=max_size_per_mapper_queue * num_of_processor)] else: self.mapper_queues = [ShmQueue(maxsize=max_size_per_mapper_queue) for _ in range(num_of_processor)] if enable_collector_queues: self.collector_queues = [ShmQueue(maxsize=max_size_per_collector_queue) for _ in range(num_of_processor)] else: self.collector_queues = None else: raise ValueError("shm not available in this version of Python.") else: if single_mapper_queue: self.mapper_queues = [mp.Queue(maxsize=max_size_per_mapper_queue * num_of_processor)] else: self.mapper_queues = [mp.Queue(maxsize=max_size_per_mapper_queue) for _ in range(num_of_processor)] if enable_collector_queues: self.collector_queues = [mp.Queue(maxsize=max_size_per_collector_queue) for _ in range(num_of_processor)] self.collector_qstats = [self.QSTATS_ON for _ in range(num_of_processor)] else: self.collector_queues = None if self.collector_queues is not None: if single_mapper_queue: self.processes = [mp.Process(target=self._run, args=(i, self.mapper_queues[0], self.collector_queues[i])) for i in range(num_of_processor)] else: self.processes = [mp.Process(target=self._run, args=(i, self.mapper_queues[i], self.collector_queues[i])) for i in range(num_of_processor)] else: if single_mapper_queue: self.processes = [mp.Process(target=self._run, args=(i, self.mapper_queues[0], None)) for i in range(num_of_processor)] else: self.processes = [mp.Process(target=self._run, args=(i, self.mapper_queues[i], None)) for i in range(num_of_processor)] if progress is not None: if sys.version_info >= (3, 8): self.progress_queues: typing.Optional[typing.Union[ShmQueue, mp.Queue]] else: self.progress_queues: typing.Optional[mp.Queue] if use_shm: if sys.version_info >= (3, 8): self.progress_queues = [ShmQueue(maxsize=1) for _ in range(num_of_processor)] else: raise ValueError("shm not available in this version of Python.") else: self.progress_queues = [mp.Queue(maxsize=1) for _ in range(num_of_processor)] self.progress_qstats = [self.QSTATS_ON for _ in range(num_of_processor)] else: self.progress_queues = None self.progress = progress ctx = self if not inspect.isclass(mapper) or not issubclass(mapper, Mapper): class DefaultMapper(Mapper): def process(self, *args, **kwargs): if ctx.enable_process_id: kwargs['_idx'] = self._idx return mapper(*args, **kwargs) self.mapper = DefaultMapper else: self.mapper = mapper self.collector = collector self.mapper_queue_index = 0 self.enable_process_id = enable_process_id self.batch_size = batch_size self.batch_data = [] # collector can be handled in each process or in main process after merging (collector needs to be set) # if collector is set, it needs to be handled in main process; # otherwise, it assumes there's no collector. if collector: self.collector_thread = CollectorThread(self, collector) if progress: self.progress_thread = ProgressThread(self, progress, progress_total, num_of_processor) def start(self): """ Start processes and threads. """ if self.collector: self.collector_thread.start() if self.progress: self.progress_thread.start() for p in self.processes: p.start() def join(self): """ Block until processes and threads return. """ if self.collector: self.collector_thread.join() if self.progress: self.progress_thread.join() for p in self.processes: p.join() for q in self.mapper_queues: q.close() q.join_thread() if self.collector_queues is not None: for q in self.collector_queues: q.close() q.join_thread() if self.progress_queues is not None: for q in self.progress_queues: q.close() q.join_thread() pass def task_done(self): """ Indicate that all resources which need to add_task are added to processes. (main process, blocked) """ if len(self.batch_data) > 0: self._add_task(self.batch_data) self.batch_data = [] for i in range(self.num_of_processor): if self.single_mapper_queue: self.mapper_queues[0].put((ParallelProcessor.CMD_STOP,)) else: self.mapper_queues[i].put((ParallelProcessor.CMD_STOP,)) def add_task(self, *args, **kwargs): """ Add data to one a mapper queue. When a single mapper queue is in use, put the process to sleep if the queue is full. When multiple mapper queues are in use (one per process), use CPU-intensive polling (round-robin processing) to find the next available queue. (main process, blocked or unblocked depending upon single_mapper_queue) """ self.batch_data.append((args, kwargs)) if self.progress: self.progress_thread.progress_info[ProgressThread.P_ADDED] += 1 if len(self.batch_data) == self.batch_size: self._add_task(self.batch_data) self.batch_data = [] # reset buffer def _add_task(self, batched_args): if self.single_mapper_queue: self.mapper_queues[0].put((ParallelProcessor.CMD_DATA, batched_args)) else: while True: q = self.mapper_queues[self.mapper_queue_index] self.mapper_queue_index = (self.mapper_queue_index + 1) % self.num_of_processor try: q.put_nowait((ParallelProcessor.CMD_DATA, batched_args)) return # put in except queue.Full: continue # find next available def _run(self, idx: int, mapper_queue: mp.Queue, collector_queue: typing.Optional[mp.Queue]): """ Process's activity. It handles queue IO and invokes user's mapper handler. (subprocess, blocked, only two queues can be used to communicate with main process) """ with self.mapper(idx) as mapper: while True: data = mapper_queue.get() if data[0] == ParallelProcessor.CMD_STOP: # print(idx, 'stop') self._update_progress(mapper, finish=True) if self.collector and collector_queue is not None: collector_queue.put((ParallelProcessor.CMD_STOP,)) return elif data[0] == ParallelProcessor.CMD_DATA: batch_result = [] for d in data[1]: args, kwargs = d[0], d[1] # print(idx, 'data') self._update_progress(mapper, type_=ProgressThread.P_LOADED) result = mapper.process(*args, **kwargs) self._update_progress(mapper, type_=ProgressThread.P_PROCESSED) if collector_queue is not None: if self.collector: if not isinstance(result, tuple): # collector must represent as tuple result = (result,) batch_result.append(result) if collector_queue is not None and len(batch_result) > 0: collector_queue.put((ParallelProcessor.CMD_DATA, batch_result)) batch_result = [] # reset buffer def _update_progress(self, mapper, type_=None, finish=False): if self.progress: try: if not finish: # No need to ensure the status will be pulled from main process # so if queue is full just skip this update mapper._progress_info[type_] += 1 self.progress_queues[mapper._idx].put_nowait( (ParallelProcessor.CMD_DATA, mapper._progress_info) ) else: # update the last progress of each mapper self.progress_queues[mapper._idx].put( (ParallelProcessor.CMD_STOP, mapper._progress_info) ) except queue.Full: pass def collect(self): """ Get data from collector queue sequentially. (main process, unblocked, using round robin to find next available queue) """ if not self.collector: return idx = 0 while True: # all queues finished if sum([int(s == self.QSTATS_OFF) for s in self.collector_qstats]) == self.num_of_processor: return # get next unfinished queue while self.collector_qstats[idx] == self.QSTATS_OFF: idx = (idx + 1) % self.num_of_processor q = self.collector_queues[idx] try: data = q.get_nowait() # get out if data[0] == ParallelProcessor.CMD_STOP: self.collector_qstats[idx] = self.QSTATS_OFF elif data[0] == ParallelProcessor.CMD_DATA: yield data[1] except queue.Empty: continue # find next available finally: idx = (idx + 1) % self.num_of_processor def get_progress(self): """ Get progress information from each mapper. (main process) """ if not self.progress: return idx = 0 while True: # all queues finished if sum([int(s == self.QSTATS_OFF) for s in self.progress_qstats]) == self.num_of_processor: return # get next unfinished queue while self.progress_qstats[idx] == self.QSTATS_OFF: idx = (idx + 1) % self.num_of_processor q = self.progress_queues[idx] try: data = q.get_nowait() if data[0] == ParallelProcessor.CMD_STOP: self.progress_qstats[idx] = self.QSTATS_OFF elif data[0] == ParallelProcessor.CMD_DATA: pass yield idx, data[1] except queue.Empty: continue # find next available finally: idx = (idx + 1) % self.num_of_processor
PypiClean
/utils/library_functions.py
from scipy.signal import butter, lfilter, filtfilt from scipy import interpolate as interp import numpy as np from numpy import pi, cos, sin, arctan2, sqrt, dot import matplotlib.dates as mdates import datetime import math import pandas as pd from loguru import logger as logging import urllib3 from urllib3 import PoolManager import glob #import numba #from numba import jit def normD(a): norm = 0 for i in range(3): norm += a[i] * a[i] return np.sqrt(norm) def crossD(a, b): cross = [0] * 3 cross[0] = a[1] * b[2] + a[2] * b[1] cross[1] = a[2] * b[0] + a[0] * b[2] cross[2] = a[0] * b[1] + a[1] * b[2] return cross def replace_at_index1(tup, ix, val): lst = list(tup) for i in range(0, len(ix)): lst[ix[i]] = val[i] return tuple(lst) # Butterword filter coefficients def butter_bandpass(lowcut, highcut, fs, order): nyq = 0.5 * fs low = lowcut / nyq high = highcut / nyq b, a = butter(order, [low, high], btype='band') return b, a # Band-pass butterword filter def butter_bandpass_filter(data, lowcut, highcut, fs, order): b, a = butter_bandpass(lowcut, highcut, fs, order=order) y = lfilter(b, a, data) return y def butter_highpass(cutoff, fs, order=5): nyq = 0.5 * fs normal_cutoff = cutoff / nyq b, a = butter(order, normal_cutoff, btype='high', analog=False) return b, a def butter_highpass_filter(data, cutoff, fs, order=5): b, a = butter_highpass(cutoff, fs, order=order) y = filtfilt(b, a, data) return y # fill the gaps with nans def fill_nan(A): ''' interpolate to fill nan values ''' if np.isnan(A[0]): A[0] = np.nanmin(A) if np.isnan(A[-1]): A[-1] = np.nanmin(A) inds = np.arange(A.shape[0]) good = np.where(np.isfinite(A)) f = interp.interp1d(inds[good], A[good], bounds_error=False) B = np.where(np.isfinite(A), A, f(inds)) return B def format_func(value, tick_number): hora = pd.to_datetime(mdates.num2date(value)).replace(tzinfo=None) return ('{:02d}:{:02d} UTC \n {:04d}/{:02d}/{:02d}'.format(hora.hour, hora.minute, hora.year, hora.month, hora.day)) #@numba.jit(nopython=True, nogil=True) def calcExEFW(efield, bfield, filter=True): ey = efield[:,1] ez = efield[:,2] bx = bfield[:,0] by = bfield[:,1] bz = bfield[:,2] ex = np.zeros((len(bx))) for i in range(len(bx)): if filter: angle = math.degrees(math.atan(bx[i]/(math.sqrt(by[i]**2 + bz[i]**2)))) if angle < 6.0: ex[i] = np.nan else: ex[i] = -((ey[i]*by[i]) + (ez[i]*bz[i]))/(bx[i]) else: ex[i] = -((ey[i]*by[i]) + (ez[i]*bz[i]))/(bx[i]) return ex #%% def smooth(y, box_pts): box = np.ones(box_pts)/box_pts y_smooth = np.convolve(y, box, mode='same') return y_smooth # convert the coordinate system from gse to field aligned system def rotate_field_fac(x, y, z, bx, by, bz, ex, ey, ez): ''' rotate the fields into Field Alignet Coordinate System data: pandas dataframe with the columns: 'x', 'y', 'ex', 'ey', 'ez', 'bx', 'by', 'bz' ''' # v1p = v1a = v1r = bp = ba = br = r = b_fac = b_orig = np.zeros((len(x))) bxs = smooth(bx,11) bys = smooth(by,11) bzs = smooth(bz,11) exs = smooth(ex,11) eys = smooth(ey,11) ezs = smooth(ez,11) tempB = np.zeros((len(x), 3)) tempE = np.zeros((len(x), 3)) for i in range(0, len(x)): JacB = np.zeros((3, 3)) JacE = np.zeros((3, 3)) r = [x[i], y[i], z[i]] / np.sqrt(x[i] **2 + y[i] **2 + z[i] **2) JacB[0, :] = [bxs[i], bys[i], bzs[i]] / np.sqrt(bxs[i] **2 + bys[i] ** 2 + bzs[i] **2 ) JacB[1, :] = crossD(JacB[0, :], r) / normD(crossD(JacB[0, :], r)) JacB[2, :] = crossD(JacB[1, :], JacB[0, :]) / normD(crossD(JacB[1, :], JacB[0, :])) JacE[0, :] = [exs[i], eys[i], ezs[i]] / np.sqrt(exs[i] **2 + eys[i] ** 2 + ezs[i] **2 ) JacE[1, :] = crossD(JacE[0, :], r) / normD(crossD(JacE[0, :], r)) JacE[2, :] = crossD(JacE[1, :], JacE[0, :]) / normD(crossD(JacE[1, :], JacE[0, :])) # apply rotation for B tempB[i, :] = np.dot(JacB, ([bx[i], by[i], bz[i]])) # Apply the rotation for E tempE[i, :] = np.dot(JacE, ([ex[i], ey[i], ez[i]])) temp_data = [tempB[:, 0], tempB[:, 1], tempB[:, 2], tempE[:, 0], tempE[:, 1], tempE[:, 2], x, y, z] return (pd.DataFrame(np.transpose(temp_data), columns=['bp', 'ba', 'br', 'ep', 'ea', 'er','x', 'y', 'z'])) def l_dipole(cgm_lat): return 1. / (np.cos(np.deg2rad(cgm_lat)) ** 2.) def geo2mag(incoord): """geographic coordinate to magnetic coordinate: incoord is numpy array of shape (2,*) array([[glat0,glat1,glat2,...], [glon0,glon1,glon2,...]) where glat, glon are geographic latitude and longitude (or if you have only one point it is [[glat,glon]]) returns array([mlat0,mlat1,...], [mlon0,mlon1,...]]) """ # SOME 'constants'... lon = 360 - 72.6 # or 71.41W lat = 80.4 r = 1.0 # convert first to radians lon, lat = [x * pi / 180 for x in (lon, lat)] glat = incoord[0] * pi / 180.0 glon = incoord[1] * pi / 180.0 galt = glat * 0. + r coord = np.vstack([glat, glon, galt]) # convert to rectangular coordinates x = coord[2] * cos(coord[0]) * cos(coord[1]) y = coord[2] * cos(coord[0]) * sin(coord[1]) z = coord[2] * sin(coord[0]) xyz = np.vstack((x, y, z)) # computer 1st rotation matrix: geo2maglon = np.zeros((3, 3), dtype='float64') geo2maglon[0, 0] = cos(lon) geo2maglon[0, 1] = sin(lon) geo2maglon[1, 0] = -sin(lon) geo2maglon[1, 1] = cos(lon) geo2maglon[2, 2] = 1. out = dot(geo2maglon, xyz) tomaglat = np.zeros((3, 3), dtype='float64') tomaglat[0, 0] = cos(.5 * pi - lat) tomaglat[0, 2] = -sin(.5 * pi - lat) tomaglat[2, 0] = sin(.5 * pi - lat) tomaglat[2, 2] = cos(.5 * pi - lat) tomaglat[1, 1] = 1. out = dot(tomaglat, out) mlat = arctan2(out[2], sqrt(out[0] * out[0] + out[1] * out[1])) mlat = mlat * 180 / pi mlon = arctan2(out[1], out[0]) mlon = mlon * 180 / pi # outcoord = np.vstack((mlat, mlon)) return [mlat, (360.0 + mlon)] def cutFlux_lshell(enSignal,lValue, EnChanel, lArray, timeArray): l = float(lValue) cutF = list() cut_date = list() for i, ll in enumerate(lArray): if ll > l-0.01 and ll < l+0.01: cutF.append(enSignal[i, EnChanel]) cut_date.append(timeArray[i]) return cut_date, cutF def calc_fce(bMagnitude): q_e = 1.60217653e-19 m_e = 9.1094e-31 #electron's rest mass (kg) wce = q_e*(bMagnitude*1e-9)/m_e #Electron cyclotron frequency (rad/s) fce_calc = wce/(2*np.pi) #Electron cyclotron frequency (Hz) return fce_calc def cutFlux_lshell2(enSignal, lvalue): cutF = enSignal.copy() l = float(lvalue) mask = (cutF['L'] < l-0.01) cutF[mask] = np.nan mask = (cutF['L'] > l + 0.01) cutF[mask] = np.nan cutF[cutF < 1e-10] = np.nan return cutF.interpolate('linear') def testFiles(local_path, remote_names): non_exiting_names = [] for rn in remote_names: temp_find = glob.glob(f"{local_path}/{rn}") if len(temp_find)==0: non_exiting_names.append(rn) else: logging.info(f"File is current: {temp_find[0]}") return non_exiting_names def testRemoteDir(config_file, satellite, prb, instrument, level, datatype): pool = PoolManager() logging.info("Testing Connection") try: remote_path = config_file[satellite]['remote_data_dir'] responseSubpath = pool.request("GET", remote_path, preload_content=False, timeout=1) responseSubpath.close() logging.warning(f"Using {config_file[satellite]['remote_data_dir']}...") changeRemoteDir = False except (Exception) as e: # logger.error(e.args) logging.error(f"Directory {config_file[satellite]['remote_data_dir']} is not available...") remote_path = config_file[satellite]['remote_subpath'][str(prb)][instrument]["secondRemoteDir"] logging.info(f"testing {remote_path}...") responseSubpath = pool.request("GET", remote_path, preload_content=False, timeout=1) responseSubpath.close() logging.warning(f"Using {remote_path}...") changeRemoteDir = True except (Exception) as e: logging.error(e) logging.warning(f"There are no repository available") changeRemoteDir = False raise if changeRemoteDir: subpathKey = "altern_subpath" filenameKey = "altern_filename" if instrument in ['rept', 'mageis'] and level == '3': datatype = 'pitchangle' else: subpathKey = 'subpath' filenameKey = 'filename' return remote_path, subpathKey, filenameKey, datatype
PypiClean
/dxtbx-3.0.4.1.tar.gz/dxtbx-3.0.4.1/format/FormatHDF5PAL.py
from __future__ import absolute_import, division, print_function import sys import h5py import numpy as np from scitbx.array_family import flex from dxtbx.format.FormatHDF5 import FormatHDF5 class FormatHDF5PAL(FormatHDF5): @staticmethod def understand(image_file): with h5py.File(image_file, "r") as h5_handle: if len(h5_handle) != 1: return False for key in h5_handle: if not key.startswith("R"): return False try: int(key.lstrip("R")) except ValueError: return False for subkey in h5_handle[key]: if subkey not in ("header", "scan_dat"): return False if subkey == "scan_dat": if "raymx_header" not in h5_handle[key][subkey]: return False return True def _start(self): self._h5_handle = h5py.File(self.get_image_file(), "r") self._run = self._h5_handle.keys()[0] # currently hardcoded to Rayonix MX225HS self._detector_size = 225 # mm self._max_pixels = 5760 frame_1 = self.get_raw_data(0) assert frame_1.focus()[0] == frame_1.focus()[1] self._binning = self._max_pixels // frame_1.focus()[0] def get_raw_data(self, index=None): if index is None: index = 0 data = self._h5_handle[self._run]["scan_dat/raymx_data"][index] # return flex.int(int) # this crashes! # return flex.int(data.astype(np.int)) # this doesn't work! (data is read incorrectly) return flex.double(data.astype(np.float)) def get_num_images(self): return len(self._h5_handle[self._run]["scan_dat/N"][()]) def _detector(self): distance = self._h5_handle[self._run]["header/detector_0_distance"][()] image_size = self._max_pixels // self._binning pixel_size = self._detector_size / image_size beam_x = 0.5 * self._detector_size beam_y = 0.5 * self._detector_size trusted_range = ( -1, 65534, ) # note one less than normal due to how bad pixels are encoded for this detector return self._detector_factory.simple( "CCD", distance, (beam_y, beam_x), "+x", "-y", (pixel_size, pixel_size), (image_size, image_size), trusted_range, [], ) def _beam(self, index=None): if index is None: index = 0 return self._beam_factory.simple( self._h5_handle[self._run]["scan_dat/photon_wavelength"][index] ) if __name__ == "__main__": for arg in sys.argv[1:]: print(FormatHDF5PAL.understand(arg))
PypiClean
/casai-home-frontend-20220503.0.tar.gz/casai-home-frontend-20220503.0/hass_frontend/frontend_latest/5edb7400.js
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[65040],{3555:(e,t,r)=>{var i=r(86251),n=r(31338),o=r(37500),a=r(33310);function s(){s=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!d(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(s)||s);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return h(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?h(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=u(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:p(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=p(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function l(e){var t,r=u(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function c(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function d(e){return e.decorators&&e.decorators.length}function f(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function p(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function u(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function h(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}function m(e,t,r){return m="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(e,t,r){var i=function(e,t){for(;!Object.prototype.hasOwnProperty.call(e,t)&&null!==(e=y(e)););return e}(e,t);if(i){var n=Object.getOwnPropertyDescriptor(i,t);return n.get?n.get.call(r):n.value}},m(e,t,r||e)}function y(e){return y=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)},y(e)}!function(e,t,r,i){var n=s();if(i)for(var o=0;o<i.length;o++)n=i[o](n);var a=t((function(e){n.initializeInstanceElements(e,p.elements)}),r),p=n.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(f(o.descriptor)||f(n.descriptor)){if(d(o)||d(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(d(o)){if(d(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}c(o,n)}else t.push(o)}return t}(a.d.map(l)),e);n.initializeClassElements(a.F,p.elements),n.runClassFinishers(a.F,p.finishers)}([(0,a.Mo)("ha-textfield")],(function(e,t){class r extends t{constructor(...t){super(...t),e(this)}}return{F:r,d:[{kind:"field",decorators:[(0,a.Cb)({type:Boolean})],key:"invalid",value:void 0},{kind:"field",decorators:[(0,a.Cb)({attribute:"error-message"})],key:"errorMessage",value:void 0},{kind:"field",decorators:[(0,a.Cb)({type:Boolean})],key:"icon",value:void 0},{kind:"field",decorators:[(0,a.Cb)({type:Boolean})],key:"iconTrailing",value:void 0},{kind:"method",key:"updated",value:function(e){m(y(r.prototype),"updated",this).call(this,e),(e.has("invalid")&&(this.invalid||void 0!==e.get("invalid"))||e.has("errorMessage"))&&(this.setCustomValidity(this.invalid?this.errorMessage||"Invalid":""),this.reportValidity())}},{kind:"method",key:"renderIcon",value:function(e,t=!1){const r=t?"trailing":"leading";return o.dy` <span class="mdc-text-field__icon mdc-text-field__icon--${r}" tabindex=${t?1:-1} > <slot name="${r}Icon"></slot> </span> `}},{kind:"field",static:!0,key:"styles",value:()=>[n.W,o.iv` .mdc-text-field__input { width: var(--ha-textfield-input-width, 100%); } .mdc-text-field:not(.mdc-text-field--with-leading-icon) { padding: var(--text-field-padding, 0px 16px); } .mdc-text-field__affix--suffix { padding-left: var(--text-field-suffix-padding-left, 12px); padding-right: var(--text-field-suffix-padding-right, 0px); } .mdc-text-field:not(.mdc-text-field--disabled) .mdc-text-field__affix--suffix { color: var(--secondary-text-color); } .mdc-text-field__icon { color: var(--secondary-text-color); } input { text-align: var(--text-field-text-align); } /* Chrome, Safari, Edge, Opera */ :host([no-spinner]) input::-webkit-outer-spin-button, :host([no-spinner]) input::-webkit-inner-spin-button { -webkit-appearance: none; margin: 0; } /* Firefox */ :host([no-spinner]) input[type="number"] { -moz-appearance: textfield; } .mdc-text-field__ripple { overflow: hidden; } .mdc-text-field { overflow: var(--text-field-overflow); } :host-context([style*="direction: rtl;"]) .mdc-floating-label { right: 10px !important; left: initial !important; } :host-context([style*="direction: rtl;"]) .mdc-text-field--with-leading-icon.mdc-text-field--filled .mdc-floating-label { max-width: calc(100% - 48px); right: 48px !important; left: initial !important; } `]}]}}),i.P)},65040:(e,t,r)=>{var i=r(37500),n=r(33310),o=(r(10983),r(52039),r(3555),r(47181));function a(){a=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!c(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(s)||s);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return u(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?u(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=p(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:f(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=f(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function s(e){var t,r=p(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function l(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function c(e){return e.decorators&&e.decorators.length}function d(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function f(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function p(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function u(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}!function(e,t,r,i){var n=a();if(i)for(var o=0;o<i.length;o++)n=i[o](n);var f=t((function(e){n.initializeInstanceElements(e,p.elements)}),r),p=n.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(d(o.descriptor)||d(n.descriptor)){if(c(o)||c(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(c(o)){if(c(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}l(o,n)}else t.push(o)}return t}(f.d.map(s)),e);n.initializeClassElements(f.F,p.elements),n.runClassFinishers(f.F,p.finishers)}([(0,n.Mo)("search-input")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"filter",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"suffix",value:()=>!1},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"autofocus",value:()=>!1},{kind:"field",decorators:[(0,n.Cb)({type:String})],key:"label",value:void 0},{kind:"method",key:"focus",value:function(){var e;null===(e=this._input)||void 0===e||e.focus()}},{kind:"field",decorators:[(0,n.IO)("ha-textfield",!0)],key:"_input",value:void 0},{kind:"method",key:"render",value:function(){return i.dy` <ha-textfield .autofocus=${this.autofocus} .label=${this.label||"Search"} .value=${this.filter||""} icon .iconTrailing=${this.filter||this.suffix} @input=${this._filterInputChanged} > <slot name="prefix" slot="leadingIcon"> <ha-svg-icon tabindex="-1" class="prefix" .path=${"M9.5,3A6.5,6.5 0 0,1 16,9.5C16,11.11 15.41,12.59 14.44,13.73L14.71,14H15.5L20.5,19L19,20.5L14,15.5V14.71L13.73,14.44C12.59,15.41 11.11,16 9.5,16A6.5,6.5 0 0,1 3,9.5A6.5,6.5 0 0,1 9.5,3M9.5,5C7,5 5,7 5,9.5C5,12 7,14 9.5,14C12,14 14,12 14,9.5C14,7 12,5 9.5,5Z"} ></ha-svg-icon> </slot> <div class="trailing" slot="trailingIcon"> ${this.filter&&i.dy` <ha-icon-button @click=${this._clearSearch} .label=${this.hass.localize("ui.common.clear")} .path=${"M19,6.41L17.59,5L12,10.59L6.41,5L5,6.41L10.59,12L5,17.59L6.41,19L12,13.41L17.59,19L19,17.59L13.41,12L19,6.41Z"} class="clear-button" ></ha-icon-button> `} <slot name="suffix"></slot> </div> </ha-textfield> `}},{kind:"method",key:"_filterChanged",value:async function(e){(0,o.B)(this,"value-changed",{value:String(e)})}},{kind:"method",key:"_filterInputChanged",value:async function(e){this._filterChanged(e.target.value)}},{kind:"method",key:"_clearSearch",value:async function(){this._filterChanged("")}},{kind:"get",static:!0,key:"styles",value:function(){return i.iv` :host { display: inline-flex; } ha-svg-icon, ha-icon-button { color: var(--primary-text-color); } ha-svg-icon { outline: none; } .clear-button { --mdc-icon-size: 20px; } ha-textfield { display: inherit; } .trailing { display: flex; align-items: center; } `}}]}}),i.oi)}}]); //# sourceMappingURL=5edb7400.js.map
PypiClean
/online_judge_template_generator-4.8.1-py3-none-any.whl/onlinejudge_template/analyzer/output_types.py
from typing import * from onlinejudge_template.analyzer.simplify import simplify from onlinejudge_template.types import * # TODO: remove this def _get_variable_on_code(*, decl: VarDecl, indices: List[Expr], decls: Dict[VarName, VarDecl]) -> str: var = str(decl.name) for index, base in zip(indices, decl.bases): i = simplify(Expr(f"""{index} - ({base})""")) var = f"""{var}[{i}]""" return var def match_indices(*, indices: List[Expr], names: List[str]) -> bool: if len(indices) != len(names): return False for index, name in zip(indices, names): if index not in (name + ' - 1', name, name + ' + 1'): return False return True def analyze_output_type(*, output_format: FormatNode, output_variables: Dict[VarName, VarDecl], constants: Dict[VarName, ConstantDecl]) -> Optional[OutputType]: node = output_format decls = output_variables # pattern: # ans if isinstance(node, SequenceNode) and len(node.items) == 2: item0 = node.items[0] item1 = node.items[1] if isinstance(item0, ItemNode) and isinstance(item1, NewlineNode): type = decls[item0.name].type if type is not None: if 'YES' in constants and 'NO' in constants and type == VarType.String: return YesNoOutputType(name=Expr('ans'), yes='YES', no='NO') if 'FIRST' in constants and 'SECOND' in constants and type == VarType.String: return YesNoOutputType(name=Expr('ans'), yes='FIRST', no='SECOND') return OneOutputType(name=Expr('ans'), type=type) # pattern: # x y if isinstance(node, SequenceNode) and len(node.items) == 3: item0 = node.items[0] item1 = node.items[1] item2 = node.items[2] if isinstance(item0, ItemNode) and isinstance(item1, ItemNode) and isinstance(item2, NewlineNode): name1 = item0.name name2 = item1.name type1 = decls[name1].type type2 = decls[name2].type return TwoOutputType(name1=Expr(name1), type1=type1, name2=Expr(name2), type2=type2, print_newline_after_item=False) # pattern: # x # y if isinstance(node, SequenceNode) and len(node.items) == 4: item0 = node.items[0] item1 = node.items[1] item2 = node.items[2] item3 = node.items[3] if isinstance(item0, ItemNode) and isinstance(item1, NewlineNode) and isinstance(item2, ItemNode) and isinstance(item3, NewlineNode): name1 = item0.name name2 = item2.name type1 = decls[name1].type type2 = decls[name2].type return TwoOutputType(name1=Expr(name1), type1=type1, name2=Expr(name2), type2=type2, print_newline_after_item=False) # pattern: # a_1 ... a_n if isinstance(node, SequenceNode) and len(node.items) == 2: item0 = node.items[0] item1 = node.items[1] if isinstance(item1, NewlineNode): if isinstance(item0, LoopNode) and isinstance(item0.body, ItemNode) and match_indices(indices=item0.body.indices, names=[item0.name]): type = decls[item0.body.name].type subscripted_name = _get_variable_on_code(decl=decls[item0.body.name], indices=item0.body.indices, decls=decls) counter_name = item0.name return VectorOutputType(name=VarName('ans'), type=type, subscripted_name=subscripted_name, counter_name=counter_name, print_size=False, print_newline_after_size=False, print_newline_after_item=False) # pattern: # n # a_1 ... a_n if isinstance(node, SequenceNode) and len(node.items) == 4: item0 = node.items[0] item1 = node.items[1] item2 = node.items[2] item3 = node.items[3] if isinstance(item0, ItemNode) and isinstance(item1, NewlineNode) and isinstance(item3, NewlineNode): if isinstance(item2, LoopNode) and isinstance(item2.body, ItemNode) and item2.size == item0.name and match_indices(indices=item2.body.indices, names=[item2.name]): type = decls[item2.body.name].type subscripted_name = _get_variable_on_code(decl=decls[item2.body.name], indices=item2.body.indices, decls=decls) counter_name = item2.name return VectorOutputType(name=VarName('ans'), type=type, subscripted_name=subscripted_name, counter_name=counter_name, print_size=True, print_newline_after_size=True, print_newline_after_item=False) # pattern: # n a_1 ... a_n if isinstance(node, SequenceNode) and len(node.items) == 3: item0 = node.items[0] item1 = node.items[1] item2 = node.items[2] if isinstance(item0, ItemNode) and isinstance(item2, NewlineNode): if isinstance(item1, LoopNode) and isinstance(item1.body, ItemNode) and item1.size == item0.name and match_indices(indices=item1.body.indices, names=[item1.name]): type = decls[item1.body.name].type subscripted_name = _get_variable_on_code(decl=decls[item1.body.name], indices=item1.body.indices, decls=decls) counter_name = item1.name return VectorOutputType(name=VarName('ans'), type=type, subscripted_name=subscripted_name, counter_name=counter_name, print_size=True, print_newline_after_size=False, print_newline_after_item=False) # pattern: # a_1 # ... # a_n if isinstance(node, SequenceNode) and len(node.items) == 3: item0 = node.items[0] if isinstance(item0, LoopNode) and isinstance(item0.body, SequenceNode) and len(item0.body.items) == 2: item1 = node.items[0] item2 = node.items[1] if isinstance(item1, ItemNode) and isinstance(item2, NewlineNode) and match_indices(indices=item1.indices, names=[item0.name]): type = decls[item1.name].type subscripted_name = _get_variable_on_code(decl=decls[item1.name], indices=item1.indices, decls=decls) counter_name = item0.name return VectorOutputType(name=VarName('ans'), type=type, subscripted_name=subscripted_name, counter_name=counter_name, print_size=False, print_newline_after_size=False, print_newline_after_item=True) # pattern: # n # a_1 # ... # a_n if isinstance(node, SequenceNode) and len(node.items) == 3: item0 = node.items[0] item1 = node.items[1] item2 = node.items[2] if isinstance(item0, ItemNode) and isinstance(item1, NewlineNode): if isinstance(item2, LoopNode) and isinstance(item2.body, SequenceNode) and len(item2.body.items) == 2: item3 = node.items[0] item4 = node.items[1] if isinstance(item3, ItemNode) and isinstance(item4, NewlineNode) and item2.size == item0.name and match_indices(indices=item3.indices, names=[item2.name]): type = decls[item3.name].type subscripted_name = _get_variable_on_code(decl=decls[item3.name], indices=item3.indices, decls=decls) counter_name = item2.name return VectorOutputType(name=VarName('ans'), type=type, subscripted_name=subscripted_name, counter_name=counter_name, print_size=True, print_newline_after_size=True, print_newline_after_item=True) return None
PypiClean
/simple-mailer-1.1.1.tar.gz/simple-mailer-1.1.1/src/simple_mailer/web.py
import json from bottle import request, response, default_app, AppStack, get, post, run from simple_mailer import checks from simple_mailer import exceptions from simple_mailer.config import settings from simple_mailer.dispatcher import Dispatcher from simple_mailer.utils import cloak, get_logger log = get_logger(__name__) @post(settings.MAILER_PATH) def mail() -> str: """A resource that can send mail.""" log.info(f"Got a new submission from client with IP {request.remote_addr}") try: Dispatcher().parse_request(request).dispatch() except exceptions.ContentTypeUnsupported as exc: response.status = 400 return str(exc) except exceptions.SubmittedDataInvalid as exc: response.status = 400 return str(exc) except exceptions.BaseSimpleMailerException as exc: response.status = 503 return str(exc) else: redirect_url = settings.REDIRECT_URL if redirect_url: response.status = 302 response.headers["Location"] = redirect_url else: response.status = 200 return "OK" @get(settings.DEBUG_PATH) def debug() -> str: """The debug resource.""" ns = {} if settings.ENABLE_DEBUG: ns.update( { "version": checks.get_version(), "environment_variables": cloak(checks.get_env_variables()), "smtp_connection": checks.get_smtp_connection(), } ) else: response.status = 404 ns["errors"] = "Debug not enabled" return json.dumps(ns) @get("/") def root() -> str: """The root resource.""" ns = {"mailer": settings.MAILER_PATH} if settings.ENABLE_DEBUG: ns["debug"] = settings.DEBUG_PATH return json.dumps(ns) def get_application() -> AppStack: """Get the default Bottle application.""" return default_app() def run_application() -> None: """Run the default Bottle application.""" run(host="localhost", port=8080) def main() -> None: """Run the main.""" import argparse parser = argparse.ArgumentParser() parser.add_argument( "--host", default="localhost", required=False, dest="host" ) parser.add_argument( "--post", default=8080, type=int, required=False, dest="port" ) parsed = parser.parse_args() run(host=parsed.host, port=parsed.port) if __name__ == "__main__": main()
PypiClean
/squirrel-0.1.0.tar.gz/squirrel-0.1.0/share/BabelRessource.py
# Resource object code # # Created by: The Resource Compiler for PyQt5 (Qt v5.9.1) # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore qt_resource_data = b"\ \x00\x00\x00\x80\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x47\x49\x44\x41\x54\x78\x01\xed\xcc\xdd\x0d\x40\x00\ \x0c\x85\xd1\x2f\x21\x61\xec\xb2\x93\x9f\xed\xea\x01\x13\xb8\x0f\ \xad\xdc\x33\xc0\xc1\xac\x8f\x85\x40\x60\x25\xc9\xef\xd5\xc0\x46\ \x6a\xaa\x89\xfd\xef\xd5\x51\xb0\x9a\x39\xdf\x4a\x16\x95\x6e\xdc\ \x8c\x82\xe6\x11\x9a\xe6\xae\x82\x8e\xcc\x2e\x0b\x20\x43\x3d\xd0\ \xa1\x15\x83\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\x8a\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x51\x49\x44\x41\x54\x78\x01\xed\xd3\xb1\x0d\x80\x30\ \x14\x03\xd1\x5b\x10\x45\x6c\xc4\xb8\x24\x4b\x98\x1e\x28\xc0\xfa\ \x42\x44\xf2\xb9\x7f\x9d\x49\x53\xb7\xd2\xd1\xcd\x36\x5e\x76\x62\ \x7c\x4a\xa8\x86\x52\x15\x25\x63\x83\x56\x03\x89\x5e\x05\xe9\x03\ \xe8\x69\x81\x02\x05\xf2\xa1\x7f\xbd\x7f\x58\xcc\xce\xa5\x46\x37\ \x98\x85\x89\x4b\x07\x80\x91\x73\x2a\xf9\xfc\x24\x8f\x00\x00\x00\ \x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x01\x00\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\xc7\x49\x44\x41\x54\x78\x01\x63\x20\x09\x8c\x02\x46\ \x06\x3f\x86\x5a\x86\x06\x22\x61\x1d\x50\x35\x23\x76\x63\xd6\x33\ \xfc\x27\x11\xae\xc7\x66\x94\x1f\x50\x82\x74\xe8\x8b\x69\x50\x2d\ \x59\x06\xd5\x62\x1a\xd4\x40\x96\x41\x0d\x43\xde\xa0\x51\x83\x46\ \x0d\x4a\x63\xb8\x4a\x0d\x83\xa6\x00\xd5\x68\x31\x7c\x25\xdd\xa0\ \x85\x0c\xaf\x90\x78\xc7\x19\xd8\xc0\xaa\x12\x48\x35\xe8\x10\x03\ \x0b\x83\x0d\xc3\x4f\x08\x0f\x68\xa4\x0c\x5c\xdd\x02\x52\x0c\x7a\ \xc6\x20\x81\x64\xff\x1f\x06\x27\x24\x75\x5c\x0c\x57\x89\x35\xe8\ \x37\xd0\x2d\x30\xd0\x0d\xe4\x57\x20\x2b\x43\x84\x14\x61\x83\x0a\ \x91\x44\x99\x18\x0a\xb0\x14\xa7\x09\xc4\x18\xb4\x8a\x81\x18\xb0\ \x80\x90\x41\xd7\x18\x78\x70\xeb\x46\x0d\x29\x9a\xa7\xec\x3a\x4a\ \x0a\x7f\xca\xab\x23\x1f\xea\x54\x90\xab\x71\xd5\xb5\xbe\xc4\x57\ \xd9\x40\x95\x3e\x40\x1d\x24\x80\x51\x00\x00\x53\xfc\x70\x70\xaf\ \x89\x10\xab\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\x78\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x3f\x49\x44\x41\x54\x78\x01\x63\x18\x06\x60\x14\x34\ \x30\xfc\x27\x12\x36\x8c\x58\x83\x1c\x18\x1a\x50\xe0\x02\xb8\xc6\ \x05\x68\x32\x0e\xa4\x05\xbe\x03\xcc\x20\x4c\x8d\x43\xd3\xa0\x51\ \x83\x46\x0d\x1a\x35\x68\xd4\x20\x05\x78\x89\xa8\x80\x53\xcd\x28\ \x18\x05\x00\x3f\x24\xae\xe9\xd4\x6e\x0d\x0f\x00\x00\x00\x00\x49\ \x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x01\x71\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x01\x38\x49\x44\x41\x54\x78\x01\xed\xd4\x41\x8b\x4d\x61\ \x1c\xc7\xf1\xcf\x9c\x19\x2c\xc8\x2c\xcc\x1d\xd7\x02\x5b\x8b\x91\ \x32\xa2\x34\x6f\xe1\xae\x48\xdd\xb2\x35\x16\xca\x0b\x50\xde\x83\ \x8d\x52\x46\x8a\x77\x21\x65\x61\x45\x14\x61\x67\x23\x32\x35\xd3\ \xc9\xe6\x92\xbb\x70\xff\x6e\x3a\x8b\xa7\xcc\x39\xe7\x99\x7b\x66\ \xc7\xe7\xbb\xff\x2d\x7f\xfe\x6d\xe7\xa7\xed\x89\xc7\x1e\xe9\xce\ \x51\x63\x3f\xf5\xb4\x28\xb4\x59\xb7\xdf\x01\xd7\x74\xb4\xe0\x8b\ \x10\x3e\x9b\xd7\xc9\x15\x51\x75\x49\x27\xcf\x45\xd5\x33\x1d\x9c\ \x11\x49\x2b\x66\xb6\x21\x92\xee\x99\x51\xdf\x0f\x91\x34\xd2\x53\ \x6b\x0e\xb0\xe2\x98\x65\x69\x3d\x07\xfd\xed\xbb\xad\x3f\x6d\xdb\ \xaa\xda\xf4\x5e\x62\x60\x24\x66\x68\x64\x00\xa9\x55\x9b\x62\x97\ \x7d\x75\xd6\x0e\x4e\xfa\x20\xe4\xf7\xce\x09\x35\x16\x3d\x15\x99\ \x3d\x71\x58\x83\x7d\x1e\x8a\x8c\x36\x2c\x68\x75\x5b\x34\x36\x71\ \x4b\xa6\xab\xc6\xa2\xa6\xb1\xa1\x6c\x73\xbe\x89\x9a\xb6\xc9\x77\ \x5a\x34\x74\x2a\xff\xd8\xd6\x34\x59\xdb\xf3\xa1\x76\x9f\x44\xd2\ \xaf\x69\x91\xf4\x51\xa6\xe3\x22\xe9\x85\x55\xe7\xbc\x14\x49\x7d\ \x59\x86\xa2\xaa\xb4\xae\x00\x85\xeb\x4a\x51\x75\x59\x96\xbb\x42\ \x98\xb8\xef\x88\xd4\x92\x07\x26\x42\xb8\x23\xcb\x5b\xe1\xb5\x0b\ \x76\x72\xd1\x1b\xe1\x95\x0c\x8b\x4a\x37\x14\xea\xcc\xbb\xa9\x74\ \x48\xab\xe5\x69\x6d\xfa\x96\xec\xda\x7f\xbf\x01\x6a\xb3\x21\x37\ \xc3\xb3\x4d\x5b\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \ \x00\x00\x00\xad\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x74\x49\x44\x41\x54\x78\x01\xed\xd1\xb1\x09\x02\x30\ \x10\x46\xe1\x57\x99\xd5\x1c\x2b\x13\x04\x92\x89\xb2\x51\xb4\xb3\ \xd3\x52\x2c\x4e\xde\x55\x82\xc9\xbb\xfe\xe3\x87\x63\x97\x0a\x9d\ \xc5\x33\xb8\x8a\xae\x07\x44\x9a\x8a\xd7\x4c\x1e\x78\xea\xeb\x96\ \xab\xa2\x04\x84\xa4\x04\x64\x29\x01\x59\x4a\x40\x92\x32\x90\xa2\ \x1c\xf4\xa6\xc8\x43\x93\xfa\x71\x53\x43\xe2\x0e\x14\xf5\x07\xd0\ \xf9\xda\xf9\xda\xcf\xa1\x7b\x8a\x59\x84\x8d\x14\xd4\x08\x2b\x0c\ \x6e\x72\x4d\xe3\xc2\x1e\xbd\x00\x1b\x2c\x35\x93\xc1\xc0\xa4\x27\ \x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\xfa\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\xc1\x49\x44\x41\x54\x78\x01\xed\xd5\xb1\xad\xc2\x30\ \x18\x45\xe1\x63\x2f\x82\x98\x80\x15\x18\x20\x0a\x13\x19\x19\xc6\ \xa0\x80\x47\xfd\x9a\x8c\x90\x81\x2c\x56\x08\x92\x0b\x8a\xc4\x57\ \xe2\xa6\x44\x39\xb7\xfc\xd1\x27\x91\x22\x61\xcb\x2a\xd2\x93\xc8\ \x8d\xd5\x9a\x97\x63\x8b\x19\x98\xc4\x6a\xcd\x4b\x66\x51\x2f\x10\ \x1b\x4a\xf5\x30\x7e\xf1\xd7\xae\xbc\xd0\x10\x59\x1d\x66\x45\xee\ \x4c\x3e\xa4\x99\x62\x42\x82\xb9\x71\x59\x0f\x45\x1e\x1f\x26\x90\ \x4d\x48\x30\xb8\x90\x60\xd6\x42\x91\xbf\x19\x63\x42\x9a\x91\x69\ \x48\x33\x16\xa4\x19\x0b\x8a\x3c\x25\x63\x3c\x23\xcd\x58\x90\x66\ \x2c\x48\x33\x26\xb4\xa7\x68\xc6\x81\xe0\x40\xd1\x8c\x03\xc1\x4e\ \x33\x1e\x04\xfc\x06\x74\x16\x2f\x7f\xb5\xb1\xfe\x3e\xb1\xe8\xc4\ \x84\xbf\x8e\x45\x81\xc1\x66\xfe\x09\x34\x0a\xf2\x93\xdd\x5a\xa2\ \x23\xb0\x65\xf4\x06\x4a\x05\x47\xf9\x4c\x28\x37\x0e\x00\x00\x00\ \x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\x83\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x4a\x49\x44\x41\x54\x78\x01\xed\xcc\xc1\x0d\x80\x50\ \x0c\xc3\x50\x2f\x88\xbe\xd8\x88\x71\x69\x97\x08\x03\xc0\x85\x2a\ \x97\x2f\xc5\xbe\x3f\xd2\xd6\x9d\x14\xfa\xf8\xe2\x67\x85\x90\x83\ \x12\xf2\x50\x72\x51\x1a\xdc\x2c\x0f\x24\xca\x05\x29\x50\xa0\x40\ \x81\xec\x50\xa3\xc1\x37\xaf\x16\x35\x60\x0e\xd2\xc6\x3d\x01\xf6\ \x62\x49\x14\x20\xe7\x42\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\ \x60\x82\ \x00\x00\x00\xcf\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x96\x49\x44\x41\x54\x78\x01\xed\xd4\x4b\x0a\x83\x40\ \x10\x00\xd1\xea\xdc\x2b\x73\x98\x7c\x2e\xe5\x27\x47\x6d\x03\xe2\ \x66\x40\x0a\x7b\x11\xb2\x99\x72\x29\xf3\x44\x86\x6e\xfa\x46\xa3\ \x46\x60\xdd\x68\x5c\xe8\x4d\x32\x11\xc2\x7c\x48\x9e\xe0\x35\x92\ \x8d\xad\xa7\x7a\x66\x7f\x9f\x34\xb4\x60\x66\xdb\x9f\x99\x38\x67\ \xf4\x43\x4e\x39\x63\xd4\x72\x1c\x58\x88\x3a\xe3\x94\x33\x4a\xad\ \xc7\xc1\x95\x28\x32\x42\xd5\x18\xb9\xa3\x3a\x23\x94\x33\x75\xe8\ \x6f\xbf\xd6\x5f\xf8\x24\x54\x81\x09\x1b\x9c\x02\xa3\x83\x53\x60\ \x7c\x70\x2a\x8c\x50\x05\xc6\x06\xa7\xb8\xd8\xce\xa9\xe4\x8e\xc7\ \xeb\xd2\xaa\x7d\xfc\x62\xf9\x8f\x46\x5f\x9d\x07\x96\x57\x2e\xfa\ \x81\x8a\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\xf7\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\xbe\x49\x44\x41\x54\x78\x01\xed\x95\xc1\x09\x83\x30\ \x14\x40\x9f\x97\xec\x90\x51\xdc\x41\x9d\xa8\x91\xac\xa1\x2b\xf4\ \xe0\x06\xe2\xa5\x19\xc5\x35\x7e\x25\x54\xea\x25\x25\xe4\x0b\xa5\ \xd4\xf7\x4e\x41\x7c\xa0\xe8\xff\x5c\x64\x52\xd1\xe1\xf0\x05\xf6\ \x74\x54\xef\xcc\x84\x28\x9c\xf6\x54\x87\x28\x6d\x01\xc0\xc5\xc3\ \xcc\xad\xc0\x19\xd9\x74\x00\xe0\xe3\xc1\x93\xa2\xde\x4c\xb0\xdf\ \x9b\x13\xb2\xac\x9b\x56\x1b\x32\x84\x78\x35\x60\x74\xa1\x11\x79\ \x39\xa8\x42\x47\x7e\x3d\x64\x18\xc9\x61\xc4\x7c\x0a\x59\x02\x42\ \x0e\x42\xc0\xa6\x42\x35\x2b\x92\x1d\x12\x56\xea\xf3\x43\xfa\x47\ \x4b\xbf\xec\x81\x1c\x06\xcc\x3f\x7c\x90\x8a\x9f\x56\x31\x46\x1e\ \x98\xaf\x0d\x36\xc5\xa8\xed\xe3\x61\xc1\x17\xb8\x1c\x87\xbf\x7e\ \x1d\x35\xe7\x2c\xc8\xfb\x71\xd7\xb6\x85\x2b\xdb\xd1\x50\x91\xc5\ \xc5\x13\xa8\xdd\x67\xb7\x89\x24\x5e\xd4\x00\x00\x00\x00\x49\x45\ \x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\x9b\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x62\x49\x44\x41\x54\x78\x01\xed\xcc\xb1\x09\x85\x30\ \x14\x46\xe1\x53\x66\x07\x9d\x44\xdc\x29\x6d\xca\x3b\x56\x86\x10\ \x87\x91\xd7\x6a\x6c\x45\xd0\xfc\xe4\xd9\x04\xee\x39\xfd\xc7\x5f\ \x79\x81\x84\x5d\x4e\x84\x36\x26\x53\x6e\x67\x9d\x32\x8a\xb8\xf5\ \x06\x7d\x9a\x09\x4b\x15\x61\x87\x1c\x72\xc8\xa1\xae\xa0\xa3\xca\ \xec\x48\xad\x55\x68\x41\x6a\xe6\xf7\xca\x6c\x4c\x88\x8d\x44\xec\ \xe1\xc8\x80\xd7\xd4\x09\x1d\x20\xc0\xf2\xee\xcf\x53\x53\x00\x00\ \x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\x7d\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x44\x49\x44\x41\x54\x78\x01\xed\xcc\xdb\x09\x80\x50\ \x0c\x44\xc1\x03\x17\x62\xd3\x82\xdb\x92\x8f\xfe\x62\x13\xfb\x61\ \x64\xa7\x80\x21\x62\x26\x21\x4f\xd3\x34\xf2\x34\xcd\xc9\x72\x34\ \x17\xf5\xa1\xe6\xfe\x63\xf3\x50\xe0\x89\x36\x18\x5d\xa5\x5a\x9e\ \x6a\xc7\x40\x1c\x4c\x11\xf1\x02\x82\x62\x42\xf0\xb2\x90\x60\x5e\ \x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\xc5\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x8c\x49\x44\x41\x54\x78\x01\xed\xd3\xbb\x0a\x02\x30\ \x0c\x85\xe1\x03\x4a\xeb\x4b\x8b\x99\xf4\x79\xbc\xe0\xed\xc1\x74\ \x54\x29\x9c\x21\x04\xac\x29\x01\xa1\xe4\x2f\xb4\xdb\xb7\xa4\xc1\ \xa4\x65\x02\x89\x61\x5e\x9f\x23\x21\x0c\xa9\x00\x86\x54\x04\x33\ \x4c\x91\x79\xb4\xfb\x39\x4a\x91\x39\x61\xdb\xde\x1d\xce\x23\x14\ \x99\x23\x0a\xa7\x86\xea\xa1\x0c\xa3\xc6\x5f\x71\xf1\x50\x64\x0e\ \x28\xe6\x1f\xad\x70\x25\xe5\x65\x14\xd4\xa8\x1b\x29\x1f\xa3\xa0\ \x0e\x65\xa0\x3d\x19\x0b\x29\x6a\x83\x2f\x89\x66\x0c\x44\xea\x8e\ \x35\x3a\x2d\x7e\x5a\xda\xe5\xdf\xb6\x3f\x21\x96\x50\x36\x5b\x6f\ \x6e\xa5\x95\x48\xda\x3b\x74\x2d\x00\x00\x00\x00\x49\x45\x4e\x44\ \xae\x42\x60\x82\ \x00\x00\x00\x7c\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x43\x49\x44\x41\x54\x78\x01\x63\xc0\x07\x46\xc1\x28\ \x70\x60\x68\x00\x42\x07\xca\x0d\x6a\x60\xf8\x0f\x84\x0d\xa3\x06\ \x8d\x1a\x44\x5a\xf2\x43\x83\x07\xc0\x06\x1d\xc0\x10\x77\x20\x68\ \x3f\x91\xb0\x81\xe6\x06\x51\xee\xb5\xd1\x74\x44\x10\x8c\x1a\x44\ \x8f\xc2\x7f\x14\x8c\x02\x00\xf6\xdc\x74\xb5\x7d\x32\x89\xda\x00\ \x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x01\x64\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x06\x00\x00\x00\xe1\x00\x98\x98\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x01\x06\x49\x44\x41\x54\x58\x85\xed\ \xd7\x3f\x4a\x03\x41\x14\x80\xf1\x9f\x66\xf1\x0e\x39\x45\x04\x2b\ \xbb\xd5\x3a\xf1\x16\xc1\x6b\x58\x78\x8a\x5c\xc1\x22\x45\xb0\x55\ \xb1\xb1\x0a\x7a\x0b\xef\x20\x24\x5a\x38\x0b\xe3\xea\xee\xfa\x67\ \xd8\x35\x30\x1f\x0c\xf3\x98\x81\xf7\x3e\x86\x37\xc5\x23\x93\xf9\ \x1b\x7b\xb5\x78\x86\x09\x46\x3d\xd5\xdf\xe2\x09\x2b\xbc\xd6\xc5\ \x96\xe1\x70\x88\xb5\xf4\xf1\x71\x9c\x0d\x28\x53\xad\x19\x14\x41\ \x68\x12\xc9\xdd\xe2\xa6\xe9\x8d\x13\x73\x8a\x93\x10\x1f\x62\x55\ \x09\xc5\x3d\x73\x8f\xcb\x9e\x84\x8a\x48\x68\x04\xfb\x3d\x15\xfe\ \x36\x59\xa8\x8b\x2c\xd4\xc5\xce\x09\x1d\x60\x91\xb0\xde\x22\xe4\ \x6c\xa4\x4d\x68\x8c\x3b\xcc\x13\x0a\xcd\x43\xce\xf1\x4f\x85\x8e\ \xb1\x0e\x7b\x6a\x5a\x73\xef\x4c\x0f\x3d\xe0\x28\xec\xa9\x69\xcd\ \xdd\xf6\x42\xcf\x28\xa5\x6f\xea\x32\xe4\xfe\x92\xa2\xe9\x22\xf0\ \x82\xf3\x84\x42\x9d\xb9\x76\xa6\x87\x06\x23\x0b\x75\x91\x85\xba\ \xa8\xbe\xfd\x36\x3a\x2b\x71\xd1\x53\xfd\x32\x8a\x37\xf1\xc5\x7f\ \x18\x83\xa6\xb1\xd0\xd0\x83\xe2\x55\x70\xf8\x34\x4a\x4f\xbd\xcf\ \x47\x7d\x8d\xd2\x1b\x3c\xe2\x5a\x6d\x94\xce\x64\x7e\xcb\x1b\xdd\ \xbd\x9e\x50\xf7\x04\xa6\xcd\x00\x00\x00\x00\x49\x45\x4e\x44\xae\ \x42\x60\x82\ \x00\x00\x00\xb0\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x77\x49\x44\x41\x54\x78\x01\x63\x18\x29\x80\x9d\x61\ \x22\xc3\x7b\x86\xff\x44\xc0\x8f\x40\x95\x6c\x38\xcd\x01\x4a\xfe\ \x27\x01\x4e\xc4\x6d\xd0\x7b\x92\x0c\x7a\x8f\xd3\x1c\xa8\x02\x54\ \xd0\x80\xdb\x28\xfa\x19\x34\x6a\xd0\xa8\x41\x12\x0c\x4c\x94\x1b\ \xc4\xc1\xd0\xc8\xf0\x9d\xe1\x14\x83\x29\x65\x06\xf9\x32\xdc\x83\ \xb2\xfe\x32\xcc\x62\x98\x4c\xa6\x41\x98\x90\xea\x06\x8d\x1a\x34\ \x6a\xd0\xa8\x41\x1f\xc9\xa9\x20\x29\xaf\xb2\xfb\x70\x1b\xc4\x46\ \x6c\x23\x02\xa8\xaa\x0f\xa8\x7a\x64\x00\x00\x1b\xa7\x74\x45\x36\ \x41\x3f\x1c\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\x6a\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x00\x31\x49\x44\x41\x54\x78\x01\x63\x18\x8c\x60\x14\x8c\ \x82\xff\x84\xe1\xd0\x34\x68\x34\xd6\x10\x2a\x07\xa1\x41\x94\x80\ \x21\x6a\x10\xa1\x70\xc1\x07\x07\x9f\x41\x43\x26\xd6\x46\x0d\x1a\ \x16\x60\x14\x00\x00\x91\x61\xe0\x20\xa5\x35\x90\x06\x00\x00\x00\ \x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x01\xf4\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x01\xbb\x49\x44\x41\x54\x48\x0d\xed\xc1\x49\x88\xcd\x01\ \x00\x07\xe0\x6f\x06\x07\xa2\x91\x65\x0c\x85\xa3\x75\x22\x23\xca\ \x45\x0a\x29\xcd\x0d\x65\xc9\x49\x24\x4b\xc8\x4d\x73\x45\x6e\x92\ \x64\x17\x39\x4b\x28\x69\x0e\x96\x52\x63\x29\x62\xe2\x64\x89\xb8\ \x78\xd4\x34\x96\x29\xf3\x7e\x7a\x07\xc5\x6b\xc6\xfc\x79\x6e\x7c\ \x1f\xff\xb4\x79\xe6\xf9\x2b\xce\x39\xeb\x2f\x18\xa7\xc7\x17\x63\ \xd5\xac\x4d\xc4\x1e\x35\x1a\xec\xb5\x88\x57\x06\xab\xc9\x2a\x11\ \x11\x2b\xd4\xe4\x96\x78\xe2\xa9\xb8\xae\x06\xb3\x44\x6c\xb5\x4d\ \x44\xb3\x3f\x76\x42\x74\x19\x61\x84\x2e\x71\xd4\x1f\x6a\xf2\x49\ \x1c\x52\x71\x58\x7c\x34\xde\x80\x66\x5a\x62\xad\x9d\xf6\x39\xe9\ \x92\x0e\xcf\x75\x8b\x28\x9b\xa2\x62\x9a\x88\xf8\xec\xa5\xbb\xae\ \x38\x6d\xbf\x5d\xd6\x59\x6a\xa6\x9f\xb4\xea\x16\x11\x11\x11\x11\ \x71\xcd\x77\xed\x22\x22\x22\x22\x22\xba\xb5\xaa\xd2\xe2\xad\x88\ \xb2\xcb\x4e\xd8\x6b\x87\x35\x16\x6b\xf2\xdd\x04\xcb\xac\xb7\xdb\ \x01\x67\x5c\x55\x16\xf1\xc6\x1c\x7d\x98\xac\x53\xc4\x45\xc3\xfc\ \xca\x50\x17\x44\x3c\x36\x49\x3f\x1a\xb4\x8b\xb8\xab\x49\x7f\x1a\ \x75\x88\x68\xd7\xe0\x17\x86\x38\x2d\xe2\x85\xe9\xfa\x32\xd5\x33\ \x11\xa7\x0c\x31\xa0\x36\x11\x1f\x2c\x52\x6d\xa1\xf7\x22\xda\x14\ \xb4\x4e\x8f\xb8\xa1\xda\x4d\xd1\x63\xad\xc2\xea\x7c\x10\x7b\x55\ \xdb\x2f\xde\xab\x53\x58\xb3\x88\xe5\xaa\xb5\x8a\x98\xa1\xb0\xcd\ \xa2\x6c\xa4\x8a\xd1\x8e\x3b\x66\x94\x8a\x51\xca\x62\x93\xc2\xce\ \x8b\x47\xa8\xb7\x51\x49\xc4\x3b\x1b\xd4\xa1\x53\x9c\x55\xd8\x4b\ \x71\x44\x8b\x0e\x11\xbd\x7a\x45\xdc\x36\xdb\x51\xf1\x4c\x41\x13\ \x45\x74\xea\x15\xd1\xa1\xc5\x5c\x77\x44\x7c\xf5\x48\xc4\x04\x85\ \xac\x16\x11\x51\xb2\x51\xbd\x8a\x7a\x9b\x94\x44\x44\xac\x54\xc8\ \x61\x11\x65\xc7\x8d\xf6\xa3\x31\x4e\x2a\x8b\x38\xa8\x90\x87\xe2\ \xbe\xf9\xfa\xb2\xc0\x03\x71\x4f\x01\x0d\x4a\xb6\xa8\xd7\x9f\x41\ \xb6\x2b\x19\x6e\x40\x8d\x1a\x0d\xa4\xc9\x18\xff\xfd\xb6\x6f\x0b\ \x8b\xd0\x4c\xb8\x0a\x19\x9e\x00\x00\x00\x00\x49\x45\x4e\x44\xae\ \x42\x60\x82\ \x00\x00\x01\x86\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x24\x00\x00\x00\x24\x08\x04\x00\x00\x00\x4b\x09\x50\x13\ \x00\x00\x01\x4d\x49\x44\x41\x54\x78\x01\xed\xd0\xaf\xaa\x2a\x51\ \x14\xc7\xf1\xaf\x32\x30\x26\xef\xc9\x22\x06\x8b\xc5\x27\x10\x99\ \x72\x9b\x5d\x5f\x41\x0c\x82\xc9\x72\x27\x0e\x9c\x6e\x10\x4c\xc6\ \xf3\x02\xd7\x3f\xe5\x32\xbe\x83\x60\x99\x26\x08\x62\x32\x18\x44\ \xc4\x39\xb0\x58\xe9\x38\xc3\xde\x3a\xc2\x2d\x7e\x76\x19\xf6\x6f\ \xcd\x8f\xcd\xe2\xed\x69\x25\xda\x0c\xf1\xe9\xd1\xc0\xe1\x49\xbf\ \x09\xb9\x11\xa3\x87\x03\x01\x45\x1e\x54\x60\x4a\x9c\x70\x76\x78\ \x8f\xd5\xfc\xd3\x1f\xaf\xac\x18\x11\x30\x65\xab\x37\x67\x5a\x58\ \xd3\xd7\xf0\x45\x05\x54\x8e\x0e\x7b\xb9\x3d\x51\xb3\xdc\x8d\xd6\ \xfc\xe1\xa7\x32\x91\x24\x21\x56\x42\x7d\x4d\x92\x3a\x17\x49\x9b\ \x18\x95\xb8\xc9\x6e\x2a\x24\x9b\x48\xd1\x18\xa3\xb6\x0c\xae\x48\ \xe3\x49\xbe\xc6\x68\x28\x83\x23\xd2\x14\x75\xe1\x46\xbe\x0c\x06\ \xa4\x71\x24\xbf\x62\xd4\x93\xc1\x29\x69\xaa\x92\xef\x31\x6a\xc8\ \xe0\x96\x1c\xc9\xba\x92\x2f\x31\x72\x38\xc8\x68\x27\x25\xdd\x48\ \xda\xc7\x42\xa0\x8f\x2f\x73\xef\x53\xb2\x23\x1f\x58\xf8\xc5\x4e\ \xc6\x23\xea\x08\xe5\x68\x4d\xcc\x00\x4b\x1e\x67\xf9\xe1\xc2\x04\ \x8f\x22\x0e\x55\xba\x6c\xb4\x26\x22\x8f\xb5\x16\x27\xe2\xc4\x13\ \xe9\xb2\x5d\x2c\xd5\x08\xef\x4a\x8e\x0c\xc8\xeb\xf7\xc2\xbe\x0a\ \x9a\x8c\x59\x73\xe2\xca\x9e\x25\x7d\x5d\xf1\x52\xab\xe6\xb8\x64\ \x52\x78\x5d\x95\xcb\x42\xab\x66\xd9\xab\xe6\xaf\xaf\xfa\x9b\xbd\ \x6a\xa6\x55\x3e\x64\xaf\xd2\x17\x65\xaf\xf2\x71\xf9\x3f\xde\xbe\ \x01\x02\xf2\xbf\x16\x2b\x95\xf4\xa1\x00\x00\x00\x00\x49\x45\x4e\ \x44\xae\x42\x60\x82\ " qt_resource_name = b"\ \x00\x05\ \x00\x6f\xa6\x53\ \x00\x69\ \x00\x63\x00\x6f\x00\x6e\x00\x73\ \x00\x05\ \x00\x36\xdb\x66\ \x00\x33\ \x00\x36\x00\x78\x00\x33\x00\x36\ \x00\x17\ \x0a\xba\x60\x27\ \x00\x63\ \x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x2d\x00\x72\x00\x69\x00\x67\x00\x68\x00\x74\x00\x2d\x00\x62\x00\x6c\x00\x61\ \x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x15\ \x0f\xae\x0f\x87\ \x00\x66\ \x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x6f\x00\x70\x00\x65\x00\x6e\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\ \x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0f\ \x08\x2f\xe5\xc7\ \x00\x69\ \x00\x6d\x00\x61\x00\x67\x00\x65\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0f\ \x01\xcf\xc9\x27\ \x00\x74\ \x00\x69\x00\x74\x00\x6c\x00\x65\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0e\ \x02\x98\x3b\xa7\ \x00\x73\ \x00\x74\x00\x61\x00\x72\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x15\ \x09\xfb\x6d\xe7\ \x00\x64\ \x00\x65\x00\x73\x00\x63\x00\x72\x00\x69\x00\x70\x00\x74\x00\x69\x00\x6f\x00\x6e\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\ \x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x15\ \x0e\x7e\x9d\x67\ \x00\x6f\ \x00\x70\x00\x65\x00\x6e\x00\x2d\x00\x69\x00\x6e\x00\x2d\x00\x6e\x00\x65\x00\x77\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\ \x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x10\ \x02\x0f\x7d\xa7\ \x00\x66\ \x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0f\ \x0a\xaf\x8d\xc7\ \x00\x63\ \x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x1b\ \x08\xaa\xec\xc7\ \x00\x73\ \x00\x65\x00\x74\x00\x74\x00\x69\x00\x6e\x00\x67\x00\x73\x00\x2d\x00\x6f\x00\x76\x00\x65\x00\x72\x00\x73\x00\x63\x00\x61\x00\x6e\ \x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x10\ \x0e\xc8\x95\xa7\ \x00\x64\ \x00\x65\x00\x6c\x00\x65\x00\x74\x00\x65\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x16\ \x06\x60\xe3\xc7\ \x00\x63\ \x00\x68\x00\x65\x00\x76\x00\x72\x00\x6f\x00\x6e\x00\x2d\x00\x6c\x00\x65\x00\x66\x00\x74\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\ \x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x16\ \x0c\x37\x76\xc7\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2d\x00\x75\x00\x70\x00\x77\x00\x61\x00\x72\x00\x64\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\ \x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0d\ \x08\xd4\x24\x87\ \x00\x61\ \x00\x64\x00\x64\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x12\ \x01\xfc\x4c\x07\ \x00\x7a\ \x00\x6f\x00\x6f\x00\x6d\x00\x2d\x00\x66\x00\x69\x00\x74\x00\x2d\x00\x77\x00\x69\x00\x64\x00\x74\x00\x68\x00\x2e\x00\x70\x00\x6e\ \x00\x67\ \x00\x0e\ \x0b\x22\x3f\x47\ \x00\x62\ \x00\x6f\x00\x6f\x00\x6b\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x16\ \x08\x2d\x2d\x47\ \x00\x70\ \x00\x6c\x00\x61\x00\x79\x00\x6c\x00\x69\x00\x73\x00\x74\x00\x2d\x00\x61\x00\x64\x00\x64\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\ \x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x15\ \x03\x5d\xaa\x67\ \x00\x73\ \x00\x74\x00\x61\x00\x72\x00\x2d\x00\x62\x00\x6f\x00\x72\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\ \x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x10\ \x06\xb6\x4b\x27\ \x00\x73\ \x00\x65\x00\x61\x00\x72\x00\x63\x00\x68\x00\x2d\x00\x62\x00\x6c\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\ " qt_resource_struct_v1 = b"\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ \x00\x00\x00\x10\x00\x02\x00\x00\x00\x13\x00\x00\x00\x03\ \x00\x00\x00\xa8\x00\x00\x00\x00\x00\x01\x00\x00\x02\x16\ \x00\x00\x02\x7e\x00\x00\x00\x00\x00\x01\x00\x00\x0a\x74\ \x00\x00\x01\x4e\x00\x00\x00\x00\x00\x01\x00\x00\x05\xb6\ \x00\x00\x00\xcc\x00\x00\x00\x00\x00\x01\x00\x00\x02\x92\ \x00\x00\x02\xfc\x00\x00\x00\x00\x00\x01\x00\x00\x0c\xfe\ \x00\x00\x01\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x08\xaa\ \x00\x00\x03\x2c\x00\x00\x00\x00\x00\x01\x00\x00\x0e\xf6\ \x00\x00\x02\xca\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x90\ \x00\x00\x00\x84\x00\x00\x00\x00\x00\x01\x00\x00\x01\x12\ \x00\x00\x01\x98\x00\x00\x00\x00\x00\x01\x00\x00\x07\x10\ \x00\x00\x02\x5e\x00\x00\x00\x00\x00\x01\x00\x00\x09\xf4\ \x00\x00\x00\xee\x00\x00\x00\x00\x00\x01\x00\x00\x04\x07\ \x00\x00\x01\x74\x00\x00\x00\x00\x00\x01\x00\x00\x06\x3d\ \x00\x00\x00\x20\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ \x00\x00\x02\xa8\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xdc\ \x00\x00\x02\x2c\x00\x00\x00\x00\x00\x01\x00\x00\x09\x2b\ \x00\x00\x01\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x04\xb8\ \x00\x00\x01\xd4\x00\x00\x00\x00\x00\x01\x00\x00\x08\x0b\ \x00\x00\x00\x54\x00\x00\x00\x00\x00\x01\x00\x00\x00\x84\ " qt_resource_struct_v2 = b"\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x10\x00\x02\x00\x00\x00\x13\x00\x00\x00\x03\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\xa8\x00\x00\x00\x00\x00\x01\x00\x00\x02\x16\ \x00\x00\x01\x61\xf9\x01\xe5\xe4\ \x00\x00\x02\x7e\x00\x00\x00\x00\x00\x01\x00\x00\x0a\x74\ \x00\x00\x01\x61\xf9\x05\x46\x97\ \x00\x00\x01\x4e\x00\x00\x00\x00\x00\x01\x00\x00\x05\xb6\ \x00\x00\x01\x61\xf9\x01\xe5\xfe\ \x00\x00\x00\xcc\x00\x00\x00\x00\x00\x01\x00\x00\x02\x92\ \x00\x00\x01\x61\xf9\x01\xe6\x54\ \x00\x00\x02\xfc\x00\x00\x00\x00\x00\x01\x00\x00\x0c\xfe\ \x00\x00\x01\x61\xf9\x01\xe6\x59\ \x00\x00\x01\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x08\xaa\ \x00\x00\x01\x61\xf9\x01\xe6\x2f\ \x00\x00\x03\x2c\x00\x00\x00\x00\x00\x01\x00\x00\x0e\xf6\ \x00\x00\x01\x61\xf9\x01\xe5\x4e\ \x00\x00\x02\xca\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x90\ \x00\x00\x01\x61\xf9\x01\xe5\xa3\ \x00\x00\x00\x84\x00\x00\x00\x00\x00\x01\x00\x00\x01\x12\ \x00\x00\x01\x61\xf9\x01\xe5\xf6\ \x00\x00\x01\x98\x00\x00\x00\x00\x00\x01\x00\x00\x07\x10\ \x00\x00\x01\x61\xf9\x01\xe5\x61\ \x00\x00\x02\x5e\x00\x00\x00\x00\x00\x01\x00\x00\x09\xf4\ \x00\x00\x01\x61\xf9\x01\xe5\xa8\ \x00\x00\x00\xee\x00\x00\x00\x00\x00\x01\x00\x00\x04\x07\ \x00\x00\x01\x61\xf9\x01\xe5\x28\ \x00\x00\x01\x74\x00\x00\x00\x00\x00\x01\x00\x00\x06\x3d\ \x00\x00\x01\x61\xf9\x01\xe6\x3d\ \x00\x00\x00\x20\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ \x00\x00\x01\x61\xf9\x01\xe6\x36\ \x00\x00\x02\xa8\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xdc\ \x00\x00\x01\x61\xf9\x01\xe5\x1c\ \x00\x00\x02\x2c\x00\x00\x00\x00\x00\x01\x00\x00\x09\x2b\ \x00\x00\x01\x61\xf9\x01\xe6\x21\ \x00\x00\x01\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x04\xb8\ \x00\x00\x01\x61\xf9\x01\xe5\x32\ \x00\x00\x01\xd4\x00\x00\x00\x00\x00\x01\x00\x00\x08\x0b\ \x00\x00\x01\x61\xf9\x01\xe5\x23\ \x00\x00\x00\x54\x00\x00\x00\x00\x00\x01\x00\x00\x00\x84\ \x00\x00\x01\x61\xf9\x01\xe6\x06\ " qt_version = QtCore.qVersion().split('.') if qt_version < ['5', '8', '0']: rcc_version = 1 qt_resource_struct = qt_resource_struct_v1 else: rcc_version = 2 qt_resource_struct = qt_resource_struct_v2 def qInitResources(): QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) def qCleanupResources(): QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) qInitResources()
PypiClean
/series_de_tiempo_pabloatarama-2021.12.1-py3-none-any.whl/SeriesDeTiempo/mediaMovilDoble.py
import SeriesDeTiempo.serie import copy import numpy as np class MediaMovilDoble(SeriesDeTiempo.serie.Modelo): """docstring for MediaMovilDoble.""" def __init__(self, data,longitud,desfasada): self.modelo = "MEDIA MÓVIL DOBLE" self.data = data self.comprobado = True self.desfasada = desfasada self.long = longitud if (desfasada): self.data["Mt"] = self.data["yt"].shift().rolling(window=longitud).mean() self.data["ft"] = self.data["Mt"].shift().rolling(window=longitud).mean() else: self.data["Mt"] = self.data["yt"].rolling(window=longitud).mean() self.data["ft"] = self.data["Mt"].rolling(window=longitud).mean() self.calcularErrores() def __repr__(self): return ( "MODELO "+self.modelo+"\n"+ str(self.data) ) def pronosticarMetodo(self, n, t0): nuevo = copy.deepcopy(self) if t0!=None: t=t0 else: long = len(nuevo.data) t=long t0=long if self.desfasada: t = t0 while t < t0 + n: if (nuevo.data.index != t).all(): nuevo.data.loc[t]=np.nan suma = 0 ti=1 while ti<=self.long: if t-ti<t0: suma = suma + nuevo.data["yt"][t-ti]/self.long else: suma = suma + nuevo.data["Mt"][t-ti]/self.long ti = ti + 1 nuevo.data["Mt"][t] = suma t = t + 1 t = t0 while t < t0 + n: suma = 0 ti=1 while ti<=self.long: suma = suma + nuevo.data["Mt"][t-ti]/self.long ti = ti + 1 nuevo.data["ft"][t] = suma t = t + 1 else: t = t0 while t < t0 + n: if (nuevo.data.index != t).all(): nuevo.data.loc[t]=np.nan suma = 0 ti=0 while ti<self.long: if t-ti<t0: suma = suma + nuevo.data["yt"][t-ti]/self.long else: suma = suma + nuevo.data["Mt"][t-ti-1]/self.long ti = ti + 1 nuevo.data["Mt"][t] = suma t = t + 1 t = t0 while t < t0 + n: suma = 0 ti=0 while ti<self.long: suma = suma + nuevo.data["Mt"][t-ti]/self.long ti = ti + 1 nuevo.data["ft"][t] = suma t = t + 1 nuevo.calcularErrores() return nuevo
PypiClean
/openerp-sale-7.0.406.tar.gz/openerp-sale-7.0.406/edi/sale_order.py
from openerp.osv import osv, fields from openerp.addons.edi import EDIMixin from openerp.tools.translate import _ from urllib import urlencode SALE_ORDER_LINE_EDI_STRUCT = { 'sequence': True, 'name': True, #custom: 'date_planned' 'product_id': True, 'product_uom': True, 'price_unit': True, #custom: 'product_qty' 'discount': True, # fields used for web preview only - discarded on import 'price_subtotal': True, } SALE_ORDER_EDI_STRUCT = { 'name': True, 'origin': True, 'company_id': True, # -> to be changed into partner #custom: 'partner_ref' 'date_order': True, 'partner_id': True, #custom: 'partner_address' #custom: 'notes' 'order_line': SALE_ORDER_LINE_EDI_STRUCT, # fields used for web preview only - discarded on import 'amount_total': True, 'amount_untaxed': True, 'amount_tax': True, 'payment_term': True, 'order_policy': True, 'user_id': True, 'state': True, } class sale_order(osv.osv, EDIMixin): _inherit = 'sale.order' def edi_export(self, cr, uid, records, edi_struct=None, context=None): """Exports a Sale order""" edi_struct = dict(edi_struct or SALE_ORDER_EDI_STRUCT) res_company = self.pool.get('res.company') res_partner_obj = self.pool.get('res.partner') edi_doc_list = [] for order in records: # generate the main report self._edi_generate_report_attachment(cr, uid, order, context=context) # Get EDI doc based on struct. The result will also contain all metadata fields and attachments. edi_doc = super(sale_order,self).edi_export(cr, uid, [order], edi_struct, context)[0] edi_doc.update({ # force trans-typing to purchase.order upon import '__import_model': 'purchase.order', '__import_module': 'purchase', 'company_address': res_company.edi_export_address(cr, uid, order.company_id, context=context), 'partner_address': res_partner_obj.edi_export(cr, uid, [order.partner_id], context=context)[0], 'currency': self.pool.get('res.currency').edi_export(cr, uid, [order.pricelist_id.currency_id], context=context)[0], 'partner_ref': order.client_order_ref or False, 'notes': order.note or False, }) edi_doc_list.append(edi_doc) return edi_doc_list def _edi_import_company(self, cr, uid, edi_document, context=None): # TODO: for multi-company setups, we currently import the document in the # user's current company, but we should perhaps foresee a way to select # the desired company among the user's allowed companies self._edi_requires_attributes(('company_id','company_address'), edi_document) res_partner = self.pool.get('res.partner') xid, company_name = edi_document.pop('company_id') # Retrofit address info into a unified partner info (changed in v7 - used to keep them separate) company_address_edi = edi_document.pop('company_address') company_address_edi['name'] = company_name company_address_edi['is_company'] = True company_address_edi['__import_model'] = 'res.partner' company_address_edi['__id'] = xid # override address ID, as of v7 they should be the same anyway if company_address_edi.get('logo'): company_address_edi['image'] = company_address_edi.pop('logo') company_address_edi['customer'] = True partner_id = res_partner.edi_import(cr, uid, company_address_edi, context=context) # modify edi_document to refer to new partner partner = res_partner.browse(cr, uid, partner_id, context=context) partner_edi_m2o = self.edi_m2o(cr, uid, partner, context=context) edi_document['partner_id'] = partner_edi_m2o edi_document['partner_invoice_id'] = partner_edi_m2o edi_document['partner_shipping_id'] = partner_edi_m2o edi_document.pop('partner_address', None) # ignored, that's supposed to be our own address! return partner_id def _edi_get_pricelist(self, cr, uid, partner_id, currency, context=None): # TODO: refactor into common place for purchase/sale, e.g. into product module partner_model = self.pool.get('res.partner') partner = partner_model.browse(cr, uid, partner_id, context=context) pricelist = partner.property_product_pricelist if not pricelist: pricelist = self.pool.get('ir.model.data').get_object(cr, uid, 'product', 'list0', context=context) if not pricelist.currency_id == currency: # look for a pricelist with the right type and currency, or make a new one pricelist_type = 'sale' product_pricelist = self.pool.get('product.pricelist') match_pricelist_ids = product_pricelist.search(cr, uid,[('type','=',pricelist_type), ('currency_id','=',currency.id)]) if match_pricelist_ids: pricelist_id = match_pricelist_ids[0] else: pricelist_name = _('EDI Pricelist (%s)') % (currency.name,) pricelist_id = product_pricelist.create(cr, uid, {'name': pricelist_name, 'type': pricelist_type, 'currency_id': currency.id, }) self.pool.get('product.pricelist.version').create(cr, uid, {'name': pricelist_name, 'pricelist_id': pricelist_id}) pricelist = product_pricelist.browse(cr, uid, pricelist_id) return self.edi_m2o(cr, uid, pricelist, context=context) def edi_import(self, cr, uid, edi_document, context=None): self._edi_requires_attributes(('company_id','company_address','order_line','date_order','currency'), edi_document) #import company as a new partner partner_id = self._edi_import_company(cr, uid, edi_document, context=context) # currency for rounding the discount calculations and for the pricelist res_currency = self.pool.get('res.currency') currency_info = edi_document.pop('currency') currency_id = res_currency.edi_import(cr, uid, currency_info, context=context) order_currency = res_currency.browse(cr, uid, currency_id) partner_ref = edi_document.pop('partner_ref', False) edi_document['client_order_ref'] = edi_document['name'] edi_document['name'] = partner_ref or edi_document['name'] edi_document['note'] = edi_document.pop('notes', False) edi_document['pricelist_id'] = self._edi_get_pricelist(cr, uid, partner_id, order_currency, context=context) # discard web preview fields, if present edi_document.pop('amount_total', None) edi_document.pop('amount_tax', None) edi_document.pop('amount_untaxed', None) order_lines = edi_document['order_line'] for order_line in order_lines: self._edi_requires_attributes(('product_id', 'product_uom', 'product_qty', 'price_unit'), order_line) order_line['product_uom_qty'] = order_line['product_qty'] del order_line['product_qty'] # discard web preview fields, if present order_line.pop('price_subtotal', None) return super(sale_order,self).edi_import(cr, uid, edi_document, context=context) def _edi_paypal_url(self, cr, uid, ids, field, arg, context=None): res = dict.fromkeys(ids, False) for order in self.browse(cr, uid, ids, context=context): if order.order_policy in ('prepaid', 'manual') and \ order.company_id.paypal_account and order.state != 'draft': params = { "cmd": "_xclick", "business": order.company_id.paypal_account, "item_name": order.company_id.name + " Order " + order.name, "invoice": order.name, "amount": order.amount_total, "currency_code": order.pricelist_id.currency_id.name, "button_subtype": "services", "no_note": "1", "bn": "OpenERP_Order_PayNow_" + order.pricelist_id.currency_id.name, } res[order.id] = "https://www.paypal.com/cgi-bin/webscr?" + urlencode(params) return res _columns = { 'paypal_url': fields.function(_edi_paypal_url, type='char', string='Paypal Url'), } class sale_order_line(osv.osv, EDIMixin): _inherit='sale.order.line' def edi_export(self, cr, uid, records, edi_struct=None, context=None): """Overridden to provide sale order line fields with the expected names (sale and purchase orders have different column names)""" edi_struct = dict(edi_struct or SALE_ORDER_LINE_EDI_STRUCT) edi_doc_list = [] for line in records: edi_doc = super(sale_order_line,self).edi_export(cr, uid, [line], edi_struct, context)[0] edi_doc['__import_model'] = 'purchase.order.line' edi_doc['product_qty'] = line.product_uom_qty if line.product_uos: edi_doc.update(product_uom=line.product_uos, product_qty=line.product_uos_qty) edi_doc_list.append(edi_doc) return edi_doc_list # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
PypiClean
/tw.dojo-0.9.181.tar.gz/tw.dojo-0.9.181/tw/dojo/static/1.8.1/debug/dojox/drawing/ui/dom/Zoom.js.uncompressed.js
define("dojox/drawing/ui/dom/Zoom", ["dojo", "../../util/oo", "../../plugins/_Plugin"], function(dojo, oo, Plugin){ var Zoom = oo.declare( // NOTE: // dojox.drawing.ui.dom.Zoom is DEPRECATED. // This was a temporary DOM solution. Use the non-dom // tools for Toolbar and Plugins. // summary: // A plugin that allows for zooming the canvas in and out. An // action-tool is added to the toolbar with plus, minus and 100% // buttons. // example: // | <div dojoType="dojox.drawing.Toolbar" drawingId="drawingNode" class="drawingToolbar vertical"> // | <div tool="dojox.drawing.tools.Line" selected="true">Line</div> // | <div plugin="dojox.drawing.ui.dom.Zoom" options="{zoomInc:.1,minZoom:.5,maxZoom:2}">Zoom</div> // | </div> Plugin, function(options){ var cls = options.node.className; var txt = options.node.innerHTML; this.domNode = dojo.create("div", {id:"btnZoom", "class":"toolCombo"}, options.node, "replace"); this.makeButton("ZoomIn", this.topClass); this.makeButton("Zoom100", this.midClass); this.makeButton("ZoomOut", this.botClass); }, { type:"dojox.drawing.ui.dom.Zoom", // zoomInc: Float // The amount of zoom that will occur upon each click. zoomInc:.1, // maxZoom: Number // The maximum the canvas can be zoomed in. 10 = 1000% maxZoom:10, // minZoom: Float // The most the canvas can be zoomed out. .1 = 10% minZoom:.1, // zoomFactor: [readonly] Float // The current zoom amount zoomFactor:1, // baseClass: String // The CSS class added to the Toolbar buttons baseClass:"drawingButton", // topClass: String // The CSS class added to the top (or left) Toolbar button topClass:"toolComboTop", // midClass: String // The CSS class added to the middle Toolbar button midClass:"toolComboMid", // botClass: String // The CSS class added to the bottom (or right) Toolbar button botClass:"toolComboBot", makeButton: function(name, cls){ // summary: // Internal. Creates one of the buttons in the zoom-button set. var node = dojo.create("div", {id:"btn"+name, "class":this.baseClass+" "+cls, innerHTML:'<div title="Zoom In" class="icon icon'+name+'"></div>'}, this.domNode); dojo.connect(document, "mouseup", function(evt){ dojo.stopEvent(evt); dojo.removeClass(node, "active"); }); dojo.connect(node, "mouseup", this, function(evt){ dojo.stopEvent(evt); dojo.removeClass(node, "active"); this["on"+name](); // this is what calls the methods below }); dojo.connect(node, "mouseover", function(evt){ dojo.stopEvent(evt); dojo.addClass(node, "hover"); }); dojo.connect(node, "mousedown", this, function(evt){ dojo.stopEvent(evt); dojo.addClass(node, "active"); }); dojo.connect(node, "mouseout", this, function(evt){ dojo.stopEvent(evt); dojo.removeClass(node, "hover"); }); }, onZoomIn: function(/*Mouse Event*/evt){ // summary: // Handles zoom in. this.zoomFactor += this.zoomInc; this.zoomFactor = Math.min(this.zoomFactor, this.maxZoom); this.canvas.setZoom(this.zoomFactor); this.mouse.setZoom(this.zoomFactor); }, onZoom100: function(/*Mouse Event*/evt){ // summary: // Zooms to 100% this.zoomFactor = 1; this.canvas.setZoom(this.zoomFactor); this.mouse.setZoom(this.zoomFactor); }, onZoomOut: function(/*Mouse Event*/evt){ // summary: // Handles zoom out. this.zoomFactor -= this.zoomInc; this.zoomFactor = Math.max(this.zoomFactor, this.minZoom); this.canvas.setZoom(this.zoomFactor); this.mouse.setZoom(this.zoomFactor); } } ); dojo.setObject("dojox.drawing.ui.dom.Zoom", Zoom); //dojox.drawing.register(dojox.drawing.plugins.tools.Pan, "plugin"); return Zoom; });
PypiClean
/trojanzoo-2.0.2.tar.gz/trojanzoo-2.0.2/trojanvision/datasets/folder/cub200.py
from trojanvision.datasets.imagefolder import ImageFolder from trojanzoo.utils.output import ansi, prints from torchvision.datasets.utils import download_file_from_google_drive, extract_archive, check_integrity import os import shutil import pandas as pd from trojanvision import __file__ as root_file root_dir = os.path.dirname(root_file) class CUB200(ImageFolder): r"""CUB200 dataset introduced by Peter Welinder in 2010. It inherits :class:`trojanvision.datasets.ImageFolder`. See Also: * paper: `Caltech-UCSD Birds 200`_ * website: http://www.vision.caltech.edu/visipedia/CUB-200.html Attributes: name (str): ``'cub200'`` num_classes (int): ``200`` data_shape (list[int]): ``[3, 224, 224]`` valid_set (bool): ``False`` .. _Caltech-UCSD Birds 200: http://www.vision.caltech.edu/visipedia/papers/WelinderEtal10_CUB-200.pdf """ name = 'cub200' num_classes = 200 valid_set = False # http://www.vision.caltech.edu/visipedia-data/CUB-200/images.tgz url = {'train': '1GDr1OkoXdhaXWGA8S3MAq3a522Tak-nx'} ext = {'train': '.tgz'} md5 = {'train': '2bbe304ef1aa3ddb6094aa8f53487cf2'} org_folder_name = {'train': 'images'} def download_and_extract_archive(self, mode: str): file_name = f'{self.name}_{mode}{self.ext[mode]}' file_path = os.path.normpath(os.path.join(self.folder_path, file_name)) md5 = self.md5.get(mode) if not check_integrity(file_path, md5=md5): prints('{yellow}Downloading Dataset{reset} '.format(**ansi), f'{self.name} {mode:5s}: {file_path}', indent=10) download_file_from_google_drive(file_id=self.url[mode], root=self.folder_path, filename=file_name, md5=md5) print('{upline}{clear_line}'.format(**ansi)) else: prints('{yellow}File Already Exists{reset}: '.format(**ansi), file_path, indent=10) extract_archive(from_path=file_path, to_path=self.folder_path) def initialize_folder(self, **kwargs): super().initialize_folder(**kwargs) self.split() def split(self): # Remove useless files os.remove(os.path.join(self.folder_path, '._images')) dirpath = os.path.join(self.folder_path, 'train') for fpath in os.listdir(dirpath): path = os.path.join(dirpath, fpath) if os.path.isfile(path): os.remove(path) # Split Train and Valid Set txt_path = os.path.normpath(os.path.join(root_dir, 'data', 'cub200', 'test.txt')) file_list: list[str] = [] with open(txt_path, 'r') as fp: file_list = fp.read().split('\n')[:-1] src_dir = os.path.join(self.folder_path, 'train') dst_dir = os.path.join(self.folder_path, 'valid') if not os.path.exists(dst_dir): os.makedirs(dst_dir) for fpath in file_list: src_path = os.path.join(src_dir, fpath) dst_path = os.path.join(dst_dir, fpath) dir_name = os.path.dirname(dst_path) if not os.path.exists(dir_name): os.makedirs(dir_name) shutil.move(src_path, dst_path) class CUB200_2011(CUB200): r"""CUB200_2011 dataset. It inherits :class:`trojanvision.datasets.ImageFolder`. See Also: * paper: `The Caltech-UCSD Birds-200-2011 Dataset`_ * website: http://www.vision.caltech.edu/visipedia/CUB-200-2011.html Attributes: name (str): ``'cub200_2011'`` num_classes (int): ``200`` data_shape (list[int]): ``[3, 224, 224]`` valid_set (bool): ``False`` .. _The Caltech-UCSD Birds-200-2011 Dataset: http://www.vision.caltech.edu/visipedia/papers/CUB_200_2011.pdf """ name = 'cub200_2011' # http://www.vision.caltech.edu/visipedia-data/CUB-200-2011/CUB_200_2011.tgz url = {'train': '1hbzc_P1FuxMkcabkgn9ZKinBwW683j45'} ext = {'train': '.tgz'} md5 = {'train': '97eceeb196236b17998738112f37df78'} org_folder_name = {'train': 'CUB_200_2011/images'} def split(self): # Split Train and Valid Set src_dir = os.path.join(self.folder_path, 'total') dst_dir = {'train': os.path.join(self.folder_path, 'train'), 'valid': os.path.join(self.folder_path, 'valid')} os.rename(dst_dir['train'], src_dir) os.remove(os.path.join(self.folder_path, 'attributes.txt')) images = pd.read_csv(os.path.join(root_dir, 'data', 'cub200_2011', 'images.txt'), sep=' ', names=['img_id', 'filepath']) train_test_split = pd.read_csv(os.path.join(root_dir, 'data', 'cub200_2011', 'train_test_split.txt'), sep=' ', names=['img_id', 'is_training_img']) data = images.merge(train_test_split, on='img_id') file_dict: dict[str, list[str]] = { 'train': data[data.is_training_img == 1]['filepath'].tolist(), 'valid': data[data.is_training_img == 0]['filepath'].tolist(), } for mode in ['train', 'valid']: for fpath in file_dict[mode]: src_path = os.path.join(src_dir, fpath) dst_path = os.path.join(dst_dir[mode], fpath) dir_name = os.path.dirname(dst_path) if not os.path.exists(dir_name): os.makedirs(dir_name) shutil.move(src_path, dst_path) shutil.rmtree(src_dir)
PypiClean
/heresuperset-0.27.6.tar.gz/heresuperset-0.27.6/superset/security.py
"""A set of constants and methods to manage permissions and security""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import logging from flask import g from flask_appbuilder.security.sqla import models as ab_models from flask_appbuilder.security.sqla.manager import SecurityManager from sqlalchemy import or_ from superset import sql_parse from superset.connectors.connector_registry import ConnectorRegistry READ_ONLY_MODEL_VIEWS = { 'DatabaseAsync', 'DatabaseView', 'DruidClusterModelView', } GAMMA_READ_ONLY_MODEL_VIEWS = { 'SqlMetricInlineView', 'TableColumnInlineView', 'TableModelView', 'DruidColumnInlineView', 'DruidDatasourceModelView', 'DruidMetricInlineView', } | READ_ONLY_MODEL_VIEWS ADMIN_ONLY_VIEW_MENUS = { 'AccessRequestsModelView', 'Manage', 'SQL Lab', 'Queries', 'Refresh Druid Metadata', 'ResetPasswordView', 'RoleModelView', 'Security', 'UserDBModelView', 'UserLDAPModelView', 'UserOAuthModelView', 'UserOIDModelView', 'UserRemoteUserModelView', } ALPHA_ONLY_VIEW_MENUS = { 'Upload a CSV', } ADMIN_ONLY_PERMISSIONS = { 'all_database_access', 'can_sql_json', # TODO: move can_sql_json to sql_lab role 'can_override_role_permissions', 'can_sync_druid_source', 'can_override_role_permissions', 'can_approve', 'can_update_role', } READ_ONLY_PERMISSION = { 'can_show', 'can_list', } ALPHA_ONLY_PERMISSIONS = set([ 'muldelete', 'all_datasource_access', ]) OBJECT_SPEC_PERMISSIONS = set([ 'database_access', 'schema_access', 'datasource_access', 'metric_access', ]) class SupersetSecurityManager(SecurityManager): def get_schema_perm(self, database, schema): if schema: return '[{}].[{}]'.format(database, schema) def can_access(self, permission_name, view_name, user=None): """Protecting from has_access failing from missing perms/view""" if not user: user = g.user if user.is_anonymous(): return self.is_item_public(permission_name, view_name) return self._has_view_access(user, permission_name, view_name) def all_datasource_access(self, user=None): return self.can_access( 'all_datasource_access', 'all_datasource_access', user=user) def database_access(self, database, user=None): return ( self.can_access( 'all_database_access', 'all_database_access', user=user) or self.can_access('database_access', database.perm, user=user) ) def schema_access(self, datasource, user=None): return ( self.database_access(datasource.database, user=user) or self.all_datasource_access(user=user) or self.can_access('schema_access', datasource.schema_perm, user=user) ) def datasource_access(self, datasource, user=None): return ( self.schema_access(datasource, user=user) or self.can_access('datasource_access', datasource.perm, user=user) ) def get_datasource_access_error_msg(self, datasource): return """This endpoint requires the datasource {}, database or `all_datasource_access` permission""".format(datasource.name) def get_datasource_access_link(self, datasource): from superset import conf return conf.get('PERMISSION_INSTRUCTIONS_LINK') def get_table_access_error_msg(self, table_name): return """You need access to the following tables: {}, all database access or `all_datasource_access` permission""".format(table_name) def get_table_access_link(self, tables): from superset import conf return conf.get('PERMISSION_INSTRUCTIONS_LINK') def datasource_access_by_name( self, database, datasource_name, schema=None): from superset import db if self.database_access(database) or self.all_datasource_access(): return True schema_perm = self.get_schema_perm(database, schema) if schema and self.can_access('schema_access', schema_perm): return True datasources = ConnectorRegistry.query_datasources_by_name( db.session, database, datasource_name, schema=schema) for datasource in datasources: if self.can_access('datasource_access', datasource.perm): return True return False def get_schema_and_table(self, table_in_query, schema): table_name_pieces = table_in_query.split('.') if len(table_name_pieces) == 2: table_schema = table_name_pieces[0] table_name = table_name_pieces[1] else: table_schema = schema table_name = table_name_pieces[0] return (table_schema, table_name) def datasource_access_by_fullname( self, database, table_in_query, schema): table_schema, table_name = self.get_schema_and_table(table_in_query, schema) return self.datasource_access_by_name( database, table_name, schema=table_schema) def rejected_datasources(self, sql, database, schema): superset_query = sql_parse.SupersetQuery(sql) return [ t for t in superset_query.tables if not self.datasource_access_by_fullname(database, t, schema)] def user_datasource_perms(self): datasource_perms = set() for r in g.user.roles: for perm in r.permissions: if ( perm.permission and 'datasource_access' == perm.permission.name): datasource_perms.add(perm.view_menu.name) return datasource_perms def schemas_accessible_by_user(self, database, schemas): from superset import db from superset.connectors.sqla.models import SqlaTable if self.database_access(database) or self.all_datasource_access(): return schemas subset = set() for schema in schemas: schema_perm = self.get_schema_perm(database, schema) if self.can_access('schema_access', schema_perm): subset.add(schema) perms = self.user_datasource_perms() if perms: tables = ( db.session.query(SqlaTable) .filter( SqlaTable.perm.in_(perms), SqlaTable.database_id == database.id, ) .all() ) for t in tables: if t.schema: subset.add(t.schema) return sorted(list(subset)) def accessible_by_user(self, database, datasource_names, schema=None): from superset import db if self.database_access(database) or self.all_datasource_access(): return datasource_names if schema: schema_perm = self.get_schema_perm(database, schema) if self.can_access('schema_access', schema_perm): return datasource_names user_perms = self.user_datasource_perms() user_datasources = ConnectorRegistry.query_datasources_by_permissions( db.session, database, user_perms) if schema: names = { d.table_name for d in user_datasources if d.schema == schema} return [d for d in datasource_names if d in names] else: full_names = {d.full_name for d in user_datasources} return [d for d in datasource_names if d in full_names] def merge_perm(self, permission_name, view_menu_name): # Implementation copied from sm.find_permission_view_menu. # TODO: use sm.find_permission_view_menu once issue # https://github.com/airbnb/superset/issues/1944 is resolved. permission = self.find_permission(permission_name) view_menu = self.find_view_menu(view_menu_name) pv = None if permission and view_menu: pv = self.get_session.query(self.permissionview_model).filter_by( permission=permission, view_menu=view_menu).first() if not pv and permission_name and view_menu_name: self.add_permission_view_menu(permission_name, view_menu_name) def is_user_defined_permission(self, perm): return perm.permission.name in OBJECT_SPEC_PERMISSIONS def create_custom_permissions(self): # Global perms self.merge_perm('all_datasource_access', 'all_datasource_access') self.merge_perm('all_database_access', 'all_database_access') def create_missing_perms(self): """Creates missing perms for datasources, schemas and metrics""" from superset import db from superset.models import core as models logging.info( 'Fetching a set of all perms to lookup which ones are missing') all_pvs = set() for pv in self.get_session.query(self.permissionview_model).all(): if pv.permission and pv.view_menu: all_pvs.add((pv.permission.name, pv.view_menu.name)) def merge_pv(view_menu, perm): """Create permission view menu only if it doesn't exist""" if view_menu and perm and (view_menu, perm) not in all_pvs: self.merge_perm(view_menu, perm) logging.info('Creating missing datasource permissions.') datasources = ConnectorRegistry.get_all_datasources(db.session) for datasource in datasources: merge_pv('datasource_access', datasource.get_perm()) merge_pv('schema_access', datasource.schema_perm) logging.info('Creating missing database permissions.') databases = db.session.query(models.Database).all() for database in databases: merge_pv('database_access', database.perm) logging.info('Creating missing metrics permissions') metrics = [] for datasource_class in ConnectorRegistry.sources.values(): metrics += list(db.session.query(datasource_class.metric_class).all()) for metric in metrics: if metric.is_restricted: merge_pv('metric_access', metric.perm) def clean_perms(self): """FAB leaves faulty permissions that need to be cleaned up""" logging.info('Cleaning faulty perms') sesh = self.get_session pvms = ( sesh.query(ab_models.PermissionView) .filter(or_( ab_models.PermissionView.permission == None, # NOQA ab_models.PermissionView.view_menu == None, # NOQA )) ) deleted_count = pvms.delete() sesh.commit() if deleted_count: logging.info('Deleted {} faulty permissions'.format(deleted_count)) def sync_role_definitions(self): """Inits the Superset application with security roles and such""" from superset import conf logging.info('Syncing role definition') self.create_custom_permissions() # Creating default roles self.set_role('Admin', self.is_admin_pvm) self.set_role('Alpha', self.is_alpha_pvm) self.set_role('Gamma', self.is_gamma_pvm) self.set_role('granter', self.is_granter_pvm) self.set_role('sql_lab', self.is_sql_lab_pvm) if conf.get('PUBLIC_ROLE_LIKE_GAMMA', False): self.set_role('Public', self.is_gamma_pvm) self.create_missing_perms() # commit role and view menu updates self.get_session.commit() self.clean_perms() def set_role(self, role_name, pvm_check): logging.info('Syncing {} perms'.format(role_name)) sesh = self.get_session pvms = sesh.query(ab_models.PermissionView).all() pvms = [p for p in pvms if p.permission and p.view_menu] role = self.add_role(role_name) role_pvms = [p for p in pvms if pvm_check(p)] role.permissions = role_pvms sesh.merge(role) sesh.commit() def is_admin_only(self, pvm): # not readonly operations on read only model views allowed only for admins if (pvm.view_menu.name in READ_ONLY_MODEL_VIEWS and pvm.permission.name not in READ_ONLY_PERMISSION): return True return ( pvm.view_menu.name in ADMIN_ONLY_VIEW_MENUS or pvm.permission.name in ADMIN_ONLY_PERMISSIONS ) def is_alpha_only(self, pvm): if (pvm.view_menu.name in GAMMA_READ_ONLY_MODEL_VIEWS and pvm.permission.name not in READ_ONLY_PERMISSION): return True return ( pvm.view_menu.name in ALPHA_ONLY_VIEW_MENUS or pvm.permission.name in ALPHA_ONLY_PERMISSIONS ) def is_admin_pvm(self, pvm): return not self.is_user_defined_permission(pvm) def is_alpha_pvm(self, pvm): return not (self.is_user_defined_permission(pvm) or self.is_admin_only(pvm)) def is_gamma_pvm(self, pvm): return not (self.is_user_defined_permission(pvm) or self.is_admin_only(pvm) or self.is_alpha_only(pvm)) def is_sql_lab_pvm(self, pvm): return ( pvm.view_menu.name in { 'SQL Lab', 'SQL Editor', 'Query Search', 'Saved Queries', } or pvm.permission.name in { 'can_sql_json', 'can_csv', 'can_search_queries', 'can_sqllab_viz', 'can_sqllab', }) def is_granter_pvm(self, pvm): return pvm.permission.name in { 'can_override_role_permissions', 'can_approve', }
PypiClean
/ImageStega-0.0.1.tar.gz/ImageStega-0.0.1/README.md
# Image Steganography *** ## Table of Contents 1. [General Info](#general-info) 2. [Technologies](#technologies) 3. [Example](#example) 4. [FAQs](#faqs) ## General Info *** This project deals with hiding text message inside a image. Also called as Image Steganography. It is extremely useful technique for conceling text messages. In this project there are two function namely: * 'encode' which encode the entered text to an also image entered by a user. * 'decode' which decodes the text from the image which is previously encoded by the encode function. ## Technologies *** A list of libraries used within the project: * [matplotlib](https://matplotlib.org): Version 3.3.4 * [math](https://docs.python.org/3/library/math.html) * [openCV](https://opencv.org): Version 4.2.0 * [numpy](https://numpy.org): Version 1.21.4 ## Example *** Example to encode the text message to image ```python from ImageStega import ImageStega image = ImageStega().encode(path, encode_text) # path(str) : "path of the image" # encode_text(str) : "Text to be encoded in the image" ``` Example to decode the text from the image ```python from ImageStega import ImageStega decoded_text = ImageStega().decode(path) # path(str) : path of the image which was previously encoded with the message. ``` ## FAQs *** A list of frequenctly asked questions 1. **What is Image steganography?**<br> Steganography is the technique of hiding secret data within an ordinary, non-secret, file or message in order to avoid detection. More information can be found on this [Wikipedia](https://en.wikipedia.org/wiki/Steganography) page. 2. **Where can I use this package?**<br> This tool is used to craftfully hide the data in images. So this package can be used to hide secret text messages by encoding them in images to maintain the secrecy. 3. **How does this algorithm works?**<br> Let us consider a 3 * 3 RBG image for understanding the algorithm. The image consists of 3 channel and heigh and weidth equal to 3. Thus 3 * 3 * 3 = 27 values, with each value ranging from 0-255(pixel value). For instance consider. We are interested in storing the character 'H' in this image. In the encode function the H is converted into binary value which is '01001000'. Each ASCII character can be represented by 8-bit binary number which is saved in the image.<br> Consider 3 pixel values p1, p2 and p3 as (10,200,35), (26,65,98), (100,139,35) respectively.<br> If the bit-value to be saved is 0 the pixel value is changed to even and if 1 its converted to odd. Thus in our example, the pixel value P1, P2 and P3 changes to (10,201,36), (26,65,98), (100,140,36). If the pixel value is odd but the value to be stored is 0, then the pixel value is increased by one. Same is the case with storing 1. In this way the txt message is stored in the form of 0s and 1s string in the image.<br> In the decode function, the pixel values are erad one by one, and then if its odd it is taken as 1 else 0. This is done till the algorithm encounter STOP character. Once is encounters the STOP character, it terminates the program and returns the decoded text message.<br> This STOP values is ~(tilda) in our case. So if your message consists of ~ character then the decoder will only read till that value. This is only drawback of this code. 4. **How many characters can I save in a image?**<br> Considering you have a RGB image with 1024 * 1024 resolution, you can encode upto (1024 * 1024 * 3 - 8) / 8 = 393215 characters. The 8 subtracted represents the bits reuiqred for STOP character.
PypiClean
/rockset-v2-alpha-0.1.13.tar.gz/rockset-v2-alpha-0.1.13/rockset/model/azure_blob_storage_integration_creation_request.py
import re # noqa: F401 import sys # noqa: F401 from rockset.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, OpenApiModel ) from rockset.exceptions import ApiAttributeError def lazy_import(): from rockset.model.azure_blob_storage_integration import AzureBlobStorageIntegration globals()['AzureBlobStorageIntegration'] = AzureBlobStorageIntegration class AzureBlobStorageIntegrationCreationRequest(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { } validations = { } additional_properties_type = None _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ lazy_import() return { 'name': (str,), # noqa: E501 'azure_blob_storage': (AzureBlobStorageIntegration, none_type), # noqa: E501 'description': (str, none_type), # noqa: E501 } @cached_property def discriminator(): return None attribute_map = { 'name': 'name', # noqa: E501 'azure_blob_storage': 'azure_blob_storage', # noqa: E501 'description': 'description', # noqa: E501 } read_only_vars = { } _composed_schemas = {} @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, name, *args, **kwargs): # noqa: E501 """AzureBlobStorageIntegrationCreationRequest - a model defined in OpenAPI Args: name (str): descriptive label Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) azure_blob_storage (AzureBlobStorageIntegration): [optional] # noqa: E501 description (str): longer explanation for the integration. [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.name = name for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value) return self required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, *, name, **kwargs): # noqa: E501 """AzureBlobStorageIntegrationCreationRequest - a model defined in OpenAPI Keyword Args: name (str): descriptive label azure_blob_storage (AzureBlobStorageIntegration): [optional] # noqa: E501 description (str): longer explanation for the integration. [optional] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) """ args = [] _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.name = name for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value) # todo: remove these comments - this stops the user from setting read only vars but we need this now to address a bug # if var_name in self.read_only_vars: # raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " # f"class with read only attributes.")
PypiClean
/azure-multiapi-storage-1.2.0.tar.gz/azure-multiapi-storage-1.2.0/azure/multiapi/storage/v2017_04_17/file/fileservice.py
import sys from os import path from azure.common import AzureHttpError from ..common._auth import ( _StorageSharedKeyAuthentication, _StorageSASAuthentication, ) from ..common._common_conversion import ( _int_to_str, _to_str, _get_content_md5, ) from ..common._connection import _ServiceParameters from ..common._constants import ( SERVICE_HOST_BASE, DEFAULT_PROTOCOL, DEV_ACCOUNT_NAME, ) from ..common._deserialization import ( _convert_xml_to_service_properties, _convert_xml_to_signed_identifiers, _parse_metadata, _parse_properties, _parse_length_from_content_range, ) from ..common._error import ( _dont_fail_not_exist, _dont_fail_on_exist, _validate_not_none, _validate_type_bytes, _ERROR_VALUE_NEGATIVE, _ERROR_STORAGE_MISSING_INFO, _ERROR_EMULATOR_DOES_NOT_SUPPORT_FILES, _ERROR_PARALLEL_NOT_SEEKABLE, _validate_access_policies, ) from ..common._http import HTTPRequest from ..common._serialization import ( _get_request_body, _get_data_bytes_only, _convert_signed_identifiers_to_xml, _convert_service_properties_to_xml, _add_metadata_headers, ) from ..common.models import ( Services, ListGenerator, _OperationContext, ) from .sharedaccesssignature import ( FileSharedAccessSignature, ) from ..common.storageclient import StorageClient from ._deserialization import ( _convert_xml_to_shares, _convert_xml_to_directories_and_files, _convert_xml_to_ranges, _convert_xml_to_share_stats, _parse_file, _parse_share, _parse_snapshot_share, _parse_directory, ) from ._download_chunking import _download_file_chunks from ._serialization import ( _get_path, _validate_and_format_range_headers, ) from ._upload_chunking import _upload_file_chunks from .models import ( FileProperties, ) from ._constants import ( X_MS_VERSION, __version__ as package_version, ) if sys.version_info >= (3,): from io import BytesIO else: from cStringIO import StringIO as BytesIO class FileService(StorageClient): ''' The Server Message Block (SMB) protocol is the preferred file share protocol used on premise today. The Microsoft Azure File service enables customers to leverage the availability and scalability of Azure's Cloud Infrastructure as a Service (IaaS) SMB without having to rewrite SMB client applications. The Azure File service also offers a compelling alternative to traditional Direct Attached Storage (DAS) and Storage Area Network (SAN) solutions, which are often complex and expensive to install, configure, and operate. :ivar int MAX_SINGLE_GET_SIZE: The size of the first range get performed by get_file_to_* methods if max_connections is greater than 1. Less data will be returned if the file is smaller than this. :ivar int MAX_CHUNK_GET_SIZE: The size of subsequent range gets performed by get_file_to_* methods if max_connections is greater than 1 and the file is larger than MAX_SINGLE_GET_SIZE. Less data will be returned if the remainder of the file is smaller than this. If this is set to larger than 4MB, content_validation will throw an error if enabled. However, if content_validation is not desired a size greater than 4MB may be optimal. Setting this below 4MB is not recommended. :ivar int MAX_RANGE_SIZE: The size of the ranges put by create_file_from_* methods. Smaller ranges may be put if there is less data provided. The maximum range size the service supports is 4MB. ''' MAX_SINGLE_GET_SIZE = 32 * 1024 * 1024 MAX_CHUNK_GET_SIZE = 8 * 1024 * 1024 MAX_RANGE_SIZE = 4 * 1024 * 1024 def __init__(self, account_name=None, account_key=None, sas_token=None, protocol=DEFAULT_PROTOCOL, endpoint_suffix=SERVICE_HOST_BASE, request_session=None, connection_string=None, socket_timeout=None): ''' :param str account_name: The storage account name. This is used to authenticate requests signed with an account key and to construct the storage endpoint. It is required unless a connection string is given. :param str account_key: The storage account key. This is used for shared key authentication. :param str sas_token: A shared access signature token to use to authenticate requests instead of the account key. If account key and sas token are both specified, account key will be used to sign. :param str protocol: The protocol to use for requests. Defaults to https. :param str endpoint_suffix: The host base component of the url, minus the account name. Defaults to Azure (core.windows.net). Override this to use the China cloud (core.chinacloudapi.cn). :param requests.Session request_session: The session object to use for http requests. :param str connection_string: If specified, this will override all other parameters besides request session. See http://azure.microsoft.com/en-us/documentation/articles/storage-configure-connection-string/ for the connection string format. :param int socket_timeout: If specified, this will override the default socket timeout. The timeout specified is in seconds. See DEFAULT_SOCKET_TIMEOUT in _constants.py for the default value. ''' service_params = _ServiceParameters.get_service_parameters( 'file', account_name=account_name, account_key=account_key, sas_token=sas_token, protocol=protocol, endpoint_suffix=endpoint_suffix, request_session=request_session, connection_string=connection_string, socket_timeout=socket_timeout) super(FileService, self).__init__(service_params) if self.account_name == DEV_ACCOUNT_NAME: raise ValueError(_ERROR_EMULATOR_DOES_NOT_SUPPORT_FILES) if self.account_key: self.authentication = _StorageSharedKeyAuthentication( self.account_name, self.account_key, ) elif self.sas_token: self.authentication = _StorageSASAuthentication(self.sas_token) else: raise ValueError(_ERROR_STORAGE_MISSING_INFO) self._X_MS_VERSION = X_MS_VERSION self._update_user_agent_string(package_version) def make_file_url(self, share_name, directory_name, file_name, protocol=None, sas_token=None): ''' Creates the url to access a file. :param str share_name: Name of share. :param str directory_name: The path to the directory. :param str file_name: Name of file. :param str protocol: Protocol to use: 'http' or 'https'. If not specified, uses the protocol specified when FileService was initialized. :param str sas_token: Shared access signature token created with generate_shared_access_signature. :return: file access URL. :rtype: str ''' if directory_name is None: url = '{}://{}/{}/{}'.format( protocol or self.protocol, self.primary_endpoint, share_name, file_name, ) else: url = '{}://{}/{}/{}/{}'.format( protocol or self.protocol, self.primary_endpoint, share_name, directory_name, file_name, ) if sas_token: url += '?' + sas_token return url def generate_account_shared_access_signature(self, resource_types, permission, expiry, start=None, ip=None, protocol=None): ''' Generates a shared access signature for the file service. Use the returned signature with the sas_token parameter of the FileService. :param ResourceTypes resource_types: Specifies the resource types that are accessible with the account SAS. :param AccountPermissions permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type start: datetime or str :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :param str protocol: Specifies the protocol permitted for a request made. Possible values are both HTTPS and HTTP (https,http) or HTTPS only (https). The default value is https,http. Note that HTTP only is not a permitted value. :return: A Shared Access Signature (sas) token. :rtype: str ''' _validate_not_none('self.account_name', self.account_name) _validate_not_none('self.account_key', self.account_key) sas = FileSharedAccessSignature(self.account_name, self.account_key) return sas.generate_account(Services.FILE, resource_types, permission, expiry, start=start, ip=ip, protocol=protocol) def generate_share_shared_access_signature(self, share_name, permission=None, expiry=None, start=None, id=None, ip=None, protocol=None, cache_control=None, content_disposition=None, content_encoding=None, content_language=None, content_type=None): ''' Generates a shared access signature for the share. Use the returned signature with the sas_token parameter of FileService. :param str share_name: Name of share. :param SharePermissions permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered read, create, write, delete, list. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type start: datetime or str :param str id: A unique value up to 64 characters in length that correlates to a stored access policy. To create a stored access policy, use :func:`~set_share_acl`. :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :param str protocol: Specifies the protocol permitted for a request made. Possible values are both HTTPS and HTTP (https,http) or HTTPS only (https). The default value is https,http. Note that HTTP only is not a permitted value. :param str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :param str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :param str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :param str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :return: A Shared Access Signature (sas) token. :rtype: str ''' _validate_not_none('share_name', share_name) _validate_not_none('self.account_name', self.account_name) _validate_not_none('self.account_key', self.account_key) sas = FileSharedAccessSignature(self.account_name, self.account_key) return sas.generate_share( share_name, permission, expiry, start=start, id=id, ip=ip, protocol=protocol, cache_control=cache_control, content_disposition=content_disposition, content_encoding=content_encoding, content_language=content_language, content_type=content_type, ) def generate_file_shared_access_signature(self, share_name, directory_name=None, file_name=None, permission=None, expiry=None, start=None, id=None, ip=None, protocol=None, cache_control=None, content_disposition=None, content_encoding=None, content_language=None, content_type=None): ''' Generates a shared access signature for the file. Use the returned signature with the sas_token parameter of FileService. :param str share_name: Name of share. :param str directory_name: Name of directory. SAS tokens cannot be created for directories, so this parameter should only be present if file_name is provided. :param str file_name: Name of file. :param FilePermissions permission: The permissions associated with the shared access signature. The user is restricted to operations allowed by the permissions. Permissions must be ordered read, create, write, delete, list. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. :param expiry: The time at which the shared access signature becomes invalid. Required unless an id is given referencing a stored access policy which contains this field. This field must be omitted if it has been specified in an associated stored access policy. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type expiry: datetime or str :param start: The time at which the shared access signature becomes valid. If omitted, start time for this call is assumed to be the time when the storage service receives the request. Azure will always convert values to UTC. If a date is passed in without timezone info, it is assumed to be UTC. :type start: datetime or str :param str id: A unique value up to 64 characters in length that correlates to a stored access policy. To create a stored access policy, use set_file_service_properties. :param str ip: Specifies an IP address or a range of IP addresses from which to accept requests. If the IP address from which the request originates does not match the IP address or address range specified on the SAS token, the request is not authenticated. For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS restricts the request to those IP addresses. :param str protocol: Specifies the protocol permitted for a request made. Possible values are both HTTPS and HTTP (https,http) or HTTPS only (https). The default value is https,http. Note that HTTP only is not a permitted value. :param str cache_control: Response header value for Cache-Control when resource is accessed using this shared access signature. :param str content_disposition: Response header value for Content-Disposition when resource is accessed using this shared access signature. :param str content_encoding: Response header value for Content-Encoding when resource is accessed using this shared access signature. :param str content_language: Response header value for Content-Language when resource is accessed using this shared access signature. :param str content_type: Response header value for Content-Type when resource is accessed using this shared access signature. :return: A Shared Access Signature (sas) token. :rtype: str ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('self.account_name', self.account_name) _validate_not_none('self.account_key', self.account_key) sas = FileSharedAccessSignature(self.account_name, self.account_key) return sas.generate_file( share_name, directory_name, file_name, permission, expiry, start=start, id=id, ip=ip, protocol=protocol, cache_control=cache_control, content_disposition=content_disposition, content_encoding=content_encoding, content_language=content_language, content_type=content_type, ) def set_file_service_properties(self, hour_metrics=None, minute_metrics=None, cors=None, timeout=None): ''' Sets the properties of a storage account's File service, including Azure Storage Analytics. If an element (ex HourMetrics) is left as None, the existing settings on the service for that functionality are preserved. :param Metrics hour_metrics: The hour metrics settings provide a summary of request statistics grouped by API in hourly aggregates for files. :param Metrics minute_metrics: The minute metrics settings provide request statistics for each minute for files. :param cors: You can include up to five CorsRule elements in the list. If an empty list is specified, all CORS rules will be deleted, and CORS will be disabled for the service. :type cors: list(:class:`~azure.storage.common.models.CorsRule`) :param int timeout: The timeout parameter is expressed in seconds. ''' request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'restype': 'service', 'comp': 'properties', 'timeout': _int_to_str(timeout), } request.body = _get_request_body( _convert_service_properties_to_xml(None, hour_metrics, minute_metrics, cors)) self._perform_request(request) def get_file_service_properties(self, timeout=None): ''' Gets the properties of a storage account's File service, including Azure Storage Analytics. :param int timeout: The timeout parameter is expressed in seconds. :return: The file service properties. :rtype: :class:`~azure.storage.common.models.ServiceProperties` ''' request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'restype': 'service', 'comp': 'properties', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_service_properties) def list_shares(self, prefix=None, marker=None, num_results=None, include_metadata=False, timeout=None, include_snapshots=False): ''' Returns a generator to list the shares under the specified account. The generator will lazily follow the continuation tokens returned by the service and stop when all shares have been returned or num_results is reached. If num_results is specified and the account has more than that number of shares, the generator will have a populated next_marker field once it finishes. This marker can be used to create a new generator if more results are desired. :param str prefix: Filters the results to return only shares whose names begin with the specified prefix. :param int num_results: Specifies the maximum number of shares to return. :param bool include_metadata: Specifies that share metadata be returned in the response. :param str marker: An opaque continuation token. This value can be retrieved from the next_marker field of a previous generator object if num_results was specified and that generator has finished enumerating results. If specified, this generator will begin returning results from the point where the previous generator stopped. :param int timeout: The timeout parameter is expressed in seconds. :param bool include_snapshots: Specifies that share snapshots be returned in the response. ''' include = 'snapshots' if include_snapshots else None if include_metadata: if include is not None: include = include + ',metadata' else: include = 'metadata' operation_context = _OperationContext(location_lock=True) kwargs = {'prefix': prefix, 'marker': marker, 'max_results': num_results, 'include': include, 'timeout': timeout, '_context': operation_context} resp = self._list_shares(**kwargs) return ListGenerator(resp, self._list_shares, (), kwargs) def _list_shares(self, prefix=None, marker=None, max_results=None, include=None, timeout=None, _context=None): ''' Returns a list of the shares under the specified account. :param str prefix: Filters the results to return only shares whose names begin with the specified prefix. :param str marker: A string value that identifies the portion of the list to be returned with the next list operation. The operation returns a next_marker value within the response body if the list returned was not complete. The marker value may then be used in a subsequent call to request the next set of list items. The marker value is opaque to the client. :param int max_results: Specifies the maximum number of shares to return. A single list request may return up to 1000 shares and potentially a continuation token which should be followed to get additional resutls. :param string include: Include this parameter to specify that either the share's metadata, snapshots or both be returned as part of the response body. set this parameter to string 'metadata' to get share's metadata. set this parameter to 'snapshots' to get all the share snapshots. for both use 'snapshots,metadata'. :param int timeout: The timeout parameter is expressed in seconds. ''' request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path() request.query = { 'comp': 'list', 'prefix': _to_str(prefix), 'marker': _to_str(marker), 'maxresults': _int_to_str(max_results), 'include': _to_str(include), 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_shares, operation_context=_context) def create_share(self, share_name, metadata=None, quota=None, fail_on_exist=False, timeout=None): ''' Creates a new share under the specified account. If the share with the same name already exists, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_on_exists. :param str share_name: Name of share to create. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: dict(str, str) :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5TB (5120). :param bool fail_on_exist: Specify whether to throw an exception when the share exists. False by default. :param int timeout: The timeout parameter is expressed in seconds. :return: True if share is created, False if share already exists. :rtype: bool ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-share-quota': _int_to_str(quota) } _add_metadata_headers(metadata, request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True def snapshot_share(self, share_name, metadata=None, quota=None, timeout=None): ''' Creates a snapshot of an existing share under the specified account. :param str share_name: The name of the share to create a snapshot of. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: a dict of str to str: :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5TB (5120). :param int timeout: The timeout parameter is expressed in seconds. :return: snapshot properties :rtype: azure.storage.file.models.Share ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'snapshot', 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-share-quota': _int_to_str(quota) } _add_metadata_headers(metadata, request) return self._perform_request(request, _parse_snapshot_share, [share_name]) def get_share_properties(self, share_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata and system properties for the specified share. The data returned does not include the shares's list of files or directories. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A Share that exposes properties and metadata. :rtype: :class:`~azure.storage.file.models.Share` ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot) } return self._perform_request(request, _parse_share, [share_name]) def set_share_properties(self, share_name, quota, timeout=None): ''' Sets service-defined properties for the specified share. :param str share_name: Name of existing share. :param int quota: Specifies the maximum size of the share, in gigabytes. Must be greater than 0, and less than or equal to 5 TB (5120 GB). :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('quota', quota) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'properties', 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-share-quota': _int_to_str(quota) } self._perform_request(request) def get_share_metadata(self, share_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata for the specified share. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A dictionary representing the share metadata name, value pairs. :rtype: dict(str, str) ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'metadata', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot), } return self._perform_request(request, _parse_metadata) def set_share_metadata(self, share_name, metadata=None, timeout=None): ''' Sets one or more user-defined name-value pairs for the specified share. Each call to this operation replaces all existing metadata attached to the share. To remove all metadata from the share, call this operation with no metadata dict. :param str share_name: Name of existing share. :param metadata: A dict containing name-value pairs to associate with the share as metadata. Example: {'category':'test'} :type metadata: dict(str, str) :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'metadata', 'timeout': _int_to_str(timeout), } _add_metadata_headers(metadata, request) self._perform_request(request) def get_share_acl(self, share_name, timeout=None): ''' Gets the permissions for the specified share. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: A dictionary of access policies associated with the share. :rtype: dict(str, :class:`~azure.storage.common.models.AccessPolicy`) ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'acl', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_signed_identifiers) def set_share_acl(self, share_name, signed_identifiers=None, timeout=None): ''' Sets the permissions for the specified share or stored access policies that may be used with Shared Access Signatures. :param str share_name: Name of existing share. :param signed_identifiers: A dictionary of access policies to associate with the share. The dictionary may contain up to 5 elements. An empty dictionary will clear the access policies set on the service. :type signed_identifiers: dict(str, :class:`~azure.storage.common.models.AccessPolicy`) :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_access_policies(signed_identifiers) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'acl', 'timeout': _int_to_str(timeout), } request.body = _get_request_body( _convert_signed_identifiers_to_xml(signed_identifiers)) self._perform_request(request) def get_share_stats(self, share_name, timeout=None): ''' Gets the approximate size of the data stored on the share, rounded up to the nearest gigabyte. Note that this value may not include all recently created or recently resized files. :param str share_name: Name of existing share. :param int timeout: The timeout parameter is expressed in seconds. :return: the approximate size of the data stored on the share. :rtype: int ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.query = { 'restype': 'share', 'comp': 'stats', 'timeout': _int_to_str(timeout), } return self._perform_request(request, _convert_xml_to_share_stats) def delete_share(self, share_name, fail_not_exist=False, timeout=None, snapshot=None, delete_snapshots=None): ''' Marks the specified share for deletion. If the share does not exist, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_not_exist. :param str share_name: Name of share to delete. :param bool fail_not_exist: Specify whether to throw an exception when the share doesn't exist. False by default. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. Specify this argument to delete a specific snapshot only. delete_snapshots must be None if this is specified. :param ~azure.storage.file.models.DeleteSnapshot delete_snapshots: To delete a share that has snapshots, this must be specified as DeleteSnapshot.Include. :return: True if share is deleted, False share doesn't exist. :rtype: bool ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(share_name) request.headers = { 'x-ms-delete-snapshots': _to_str(delete_snapshots) } request.query = { 'restype': 'share', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot), } if not fail_not_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_not_exist(ex) return False else: self._perform_request(request) return True def create_directory(self, share_name, directory_name, metadata=None, fail_on_exist=False, timeout=None): ''' Creates a new directory under the specified share or parent directory. If the directory with the same name already exists, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_on_exists. :param str share_name: Name of existing share. :param str directory_name: Name of directory to create, including the path to the parent directory. :param metadata: A dict with name_value pairs to associate with the share as metadata. Example:{'Category':'test'} :type metadata: dict(str, str): :param bool fail_on_exist: specify whether to throw an exception when the directory exists. False by default. :param int timeout: The timeout parameter is expressed in seconds. :return: True if directory is created, False if directory already exists. :rtype: bool ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), } _add_metadata_headers(metadata, request) if not fail_on_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_on_exist(ex) return False else: self._perform_request(request) return True def delete_directory(self, share_name, directory_name, fail_not_exist=False, timeout=None): ''' Deletes the specified empty directory. Note that the directory must be empty before it can be deleted. Attempting to delete directories that are not empty will fail. If the directory does not exist, the operation fails on the service. By default, the exception is swallowed by the client. To expose the exception, specify True for fail_not_exist. :param str share_name: Name of existing share. :param str directory_name: Name of directory to delete, including the path to the parent directory. :param bool fail_not_exist: Specify whether to throw an exception when the directory doesn't exist. :param int timeout: The timeout parameter is expressed in seconds. :return: True if directory is deleted, False otherwise. :rtype: bool ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), } if not fail_not_exist: try: self._perform_request(request) return True except AzureHttpError as ex: _dont_fail_not_exist(ex) return False else: self._perform_request(request) return True def get_directory_properties(self, share_name, directory_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata and system properties for the specified directory. The data returned does not include the directory's list of files. :param str share_name: Name of existing share. :param str directory_name: The path to an existing directory. :param int timeout: The timeout parameter is expressed in seconds. :return: properties for the specified directory within a directory object. :param str snapshot: A string that represents the snapshot version, if applicable. :rtype: :class:`~azure.storage.file.models.Directory` ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot) } return self._perform_request(request, _parse_directory, [directory_name]) def get_directory_metadata(self, share_name, directory_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata for the specified directory. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A dictionary representing the directory metadata name, value pairs. :rtype: dict(str, str) ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'comp': 'metadata', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot) } return self._perform_request(request, _parse_metadata) def set_directory_metadata(self, share_name, directory_name, metadata=None, timeout=None): ''' Sets one or more user-defined name-value pairs for the specified directory. Each call to this operation replaces all existing metadata attached to the directory. To remove all metadata from the directory, call this operation with no metadata dict. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param metadata: A dict containing name-value pairs to associate with the directory as metadata. Example: {'category':'test'} :type metadata: dict(str, str). :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('directory_name', directory_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'comp': 'metadata', 'timeout': _int_to_str(timeout), } _add_metadata_headers(metadata, request) self._perform_request(request) def list_directories_and_files(self, share_name, directory_name=None, num_results=None, marker=None, timeout=None, prefix=None, snapshot=None): ''' Returns a generator to list the directories and files under the specified share. The generator will lazily follow the continuation tokens returned by the service and stop when all directories and files have been returned or num_results is reached. If num_results is specified and the share has more than that number of files and directories, the generator will have a populated next_marker field once it finishes. This marker can be used to create a new generator if more results are desired. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param int num_results: Specifies the maximum number of files to return, including all directory elements. If the request does not specify num_results or specifies a value greater than 5,000, the server will return up to 5,000 items. Setting num_results to a value less than or equal to zero results in error response code 400 (Bad Request). :param str marker: An opaque continuation token. This value can be retrieved from the next_marker field of a previous generator object if num_results was specified and that generator has finished enumerating results. If specified, this generator will begin returning results from the point where the previous generator stopped. :param int timeout: The timeout parameter is expressed in seconds. :param str prefix: List only the files and/or directories with the given prefix. :param str snapshot: A string that represents the snapshot version, if applicable. ''' operation_context = _OperationContext(location_lock=True) args = (share_name, directory_name) kwargs = {'marker': marker, 'max_results': num_results, 'timeout': timeout, '_context': operation_context, 'prefix': prefix, 'snapshot': snapshot} resp = self._list_directories_and_files(*args, **kwargs) return ListGenerator(resp, self._list_directories_and_files, args, kwargs) def _list_directories_and_files(self, share_name, directory_name=None, marker=None, max_results=None, timeout=None, prefix=None, _context=None, snapshot=None): ''' Returns a list of the directories and files under the specified share. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str marker: A string value that identifies the portion of the list to be returned with the next list operation. The operation returns a next_marker value within the response body if the list returned was not complete. The marker value may then be used in a subsequent call to request the next set of list items. The marker value is opaque to the client. :param int max_results: Specifies the maximum number of files to return, including all directory elements. If the request does not specify max_results or specifies a value greater than 5,000, the server will return up to 5,000 items. Setting max_results to a value less than or equal to zero results in error response code 400 (Bad Request). :param int timeout: The timeout parameter is expressed in seconds. :param str prefix: List only the files and/or directories with the given prefix. :param str snapshot: A string that represents the snapshot version, if applicable. ''' _validate_not_none('share_name', share_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name) request.query = { 'restype': 'directory', 'comp': 'list', 'prefix': _to_str(prefix), 'marker': _to_str(marker), 'maxresults': _int_to_str(max_results), 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot) } return self._perform_request(request, _convert_xml_to_directories_and_files, operation_context=_context) def get_file_properties(self, share_name, directory_name, file_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata, standard HTTP properties, and system properties for the file. Returns an instance of :class:`~azure.storage.file.models.File` with :class:`~azure.storage.file.models.FileProperties` and a metadata dict. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. :return: a file object including properties and metadata. :rtype: :class:`~azure.storage.file.models.File` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) request = HTTPRequest() request.method = 'HEAD' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot)} return self._perform_request(request, _parse_file, [file_name]) def exists(self, share_name, directory_name=None, file_name=None, timeout=None, snapshot=None): ''' Returns a boolean indicating whether the share exists if only share name is given. If directory_name is specificed a boolean will be returned indicating if the directory exists. If file_name is specified as well, a boolean will be returned indicating if the file exists. :param str share_name: Name of a share. :param str directory_name: The path to a directory. :param str file_name: Name of a file. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A boolean indicating whether the resource exists. :rtype: bool ''' _validate_not_none('share_name', share_name) try: if file_name is not None: self.get_file_properties(share_name, directory_name, file_name, timeout=timeout, snapshot=snapshot) elif directory_name is not None: self.get_directory_properties(share_name, directory_name, timeout=timeout, snapshot=snapshot) else: self.get_share_properties(share_name, timeout=timeout, snapshot=snapshot) return True except AzureHttpError as ex: _dont_fail_not_exist(ex) return False def resize_file(self, share_name, directory_name, file_name, content_length, timeout=None): ''' Resizes a file to the specified size. If the specified byte value is less than the current size of the file, then all ranges above the specified byte value are cleared. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param int content_length: The length to resize the file to. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('content_length', content_length) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'properties', 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-content-length': _to_str(content_length) } self._perform_request(request) def set_file_properties(self, share_name, directory_name, file_name, content_settings, timeout=None): ''' Sets system properties on the file. If one property is set for the content_settings, all properties will be overriden. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set the file properties. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('content_settings', content_settings) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'properties', 'timeout': _int_to_str(timeout), } request.headers = content_settings._to_headers() self._perform_request(request) def get_file_metadata(self, share_name, directory_name, file_name, timeout=None, snapshot=None): ''' Returns all user-defined metadata for the specified file. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A dictionary representing the file metadata name, value pairs. :rtype: dict(str, str) ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'metadata', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot), } return self._perform_request(request, _parse_metadata) def set_file_metadata(self, share_name, directory_name, file_name, metadata=None, timeout=None): ''' Sets user-defined metadata for the specified file as one or more name-value pairs. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param metadata: Dict containing name and value pairs. Each call to this operation replaces all existing metadata attached to the file. To remove all metadata from the file, call this operation with no metadata headers. :type metadata: dict(str, str) :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'metadata', 'timeout': _int_to_str(timeout), } _add_metadata_headers(metadata, request) self._perform_request(request) def copy_file(self, share_name, directory_name, file_name, copy_source, metadata=None, timeout=None): ''' Copies a file asynchronously. This operation returns a copy operation properties object, including a copy ID you can use to check or abort the copy operation. The File service copies files on a best-effort basis. If the destination file exists, it will be overwritten. The destination file cannot be modified while the copy operation is in progress. :param str share_name: Name of the destination share. The share must exist. :param str directory_name: Name of the destination directory. The directory must exist. :param str file_name: Name of the destination file. If the destination file exists, it will be overwritten. Otherwise, it will be created. :param str copy_source: A URL of up to 2 KB in length that specifies an Azure file or blob. The value should be URL-encoded as it would appear in a request URI. If the source is in another account, the source must either be public or must be authenticated via a shared access signature. If the source is public, no authentication is required. Examples: https://myaccount.file.core.windows.net/myshare/mydir/myfile https://otheraccount.file.core.windows.net/myshare/mydir/myfile?sastoken :param metadata: Name-value pairs associated with the file as metadata. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination file. If one or more name-value pairs are specified, the destination file is created with the specified metadata, and the metadata is not copied from the source blob or file. :type metadata: dict(str, str). :param int timeout: The timeout parameter is expressed in seconds. :return: Copy operation properties such as status, source, and ID. :rtype: :class:`~azure.storage.file.models.CopyProperties` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_source', copy_source) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = {'timeout': _int_to_str(timeout)} request.headers = { 'x-ms-copy-source': _to_str(copy_source), } _add_metadata_headers(metadata, request) return self._perform_request(request, _parse_properties, [FileProperties]).copy def abort_copy_file(self, share_name, directory_name, file_name, copy_id, timeout=None): ''' Aborts a pending copy_file operation, and leaves a destination file with zero length and full metadata. :param str share_name: Name of destination share. :param str directory_name: The path to the directory. :param str file_name: Name of destination file. :param str copy_id: Copy identifier provided in the copy.id of the original copy_file operation. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('copy_id', copy_id) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'copy', 'copyid': _to_str(copy_id), 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-copy-action': 'abort', } self._perform_request(request) def delete_file(self, share_name, directory_name, file_name, timeout=None): ''' Marks the specified file for deletion. The file is later deleted during garbage collection. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) request = HTTPRequest() request.method = 'DELETE' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = {'timeout': _int_to_str(timeout)} self._perform_request(request) def create_file(self, share_name, directory_name, file_name, content_length, content_settings=None, metadata=None, timeout=None): ''' Creates a new file. See create_file_from_* for high level functions that handle the creation and upload of large files with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param int content_length: Length of the file in bytes. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('content_length', content_length) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = {'timeout': _int_to_str(timeout)} request.headers = { 'x-ms-content-length': _to_str(content_length), 'x-ms-type': 'file' } _add_metadata_headers(metadata, request) if content_settings is not None: request.headers.update(content_settings._to_headers()) self._perform_request(request) def create_file_from_path(self, share_name, directory_name, file_name, local_file_path, content_settings=None, metadata=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None): ''' Creates a new azure file from a local file path, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param str local_file_path: Path of the local file to upload as the file content. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used for setting file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far and total is the size of the file, or None if the total size is unknown. :type progress_callback: func(current, total) :param int max_connections: Maximum number of parallel connections to use. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('local_file_path', local_file_path) count = path.getsize(local_file_path) with open(local_file_path, 'rb') as stream: self.create_file_from_stream( share_name, directory_name, file_name, stream, count, content_settings, metadata, validate_content, progress_callback, max_connections, timeout) def create_file_from_text(self, share_name, directory_name, file_name, text, encoding='utf-8', content_settings=None, metadata=None, validate_content=False, timeout=None): ''' Creates a new file from str/unicode, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param str text: Text to upload to the file. :param str encoding: Python encoding to use to convert the text to bytes. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('text', text) if not isinstance(text, bytes): _validate_not_none('encoding', encoding) text = text.encode(encoding) self.create_file_from_bytes( share_name, directory_name, file_name, text, count=len(text), content_settings=content_settings, metadata=metadata, validate_content=validate_content, timeout=timeout) def create_file_from_bytes( self, share_name, directory_name, file_name, file, index=0, count=None, content_settings=None, metadata=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None): ''' Creates a new file from an array of bytes, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param str file: Content of file as an array of bytes. :param int index: Start index in the array of bytes. :param int count: Number of bytes to upload. Set to None or negative value to upload all bytes starting from index. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far and total is the size of the file, or None if the total size is unknown. :type progress_callback: func(current, total) :param int max_connections: Maximum number of parallel connections to use. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('file', file) _validate_type_bytes('file', file) if index < 0: raise TypeError(_ERROR_VALUE_NEGATIVE.format('index')) if count is None or count < 0: count = len(file) - index stream = BytesIO(file) stream.seek(index) self.create_file_from_stream( share_name, directory_name, file_name, stream, count, content_settings, metadata, validate_content, progress_callback, max_connections, timeout) def create_file_from_stream( self, share_name, directory_name, file_name, stream, count, content_settings=None, metadata=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None): ''' Creates a new file from a file/stream, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param io.IOBase stream: Opened file/stream to upload as the file content. :param int count: Number of bytes to read from the stream. This is required, a file cannot be created if the count is unknown. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used to set file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far and total is the size of the file, or None if the total size is unknown. :type progress_callback: func(current, total) :param int max_connections: Maximum number of parallel connections to use. Note that parallel upload requires the stream to be seekable. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('stream', stream) _validate_not_none('count', count) if count < 0: raise TypeError(_ERROR_VALUE_NEGATIVE.format('count')) self.create_file( share_name, directory_name, file_name, count, content_settings, metadata, timeout ) _upload_file_chunks( self, share_name, directory_name, file_name, count, self.MAX_RANGE_SIZE, stream, max_connections, progress_callback, validate_content, timeout ) def _get_file(self, share_name, directory_name, file_name, start_range=None, end_range=None, validate_content=False, timeout=None, _context=None, snapshot=None): ''' Downloads a file's content, metadata, and properties. You can specify a range if you don't need to download the file in its entirety. If no range is specified, the full file will be downloaded. See get_file_to_* for high level functions that handle the download of large files with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param int start_range: Start of byte range to use for downloading a section of the file. If no end_range is given, all bytes after the start_range will be downloaded. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int end_range: End of byte range to use for downloading a section of the file. If end_range is given, start_range must be provided. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param bool validate_content: When this is set to True and specified together with the Range header, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A File with content, properties, and metadata. :rtype: :class:`~azure.storage.file.models.File` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot)} _validate_and_format_range_headers( request, start_range, end_range, start_range_required=False, end_range_required=False, check_content_md5=validate_content) return self._perform_request(request, _parse_file, [file_name, validate_content], operation_context=_context) def get_file_to_path(self, share_name, directory_name, file_name, file_path, open_mode='wb', start_range=None, end_range=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None, snapshot=None): ''' Downloads a file to a file path, with automatic chunking and progress notifications. Returns an instance of File with properties and metadata. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param str file_path: Path of file to write to. :param str open_mode: Mode to use when opening the file. Note that specifying append only open_mode prevents parallel download. So, max_connections must be set to 1 if this open_mode is used. :param int start_range: Start of byte range to use for downloading a section of the file. If no end_range is given, all bytes after the start_range will be downloaded. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int end_range: End of byte range to use for downloading a section of the file. If end_range is given, start_range must be provided. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param bool validate_content: If set to true, validates an MD5 hash for each retrieved portion of the file. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that the service will only return transactional MD5s for chunks 4MB or less so the first get request will be of size self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be thrown. As computing the MD5 takes processing time and more requests will need to be done due to the reduced chunk size there may be some increase in latency. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far, and total is the size of the file if known. :type progress_callback: func(current, total) :param int max_connections: If set to 2 or greater, an initial get will be done for the first self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file, the method returns at this point. If it is not, it will download the remaining data parallel using the number of threads equal to max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE. If set to 1, a single large get request will be done. This is not generally recommended but available if very few threads should be used, network requests are very expensive, or a non-seekable stream prevents parallel download. This may also be valuable if the file is being concurrently modified to enforce atomicity or if many files are expected to be empty as an extra request is required for empty files if max_connections is greater than 1. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A File with properties and metadata. :rtype: :class:`~azure.storage.file.models.File` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('file_path', file_path) _validate_not_none('open_mode', open_mode) if max_connections > 1 and 'a' in open_mode: raise ValueError(_ERROR_PARALLEL_NOT_SEEKABLE) with open(file_path, open_mode) as stream: file = self.get_file_to_stream( share_name, directory_name, file_name, stream, start_range, end_range, validate_content, progress_callback, max_connections, timeout, snapshot) return file def get_file_to_stream( self, share_name, directory_name, file_name, stream, start_range=None, end_range=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None, snapshot=None): ''' Downloads a file to a stream, with automatic chunking and progress notifications. Returns an instance of :class:`~azure.storage.file.models.File` with properties and metadata. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param io.IOBase stream: Opened file/stream to write to. :param int start_range: Start of byte range to use for downloading a section of the file. If no end_range is given, all bytes after the start_range will be downloaded. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int end_range: End of byte range to use for downloading a section of the file. If end_range is given, start_range must be provided. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param bool validate_content: If set to true, validates an MD5 hash for each retrieved portion of the file. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that the service will only return transactional MD5s for chunks 4MB or less so the first get request will be of size self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be thrown. As computing the MD5 takes processing time and more requests will need to be done due to the reduced chunk size there may be some increase in latency. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far, and total is the size of the file if known. :type progress_callback: func(current, total) :param int max_connections: If set to 2 or greater, an initial get will be done for the first self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file, the method returns at this point. If it is not, it will download the remaining data parallel using the number of threads equal to max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE. If set to 1, a single large get request will be done. This is not generally recommended but available if very few threads should be used, network requests are very expensive, or a non-seekable stream prevents parallel download. This may also be valuable if the file is being concurrently modified to enforce atomicity or if many files are expected to be empty as an extra request is required for empty files if max_connections is greater than 1. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A File with properties and metadata. :rtype: :class:`~azure.storage.file.models.File` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('stream', stream) # If the user explicitly sets max_connections to 1, do a single shot download if max_connections == 1: file = self._get_file(share_name, directory_name, file_name, start_range=start_range, end_range=end_range, validate_content=validate_content, timeout=timeout, snapshot=snapshot) # Set the download size download_size = file.properties.content_length # If max_connections is greater than 1, do the first get to establish the # size of the file and get the first segment of data else: if sys.version_info >= (3,) and not stream.seekable(): raise ValueError(_ERROR_PARALLEL_NOT_SEEKABLE) # The service only provides transactional MD5s for chunks under 4MB. # If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first # chunk so a transactional MD5 can be retrieved. first_get_size = self.MAX_SINGLE_GET_SIZE if not validate_content else self.MAX_CHUNK_GET_SIZE initial_request_start = start_range if start_range else 0 if end_range is not None and end_range - start_range < first_get_size: initial_request_end = end_range else: initial_request_end = initial_request_start + first_get_size - 1 # Send a context object to make sure we always retry to the initial location operation_context = _OperationContext(location_lock=True) try: file = self._get_file(share_name, directory_name, file_name, start_range=initial_request_start, end_range=initial_request_end, validate_content=validate_content, timeout=timeout, _context=operation_context, snapshot=snapshot) # Parse the total file size and adjust the download size if ranges # were specified file_size = _parse_length_from_content_range(file.properties.content_range) if end_range: # Use the end_range unless it is over the end of the file download_size = min(file_size, end_range - start_range + 1) elif start_range: download_size = file_size - start_range else: download_size = file_size except AzureHttpError as ex: if not start_range and ex.status_code == 416: # Get range will fail on an empty file. If the user did not # request a range, do a regular get request in order to get # any properties. file = self._get_file(share_name, directory_name, file_name, validate_content=validate_content, timeout=timeout, _context=operation_context, snapshot=snapshot) # Set the download size to empty download_size = 0 else: raise ex # Mark the first progress chunk. If the file is small or this is a single # shot download, this is the only call if progress_callback: progress_callback(file.properties.content_length, download_size) # Write the content to the user stream # Clear file content since output has been written to user stream if file.content is not None: stream.write(file.content) file.content = None # If the file is small or single shot download was used, the download is # complete at this point. If file size is large, use parallel download. if file.properties.content_length != download_size: # At this point would like to lock on something like the etag so that # if the file is modified, we dont get a corrupted download. However, # this feature is not yet available on the file service. end_file = file_size if end_range: # Use the end_range unless it is over the end of the file end_file = min(file_size, end_range + 1) _download_file_chunks( self, share_name, directory_name, file_name, download_size, self.MAX_CHUNK_GET_SIZE, first_get_size, initial_request_end + 1, # start where the first download ended end_file, stream, max_connections, progress_callback, validate_content, timeout, operation_context, snapshot ) # Set the content length to the download size instead of the size of # the last range file.properties.content_length = download_size # Overwrite the content range to the user requested range file.properties.content_range = 'bytes {0}-{1}/{2}'.format(start_range, end_range, file_size) # Overwrite the content MD5 as it is the MD5 for the last range instead # of the stored MD5 # TODO: Set to the stored MD5 when the service returns this file.properties.content_md5 = None return file def get_file_to_bytes(self, share_name, directory_name, file_name, start_range=None, end_range=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None, snapshot=None): ''' Downloads a file as an array of bytes, with automatic chunking and progress notifications. Returns an instance of :class:`~azure.storage.file.models.File` with properties, metadata, and content. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param int start_range: Start of byte range to use for downloading a section of the file. If no end_range is given, all bytes after the start_range will be downloaded. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int end_range: End of byte range to use for downloading a section of the file. If end_range is given, start_range must be provided. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param bool validate_content: If set to true, validates an MD5 hash for each retrieved portion of the file. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that the service will only return transactional MD5s for chunks 4MB or less so the first get request will be of size self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be thrown. As computing the MD5 takes processing time and more requests will need to be done due to the reduced chunk size there may be some increase in latency. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far, and total is the size of the file if known. :type progress_callback: func(current, total) :param int max_connections: If set to 2 or greater, an initial get will be done for the first self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file, the method returns at this point. If it is not, it will download the remaining data parallel using the number of threads equal to max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE. If set to 1, a single large get request will be done. This is not generally recommended but available if very few threads should be used, network requests are very expensive, or a non-seekable stream prevents parallel download. This may also be valuable if the file is being concurrently modified to enforce atomicity or if many files are expected to be empty as an extra request is required for empty files if max_connections is greater than 1. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A File with properties, content, and metadata. :rtype: :class:`~azure.storage.file.models.File` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) stream = BytesIO() file = self.get_file_to_stream( share_name, directory_name, file_name, stream, start_range, end_range, validate_content, progress_callback, max_connections, timeout, snapshot) file.content = stream.getvalue() return file def get_file_to_text( self, share_name, directory_name, file_name, encoding='utf-8', start_range=None, end_range=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None, snapshot=None): ''' Downloads a file as unicode text, with automatic chunking and progress notifications. Returns an instance of :class:`~azure.storage.file.models.File` with properties, metadata, and content. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param str encoding: Python encoding to use when decoding the file data. :param int start_range: Start of byte range to use for downloading a section of the file. If no end_range is given, all bytes after the start_range will be downloaded. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int end_range: End of byte range to use for downloading a section of the file. If end_range is given, start_range must be provided. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param bool validate_content: If set to true, validates an MD5 hash for each retrieved portion of the file. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that the service will only return transactional MD5s for chunks 4MB or less so the first get request will be of size self.MAX_CHUNK_GET_SIZE instead of self.MAX_SINGLE_GET_SIZE. If self.MAX_CHUNK_GET_SIZE was set to greater than 4MB an error will be thrown. As computing the MD5 takes processing time and more requests will need to be done due to the reduced chunk size there may be some increase in latency. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far, and total is the size of the file if known. :type progress_callback: func(current, total) :param int max_connections: If set to 2 or greater, an initial get will be done for the first self.MAX_SINGLE_GET_SIZE bytes of the file. If this is the entire file, the method returns at this point. If it is not, it will download the remaining data parallel using the number of threads equal to max_connections. Each chunk will be of size self.MAX_CHUNK_GET_SIZE. If set to 1, a single large get request will be done. This is not generally recommended but available if very few threads should be used, network requests are very expensive, or a non-seekable stream prevents parallel download. This may also be valuable if the file is being concurrently modified to enforce atomicity or if many files are expected to be empty as an extra request is required for empty files if max_connections is greater than 1. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. :param str snapshot: A string that represents the snapshot version, if applicable. :return: A File with properties, content, and metadata. :rtype: :class:`~azure.storage.file.models.File` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('encoding', encoding) file = self.get_file_to_bytes( share_name, directory_name, file_name, start_range, end_range, validate_content, progress_callback, max_connections, timeout, snapshot) file.content = file.content.decode(encoding) return file def update_range(self, share_name, directory_name, file_name, data, start_range, end_range, validate_content=False, timeout=None): ''' Writes the bytes specified by the request body into the specified range. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param bytes data: Content of the range. :param int start_range: Start of byte range to use for updating a section of the file. The range can be up to 4 MB in size. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int end_range: End of byte range to use for updating a section of the file. The range can be up to 4 MB in size. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param bool validate_content: If true, calculates an MD5 hash of the page content. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('data', data) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'range', 'timeout': _int_to_str(timeout), } request.headers = { 'x-ms-write': 'update', } _validate_and_format_range_headers( request, start_range, end_range) request.body = _get_data_bytes_only('data', data) if validate_content: computed_md5 = _get_content_md5(request.body) request.headers['Content-MD5'] = _to_str(computed_md5) self._perform_request(request) def clear_range(self, share_name, directory_name, file_name, start_range, end_range, timeout=None): ''' Clears the specified range and releases the space used in storage for that range. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param int start_range: Start of byte range to use for clearing a section of the file. The range can be up to 4 MB in size. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int end_range: End of byte range to use for clearing a section of the file. The range can be up to 4 MB in size. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int timeout: The timeout parameter is expressed in seconds. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) request = HTTPRequest() request.method = 'PUT' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'range', 'timeout': _int_to_str(timeout), } request.headers = { 'Content-Length': '0', 'x-ms-write': 'clear', } _validate_and_format_range_headers( request, start_range, end_range) self._perform_request(request) def list_ranges(self, share_name, directory_name, file_name, start_range=None, end_range=None, timeout=None, snapshot=None): ''' Retrieves the valid ranges for a file. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of existing file. :param int start_range: Specifies the start offset of bytes over which to list ranges. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int end_range: Specifies the end offset of bytes over which to list ranges. The start_range and end_range params are inclusive. Ex: start_range=0, end_range=511 will download first 512 bytes of file. :param int timeout: The timeout parameter is expressed in seconds. :param str snapshot: A string that represents the snapshot version, if applicable. :returns: a list of valid ranges :rtype: a list of :class:`~azure.storage.file.models.FileRange` ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) request = HTTPRequest() request.method = 'GET' request.host_locations = self._get_host_locations() request.path = _get_path(share_name, directory_name, file_name) request.query = { 'comp': 'rangelist', 'timeout': _int_to_str(timeout), 'sharesnapshot': _to_str(snapshot), } if start_range is not None: _validate_and_format_range_headers( request, start_range, end_range, start_range_required=False, end_range_required=False) return self._perform_request(request, _convert_xml_to_ranges)
PypiClean
/sherpa_client-0.12.7-py3-none-any.whl/sherpa_client/api/alt_texts/get_alt_texts.py
from http import HTTPStatus from typing import Any, Dict, List, Optional import httpx from ... import errors from ...client import Client from ...models.alt_text import AltText from ...types import Response def _get_kwargs( project_name: str, *, client: Client, ) -> Dict[str, Any]: url = "{}/projects/{projectName}/alt_texts".format(client.base_url, projectName=project_name) headers: Dict[str, str] = client.get_headers() cookies: Dict[str, Any] = client.get_cookies() return { "method": "get", "url": url, "headers": headers, "cookies": cookies, "timeout": client.get_timeout(), } def _parse_response(*, client: Client, response: httpx.Response) -> Optional[List["AltText"]]: if response.status_code == HTTPStatus.OK: response_200 = [] _response_200 = response.json() for componentsschemas_alt_text_array_item_data in _response_200: componentsschemas_alt_text_array_item = AltText.from_dict(componentsschemas_alt_text_array_item_data) response_200.append(componentsschemas_alt_text_array_item) return response_200 if client.raise_on_unexpected_status: raise errors.UnexpectedStatus(f"Unexpected status code: {response.status_code}") else: return None def _build_response(*, client: Client, response: httpx.Response) -> Response[List["AltText"]]: return Response( status_code=HTTPStatus(response.status_code), content=response.content, headers=response.headers, parsed=_parse_response(client=client, response=response), ) def sync_detailed( project_name: str, *, client: Client, ) -> Response[List["AltText"]]: """Get alternative document texts Args: project_name (str): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: Response[List['AltText']] """ kwargs = _get_kwargs( project_name=project_name, client=client, ) response = httpx.request( verify=client.verify_ssl, **kwargs, ) return _build_response(client=client, response=response) def sync( project_name: str, *, client: Client, ) -> Optional[List["AltText"]]: """Get alternative document texts Args: project_name (str): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: Response[List['AltText']] """ return sync_detailed( project_name=project_name, client=client, ).parsed async def asyncio_detailed( project_name: str, *, client: Client, ) -> Response[List["AltText"]]: """Get alternative document texts Args: project_name (str): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: Response[List['AltText']] """ kwargs = _get_kwargs( project_name=project_name, client=client, ) async with httpx.AsyncClient(verify=client.verify_ssl) as _client: response = await _client.request(**kwargs) return _build_response(client=client, response=response) async def asyncio( project_name: str, *, client: Client, ) -> Optional[List["AltText"]]: """Get alternative document texts Args: project_name (str): Raises: errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. httpx.TimeoutException: If the request takes longer than Client.timeout. Returns: Response[List['AltText']] """ return ( await asyncio_detailed( project_name=project_name, client=client, ) ).parsed
PypiClean
/31-2.2.tar.gz/31-2.2/s31/main.py
import argparse import sys import os import json from .config import Config, update_config from .notify import notify from .command import Command from .foreach import MAX_FOREACHES, parse_foreach_args, parse_values from .interruptable_runner import InterruptableRunner from .process_manager import list_procesess, stop_process from .utils import format_assignments, sanitize, set_key from .workers import dispatch_workers def main(): def config_argument(p): p.add_argument( "--config-file", default=os.path.expanduser("~/.31rc"), help="The location of the configuration file", ) parser = argparse.ArgumentParser("31") subparsers = parser.add_subparsers(dest="cmd") subparsers.required = True command_parser = subparsers.add_parser( "command", help="Run a command", aliases=["c"] ) config_argument(command_parser) command_parser.add_argument( "-s", "--sync", action="store_true", help="Run the command synchronously, that is, not in a screen session", ) command_parser.add_argument( "-n", "--screen-name", help="The name of the screen session to create" ) command_parser.add_argument( "-l", "--location", help="The location to run the script" ) command_parser.add_argument( "--no-email", help="Do not send an email when the command is done running", action="store_true", ) command_parser.add_argument( "-d", "--dry-run", help="Print out the commands to be run rather than running them", action="store_true", ) command_parser.add_argument( "-f", "--foreach", metavar=("%var", "vals"), nargs=2, action="append", help="Replaces each occurence of the variable with the corresponding value. " "Variables can be any sequence of characters. " "After the variables, values can be provided, each list of values should be a single argument in CSV format. " "See the documentation for details and examples.", ) command_parser.add_argument( "-fw", "--foreach-worker", nargs=2, metavar=("%var", "vals"), action="append", help="Similar to -f but associates each substitution with a particular variable. " "Notably, this does not lead to a combinatoric explosion if multiple are used, they are " "implicitly zipped together by the worker index", ) for k in range(2, 1 + MAX_FOREACHES): meta = tuple("%var{}".format(i) for i in range(1, k + 1)) meta += tuple("vals{}".format(i) for i in range(1, k + 1)) command_parser.add_argument( "-f" + str(k), "--foreach-" + str(k), metavar=meta, nargs=k * 2, action="append", help="See -f for details, -f2 through -f{0} allow you to zip the values for 2-{0} variables together.".format( MAX_FOREACHES ) if k == 2 else argparse.SUPPRESS, ) # internal use only, specifies which foreach args to use, in json format [(name, value)] command_parser.add_argument( "--foreach-specified-args", type=json.loads, help=argparse.SUPPRESS ) command_parser.add_argument( "-w", "--max-workers", type=int, help="Limit the number of threads that are to be launched at any point. " "This forces the creation of a monitoring thread, for which --sync is applied to", ) command_parser.add_argument( "-wn", "--worker-monitor-name", default="worker-monitor", help="Names the screen for the worker thread. By default is 'worker-monitor'", ) # internal use only, specifies that when the process is done, it should set the given key of the given file. command_parser.add_argument( "--when-done-set", nargs=2, metavar=("file", "key"), help=argparse.SUPPRESS ) command_parser.add_argument("command", help="Command to run") command_parser.set_defaults(action=command_action) config_parser = subparsers.add_parser("config", help="Modify configuration") config_argument(config_parser) config_parser.add_argument("key", help="The configuration key to modify") config_parser.add_argument("value", help="The value to assign the given key to") config_parser.set_defaults(action=config_action) list_parser = subparsers.add_parser("list", help="List all commands", aliases=["l"]) config_argument(list_parser) list_parser.add_argument( "prefix", help="Only list commands whose names start with the given prefix", nargs="?", ) list_parser.add_argument( "-o", "--ordering", help="The ordering to use", choices=["timestamp", "name"], default="timestamp", ) list_parser.set_defaults(action=list_action) stop_parser = subparsers.add_parser("stop", help="Stop a command", aliases=["s"]) config_argument(stop_parser) stop_parser.add_argument("name", help="The name of the command to stop") stop_parser.add_argument( "-m", "--multi", help="Stop all commands with the given prefix, even if there are multiple or it isn't the full name", action="store_true", ) stop_parser.set_defaults(action=stop_action) args = parser.parse_args() try: args.action(args) except RuntimeError as e: print(e, file=sys.stderr) def command_action(args): try: return do_command_action(args) finally: if args.when_done_set is not None: set_key(*args.when_done_set) def do_command_action(args): config = Config(args.config_file) assignments = ( [args.foreach_specified_args] if args.foreach_specified_args is not None else parse_foreach_args(args) ) worker_assignments = [] # Validation if args.foreach_worker is not None: for variable, vals in args.foreach_worker: if args.max_workers is None: raise RuntimeError("Cannot provide -fw without -w") vals = parse_values(vals) if len(vals) != args.max_workers: raise RuntimeError( "Mismatch between number of workers and number of provided values" ) worker_assignments.append((variable, vals)) commands = [] for assignment in assignments: screen_name = sanitize( format_assignments(args.screen_name or args.command, assignment) ) cmd = Command(cmd_line=args.command, location=args.location) cmd_to_use = cmd.replace(assignment) commands.append((screen_name, cmd_to_use, assignment)) if args.dry_run: for screen_name, cmd_to_use, _ in commands: if not args.sync: print("# on screen {}".format(screen_name)) cmd_to_use.dry_run() return if not args.sync: if args.max_workers is not None: config.launch_screen(sys.argv + ["--sync"], args.worker_monitor_name) return for screen_name, _, assignment in commands: config.launch_screen( sys.argv + ["--sync", "--foreach-specified-args", json.dumps(assignment)], screen_name, ) return if args.max_workers is not None and args.foreach_specified_args is None: # if foreach-specified-args is set, this is a dispatch thread def launch_worker(worker_idx, assignment, output_file, token): assignment = assignment + [ (var, vals[worker_idx]) for var, vals in worker_assignments ] screen_name = sanitize( format_assignments(args.screen_name or args.command, assignment) ) print("Launching {}".format(screen_name)) # don't need the --sync since at this point it is guaranteed config.launch_screen( sys.argv + [ "--foreach-specified-args", json.dumps(assignment), "--when-done-set", output_file, token, ], screen_name, ) dispatch_workers(args.max_workers, launch_worker, assignments) return for screen_name, cmd_to_use, _ in commands: runner = InterruptableRunner(screen_name, os.getpid(), cmd_to_use) if args.no_email: runner.run_checking_interrupt(cmd_to_use.run) else: notify(config, cmd_to_use, runner) def config_action(args): update_config(args.config_file, {args.key: args.value}) def list_action(args): list_procesess(args.prefix, args.ordering) def stop_action(args): stop_process(args.name)
PypiClean
/pyalc7t-1.1.0.tar.gz/pyalc7t-1.1.0/matplotlib/PlotDialog.py
from PyQt4 import QtCore, QtGui from pyqtgraph import GraphicsLayoutWidget import os,time import pyqtgraph as pg import numpy as np class cls_PlotDialog(QtGui.QDialog): def __init__(self,kanal, kanalnummer,parent=None): QtGui.QDialog.__init__(self,None) self.resize(700, 500) self.vlayout=QtGui.QVBoxLayout() self.vlayout.setMargin(20) self.vlayout.setSpacing(20) self.setLayout(self.vlayout) font = QtGui.QFont() font.setPointSize(14) self.label_Titel = QtGui.QLabel(self) self.label_Titel.setFont(font) self.label_Titel.setText("") self.label_Titel.setAlignment(QtCore.Qt.AlignCenter) self.vlayout.addWidget(self.label_Titel) self.hlayout=QtGui.QHBoxLayout() self.tableWidget_Messwerte = QtGui.QTableWidget(self) self.tableWidget_Messwerte.setColumnCount(0) self.tableWidget_Messwerte.setRowCount(0) self.hlayout.addWidget(self.tableWidget_Messwerte) self.plotView = GraphicsLayoutWidget(self) self.hlayout.addWidget(self.plotView) self.vlayout.addLayout(self.hlayout) self.hlayout2=QtGui.QHBoxLayout() self.button_Beenden = QtGui.QPushButton() self.button_Beenden.setText("Beenden") self.button_Beenden.setFixedWidth(80) self.hlayout2.addWidget(self.button_Beenden) self.vlayout.addLayout(self.hlayout2) self.kanalnr=kanalnummer self.kanal=kanal self.starttime=0 self.anzmess=0 self.fromfile= False self.messwertdatei="0_kanal%d.amw" % self.kanalnr self.button_Beenden.clicked.connect(self.do_exit) self.label_Titel.setText("Messwerte für Kanal "+str(self.kanalnr)) self.tableWidget_Messwerte.setColumnCount(3) self.tableWidget_Messwerte.setColumnWidth(0,72) self.tableWidget_Messwerte.setColumnWidth(1,80) self.tableWidget_Messwerte.setColumnWidth(2,80) self.tableWidget_Messwerte.setFixedWidth(254) self.tableWidget_Messwerte.setHorizontalHeaderLabels(('Zeit','Spannung','Strom')) self.tableWidget_Messwerte.setShowGrid(True) self.tableWidget_Messwerte.verticalHeader().setVisible(False) pg.setConfigOptions(antialias=True) self.p1=self.plotView.addPlot() self.p1.setLabel('bottom','Zeit (s)') self.p1.setLabel('left','Spannung (V)') self.plotView.nextRow() self.p2=self.plotView.addPlot() self.p2.setLabel('bottom','Zeit (s)') self.p2.setLabel('left','Strom (A)') self.timer = QtCore.QTimer() self.timer.timeout.connect(self.refresh) # # Timer aktivieren für den Refresh von Messwertanzeige und Plot # def start_timer(self): self.timer.start(5000) # # Messwerte anzeigen, Plot erstellen # def refresh(self): try: status=self.kanal.GetStatus() if status['Aufz'] : if self.fromfile: self.p1.clear() self.p2.clear() self.fromfile= False entries= self.kanal.GetMesswertArray() l=len(entries) t=self.kanal.GetStarttime() if t == self.starttime and l == self.anzmess: return self.starttime=t self.anzmess=l else: if self.fromfile: return self.fromfile= True entries=[ ] if not os.path.isfile(self.messwertdatei): return f= open(self.messwertdatei,"r") for line in f: values=line.split(None) if values[0] == "#" : continue entries.append(values) f.close() except EnvironmentError as e: reply=QtGui.QMessageBox.critical(self,'Fehler',"Zugriff auf Messwertdatei fehlgeschlagen. "+e.strerror,QtGui.QMessageBox.Ok,QtGui.QMessageBox.Ok) self.do_exit() self.tableWidget_Messwerte.setRowCount(len(entries)) for i, row in enumerate(entries): for j, col in enumerate(row): item=QtGui.QTableWidgetItem(col) item.setFlags(QtCore.Qt.NoItemFlags) item.setFlags(QtCore.Qt.ItemIsEnabled) self.tableWidget_Messwerte.setItem(i,j,item) self.label_Titel.setText("%d Messwerte für Kanal %d" % (len(entries),self.kanalnr)) if len(entries) < 3 : return t=np.array([]) v=np.array([]) a=np.array([]) for i, row in enumerate (entries): t=np.append(t,[float(row[0])]) v=np.append(v,[float(row[1])]) a=np.append(a,[float(row[2])]) self.p1.plot(t,v,pen=(255,0,0)) self.p2.plot(t,a,pen=(255,255,0)) # # Action Script: Fenster schließen --- # def do_exit(self): self.timer.stop() self.close()
PypiClean
/ShaPy-1.0.1.tar.gz/ShaPy-1.0.1/shapy/framework/netlink/htb.py
import os from shapy.framework.netlink.message import Attr from .constants import * from struct import Struct from shapy.framework.utils import nl_us2ticks class HTBQdiscAttr(Attr): """Representation of HTB qdisc options.""" #struct tc_htb_glob { # __u32 version; /* to match HTB/TC */ # __u32 rate2quantum; /* bps->quantum divisor */ # __u32 defcls; /* default class number */ # __u32 debug; /* debug flags */ # # /* stats */ # __u32 direct_pkts; /* count of non shapped packets */ #}; data_format = Struct('6I') def __init__(self, defcls, r2q=10): data = self.data_format.pack(0x20018, 3, r2q, defcls, 0, 0) Attr.__init__(self, TCA_OPTIONS, data) class HTBParms(Attr): """ Internal units: bytes """ #struct tc_ratespec { # unsigned char cell_log; # unsigned char __reserved; # unsigned short overhead; # short cell_align; # unsigned short mpu; # __u32 rate; #}; #struct tc_htb_opt { # struct tc_ratespec rate; # struct tc_ratespec ceil; # __u32 buffer; # __u32 cbuffer; # __u32 quantum; # __u32 level; /* out only */ # __u32 prio; #}; tc_ratespec = Struct("BxHhHI") tc_htb_opt = Struct("5I") data_format = Struct("8xI8xI5I") # tc_htb_opt @classmethod def unpack(cls, data): attr, rest = Attr.unpack(data) d = cls.data_format.unpack(attr.data) return cls(d[0], d[1], d[5], d[7]) def __init__(self, rate, ceil=0, mtu=1600, quantum=0, prio=0): """ rate, ceil, mtu: bytes """ if not ceil: ceil = rate r = self.tc_ratespec.pack(3, 0, -1, 0, rate) c = self.tc_ratespec.pack(3, 0, -1, 0, ceil) hz = os.sysconf('SC_CLK_TCK') buffer = tc_calc_xmittime(rate, (rate / hz) + mtu) cbuffer = tc_calc_xmittime(ceil, (rate / hz) + mtu) t = self.tc_htb_opt.pack(buffer, cbuffer, quantum, 0, prio) data = r + c + t Attr.__init__(self, TCA_HTB_PARMS, data) class RTab(Attr): """ Rate table attribute, 256 integers representing an estimate how long it takes to send packets of various lengths. """ data_format = Struct("256I") def __init__(self, rate, mtu, cell_log=3): rtab = tc_calc_rtable(rate, cell_log, mtu) data = self.data_format.pack(*rtab) Attr.__init__(self, TCA_HTB_RTAB, data) class CTab(RTab): def __init__(self, rate, mtu, cell_log=3): rtab = tc_calc_rtable(rate, cell_log, mtu) data = self.data_format.pack(*rtab) Attr.__init__(self, TCA_HTB_CTAB, data) def tc_calc_rtable(rate, cell_log, mtu): """ rtab[pkt_len>>cell_log] = pkt_xmit_time cell - The cell size determines he granularity of packet transmission time calculations. Has a sensible default. """ # http://kerneltrap.org/mailarchive/linux-netdev/2009/11/2/6259456/thread rtab = [] bps = rate if mtu == 0: mtu = 2047 if cell_log < 0: cell_log = 0 while (mtu >> cell_log) > 255: cell_log += 1 for i in range(0, 256): size = (i + 1) << cell_log rtab.append(tc_calc_xmittime(bps, size)) return rtab; def tc_calc_xmittime(rate, size): TIME_UNITS_PER_SEC = 1000000#000 return int(nl_us2ticks(int(TIME_UNITS_PER_SEC*(float(size)/rate))))
PypiClean
/arrivy-google-cloud-bigquery-0.0.6.tar.gz/arrivy-google-cloud-bigquery-0.0.6/arrivy/google/cloud/bigquery/query.py
import six from google.cloud.iterator import HTTPIterator from arrivy.google.cloud.bigquery._helpers import _TypedProperty from arrivy.google.cloud.bigquery._helpers import _rows_from_json from arrivy.google.cloud.bigquery.dataset import Dataset from arrivy.google.cloud.bigquery.job import QueryJob from arrivy.google.cloud.bigquery.table import _parse_schema_resource from arrivy.google.cloud.bigquery._helpers import QueryParametersProperty from arrivy.google.cloud.bigquery._helpers import UDFResourcesProperty from arrivy.google.cloud.bigquery._helpers import _item_to_row from arrivy.google.cloud.bigquery._helpers import _rows_page_start class _SyncQueryConfiguration(object): """User-settable configuration options for synchronous query jobs. Values which are ``None`` -> server defaults. """ _default_dataset = None _dry_run = None _max_results = None _timeout_ms = None _preserve_nulls = None _use_query_cache = None _use_legacy_sql = None class QueryResults(object): """Synchronous job: query tables. :type query: str :param query: SQL query string :type client: :class:`arrivy.google.cloud.bigquery.client.Client` :param client: A client which holds credentials and project configuration for the dataset (which requires a project). :type udf_resources: tuple :param udf_resources: An iterable of :class:`arrivy.google.cloud.bigquery.job.UDFResource` (empty by default) :type query_parameters: tuple :param query_parameters: An iterable of :class:`arrivy.google.cloud.bigquery._helpers.AbstractQueryParameter` (empty by default) """ _UDF_KEY = 'userDefinedFunctionResources' _QUERY_PARAMETERS_KEY = 'queryParameters' def __init__(self, query, client, udf_resources=(), query_parameters=()): self._client = client self._properties = {} self.query = query self._configuration = _SyncQueryConfiguration() self.udf_resources = udf_resources self.query_parameters = query_parameters self._job = None @classmethod def from_query_job(cls, job): """Factory: construct from an existing job. :type job: :class:`~arrivy.google.cloud.bigquery.job.QueryJob` :param job: existing job :rtype: :class:`QueryResults` :returns: the instance, bound to the job """ instance = cls(job.query, job._client, job.udf_resources) instance._job = job job_ref = instance._properties.setdefault('jobReference', {}) job_ref['jobId'] = job.name if job.default_dataset is not None: instance.default_dataset = job.default_dataset if job.use_query_cache is not None: instance.use_query_cache = job.use_query_cache if job.use_legacy_sql is not None: instance.use_legacy_sql = job.use_legacy_sql return instance @property def project(self): """Project bound to the job. :rtype: str :returns: the project (derived from the client). """ return self._client.project def _require_client(self, client): """Check client or verify over-ride. :type client: :class:`~arrivy.google.cloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. :rtype: :class:`arrivy.google.cloud.bigquery.client.Client` :returns: The client passed in or the currently bound client. """ if client is None: client = self._client return client @property def cache_hit(self): """Query results served from cache. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#cacheHit :rtype: bool or ``NoneType`` :returns: True if the query results were served from cache (None until set by the server). """ return self._properties.get('cacheHit') @property def complete(self): """Server completed query. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#jobComplete :rtype: bool or ``NoneType`` :returns: True if the query completed on the server (None until set by the server). """ return self._properties.get('jobComplete') @property def errors(self): """Errors generated by the query. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#errors :rtype: list of mapping, or ``NoneType`` :returns: Mappings describing errors generated on the server (None until set by the server). """ return self._properties.get('errors') @property def name(self): """Job name, generated by the back-end. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#jobReference :rtype: list of mapping, or ``NoneType`` :returns: Mappings describing errors generated on the server (None until set by the server). """ return self._properties.get('jobReference', {}).get('jobId') @property def job(self): """Job instance used to run the query. :rtype: :class:`arrivy.google.cloud.bigquery.job.QueryJob`, or ``NoneType`` :returns: Job instance used to run the query (None until ``jobReference`` property is set by the server). """ if self._job is None: job_ref = self._properties.get('jobReference') if job_ref is not None: self._job = QueryJob(job_ref['jobId'], self.query, self._client) return self._job @property def page_token(self): """Token for fetching next bach of results. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#pageToken :rtype: str, or ``NoneType`` :returns: Token generated on the server (None until set by the server). """ return self._properties.get('pageToken') @property def total_rows(self): """Total number of rows returned by the query. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#totalRows :rtype: int, or ``NoneType`` :returns: Count generated on the server (None until set by the server). """ total_rows = self._properties.get('totalRows') if total_rows is not None: return int(total_rows) @property def total_bytes_processed(self): """Total number of bytes processed by the query. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#totalBytesProcessed :rtype: int, or ``NoneType`` :returns: Count generated on the server (None until set by the server). """ total_bytes_processed = self._properties.get('totalBytesProcessed') if total_bytes_processed is not None: return int(total_bytes_processed) @property def num_dml_affected_rows(self): """Total number of rows affected by a DML query. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#numDmlAffectedRows :rtype: int, or ``NoneType`` :returns: Count generated on the server (None until set by the server). """ num_dml_affected_rows = self._properties.get('numDmlAffectedRows') if num_dml_affected_rows is not None: return int(num_dml_affected_rows) @property def rows(self): """Query results. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#rows :rtype: list of tuples of row values, or ``NoneType`` :returns: fields describing the schema (None until set by the server). """ return _rows_from_json(self._properties.get('rows', ()), self.schema) @property def schema(self): """Schema for query results. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#schema :rtype: list of :class:`SchemaField`, or ``NoneType`` :returns: fields describing the schema (None until set by the server). """ return _parse_schema_resource(self._properties.get('schema', {})) default_dataset = _TypedProperty('default_dataset', Dataset) """See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#defaultDataset """ dry_run = _TypedProperty('dry_run', bool) """See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#dryRun """ max_results = _TypedProperty('max_results', six.integer_types) """See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#maxResults """ preserve_nulls = _TypedProperty('preserve_nulls', bool) """See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#preserveNulls """ query_parameters = QueryParametersProperty() timeout_ms = _TypedProperty('timeout_ms', six.integer_types) """See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#timeoutMs """ udf_resources = UDFResourcesProperty() use_query_cache = _TypedProperty('use_query_cache', bool) """See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#useQueryCache """ use_legacy_sql = _TypedProperty('use_legacy_sql', bool) """See https://cloud.google.com/bigquery/docs/\ reference/v2/jobs/query#useLegacySql """ def _set_properties(self, api_response): """Update properties from resource in body of ``api_response`` :type api_response: httplib2.Response :param api_response: response returned from an API call """ self._properties.clear() self._properties.update(api_response) def _build_resource(self): """Generate a resource for :meth:`begin`.""" resource = {'query': self.query} if self.default_dataset is not None: resource['defaultDataset'] = { 'projectId': self.project, 'datasetId': self.default_dataset.name, } if self.max_results is not None: resource['maxResults'] = self.max_results if self.preserve_nulls is not None: resource['preserveNulls'] = self.preserve_nulls if self.timeout_ms is not None: resource['timeoutMs'] = self.timeout_ms if self.use_query_cache is not None: resource['useQueryCache'] = self.use_query_cache if self.use_legacy_sql is not None: resource['useLegacySql'] = self.use_legacy_sql if self.dry_run is not None: resource['dryRun'] = self.dry_run if len(self._udf_resources) > 0: resource[self._UDF_KEY] = [ {udf_resource.udf_type: udf_resource.value} for udf_resource in self._udf_resources ] if len(self._query_parameters) > 0: resource[self._QUERY_PARAMETERS_KEY] = [ query_parameter.to_api_repr() for query_parameter in self._query_parameters ] if self._query_parameters[0].name is None: resource['parameterMode'] = 'POSITIONAL' else: resource['parameterMode'] = 'NAMED' return resource def run(self, client=None): """API call: run the query via a POST request See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query :type client: :class:`~arrivy.google.cloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. """ if self.job is not None: raise ValueError("Query job is already running.") client = self._require_client(client) path = '/projects/%s/queries' % (self.project,) api_response = client._connection.api_request( method='POST', path=path, data=self._build_resource()) self._set_properties(api_response) def fetch_data(self, max_results=None, page_token=None, start_index=None, timeout_ms=None, client=None): """API call: fetch a page of query result data via a GET request See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/getQueryResults :type max_results: int :param max_results: (Optional) maximum number of rows to return. :type page_token: str :param page_token: (Optional) token representing a cursor into the table's rows. :type start_index: int :param start_index: (Optional) zero-based index of starting row :type timeout_ms: int :param timeout_ms: (Optional) How long to wait for the query to complete, in milliseconds, before the request times out and returns. Note that this is only a timeout for the request, not the query. If the query takes longer to run than the timeout value, the call returns without any results and with the 'jobComplete' flag set to false. You can call GetQueryResults() to wait for the query to complete and read the results. The default value is 10000 milliseconds (10 seconds). :type client: :class:`~arrivy.google.cloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. :rtype: tuple :returns: ``(row_data, total_rows, page_token)``, where ``row_data`` is a list of tuples, one per result row, containing only the values; ``total_rows`` is a count of the total number of rows in the table; and ``page_token`` is an opaque string which can be used to fetch the next batch of rows (``None`` if no further batches can be fetched). :raises: ValueError if the query has not yet been executed. """ if self.name is None: raise ValueError("Query not yet executed: call 'run()'") client = self._require_client(client) params = {} if start_index is not None: params['startIndex'] = start_index if timeout_ms is not None: params['timeoutMs'] = timeout_ms path = '/projects/%s/queries/%s' % (self.project, self.name) iterator = HTTPIterator(client=client, path=path, item_to_value=_item_to_row, items_key='rows', page_token=page_token, max_results=max_results, page_start=_rows_page_start_query, extra_params=params) iterator.query_result = self # Over-ride the key used to retrieve the next page token. iterator._NEXT_TOKEN = 'pageToken' return iterator def _rows_page_start_query(iterator, page, response): """Update query response when :class:`~arrivy.google.cloud.iterator.Page` starts. .. note:: This assumes that the ``query_response`` attribute has been added to the iterator after being created, which should be done by the caller. :type iterator: :class:`~arrivy.google.cloud.iterator.Iterator` :param iterator: The iterator that is currently in use. :type page: :class:`~arrivy.google.cloud.iterator.Page` :param page: The page that was just created. :type response: dict :param response: The JSON API response for a page of rows in a table. """ iterator.query_result._set_properties(response) iterator.schema = iterator.query_result.schema _rows_page_start(iterator, page, response)
PypiClean
/root_pack-1.1.1.tar.gz/root_pack-1.1.1/root_pack/__init__.py
name = "root_pack" import pandas as pd import re import string import os def pathfile(file): THIS_FOLDER = os.path.dirname(os.path.abspath(__file__)) my_file = os.path.join(THIS_FOLDER, file) return my_file def li(file): my_file = pathfile(file) dictfile=open(my_file,"r") dictlines=dictfile.read() mydict=[] mydict=dictlines.split() return mydict mydict=li("dictionary.txt") mysuffix=li("suffixmorph.txt") vedict=li("verb.txt") ex=pathfile('example.csv') file=pd.read_csv(ex) loop = file['letter'].copy() ch1=pathfile('ch.csv') file1=pd.read_csv(ch1) myloop = file1['letter'].copy() ch2=pathfile('change.csv') filee=pd.read_csv(ch2) myloop2 = filee['letter'].copy() stop1=["വല്‍","ടി","ര്‍ക്ക്"] stop2=["നാല്","യ്യ്","ല്ല്","ള്ള്","മ്മ്","\u0d3e"+"യ്","\u0d47"+"യ്"] def dvithva(x): n=len(x) mysuffix1=li("suffixdvithva.txt") mylist = [] mylist = ["ക്ക","പ്പ","ത്ത", "ശ്ശ","ച്ച"] if any(dl in x for dl in mylist): for k in mylist: if k in x: lis=[m.start() for m in re.finditer(k,x)] for N in lis: if k == x[N:n]: return x else: if k[2:3]+x[N+3:n] in mysuffix1: wordd=fn(x[0:N]) if wordd==None: wordd=x[0:N] wordt=sp(wordd) if wordt==None: wordt=wordd if wordt in mydict: return wordt def stem(word): flag = False word=word.replace(" ","") stoplist=["കടത്ത്","ഫോട്ടോ","ആദായ","ഓയില്‍","നടത്ത്","പഞ്ചായത്ത്","മതില്‍","വാതില്‍","അബ്ദുള്ള","സുഹൃത്ത്","അഭിപ്രായ","നിങ്ങള്‍","ജില്ല","കത്ത്","കരയ്","തിരയ്","പറയ്","അറിയ്","പിരിയ്","നേതാവും","മക്കള്‍","അനുയായി"] stop=["റയ്","പ്പിക്ക്","ന്നെ","കുമാര്‍","\u0d42"+"ത്ത്","യ്യും","ഴിയ്","രും","സമ്പത്ത്","തൊഴില്‍","രോ","താവും","വില്‍","ര്‍ത്ത്","ഹാം","ടുവ്","യ്യ്","ന്ന്","ല്ല്","ള്ള്","ണ്ണ്","\u0d3e"+"ത്ത്","\u0d3f"+"ത്ത്","\u0d41"+"ത്ത്","യിന്‍","\u0d3e"+"വ്","ന്നില്‍","നാല്","ശായി"] #print (len(word),";len") adlist=["\u0d41","\u0d42","\u0d46","\u0d47","\u0d4a","\u0d4b","\u0d3e","\u0d3f"] if word.endswith("\u0d47"+"യ്") and len(word)==4: return word if word in stoplist: return word for patt in stop: if word.endswith(patt): return word for pattern in loop: if word.endswith(pattern): if len(word)>len(pattern)+1: n=file.loc[file['letter']==pattern].index[0] flag = True word1=re.sub(pattern+'$',file.loc[n,'first_change'],word) break if flag: if word!=word1: return stem(word1) else: return word def verb(word,myloop,file1,stop): word=word.replace(" ","") for patt in stop: if word.endswith(patt): return word for pattern in myloop: if word.endswith(pattern): n=file1.loc[file1['letter']==pattern].index[0] word1=re.sub(pattern+'$',file1.loc[n,'first_change'],word) if word1 in vedict: return word1 return word def change(patt): if patt=="\u0d41": return "ഉ" elif patt=="\u0d42": return "ഊ" elif patt=="\u0d46": return "എ" elif patt=="\u0d47": return "ഏ" elif patt=="\u0d4a": return "ഒ" elif patt=="\u0d4b": return "ഓ" elif patt=="\u0d3e": return "ആ" elif patt=="\u0d3f": return "ഇ" else: return patt def spst(word): x= stem(word) if x==None: x=word d= dvithva(x) if d!=None: x=d return x def sp(x): n=len(x) flag = False x=x.replace(" ","") stoplist=["മനസ്സിലാക്ക്"] if x in stoplist: return x adlist=["\u0d41","\u0d42","\u0d46","\u0d47","\u0d4a","\u0d4b","\u0d3e","\u0d3f"] x=spst(x) for pattern in adlist: if pattern in x: indx=[m.start() for m in re.finditer(pattern,x)] N=indx[-1] if x[0:N].endswith("ല്ല") or x[0:N].endswith("ണ്ട"): word1=x[0:N] else: word1=x[0:N]+"\u0d4d" word1=spst(word1) suff=change(pattern)+x[N+1:n] if suff in mysuffix: flag = True break if flag: word = word1 word=word.replace(" ","") if word in mydict: return word else: wor=fn(word) if wor!=None: word=wor wor=dvithva(word) if wor!=None: word=wor wor=ni(word) if wor!=None: word=wor wor= last(word) if wor!=None: word=wor hj=verb(word,myloop2,filee,stop1) if hj in mydict: return hj if word in mydict: return word return sp(word) else: if x in mydict: return x else: if x.endswith("വ്"): x=x.replace("വ്"+'$',"\u0d02") if x in mydict: return x for patt in adlist: if x.endswith(patt): x=x[:-1]+"\u0d4d" wo= stem(x) wor1=wo[0] w=sp(wor1) if w!=None: wo=w if wo in mydict: return wo def ni(x): adlist4=["ല്‍","ര്‍", "ന്‍", "ള്‍","ണ്‍","നെ"] for patt in adlist4: if x.endswith(patt)==False: if patt in x: ind=x.index(patt) n=len(patt) if x[ind+n:] in mysuffix or x[ind+n:] in mydict: stem5=x[:ind+n] stem6=spst(stem5) if stem6!=None and stem6 in mydict: x=stem6 else: stem7=sp(stem6) if stem7!=None: x=stem7 return x def last(x): adlistd=["ത","ല","ന","ച","ണ","യ","ര","ക്ക","ള"] for patt in adlistd: if x.endswith(patt)==False: if patt in x: indx=[m.start() for m in re.finditer(patt,x)] n=len(patt) for N in indx: suf="അ"+x[N+n:] if (suf in mysuffix) or (suf in mydict): k=sp(x[:N+n]+"\u0d4d") if k in mydict: return k def fn(word): if word in mydict: return word adlist1=["\u0d41","\u0d42","\u0d46","\u0d47","\u0d4a","\u0d4b","\u0d3e","\u0d3f"] for patt in adlist1: if patt in word: indx=[m.start() for m in re.finditer(patt,word)] for N in indx: if change(patt)+word[N+1:] in mysuffix: x=verb(word[:-1]+"\u0d4d",myloop,file1,stop2) if x in mydict: return x k=sp(word[:N]+"\u0d4d") if k in mydict: return k if (word[N+1:] in mysuffix) or (word[N+1:] in mydict): if word[:N+1] in mydict: return word[:N+1] else: if word[:N] in mydict: return word[:N] k=sp(word[:N]+"\u0d4d") if k==None: k=word[:N]+"\u0d4d" k=verb(k,myloop,file1,stop2) if k in mydict: return k if word.endswith(patt): if word[:-1] in mydict: return word[:-1] elif word[:-1]+"\u0d4d" in mydict: k1=word[:-1]+"\u0d4d" k2=sp(k1) ki=verb(k1,myloop,file1,stop2) return ki def root(word): x=re.sub('[0-9]+', '',word) remove=str.maketrans('', '', string.punctuation) x=x.translate(remove) x=x.replace("ർ","ര്‍").replace("ൾ","ള്‍").replace("ൽ","ല്‍").replace("ൺ","ണ്‍").replace("ൻ","ന്‍").replace("‌","") steml=stem(x) if steml==None: steml=x adlist1=["\u0d41","\u0d42","\u0d46","\u0d47","\u0d4a","\u0d4b","\u0d3e","\u0d3f"] adlist2=["ല്‍","ര്‍", "ന്‍", "ള്‍","ണ്‍","നെ"] adlist3=["ക്ക","പ്പ", "ത്ത", "ശ്ശ","ച്ച"] adlistd=["ത","ല","ന","ച","ണ","യ","ര","ക്ക","ള"] if any(ltr in steml for ltr in adlist2): stemj=ni(steml) if stemj!=None: steml=stemj if any(ltr in steml for ltr in adlist3): stem1=dvithva(steml) if stem1!=None: steml=stem1 if any(ltr in steml for ltr in adlist1): stem1=sp(steml) if stem1!=None: steml=stem1 words=fn(steml) if words!=None: steml=words if any(ltr in steml for ltr in adlistd): stemdi=last(steml) if stemdi!=None: steml=stemdi stem2=verb(steml,myloop2,filee,stop1) if stem2!=steml and stem2!=None: return stem2 else: stem2=verb(steml,myloop,file1,stop2) if stem2!=None and stem2 in mydict: return stem2 return (steml)
PypiClean
/matrix_nio-0.21.2-py3-none-any.whl/nio/events/common.py
# Copyright © 2020 Damir Jelić <[email protected]> # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY # SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """nio common event type mixins This module contains mixin classes for events that can be found in the to-device part of a sync response or in a room timeline of a sync response. """ from dataclasses import dataclass, field from typing import Dict, List @dataclass class KeyVerificationEventMixin: """Base class for key verification events. Attributes: transaction_id (str): An opaque identifier for the verification process. Must be unique with respect to the devices involved. """ transaction_id: str = field() @dataclass class KeyVerificationStartMixin: """Event signaling the start of a SAS key verification process. Attributes: from_device (str): The device ID which is initiating the process. method (str): The verification method to use. key_agreement_protocols (list): A list of strings specifying the key agreement protocols the sending device understands. hashes (list): A list of strings specifying the hash methods the sending device understands. message_authentication_codes (list): A list of strings specifying the message authentication codes that the sending device understands. short_authentication_string (list): A list of strings specifying the SAS methods the sending device (and the sending device's user) understands. """ from_device: str = field() method: str = field() key_agreement_protocols: List[str] = field() hashes: List[str] = field() message_authentication_codes: List[str] = field() short_authentication_string: List[str] = field() @dataclass class KeyVerificationAcceptMixin: """Event signaling that the SAS verification start has been accepted. Attributes: commitment (str): The commitment value of the verification process. key_agreement_protocol (str): The key agreement protocol the device is choosing to use hash (str): A list of strings specifying the hash methods the sending device understands. message_authentication_code (str): The message authentication code the device is choosing to use. short_authentication_string (list): A list of strings specifying the SAS methods that can be used in the verification process. """ commitment: str = field() key_agreement_protocol: str = field() hash: str = field() message_authentication_code: str = field() short_authentication_string: List[str] = field() @dataclass class KeyVerificationKeyMixin: """Event carrying a key verification key. After this event is received the short authentication string can be shown to the user. Attributes: key (str): The device's ephemeral public key, encoded as unpadded base64. """ key: str = field() @dataclass class KeyVerificationMacMixin: """Event holding a message authentication code of the verification process. After this event is received the device that we are verifying will be marked as verified given that we have accepted the short authentication string as well. Attributes: mac (dict): A map of the key ID to the MAC of the key, using the algorithm in the verification process. The MAC is encoded as unpadded base64. keys (str): The MAC of the comma-separated, sorted, list of key IDs given in the mac property, encoded as unpadded base64. """ mac: Dict[str, str] = field() keys: str = field() @dataclass class KeyVerificationCancelMixin: """Event signaling that a key verification process has been canceled. Attributes: code (str): The error code for why the process/request was canceled by the user. reason (str): A human readable description of the cancellation code. """ code: str = field() reason: str = field()
PypiClean
/carp-0.0.3.tar.gz/carp-0.0.3/src/carp.py
import abc import argparse import collections import logging import os import shutil import clepy import jinja2 import jinja2.meta log = logging.getLogger('carp') class CarpScript(object): __metaclass__ = abc.ABCMeta @staticmethod def find_carpdir(): for d in clepy.walkup(): if 'carpdir' in os.listdir(d): return os.path.join(d, 'carpdir') @abc.abstractmethod def set_up_args(self): raise NotImplementedError def template_is_a_single_file(self, carpdir, template_name): return os.path.isfile(os.path.join( carpdir, template_name)) def template_is_a_folder(self, carpdir, template_name): return os.path.isdir(os.path.join( carpdir, template_name)) class CarpLister(CarpScript): def set_up_args(self): ap = argparse.ArgumentParser( description='List available templates') ap.add_argument('--carpdir', help='Use this as the carp directory') return ap.parse_args() @classmethod def list_templates(cls): self = cls() args = self.set_up_args() carpdir = args.carpdir or self.find_carpdir() if not carpdir: print("no carpdir found!") return else: for tmpl in self.yield_stored_templates(carpdir): print('{0}'.format(os.path.basename(tmpl))) def yield_stored_templates(self, carpdir): for tmpl in os.listdir(carpdir): if os.path.isdir(os.path.join(carpdir, tmpl)) \ and os.path.basename(tmpl) == '.svn': continue else: yield(tmpl) class CarpAdder(CarpScript): def set_up_args(self): ap = argparse.ArgumentParser( description='Add a template') ap.add_argument('--carpdir', help='Use this as the carp directory') ap.add_argument('file_to_add', help='This is the file to store') return ap.parse_args() @classmethod def add_template(cls): self = cls() args = self.set_up_args() carpdir = args.carpdir or self.find_carpdir() self.copy_template(args.file_to_add, carpdir) def copy_template(self, thing_to_add, carpdir): if os.path.isfile(thing_to_add): self.copy_single_file(thing_to_add, carpdir) elif os.path.isdir(thing_to_add): self.copy_folder(thing_to_add, carpdir) def copy_single_file(self, single_file, carpdir): basename = os.path.basename(single_file) shutil.copyfile( single_file, os.path.join( carpdir, basename)) def copy_folder(self, folder, carpdir): basename = os.path.basename(folder) shutil.copytree( folder, os.path.join( carpdir, basename)) class CarpRenderer(CarpScript): def set_up_args(self): ap = argparse.ArgumentParser( description='Render a stored template') ap.add_argument('--carpdir', help='Use this as the carp directory') ap.add_argument('--define', action='append', metavar="key=value", help='Define a value to use in the template, like ' 'projname=foo. Can use multiple times.') ap.add_argument('template', help='This is the template to render') ap.add_argument('target', help='This is the directory to copy stuff to.', nargs='?', default=None) return ap.parse_args() @classmethod def render(cls): self = cls() args = self.set_up_args() carpdir = args.carpdir or self.find_carpdir() defined_values = dict([s.split('=') for s in args.define]) \ if args.define else {} print(self.render_template(carpdir, args.template, defined_values, args.target)) def render_template(self, carpdir, template, values, target=None): if self.template_is_a_single_file(carpdir, template): env = jinja2.Environment( loader=jinja2.FileSystemLoader(carpdir), undefined=jinja2.StrictUndefined) j = env.get_template(template) return j.render(**values) elif self.template_is_a_folder(carpdir, template): if not target: raise TargetRequired("Provide a destination!") else: return self.render_folder(carpdir, template, values, target) def render_folder(self, carpdir, template, values, target): for dirpath, dirnames, filenames \ in os.walk(os.path.join(os.path.join(carpdir, template))): parts = dirpath.partition(carpdir) stripped_dirpath = parts[-1].lstrip('/') target_dirpath = os.path.join(target, stripped_dirpath) rendered_target_dirpath = jinja2.Template(target_dirpath).render(**values) os.mkdir(rendered_target_dirpath) for filename in filenames: rendered_text = self.render_template( dirpath, filename, values) rendered_filename = jinja2.Template(filename).render(**values) with open(os.path.join( rendered_target_dirpath, rendered_filename), 'w') as f: f.write(rendered_text) return target class CarpInfoGetter(CarpScript): def set_up_args(self): ap = argparse.ArgumentParser( description='Get information on a stored template') ap.add_argument('--log-level', default='ERROR', choices=['DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL']) ap.add_argument('--carpdir', help='Use this as the carp directory') ap.add_argument('template', help='This is the template to get information on') return ap.parse_args() @classmethod def get_info(cls): self = cls() args = self.set_up_args() logging.basicConfig(level=args.log_level) carpdir = args.carpdir or self.find_carpdir() required_vars = self.get_info_on_template(carpdir, args.template) if not required_vars: print("{0} doesn't need any variables\n".format( args.template)) else: print("{0} required variables\n".format(args.template)) for var in required_vars: print("* {0}".format(var)) def get_info_on_template(self, carpdir, template_name): if self.template_is_a_single_file(carpdir, template_name): return self.get_info_on_single_file(carpdir, template_name) elif self.template_is_a_folder(carpdir, template_name): return self.get_info_on_folder(carpdir, template_name) def get_info_on_single_file(self, carpdir, template_name): env = jinja2.Environment( loader=jinja2.FileSystemLoader(carpdir), undefined=jinja2.StrictUndefined) text = env.loader.get_source(env, template_name) ast = env.parse(text) return jinja2.meta.find_undeclared_variables(ast) def get_info_on_folder(self, carpdir, template_name): required_variables = set([]) for dirpath, dirnames, filenames \ in os.walk(os.path.join(carpdir, template_name)): for f in filenames: for var in self.get_required_variables_from_file_name(carpdir, f): required_variables.add(var) for var in self.get_info_on_single_file(dirpath, f): required_variables.add(var) return required_variables def get_required_variables_from_file_name(self, carpdir, file_name): env = jinja2.Environment() ast = env.parse(file_name) return jinja2.meta.find_undeclared_variables(ast) class TargetRequired(ValueError): """ You can not render a directory template without specifying where it goes. """
PypiClean
/azureml_core-1.53.0-py3-none-any.whl/azureml/_restclient/models/iot_service_response.py
from .service_response_base import ServiceResponseBase class IotServiceResponse(ServiceResponseBase): """IotServiceResponse. :param id: The service Id. :type id: str :param name: The service name. :type name: str :param description: The service description. :type description: str :param kv_tags: The service tag dictionary. Tags are mutable. :type kv_tags: dict[str, str] :param properties: The service property dictionary. Properties are immutable. :type properties: dict[str, str] :param operation_id: The ID of the latest asynchronous operation for this service. :type operation_id: str :param state: The current state of the service. Possible values include: 'Transitioning', 'Healthy', 'Unhealthy', 'Failed', 'Unschedulable' :type state: str or ~_restclient.models.WebServiceState :param created_time: The time the service was created. :type created_time: datetime :param updated_time: The time the service was updated. :type updated_time: datetime :param error: The error details. :type error: ~_restclient.models.ServiceResponseBaseError :param deployment_type: The deployment type for the service. Possible values include: 'GRPCRealtimeEndpoint', 'HttpRealtimeEndpoint', 'Batch' :type deployment_type: str or ~_restclient.models.DeploymentType :param created_by: The individual last responsible for creating or updating the service. :type created_by: ~_restclient.models.ServiceResponseBaseCreatedBy :param compute_type: Constant filled by server. :type compute_type: str :param iot_device_id: :type iot_device_id: str :param routes: :type routes: dict[str, str] :param compute_name: :type compute_name: str :param iot_edge_modules: :type iot_edge_modules: list[~_restclient.models.IotModuleSettings] :param auth_enabled: :type auth_enabled: bool :param image_details: :type image_details: ~_restclient.models.IotServiceResponseImageDetails :param image_id: :type image_id: str :param image_digest: :type image_digest: str """ _validation = { 'compute_type': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'kv_tags': {'key': 'kvTags', 'type': '{str}'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'operation_id': {'key': 'operationId', 'type': 'str'}, 'state': {'key': 'state', 'type': 'WebServiceState'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'}, 'error': {'key': 'error', 'type': 'ServiceResponseBaseError'}, 'deployment_type': {'key': 'deploymentType', 'type': 'DeploymentType'}, 'created_by': {'key': 'createdBy', 'type': 'ServiceResponseBaseCreatedBy'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'iot_device_id': {'key': 'iotDeviceId', 'type': 'str'}, 'routes': {'key': 'routes', 'type': '{str}'}, 'compute_name': {'key': 'computeName', 'type': 'str'}, 'iot_edge_modules': {'key': 'iotEdgeModules', 'type': '[IotModuleSettings]'}, 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'}, 'image_details': {'key': 'imageDetails', 'type': 'IotServiceResponseImageDetails'}, 'image_id': {'key': 'imageId', 'type': 'str'}, 'image_digest': {'key': 'imageDigest', 'type': 'str'}, } def __init__(self, id=None, name=None, description=None, kv_tags=None, properties=None, operation_id=None, state=None, created_time=None, updated_time=None, error=None, deployment_type=None, created_by=None, iot_device_id=None, routes=None, compute_name=None, iot_edge_modules=None, auth_enabled=None, image_details=None, image_id=None, image_digest=None): super(IotServiceResponse, self).__init__(id=id, name=name, description=description, kv_tags=kv_tags, properties=properties, operation_id=operation_id, state=state, created_time=created_time, updated_time=updated_time, error=error, deployment_type=deployment_type, created_by=created_by) self.iot_device_id = iot_device_id self.routes = routes self.compute_name = compute_name self.iot_edge_modules = iot_edge_modules self.auth_enabled = auth_enabled self.image_details = image_details self.image_id = image_id self.image_digest = image_digest self.compute_type = 'IOT'
PypiClean
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/jsdom/lib/jsdom/living/generated/DOMParser.js
"use strict"; const conversions = require("webidl-conversions"); const utils = require("./utils.js"); const convertSupportedType = require("./SupportedType.js").convert; const impl = utils.implSymbol; class DOMParser { constructor() { return iface.setup(Object.create(new.target.prototype)); } parseFromString(str, type) { if (!this || !module.exports.is(this)) { throw new TypeError("Illegal invocation"); } if (arguments.length < 2) { throw new TypeError( "Failed to execute 'parseFromString' on 'DOMParser': 2 arguments required, but only " + arguments.length + " present." ); } const args = []; { let curArg = arguments[0]; curArg = conversions["DOMString"](curArg, { context: "Failed to execute 'parseFromString' on 'DOMParser': parameter 1" }); args.push(curArg); } { let curArg = arguments[1]; curArg = convertSupportedType(curArg, { context: "Failed to execute 'parseFromString' on 'DOMParser': parameter 2" }); args.push(curArg); } return utils.tryWrapperForImpl(this[impl].parseFromString(...args)); } } Object.defineProperties(DOMParser.prototype, { parseFromString: { enumerable: true }, [Symbol.toStringTag]: { value: "DOMParser", configurable: true } }); const iface = { // When an interface-module that implements this interface as a mixin is loaded, it will append its own `.is()` // method into this array. It allows objects that directly implements *those* interfaces to be recognized as // implementing this mixin interface. _mixedIntoPredicates: [], is(obj) { if (obj) { if (utils.hasOwn(obj, impl) && obj[impl] instanceof Impl.implementation) { return true; } for (const isMixedInto of module.exports._mixedIntoPredicates) { if (isMixedInto(obj)) { return true; } } } return false; }, isImpl(obj) { if (obj) { if (obj instanceof Impl.implementation) { return true; } const wrapper = utils.wrapperForImpl(obj); for (const isMixedInto of module.exports._mixedIntoPredicates) { if (isMixedInto(wrapper)) { return true; } } } return false; }, convert(obj, { context = "The provided value" } = {}) { if (module.exports.is(obj)) { return utils.implForWrapper(obj); } throw new TypeError(`${context} is not of type 'DOMParser'.`); }, create(constructorArgs, privateData) { let obj = Object.create(DOMParser.prototype); obj = this.setup(obj, constructorArgs, privateData); return obj; }, createImpl(constructorArgs, privateData) { let obj = Object.create(DOMParser.prototype); obj = this.setup(obj, constructorArgs, privateData); return utils.implForWrapper(obj); }, _internalSetup(obj) {}, setup(obj, constructorArgs, privateData) { if (!privateData) privateData = {}; privateData.wrapper = obj; this._internalSetup(obj); Object.defineProperty(obj, impl, { value: new Impl.implementation(constructorArgs, privateData), configurable: true }); obj[impl][utils.wrapperSymbol] = obj; if (Impl.init) { Impl.init(obj[impl], privateData); } return obj; }, interface: DOMParser, expose: { Window: { DOMParser } } }; // iface module.exports = iface; const Impl = require("../domparsing/DOMParser-impl.js");
PypiClean
/inspursmsdk-2.1.2-py3-none-any.whl/ism.py
# -*- coding:utf-8 -*- from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import sys import json import signal from importlib import import_module from inspur_sm_sdk.command import RestFunc import time import collections try: from inspur_sm_sdk.util import configUtil, HostTypeJudge, parameterConversion, RequestClient ISM_EXIST = True except ImportError: ISM_EXIST = False sys.path.append(os.path.join(sys.path[0], "interface")) current_time = time.strftime( '%Y-%m-%d %H:%M:%S', time.localtime( time.time())) __version__ = '2.1.2' ERR_dict = { 'ERR_CODE_CMN_FAIL': 'data acquisition exception', 'ERR_CODE_PARAM_NULL': 'parameter is null', 'ERR_CODE_INPUT_ERROR': 'parameter error', 'ERR_CODE_INTF_FAIL': 'create link exception', 'ERR_CODE_INTERNAL_ERROR': 'internal error', 'ERR_CODE_ALLOC_MEM': 'allocated memory exception', 'ERR_CODE_NETWORK_CONNECT_FAIL': 'network connection failed', 'ERR_CODE_AUTH_NAME_OR_PWD_ERROR': 'incorrect user name or password', 'ERR_CODE_USER_NOT_EXIST': 'user not exist' } def main(params): def logout(signum, frame): if hasattr(client, "header"): RestFunc.logout(client) signal.signal(signal.SIGINT, logout) signal.signal(signal.SIGTERM, logout) signal.signal(signal.SIGABRT, logout) # windows下注释下面两行 signal.signal(signal.SIGHUP, logout) signal.signal(signal.SIGQUIT, logout) res = {} if not ISM_EXIST: res['State'] = "Failure" res['Message'] = ["Please install the requests library"] return res param = parameterConversion.getParam(params) args = dict_to_object(param) args.port = None configutil = configUtil.configUtil() if args.subcommand is not None and args.subcommand == 'support_model': res['State'] = "Success" res['Message'] = configutil.getModelSupport() return res # 使用fru获取机型信息 hostTypeClient = HostTypeJudge.HostTypeClient() productName, firmwareVersion = hostTypeClient.getProductNameByIPMI(args) if productName is None: res['State'] = "Not Support" res['Message'] = ["cannot get productName"] return res elif productName in ERR_dict: res['State'] = "Failure" res['Message'] = [ERR_dict.get(productName)] return res if firmwareVersion is None: res['State'] = "Failure" res['Message'] = ["cannot get BMC version"] return args.hostPlatform = productName configutil = configUtil.configUtil() impl = configutil.getRouteOption(productName, firmwareVersion) if 'Error' in impl: res['State'] = "Failure" res['Message'] = [impl] return res # if 'M5' not in impl and 'M6' not in impl and 'A5' not in impl and 'A6' not in impl: # res['State'] = "Failure" # res['Message'] = ['Not Support'] # return res module_impl = 'inspur_sm_sdk.interface.' + impl obj = import_module(module_impl) targetclass = getattr(obj, impl) obj = targetclass() if args.subcommand is None: res['State'] = "Failure" res['Message'] = ["please input a subcommand"] return res targetMed = getattr(obj, args.subcommand) client = RequestClient.RequestClient() client.setself( args.host, args.username, args.passcode, args.port, 'lanplus') try: resultJson = targetMed(client, args) except Exception as e: # 保留日志 # import traceback # utool_path = os.path.dirname(os.path.abspath(__file__)) # # print(utool_path) # log_path = os.path.join(utool_path, "log") # if not os.path.exists(log_path): # os.makedirs(log_path) # # TIME # localtime = time.localtime() # f_localdate = time.strftime("%Y-%m-%d", localtime) # f_localtime = time.strftime("%Y-%m-%dT%H:%M:%S ", localtime) # # log_file = os.path.join(log_path, f_localdate) # args.items() # res_info = "[" + args.subcommand + "]" + traceback.format_exc() # with open(log_file, 'a+') as logfile: # utoollog = "[ERROR]" + f_localtime + res_info + json.dumps(param, default=lambda o: o.__dict__, indent=4, ensure_ascii=True) # logfile.write(utoollog) # logfile.write("\n") res['State'] = "Failure" res['Message'] = ["Error occurs, request failed..."] return res sortedRes = collections.OrderedDict() sortedRes["State"] = resultJson.State sortedRes["Message"] = resultJson.Message return sortedRes class Dict(dict): __setattr__ = dict.__setitem__ __getattr__ = dict.__getitem__ def dict_to_object(dictobj): if not isinstance(dictobj, dict): return dictobj inst = Dict() for k, v in dictobj.items(): if k == 'password': k = 'passcode' inst[k] = dict_to_object(v) return inst
PypiClean
/gamification-engine-0.4.0.tar.gz/gamification-engine-0.4.0/gengine/app/jsscripts/node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js
"use strict"; const identifierUtils = require("../util/identifier"); function toIndexOf(list) { return function(item) { return list.indexOf(item); }; } function toChunkModuleIndices(modules) { return function(idx) { return modules[idx]; }; } function moveModuleBetween(oldChunk, newChunk) { return function(module) { oldChunk.moveModule(module, newChunk); }; } function isNotAEntryModule(entryModule) { return function(module) { return entryModule !== module; }; } function copyWithReason(obj) { const newObj = {}; Object.keys(obj).forEach((key) => { newObj[key] = obj[key]; }); if(!newObj.reasons || newObj.reasons.indexOf("aggressive-splitted") < 0) newObj.reasons = (newObj.reasons || []).concat("aggressive-splitted"); return newObj; } class AggressiveSplittingPlugin { constructor(options) { this.options = options || {}; if(typeof this.options.minSize !== "number") this.options.minSize = 30 * 1024; if(typeof this.options.maxSize !== "number") this.options.maxSize = 50 * 1024; if(typeof this.options.chunkOverhead !== "number") this.options.chunkOverhead = 0; if(typeof this.options.entryChunkMultiplicator !== "number") this.options.entryChunkMultiplicator = 1; } apply(compiler) { compiler.plugin("compilation", (compilation) => { compilation.plugin("optimize-chunks-advanced", (chunks) => { const savedSplits = compilation.records && compilation.records.aggressiveSplits || []; const usedSplits = compilation._aggressiveSplittingSplits ? savedSplits.concat(compilation._aggressiveSplittingSplits) : savedSplits; const minSize = this.options.minSize; const maxSize = this.options.maxSize; // 1. try to restore to recorded splitting for(let j = 0; j < usedSplits.length; j++) { const splitData = usedSplits[j]; for(let i = 0; i < chunks.length; i++) { const chunk = chunks[i]; const chunkModuleNames = chunk.modules.map(m => identifierUtils.makePathsRelative(compiler.context, m.identifier())); if(chunkModuleNames.length < splitData.modules.length) continue; const moduleIndicies = splitData.modules.map(toIndexOf(chunkModuleNames)); const hasAllModules = moduleIndicies.every((idx) => { return idx >= 0; }); if(hasAllModules) { if(chunkModuleNames.length > splitData.modules.length) { const selectedModules = moduleIndicies.map(toChunkModuleIndices(chunk.modules)); const newChunk = compilation.addChunk(); selectedModules.forEach(moveModuleBetween(chunk, newChunk)); chunk.split(newChunk); chunk.name = null; newChunk._fromAggressiveSplitting = true; if(j < savedSplits.length) newChunk._fromAggressiveSplittingIndex = j; if(splitData.id !== null && splitData.id !== undefined) { newChunk.id = splitData.id; } newChunk.origins = chunk.origins.map(copyWithReason); chunk.origins = chunk.origins.map(copyWithReason); return true; } else { if(j < savedSplits.length) chunk._fromAggressiveSplittingIndex = j; chunk.name = null; if(splitData.id !== null && splitData.id !== undefined) { chunk.id = splitData.id; } } } } } // 2. for any other chunk which isn't splitted yet, split it for(let i = 0; i < chunks.length; i++) { const chunk = chunks[i]; const size = chunk.size(this.options); if(size > maxSize && chunk.modules.length > 1) { const newChunk = compilation.addChunk(); const modules = chunk.modules .filter(isNotAEntryModule(chunk.entryModule)) .sort((a, b) => { a = a.identifier(); b = b.identifier(); if(a > b) return 1; if(a < b) return -1; return 0; }); for(let k = 0; k < modules.length; k++) { chunk.moveModule(modules[k], newChunk); const newSize = newChunk.size(this.options); const chunkSize = chunk.size(this.options); // break early if it's fine if(chunkSize < maxSize && newSize < maxSize && newSize >= minSize && chunkSize >= minSize) break; if(newSize > maxSize && k === 0) { // break if there is a single module which is bigger than maxSize break; } if(newSize > maxSize || chunkSize < minSize) { // move it back newChunk.moveModule(modules[k], chunk); // check if it's fine now if(newSize < maxSize && newSize >= minSize && chunkSize >= minSize) break; } } if(newChunk.modules.length > 0) { chunk.split(newChunk); chunk.name = null; newChunk.origins = chunk.origins.map(copyWithReason); chunk.origins = chunk.origins.map(copyWithReason); compilation._aggressiveSplittingSplits = (compilation._aggressiveSplittingSplits || []).concat({ modules: newChunk.modules.map(m => identifierUtils.makePathsRelative(compiler.context, m.identifier())) }); return true; } else { chunks.splice(chunks.indexOf(newChunk), 1); } } } }); compilation.plugin("record-hash", (records) => { // 3. save to made splittings to records const minSize = this.options.minSize; if(!records.aggressiveSplits) records.aggressiveSplits = []; compilation.chunks.forEach((chunk) => { if(chunk.hasEntryModule()) return; const size = chunk.size(this.options); const incorrectSize = size < minSize; const modules = chunk.modules.map(m => identifierUtils.makePathsRelative(compiler.context, m.identifier())); if(typeof chunk._fromAggressiveSplittingIndex === "undefined") { if(incorrectSize) return; chunk.recorded = true; records.aggressiveSplits.push({ modules: modules, hash: chunk.hash, id: chunk.id }); } else { const splitData = records.aggressiveSplits[chunk._fromAggressiveSplittingIndex]; if(splitData.hash !== chunk.hash || incorrectSize) { if(chunk._fromAggressiveSplitting) { chunk._aggressiveSplittingInvalid = true; splitData.invalid = true; } else { splitData.hash = chunk.hash; } } } }); records.aggressiveSplits = records.aggressiveSplits.filter((splitData) => { return !splitData.invalid; }); }); compilation.plugin("need-additional-seal", (callback) => { const invalid = compilation.chunks.some((chunk) => { return chunk._aggressiveSplittingInvalid; }); if(invalid) return true; }); }); } } module.exports = AggressiveSplittingPlugin;
PypiClean
/ScopeFoundry-1.2.2.tar.gz/ScopeFoundry-1.2.2/measurement.py
from __future__ import absolute_import, print_function from qtpy import QtCore, QtWidgets import threading import time from .logged_quantity import LQCollection from .helper_funcs import load_qt_ui_file from collections import OrderedDict import pyqtgraph as pg from ScopeFoundry.helper_funcs import get_logger_from_class import traceback import sys class MeasurementQThread(QtCore.QThread): def __init__(self, measurement, parent=None): super(MeasurementQThread, self).__init__(parent) self.measurement = measurement def run(self): self.measurement._thread_run() class Measurement(QtCore.QObject): """ Base class for ScopeFoundry Measurement objects to subclass, implement :meth:`setup`, :meth:`run` for measurements with graphical interfaces, subclass and additionally implement :meth:`setup_figure`, :meth:`update_display` Run States: stop_first -> run_starting -> run_pre_run --> run_thread_starting --> run_thread_run --> run_thread_end --> run_post_run --> stop_success | stop_interrupted | stop_failure """ measurement_sucessfully_completed = QtCore.Signal(()) """signal sent when full measurement is complete""" measurement_interrupted = QtCore.Signal(()) """signal sent when measurement is complete due to an interruption""" #measurement_state_changed = QtCore.Signal(bool) # signal sent when measurement started or stopped def __init__(self, app, name=None): """ :type app: BaseMicroscopeApp """ QtCore.QObject.__init__(self) self.log = get_logger_from_class(self) if not hasattr(self, 'name'): self.name = self.__class__.__name__ if name is not None: self.name = name self.app = app self.display_update_period = 0.1 # seconds self.display_update_timer = QtCore.QTimer(self) self.display_update_timer.timeout.connect(self._on_display_update_timer) self.acq_thread = None self.interrupt_measurement_called = False #self.logged_quantities = OrderedDict() self.settings = LQCollection() self.operations = OrderedDict() self.activation = self.settings.New('activation', dtype=bool, ro=False, description=f'<i>{self.name}</i>') # does the user want to the thread to be running #self.running = self.settings.New('running', dtype=bool, ro=True) # is the thread actually running? self.run_state = self.settings.New('run_state', dtype=str, initial='stop_first') self.progress = self.settings.New('progress', dtype=float, unit="%", si=False, ro=True) self.settings.New('profile', dtype=bool, initial=False) # Run a profile on the run to find performance problems self.activation.updated_value[bool].connect(self.start_stop) self.add_operation("start", self.start) self.add_operation("interrupt", self.interrupt) #self.add_operation('terminate', self.terminate) #self.add_operation("setup", self.setup) #self.add_operation("setup_figure", self.setup_figure) self.add_operation("update_display", self.update_display) self.add_operation('show_ui', self.show_ui) self.add_operation('Reload_Code', self.reload_code) if hasattr(self, 'ui_filename'): self.load_ui() self.setup() def setup(self): """Override this to set up logged quantities and gui connections Runs during __init__, before the hardware connection is established Should generate desired LoggedQuantities""" pass #raise NotImplementedError() def setup_figure(self): """ Override setup_figure to build graphical interfaces. This function is run on ScopeFoundry startup. """ self.log.info("Empty setup_figure called") pass def start(self): """ Starts the measurement calls *pre_run* creates acquisition thread runs thread starts display timer which calls update_display periodically calls post run when thread is finished """ #self.start_stop(True) self.activation.update_value(True) def _start(self): """ INTERNAL DO NOT CALL DIRECTLY Starts the measurement calls *pre_run* creates acquisition thread runs thread starts display timer which calls update_display periodically connects a signal/slot that calls post run when thread is finished """ self.interrupt_measurement_called = False self.run_state.update_value('run_starting') self.log.info("measurement {} start called from thread: {}".format(self.name, repr(threading.get_ident()))) if self.is_thread_alive(): raise RuntimeError("Cannot start a new measurement while still measuring {} {}".format(self.acq_thread, self.is_measuring())) # remove previous qthread with delete later #if self.acq_thread is not None: # self.acq_thread.deleteLater() self.acq_thread = MeasurementQThread(self) self.acq_thread.finished.connect(self._call_post_run) #self.measurement_state_changed.emit(True) #self.running.update_value(True) self.run_state.update_value('run_prerun') try: self.pre_run() except Exception as err: #print("err", err) self.run_state.update_value('stop_failure') self.activation.update_value(False) raise self.run_state.update_value('run_thread_starting') self.acq_thread.start() self.run_state.update_value('run_thread_run') self.t_start = time.time() self.display_update_timer.start(int(self.display_update_period*1000)) def pre_run(self): """Override this method to enable main-thread initialization prior to measurement thread start""" pass def run(self): """ *run* method runs in an separate thread and is used for data acquisition No GUI updates should occur within the *run* function, any Qt related GUI work should occur in :meth:`update_display` Don't call this directly! """ if hasattr(self, '_run'): self.log.warning("warning _run is deprecated, use run") self._run() else: raise NotImplementedError("Measurement {}.run() not defined".format(self.name)) @QtCore.Slot() def _call_post_run(self): """ Don't call this directly! """ self.run_state.update_value('run_post_run') try: self.post_run() except Exception as err: self.end_state = 'stop_failure' raise finally: self.activation.update_value(False) self.run_state.update_value(self.end_state) def post_run(self): """Override this method to enable main-thread finalization after to measurement thread completes""" pass def _thread_run(self): """ This function governs the behavior of the measurement thread. """ print(self.name, "_thread_run thread_id:", threading.get_ident()) self.set_progress(50.) # set progress bars to default run position at 50% try: if self.settings['profile']: import cProfile profile = cProfile.Profile() profile.enable() self.run() success = True except Exception as err: success = False raise finally: self.run_state.update_value('run_thread_end') #self.running.update_value(False) self.set_progress(0.) # set progress bars back to zero #self.measurement_state_changed.emit(False) if self.interrupt_measurement_called: self.measurement_interrupted.emit() self.interrupt_measurement_called = False end_state = 'stop_interrupted' elif not success: end_state = "stop_failure" else: self.measurement_sucessfully_completed.emit() end_state = "stop_success" if self.settings['profile']: profile.disable() profile.print_stats(sort='time') self.end_state = end_state @property def gui(self): self.log.warning("Measurement.gui is deprecated, use Measurement.app " + repr(DeprecationWarning)) return self.app def set_progress(self, pct): """ This function updates the logged quantity progress which is used for the display of progress bars in the UI. ============== ============================================================================================== **Arguments:** pct The percentage of progress given by a measurement module ============== ============================================================================================== """ self.progress.update_value(pct) @QtCore.Slot() def _interrupt(self): """ Kindly ask the measurement to stop. This raises the :attr:`interrupt_measurement_called` flag To actually stop, the threaded :meth:`run` method must check for this flag and exit """ self.log.info("measurement {} stopping {}".format(self.name, self.settings['run_state'])) # print("{} interrupt(): run_state={}".format(self.name, self.settings['run_state'])) if self.settings['run_state'].startswith('run'): self.log.info("measurement {} interrupt called".format(self.name)) self.interrupt_measurement_called = True #self.activation.update_value(False) #Make sure display is up to date #self._on_display_update_timer() def interrupt(self): self.activation.update_value(False) def terminate(self): """ Terminate MeasurementQThread. Usually a bad idea: This will not clean up the thread correctly and usually requires a reboot of the App """ self.acq_thread.terminate() def start_stop(self, start): """ Use boolean *start* to either start (True) or interrupt (False) measurement. Test. """ self.log.info("{} start_stop {}".format(self.name, start)) if start: self._start() else: self._interrupt() def is_measuring(self): """ Returns whether the acquisition thread is running """ #print(self.name, "is_measuring run_state", self.settings['run_state']) return self.settings['run_state'].startswith('run') """ if self.acq_thread is None: self.running.update_value(False) self.activation.update_value(False) self.settings['progress'] = 0.0 return False else: #resp = self.acq_thread.is_alive() resp = self.acq_thread.isRunning() self.running.update_value(resp) return resp """ def is_thread_alive(self): if self.acq_thread is None: return False else: #resp = self.acq_thread.is_alive() resp = self.acq_thread.isRunning() return resp def update_display(self): "Override this function to provide figure updates when the display timer runs" pass @QtCore.Slot() def _on_display_update_timer(self): try: self.update_display() except Exception as err: exc_type, exc_value, exc_traceback = sys.exc_info() self.log.error("{} Failed to update figure1: {}. {}".format(self.name, err, traceback.format_exception(exc_type, exc_value, exc_traceback))) finally: if not self.is_measuring(): self.display_update_timer.stop() def add_logged_quantity(self, name, **kwargs): """ Create a new :class:`LoggedQuantity` and adds it to the measurement's :attr:`settings` (:class:`LQCollection`) """ lq = self.settings.New(name=name, **kwargs) return lq def add_operation(self, name, op_func): """ Used to create a logged quantity connection between a button in the Measurement tree and a function. ============== ================= **type name:** **type op_func:** str QtCore.Slot ============== ================= """ self.operations[name] = op_func def start_nested_measure_and_wait(self, measure, nested_interrupt = True, polling_func=None, polling_time=0.1): """ Start another nested measurement *measure* and wait until completion. Should be called with run function. Optionally it can call a polling function *polling_func* with no arguments at an interval *polling_time* in seconds. if *nested_interrupt* is True then interrupting the nested *measure* will also interrupt the outer measurement. *nested_interrupt* defaults to True returns True if successful run, otherwise returns false for a run failure or interrupted measurement """ self.log.info("Starting nested measurement {} from {} on thread id {}".format(measure.name, self.name, threading.get_ident())) measure.start() # Wait until measurement has started, timeout of 1 second t0 = time.time() while not measure.is_measuring(): time.sleep(0.010) if time.time() - t0 > 1.0: print(self.name, ': nested measurement', measure.name, 'has not started before timeout', ) return measure.settings['run_state'] == 'stop_success' last_polling = time.time() # Now that it is running, wait until done while measure.is_measuring(): if self.interrupt_measurement_called: #print('nest outer interrupted', self.interrupt_measurement_called) measure.interrupt() if measure.interrupt_measurement_called and nested_interrupt: # THIS IS MAYBE UNSAFE???: measure.interrupt_measurement_called might be also TRUE if measure finished successfully? # IDEA to TEST: also check the measure.settings['run_state'].startswidth('stop') print("nested interrupt bubbling up", measure.interrupt_measurement_called, self.interrupt_measurement_called) self.interrupt() time.sleep(0.010) # polling if measure.settings['run_state'] == 'run_thread_run': if polling_func: t = time.time() if t - last_polling > polling_time: try: polling_func() except Exception as err: self.log.error('start_nested_measure_and_wait polling failed {}'.format(err)) last_polling = t #returns True if successful run, otherwise, #returns false for a run failure or interrupted measurement return measure.settings['run_state'] == 'stop_success' def load_ui(self, ui_fname=None): """ Loads and shows user interface. ============== =============================================================== **Arguments:** ui_fname filename of user interface file (usually made with Qt Designer) ============== =============================================================== """ # TODO destroy and rebuild UI if it already exists if ui_fname is not None: self.ui_filename = ui_fname # Load Qt UI from .ui file self.ui = load_qt_ui_file(self.ui_filename) #self.show_ui() def show_ui(self): """ Shows the graphical user interface of this measurement. :attr:`ui` """ self.app.bring_measure_ui_to_front(self) # if self.app.mdi and self.ui.parent(): # self.ui.parent().raise_() # return # self.ui.show() # self.ui.activateWindow() # self.ui.raise_() #just to be sure it's on top # if self.app.mdi and self.ui.parent(): # self.ui.parent().raise_() def new_control_widgets(self): self.controls_groupBox = QtWidgets.QGroupBox(self.name) self.controls_formLayout = QtWidgets.QFormLayout() self.controls_groupBox.setLayout(self.controls_formLayout) self.control_widgets = OrderedDict() for lqname, lq in self.settings.as_dict().items(): #: :type lq: LoggedQuantity if lq.choices is not None: widget = QtWidgets.QComboBox() elif lq.dtype in [int, float]: if lq.si: widget = pg.SpinBox() else: widget = QtWidgets.QDoubleSpinBox() elif lq.dtype in [bool]: widget = QtWidgets.QCheckBox() elif lq.dtype in [str]: widget = QtWidgets.QLineEdit() lq.connect_bidir_to_widget(widget) # Add to formlayout self.controls_formLayout.addRow(lqname, widget) self.control_widgets[lqname] = widget self.op_buttons = OrderedDict() for op_name, op_func in self.operations.items(): op_button = QtWidgets.QPushButton(op_name) op_button.clicked.connect(op_func) self.controls_formLayout.addRow(op_name, op_button) return self.controls_groupBox def add_widgets_to_tree(self, tree): """ Adds Measurement items and their controls to Measurements tree in the user interface. """ #if tree is None: # tree = self.app.ui.measurements_treeWidget tree.setColumnCount(2) tree.setHeaderLabels(["Measurements", "Value"]) self.tree_item = QtWidgets.QTreeWidgetItem(tree, [self.name, ""]) tree.insertTopLevelItem(0, self.tree_item) #self.tree_item.setFirstColumnSpanned(True) self.tree_progressBar = QtWidgets.QProgressBar() tree.setItemWidget(self.tree_item, 1, self.tree_progressBar) #self.progress.updated_value.connect(self.tree_progressBar.setValue) self.progress.connect_to_widget(self.tree_progressBar) # Add logged quantities to tree self.settings.add_widgets_to_subtree(self.tree_item) # Add operation buttons to tree self.op_buttons = OrderedDict() for op_name, op_func in self.operations.items(): op_button = QtWidgets.QPushButton(op_name) op_button.clicked.connect(op_func) self.op_buttons[op_name] = op_button #self.controls_formLayout.addRow(op_name, op_button) op_tree_item = QtWidgets.QTreeWidgetItem(self.tree_item, [op_name, ""]) tree.setItemWidget(op_tree_item, 1, op_button) def web_ui(self): return "Hardware {}".format(self.name) def reload_code(self): import inspect import xreload mod = inspect.getmodule(self) x = xreload.xreload(mod) print("Reloading from code", mod, x)
PypiClean
/openapi_jira_client-3.0.1-py3-none-any.whl/openapi_jira_client/api/issue_custom_field_contexts/add_issue_types_to_context.py
from typing import Any, Dict, List, Optional, Union, cast import httpx from attr import asdict from ...client import AuthenticatedClient, Client from ...models.issue_type_ids import IssueTypeIds from ...types import UNSET, Response def _get_kwargs( *, client: AuthenticatedClient, field_id: str, context_id: int, json_body: IssueTypeIds, ) -> Dict[str, Any]: url = "{}/rest/api/3/field/{fieldId}/context/{contextId}/issuetype".format( client.base_url, fieldId=field_id, contextId=context_id ) headers: Dict[str, Any] = client.get_headers() cookies: Dict[str, Any] = client.get_cookies() json_json_body = json_body.to_dict() return { "url": url, "headers": headers, "cookies": cookies, "timeout": client.get_timeout(), "json": json_json_body, } def _parse_response(*, response: httpx.Response) -> Optional[Union[None, None, None, None, None, None]]: if response.status_code == 204: response_204 = None return response_204 if response.status_code == 400: response_400 = None return response_400 if response.status_code == 401: response_401 = None return response_401 if response.status_code == 403: response_403 = None return response_403 if response.status_code == 404: response_404 = None return response_404 if response.status_code == 409: response_409 = None return response_409 return None def _build_response(*, response: httpx.Response) -> Response[Union[None, None, None, None, None, None]]: return Response( status_code=response.status_code, content=response.content, headers=response.headers, parsed=_parse_response(response=response), ) def sync_detailed( *, client: AuthenticatedClient, field_id: str, context_id: int, json_body: IssueTypeIds, ) -> Response[Union[None, None, None, None, None, None]]: kwargs = _get_kwargs( client=client, field_id=field_id, context_id=context_id, json_body=json_body, ) response = httpx.put( **kwargs, ) return _build_response(response=response) def sync( *, client: AuthenticatedClient, field_id: str, context_id: int, json_body: IssueTypeIds, ) -> Optional[Union[None, None, None, None, None, None]]: """Adds issue types to a custom field context, appending the issue types to the issue types list. A custom field context without any issue types applies to all issue types. Adding issue types to such a custom field context would result in it applying to only the listed issue types. If any of the issue types exists in the custom field context, the operation fails and no issue types are added. **[Permissions](#permissions) required:** *Administer Jira* [global permission](https://confluence.atlassian.com/x/x4dKLg).""" return sync_detailed( client=client, field_id=field_id, context_id=context_id, json_body=json_body, ).parsed async def asyncio_detailed( *, client: AuthenticatedClient, field_id: str, context_id: int, json_body: IssueTypeIds, ) -> Response[Union[None, None, None, None, None, None]]: kwargs = _get_kwargs( client=client, field_id=field_id, context_id=context_id, json_body=json_body, ) async with httpx.AsyncClient() as _client: response = await _client.put(**kwargs) return _build_response(response=response) async def asyncio( *, client: AuthenticatedClient, field_id: str, context_id: int, json_body: IssueTypeIds, ) -> Optional[Union[None, None, None, None, None, None]]: """Adds issue types to a custom field context, appending the issue types to the issue types list. A custom field context without any issue types applies to all issue types. Adding issue types to such a custom field context would result in it applying to only the listed issue types. If any of the issue types exists in the custom field context, the operation fails and no issue types are added. **[Permissions](#permissions) required:** *Administer Jira* [global permission](https://confluence.atlassian.com/x/x4dKLg).""" return ( await asyncio_detailed( client=client, field_id=field_id, context_id=context_id, json_body=json_body, ) ).parsed
PypiClean
/odooku_odoo_base-11.0.7-py35-none-any.whl/odoo/addons/web/static/lib/jquery.mjs.nestedSortable/jquery.mjs.nestedSortable.js
(function( factory ) { "use strict"; if ( typeof define === "function" && define.amd ) { // AMD. Register as an anonymous module. define([ "jquery", "jquery-ui/sortable" ], factory ); } else { // Browser globals factory( window.jQuery ); } }(function($) { "use strict"; function isOverAxis( x, reference, size ) { return ( x > reference ) && ( x < ( reference + size ) ); } $.widget("mjs.nestedSortable", $.extend({}, $.ui.sortable.prototype, { options: { disableParentChange: false, doNotClear: false, expandOnHover: 700, isAllowed: function() { return true; }, isTree: false, listType: "ol", maxLevels: 0, protectRoot: false, rootID: null, rtl: false, startCollapsed: false, tabSize: 20, branchClass: "mjs-nestedSortable-branch", collapsedClass: "mjs-nestedSortable-collapsed", disableNestingClass: "mjs-nestedSortable-no-nesting", errorClass: "mjs-nestedSortable-error", expandedClass: "mjs-nestedSortable-expanded", hoveringClass: "mjs-nestedSortable-hovering", leafClass: "mjs-nestedSortable-leaf", disabledClass: "mjs-nestedSortable-disabled" }, _create: function() { var self = this, err; this.element.data("ui-sortable", this.element.data("mjs-nestedSortable")); // mjs - prevent browser from freezing if the HTML is not correct if (!this.element.is(this.options.listType)) { err = "nestedSortable: " + "Please check that the listType option is set to your actual list type"; throw new Error(err); } // if we have a tree with expanding/collapsing functionality, // force 'intersect' tolerance method if (this.options.isTree && this.options.expandOnHover) { this.options.tolerance = "intersect"; } $.ui.sortable.prototype._create.apply(this, arguments); // prepare the tree by applying the right classes // (the CSS is responsible for actual hide/show functionality) if (this.options.isTree) { $(this.items).each(function() { var $li = this.item, hasCollapsedClass = $li.hasClass(self.options.collapsedClass), hasExpandedClass = $li.hasClass(self.options.expandedClass); if ($li.children(self.options.listType).length) { $li.addClass(self.options.branchClass); // expand/collapse class only if they have children if ( !hasCollapsedClass && !hasExpandedClass ) { if (self.options.startCollapsed) { $li.addClass(self.options.collapsedClass); } else { $li.addClass(self.options.expandedClass); } } } else { $li.addClass(self.options.leafClass); } }); } }, _destroy: function() { this.element .removeData("mjs-nestedSortable") .removeData("ui-sortable"); return $.ui.sortable.prototype._destroy.apply(this, arguments); }, _mouseDrag: function(event) { var i, item, itemElement, intersection, self = this, o = this.options, scrolled = false, $document = $(document), previousTopOffset, parentItem, level, childLevels, itemAfter, itemBefore, newList, method, a, previousItem, nextItem, helperIsNotSibling; //Compute the helpers position this.position = this._generatePosition(event); this.positionAbs = this._convertPositionTo("absolute"); if (!this.lastPositionAbs) { this.lastPositionAbs = this.positionAbs; } //Do scrolling if (this.options.scroll) { if (this.scrollParent[0] !== document && this.scrollParent[0].tagName !== "HTML") { if ( ( this.overflowOffset.top + this.scrollParent[0].offsetHeight ) - event.pageY < o.scrollSensitivity ) { scrolled = this.scrollParent.scrollTop() + o.scrollSpeed; this.scrollParent.scrollTop(scrolled); } else if ( event.pageY - this.overflowOffset.top < o.scrollSensitivity ) { scrolled = this.scrollParent.scrollTop() - o.scrollSpeed; this.scrollParent.scrollTop(scrolled); } if ( ( this.overflowOffset.left + this.scrollParent[0].offsetWidth ) - event.pageX < o.scrollSensitivity ) { scrolled = this.scrollParent.scrollLeft() + o.scrollSpeed; this.scrollParent.scrollLeft(scrolled); } else if ( event.pageX - this.overflowOffset.left < o.scrollSensitivity ) { scrolled = this.scrollParent.scrollLeft() - o.scrollSpeed; this.scrollParent.scrollLeft(scrolled); } } else { if ( event.pageY - $document.scrollTop() < o.scrollSensitivity ) { scrolled = $document.scrollTop() - o.scrollSpeed; $document.scrollTop(scrolled); } else if ( $(window).height() - ( event.pageY - $document.scrollTop() ) < o.scrollSensitivity ) { scrolled = $document.scrollTop() + o.scrollSpeed; $document.scrollTop(scrolled); } if ( event.pageX - $document.scrollLeft() < o.scrollSensitivity ) { scrolled = $document.scrollLeft() - o.scrollSpeed; $document.scrollLeft(scrolled); } else if ( $(window).width() - ( event.pageX - $document.scrollLeft() ) < o.scrollSensitivity ) { scrolled = $document.scrollLeft() + o.scrollSpeed; $document.scrollLeft(scrolled); } } if (scrolled !== false && $.ui.ddmanager && !o.dropBehaviour) { $.ui.ddmanager.prepareOffsets(this, event); } } //Regenerate the absolute position used for position checks this.positionAbs = this._convertPositionTo("absolute"); // mjs - find the top offset before rearrangement, previousTopOffset = this.placeholder.offset().top; //Set the helper position if (!this.options.axis || this.options.axis !== "y") { this.helper[0].style.left = this.position.left + "px"; } if (!this.options.axis || this.options.axis !== "x") { this.helper[0].style.top = (this.position.top) + "px"; } // mjs - check and reset hovering state at each cycle this.hovering = this.hovering ? this.hovering : null; this.mouseentered = this.mouseentered ? this.mouseentered : false; // mjs - let's start caching some variables (function() { var _parentItem = this.placeholder.parent().parent(); if (_parentItem && _parentItem.closest(".ui-sortable").length) { parentItem = _parentItem; } }.call(this)); level = this._getLevel(this.placeholder); childLevels = this._getChildLevels(this.helper); newList = document.createElement(o.listType); //Rearrange for (i = this.items.length - 1; i >= 0; i--) { //Cache variables and intersection, continue if no intersection item = this.items[i]; itemElement = item.item[0]; intersection = this._intersectsWithPointer(item); if (!intersection) { continue; } // Only put the placeholder inside the current Container, skip all // items form other containers. This works because when moving // an item from one container to another the // currentContainer is switched before the placeholder is moved. // // Without this moving items in "sub-sortables" can cause the placeholder to jitter // beetween the outer and inner container. if (item.instance !== this.currentContainer) { continue; } // No action if intersected item is disabled // and the element above or below in the direction we're going is also disabled if (itemElement.className.indexOf(o.disabledClass) !== -1) { // Note: intersection hardcoded direction values from // jquery.ui.sortable.js:_intersectsWithPointer if (intersection === 2) { // Going down itemAfter = this.items[i + 1]; if (itemAfter && itemAfter.item.hasClass(o.disabledClass)) { continue; } } else if (intersection === 1) { // Going up itemBefore = this.items[i - 1]; if (itemBefore && itemBefore.item.hasClass(o.disabledClass)) { continue; } } } method = intersection === 1 ? "next" : "prev"; // cannot intersect with itself // no useless actions that have been done before // no action if the item moved is the parent of the item checked if (itemElement !== this.currentItem[0] && this.placeholder[method]()[0] !== itemElement && !$.contains(this.placeholder[0], itemElement) && ( this.options.type === "semi-dynamic" ? !$.contains(this.element[0], itemElement) : true ) ) { // mjs - we are intersecting an element: // trigger the mouseenter event and store this state if (!this.mouseentered) { $(itemElement).mouseenter(); this.mouseentered = true; } // mjs - if the element has children and they are hidden, // show them after a delay (CSS responsible) if (o.isTree && $(itemElement).hasClass(o.collapsedClass) && o.expandOnHover) { if (!this.hovering) { $(itemElement).addClass(o.hoveringClass); this.hovering = window.setTimeout(function() { $(itemElement) .removeClass(o.collapsedClass) .addClass(o.expandedClass); self.refreshPositions(); self._trigger("expand", event, self._uiHash()); }, o.expandOnHover); } } this.direction = intersection === 1 ? "down" : "up"; // mjs - rearrange the elements and reset timeouts and hovering state if (this.options.tolerance === "pointer" || this._intersectsWithSides(item)) { $(itemElement).mouseleave(); this.mouseentered = false; $(itemElement).removeClass(o.hoveringClass); if (this.hovering) { window.clearTimeout(this.hovering); } this.hovering = null; // mjs - do not switch container if // it's a root item and 'protectRoot' is true // or if it's not a root item but we are trying to make it root if (o.protectRoot && !( this.currentItem[0].parentNode === this.element[0] && // it's a root item itemElement.parentNode !== this.element[0] // it's intersecting a non-root item ) ) { if (this.currentItem[0].parentNode !== this.element[0] && itemElement.parentNode === this.element[0] ) { if ( !$(itemElement).children(o.listType).length) { itemElement.appendChild(newList); if (o.isTree) { $(itemElement) .removeClass(o.leafClass) .addClass(o.branchClass + " " + o.expandedClass); } } if (this.direction === "down") { a = $(itemElement).prev().children(o.listType); } else { a = $(itemElement).children(o.listType); } if (a[0] !== undefined) { this._rearrange(event, null, a); } } else { this._rearrange(event, item); } } else if (!o.protectRoot) { this._rearrange(event, item); } } else { break; } // Clear emtpy ul's/ol's this._clearEmpty(itemElement); this._trigger("change", event, this._uiHash()); break; } } // mjs - to find the previous sibling in the list, // keep backtracking until we hit a valid list item. (function() { var _previousItem = this.placeholder.prev(); if (_previousItem.length) { previousItem = _previousItem; } else { previousItem = null; } }.call(this)); if (previousItem != null) { while ( previousItem[0].nodeName.toLowerCase() !== "li" || previousItem[0].className.indexOf(o.disabledClass) !== -1 || previousItem[0] === this.currentItem[0] || previousItem[0] === this.helper[0] ) { if (previousItem[0].previousSibling) { previousItem = $(previousItem[0].previousSibling); } else { previousItem = null; break; } } } // mjs - to find the next sibling in the list, // keep stepping forward until we hit a valid list item. (function() { var _nextItem = this.placeholder.next(); if (_nextItem.length) { nextItem = _nextItem; } else { nextItem = null; } }.call(this)); if (nextItem != null) { while ( nextItem[0].nodeName.toLowerCase() !== "li" || nextItem[0].className.indexOf(o.disabledClass) !== -1 || nextItem[0] === this.currentItem[0] || nextItem[0] === this.helper[0] ) { if (nextItem[0].nextSibling) { nextItem = $(nextItem[0].nextSibling); } else { nextItem = null; break; } } } this.beyondMaxLevels = 0; // mjs - if the item is moved to the left, send it one level up // but only if it's at the bottom of the list if (parentItem != null && nextItem == null && !(o.protectRoot && parentItem[0].parentNode == this.element[0]) && ( o.rtl && ( this.positionAbs.left + this.helper.outerWidth() > parentItem.offset().left + parentItem.outerWidth() ) || !o.rtl && (this.positionAbs.left < parentItem.offset().left) ) ) { parentItem.after(this.placeholder[0]); helperIsNotSibling = !parentItem .children(o.listItem) .children("li:visible:not(.ui-sortable-helper)") .length; if (o.isTree && helperIsNotSibling) { parentItem .removeClass(this.options.branchClass + " " + this.options.expandedClass) .addClass(this.options.leafClass); } if(typeof parentItem !== 'undefined') this._clearEmpty(parentItem[0]); this._trigger("change", event, this._uiHash()); // mjs - if the item is below a sibling and is moved to the right, // make it a child of that sibling } else if (previousItem != null && !previousItem.hasClass(o.disableNestingClass) && ( previousItem.children(o.listType).length && previousItem.children(o.listType).is(":visible") || !previousItem.children(o.listType).length ) && !(o.protectRoot && this.currentItem[0].parentNode === this.element[0]) && ( o.rtl && ( this.positionAbs.left + this.helper.outerWidth() < previousItem.offset().left + previousItem.outerWidth() - o.tabSize ) || !o.rtl && (this.positionAbs.left > previousItem.offset().left + o.tabSize) ) ) { this._isAllowed(previousItem, level, level + childLevels + 1); if (!previousItem.children(o.listType).length) { previousItem[0].appendChild(newList); if (o.isTree) { previousItem .removeClass(o.leafClass) .addClass(o.branchClass + " " + o.expandedClass); } } // mjs - if this item is being moved from the top, add it to the top of the list. if (previousTopOffset && (previousTopOffset <= previousItem.offset().top)) { previousItem.children(o.listType).prepend(this.placeholder); } else { // mjs - otherwise, add it to the bottom of the list. previousItem.children(o.listType)[0].appendChild(this.placeholder[0]); } if(typeof parentItem !== 'undefined') this._clearEmpty(parentItem[0]); this._trigger("change", event, this._uiHash()); } else { this._isAllowed(parentItem, level, level + childLevels); } //Post events to containers this._contactContainers(event); //Interconnect with droppables if ($.ui.ddmanager) { $.ui.ddmanager.drag(this, event); } //Call callbacks this._trigger("sort", event, this._uiHash()); this.lastPositionAbs = this.positionAbs; return false; }, _mouseStop: function(event) { // mjs - if the item is in a position not allowed, send it back if (this.beyondMaxLevels) { this.placeholder.removeClass(this.options.errorClass); if (this.domPosition.prev) { $(this.domPosition.prev).after(this.placeholder); } else { $(this.domPosition.parent).prepend(this.placeholder); } this._trigger("revert", event, this._uiHash()); } // mjs - clear the hovering timeout, just to be sure $("." + this.options.hoveringClass) .mouseleave() .removeClass(this.options.hoveringClass); this.mouseentered = false; if (this.hovering) { window.clearTimeout(this.hovering); } this.hovering = null; this._relocate_event = event; this._pid_current = $(this.domPosition.parent).parent().attr("id"); this._sort_current = this.domPosition.prev ? $(this.domPosition.prev).next().index() : 0; $.ui.sortable.prototype._mouseStop.apply(this, arguments); //asybnchronous execution, @see _clear for the relocate event. }, // mjs - this function is slightly modified // to make it easier to hover over a collapsed element and have it expand _intersectsWithSides: function(item) { var half = this.options.isTree ? .8 : .5, isOverBottomHalf = isOverAxis( this.positionAbs.top + this.offset.click.top, item.top + (item.height * half), item.height ), isOverTopHalf = isOverAxis( this.positionAbs.top + this.offset.click.top, item.top - (item.height * half), item.height ), isOverRightHalf = isOverAxis( this.positionAbs.left + this.offset.click.left, item.left + (item.width / 2), item.width ), verticalDirection = this._getDragVerticalDirection(), horizontalDirection = this._getDragHorizontalDirection(); if (this.floating && horizontalDirection) { return ( (horizontalDirection === "right" && isOverRightHalf) || (horizontalDirection === "left" && !isOverRightHalf) ); } else { return verticalDirection && ( (verticalDirection === "down" && isOverBottomHalf) || (verticalDirection === "up" && isOverTopHalf) ); } }, _contactContainers: function() { if (this.options.protectRoot && this.currentItem[0].parentNode === this.element[0] ) { return; } $.ui.sortable.prototype._contactContainers.apply(this, arguments); }, _clear: function() { var i, item; $.ui.sortable.prototype._clear.apply(this, arguments); //relocate event if (!(this._pid_current === this._uiHash().item.parent().parent().attr("id") && this._sort_current === this._uiHash().item.index())) { this._trigger("relocate", this._relocate_event, this._uiHash()); } // mjs - clean last empty ul/ol for (i = this.items.length - 1; i >= 0; i--) { item = this.items[i].item[0]; this._clearEmpty(item); } }, serialize: function(options) { var o = $.extend({}, this.options, options), items = this._getItemsAsjQuery(o && o.connected), str = []; $(items).each(function() { var res = ($(o.item || this).attr(o.attribute || "id") || "") .match(o.expression || (/(.+)[-=_](.+)/)), pid = ($(o.item || this).parent(o.listType) .parent(o.items) .attr(o.attribute || "id") || "") .match(o.expression || (/(.+)[-=_](.+)/)); if (res) { str.push( ( (o.key || res[1]) + "[" + (o.key && o.expression ? res[1] : res[2]) + "]" ) + "=" + (pid ? (o.key && o.expression ? pid[1] : pid[2]) : o.rootID)); } }); if (!str.length && o.key) { str.push(o.key + "="); } return str.join("&"); }, toHierarchy: function(options) { var o = $.extend({}, this.options, options), ret = []; $(this.element).children(o.items).each(function() { var level = _recursiveItems(this); ret.push(level); }); return ret; function _recursiveItems(item) { var id = ($(item).attr(o.attribute || "id") || "").match(o.expression || (/(.+)[-=_](.+)/)), currentItem; var data = $(item).data(); if (data.nestedSortableItem) { delete data.nestedSortableItem; // Remove the nestedSortableItem object from the data } if (id) { currentItem = { "id": id[2] }; currentItem = $.extend({}, currentItem, data); // Combine the two objects if ($(item).children(o.listType).children(o.items).length > 0) { currentItem.children = []; $(item).children(o.listType).children(o.items).each(function() { var level = _recursiveItems(this); currentItem.children.push(level); }); } return currentItem; } } }, toArray: function(options) { var o = $.extend({}, this.options, options), sDepth = o.startDepthCount || 0, ret = [], left = 1; if (!o.excludeRoot) { ret.push({ "item_id": o.rootID, "parent_id": null, "depth": sDepth, "left": left, "right": ($(o.items, this.element).length + 1) * 2 }); left++; } $(this.element).children(o.items).each(function() { left = _recursiveArray(this, sDepth, left); }); ret = ret.sort(function(a, b) { return (a.left - b.left); }); return ret; function _recursiveArray(item, depth, _left) { var right = _left + 1, id, pid, parentItem; if ($(item).children(o.listType).children(o.items).length > 0) { depth++; $(item).children(o.listType).children(o.items).each(function() { right = _recursiveArray($(this), depth, right); }); depth--; } id = ($(item).attr(o.attribute || "id")).match(o.expression || (/(.+)[-=_](.+)/)); if (depth === sDepth) { pid = o.rootID; } else { parentItem = ($(item).parent(o.listType) .parent(o.items) .attr(o.attribute || "id")) .match(o.expression || (/(.+)[-=_](.+)/)); pid = parentItem[2]; } if (id) { var data = $(item).children('div').data(); var itemObj = $.extend( data, { "id":id[2], "parent_id":pid, "depth":depth, "left":_left, "right":right } ); ret.push( itemObj ); } _left = right + 1; return _left; } }, _clearEmpty: function (item) { function replaceClass(elem, search, replace, swap) { if (swap) { search = [replace, replace = search][0]; } $(elem).removeClass(search).addClass(replace); } var o = this.options, childrenList = $(item).children(o.listType), hasChildren = childrenList.has('li').length; var doNotClear = o.doNotClear || hasChildren || o.protectRoot && $(item)[0] === this.element[0]; if (o.isTree) { replaceClass(item, o.branchClass, o.leafClass, doNotClear); } if (!doNotClear) { childrenList.parent().removeClass(o.expandedClass); childrenList.remove(); } }, _getLevel: function(item) { var level = 1, list; if (this.options.listType) { list = item.closest(this.options.listType); while (list && list.length > 0 && !list.is(".ui-sortable")) { level++; list = list.parent().closest(this.options.listType); } } return level; }, _getChildLevels: function(parent, depth) { var self = this, o = this.options, result = 0; depth = depth || 0; $(parent).children(o.listType).children(o.items).each(function(index, child) { result = Math.max(self._getChildLevels(child, depth + 1), result); }); return depth ? result + 1 : result; }, _isAllowed: function(parentItem, level, levels) { var o = this.options, // this takes into account the maxLevels set to the recipient list maxLevels = this .placeholder .closest(".ui-sortable") .nestedSortable("option", "maxLevels"), // Check if the parent has changed to prevent it, when o.disableParentChange is true oldParent = this.currentItem.parent().parent(), disabledByParentchange = o.disableParentChange && ( //From somewhere to somewhere else, except the root typeof parentItem !== 'undefined' && !oldParent.is(parentItem) || typeof parentItem === 'undefined' && oldParent.is("li") //From somewhere to the root ); // mjs - is the root protected? // mjs - are we nesting too deep? if ( disabledByParentchange || !o.isAllowed(this.placeholder, parentItem, this.currentItem) ) { this.placeholder.addClass(o.errorClass); if (maxLevels < levels && maxLevels !== 0) { this.beyondMaxLevels = levels - maxLevels; } else { this.beyondMaxLevels = 1; } } else { if (maxLevels < levels && maxLevels !== 0) { this.placeholder.addClass(o.errorClass); this.beyondMaxLevels = levels - maxLevels; } else { this.placeholder.removeClass(o.errorClass); this.beyondMaxLevels = 0; } } } })); $.mjs.nestedSortable.prototype.options = $.extend( {}, $.ui.sortable.prototype.options, $.mjs.nestedSortable.prototype.options ); }));
PypiClean
/qg.core-0.1.6.tar.gz/qg.core-0.1.6/qg/core/log.py
# Copyright 2011 OpenStack Foundation. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Openstack logging handler. This module adds to logging functionality by adding the option to specify a context object when calling the various log methods. If the context object is not specified, default formatting is used. Additionally, an instance uuid may be passed as part of the log message, which is intended to make it easier for admins to find messages related to a specific instance. It also allows setting of formatting information through conf. """ import ConfigParser import cStringIO import inspect import itertools import logging import logging.config import logging.handlers import os import sys import traceback from oslo_config import cfg from qg.core.gettextutils import _ from qg.core import jsonutils from qg.core import local _DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" common_cli_opts = [ cfg.BoolOpt('debug', short='d', default=False, help='Print debugging output (set logging level to ' 'DEBUG instead of default WARNING level).'), cfg.BoolOpt('verbose', short='v', default=False, help='Print more verbose output (set logging level to ' 'INFO instead of default WARNING level).'), ] logging_cli_opts = [ cfg.StrOpt('log-config', metavar='PATH', help='If this option is specified, the logging configuration ' 'file specified is used and overrides any other logging ' 'options specified. Please see the Python logging module ' 'documentation for details on logging configuration ' 'files.'), cfg.StrOpt('log-format', default=None, metavar='FORMAT', help='DEPRECATED. ' 'A logging.Formatter log message format string which may ' 'use any of the available logging.LogRecord attributes. ' 'This option is deprecated. Please use ' 'logging_context_format_string and ' 'logging_default_format_string instead.'), cfg.StrOpt('log-date-format', default=_DEFAULT_LOG_DATE_FORMAT, metavar='DATE_FORMAT', help='Format string for %%(asctime)s in log records. ' 'Default: %(default)s'), cfg.StrOpt('log-file', metavar='PATH', deprecated_name='logfile', help='(Optional) Name of log file to output to. ' 'If no default is set, logging will go to stdout.'), cfg.StrOpt('log-dir', deprecated_name='logdir', help='(Optional) The base directory used for relative ' '--log-file paths'), cfg.BoolOpt('use-syslog', default=False, help='Use syslog for logging.'), cfg.StrOpt('syslog-log-facility', default='LOG_USER', help='syslog facility to receive log lines') ] generic_log_opts = [ cfg.BoolOpt('use_stderr', default=True, help='Log output to standard error') ] log_opts = [ cfg.StrOpt('logging_context_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [%(request_id)s %(user)s] ' '%(instance)s%(message)s', help='format string to use for log messages with context'), cfg.StrOpt('logging_default_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' '%(name)s [-] %(instance)s%(message)s', help='format string to use for log messages without context'), cfg.StrOpt('logging_debug_format_suffix', default='%(funcName)s %(pathname)s:%(lineno)d', help='data to append to log format when level is DEBUG'), cfg.StrOpt('logging_exception_prefix', default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' '%(instance)s', help='prefix each line of exception output with this format'), cfg.ListOpt('default_log_levels', default=[ 'amqplib=WARN', 'sqlalchemy=WARN', 'boto=WARN', 'suds=INFO', 'keystone=INFO', 'eventlet.wsgi.server=WARN' ], help='list of logger=LEVEL pairs'), cfg.BoolOpt('fatal_deprecations', default=False, help='make deprecations fatal'), # NOTE(mikal): there are two options here because sometimes we are handed # a full instance (and could include more information), and other times we # are just handed a UUID for the instance. cfg.StrOpt('instance_format', default='[instance: %(uuid)s] ', help='If an instance is passed with the log message, format ' 'it like this'), cfg.StrOpt('instance_uuid_format', default='[instance: %(uuid)s] ', help='If an instance UUID is passed with the log message, ' 'format it like this'), ] CONF = cfg.CONF CONF.register_cli_opts(common_cli_opts) CONF.register_cli_opts(logging_cli_opts) CONF.register_opts(generic_log_opts) CONF.register_opts(log_opts) # our new audit level # NOTE(jkoelker) Since we synthesized an audit level, make the logging # module aware of it so it acts like other levels. logging.AUDIT = logging.INFO + 1 logging.addLevelName(logging.AUDIT, 'AUDIT') try: NullHandler = logging.NullHandler except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None def _dictify_context(context): if context is None: return None if not isinstance(context, dict) and getattr(context, 'to_dict', None): context = context.to_dict() return context def _get_binary_name(): return os.path.basename(inspect.stack()[-1][1]) def _get_log_file_path(binary=None): logfile = CONF.log_file logdir = CONF.log_dir if logfile and not logdir: return logfile if logfile and logdir: return os.path.join(logdir, logfile) if logdir: binary = binary or _get_binary_name() return '%s.log' % (os.path.join(logdir, binary),) class BaseLoggerAdapter(logging.LoggerAdapter): def audit(self, msg, *args, **kwargs): self.log(logging.AUDIT, msg, *args, **kwargs) class LazyAdapter(BaseLoggerAdapter): def __init__(self, name='unknown', version='unknown'): self._logger = None self.extra = {} self.name = name self.version = version @property def logger(self): if not self._logger: self._logger = getLogger(self.name, self.version) return self._logger class ContextAdapter(BaseLoggerAdapter): warn = logging.LoggerAdapter.warning def __init__(self, logger, project_name, version_string): self.logger = logger self.project = project_name self.version = version_string @property def handlers(self): return self.logger.handlers def deprecated(self, msg, *args, **kwargs): stdmsg = _("Deprecated: %s") % msg if CONF.fatal_deprecations: self.critical(stdmsg, *args, **kwargs) raise DeprecatedConfig(msg=stdmsg) else: self.warn(stdmsg, *args, **kwargs) def process(self, msg, kwargs): if 'extra' not in kwargs: kwargs['extra'] = {} extra = kwargs['extra'] context = kwargs.pop('context', None) if not context: context = getattr(local.store, 'context', None) if context: extra.update(_dictify_context(context)) instance = kwargs.pop('instance', None) instance_extra = '' if instance: instance_extra = CONF.instance_format % instance else: instance_uuid = kwargs.pop('instance_uuid', None) if instance_uuid: instance_extra = (CONF.instance_uuid_format % {'uuid': instance_uuid}) extra.update({'instance': instance_extra}) extra.update({"project": self.project}) extra.update({"version": self.version}) extra['extra'] = extra.copy() return msg, kwargs class JSONFormatter(logging.Formatter): def __init__(self, fmt=None, datefmt=None): # NOTE(jkoelker) we ignore the fmt argument, but its still there # since logging.config.fileConfig passes it. self.datefmt = datefmt def formatException(self, ei, strip_newlines=True): lines = traceback.format_exception(*ei) if strip_newlines: lines = [itertools.ifilter( lambda x: x, line.rstrip().splitlines()) for line in lines] lines = list(itertools.chain(*lines)) return lines def format(self, record): message = {'message': record.getMessage(), 'asctime': self.formatTime(record, self.datefmt), 'name': record.name, 'msg': record.msg, 'args': record.args, 'levelname': record.levelname, 'levelno': record.levelno, 'pathname': record.pathname, 'filename': record.filename, 'module': record.module, 'lineno': record.lineno, 'funcname': record.funcName, 'created': record.created, 'msecs': record.msecs, 'relative_created': record.relativeCreated, 'thread': record.thread, 'thread_name': record.threadName, 'process_name': record.processName, 'process': record.process, 'traceback': None} if hasattr(record, 'extra'): message['extra'] = record.extra if record.exc_info: message['traceback'] = self.formatException(record.exc_info) return jsonutils.dumps(message) def _create_logging_excepthook(product_name): def logging_excepthook(type, value, tb): extra = {} if CONF.verbose: extra['exc_info'] = (type, value, tb) getLogger(product_name).critical(str(value), **extra) return logging_excepthook class LogConfigError(Exception): message = _('Error loading logging config %(log_config)s: %(err_msg)s') def __init__(self, log_config, err_msg): self.log_config = log_config self.err_msg = err_msg def __str__(self): return self.message % dict(log_config=self.log_config, err_msg=self.err_msg) def _load_log_config(log_config): try: logging.config.fileConfig(log_config) except ConfigParser.Error as exc: raise LogConfigError(log_config, str(exc)) def setup(product_name): """Setup logging.""" if CONF.log_config: _load_log_config(CONF.log_config) else: _setup_logging_from_conf() sys.excepthook = _create_logging_excepthook(product_name) def set_defaults(logging_context_format_string): cfg.set_defaults( log_opts, logging_context_format_string=logging_context_format_string) def _find_facility_from_conf(): facility_names = logging.handlers.SysLogHandler.facility_names facility = getattr(logging.handlers.SysLogHandler, CONF.syslog_log_facility, None) if facility is None and CONF.syslog_log_facility in facility_names: facility = facility_names.get(CONF.syslog_log_facility) if facility is None: valid_facilities = facility_names.keys() consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] valid_facilities.extend(consts) raise TypeError(_('syslog facility must be one of: %s') % ', '.join("'%s'" % fac for fac in valid_facilities)) return facility def _setup_logging_from_conf(): log_root = getLogger(None).logger for handler in log_root.handlers: log_root.removeHandler(handler) if CONF.use_syslog: facility = _find_facility_from_conf() syslog = logging.handlers.SysLogHandler(address='/dev/log', facility=facility) log_root.addHandler(syslog) logpath = _get_log_file_path() if logpath: filelog = logging.handlers.WatchedFileHandler(logpath) log_root.addHandler(filelog) if CONF.use_stderr: streamlog = ColorHandler() log_root.addHandler(streamlog) elif not CONF.log_file: # pass sys.stdout as a positional argument # python2.6 calls the argument strm, in 2.7 it's stream streamlog = logging.StreamHandler(sys.stdout) log_root.addHandler(streamlog) datefmt = CONF.log_date_format for handler in log_root.handlers: # NOTE(alaski): CONF.log_format overrides everything currently. This # should be deprecated in favor of context aware formatting. if CONF.log_format: handler.setFormatter(logging.Formatter(fmt=CONF.log_format, datefmt=datefmt)) log_root.info('Deprecated: log_format is now deprecated and will ' 'be removed in the next release') else: handler.setFormatter(ContextFormatter(datefmt=datefmt)) if CONF.debug: log_root.setLevel(logging.DEBUG) elif CONF.verbose: log_root.setLevel(logging.INFO) else: log_root.setLevel(logging.WARNING) for pair in CONF.default_log_levels: mod, _sep, level_name = pair.partition('=') level = logging.getLevelName(level_name) logger = logging.getLogger(mod) logger.setLevel(level) _loggers = {} def getLogger(name='unknown', version='unknown'): if name not in _loggers: _loggers[name] = ContextAdapter(logging.getLogger(name), name, version) return _loggers[name] def getLazyLogger(name='unknown', version='unknown'): """Returns lazy logger. Creates a pass-through logger that does not create the real logger until it is really needed and delegates all calls to the real logger once it is created. """ return LazyAdapter(name, version) class WritableLogger(object): """A thin wrapper that responds to `write` and logs.""" def __init__(self, logger, level=logging.INFO): self.logger = logger self.level = level def write(self, msg): self.logger.log(self.level, msg) class ContextFormatter(logging.Formatter): """A context.RequestContext aware formatter configured through flags. The flags used to set format strings are: logging_context_format_string and logging_default_format_string. You can also specify logging_debug_format_suffix to append extra formatting if the log level is debug. For information about what variables are available for the formatter see: http://docs.python.org/library/logging.html#formatter """ def format(self, record): """Uses contextstring if request_id is set, otherwise default.""" # NOTE(sdague): default the fancier formating params # to an empty string so we don't throw an exception if # they get used for key in ('instance', 'color'): if key not in record.__dict__: record.__dict__[key] = '' if record.__dict__.get('request_id', None): self._fmt = CONF.logging_context_format_string else: self._fmt = CONF.logging_default_format_string if (record.levelno == logging.DEBUG and CONF.logging_debug_format_suffix): self._fmt += " " + CONF.logging_debug_format_suffix # Cache this on the record, Logger will respect our formated copy if record.exc_info: record.exc_text = self.formatException(record.exc_info, record) return logging.Formatter.format(self, record) def formatException(self, exc_info, record=None): """Format exception output with CONF.logging_exception_prefix.""" if not record: return logging.Formatter.formatException(self, exc_info) stringbuffer = cStringIO.StringIO() traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, stringbuffer) lines = stringbuffer.getvalue().split('\n') stringbuffer.close() if CONF.logging_exception_prefix.find('%(asctime)') != -1: record.asctime = self.formatTime(record, self.datefmt) formatted_lines = [] for line in lines: pl = CONF.logging_exception_prefix % record.__dict__ fl = '%s%s' % (pl, line) formatted_lines.append(fl) return '\n'.join(formatted_lines) class ColorHandler(logging.StreamHandler): LEVEL_COLORS = { logging.DEBUG: '\033[00;32m', # GREEN logging.INFO: '\033[00;36m', # CYAN logging.AUDIT: '\033[01;36m', # BOLD CYAN logging.WARN: '\033[01;33m', # BOLD YELLOW logging.ERROR: '\033[01;31m', # BOLD RED logging.CRITICAL: '\033[01;31m', # BOLD RED } def format(self, record): record.color = self.LEVEL_COLORS[record.levelno] return logging.StreamHandler.format(self, record) class DeprecatedConfig(Exception): message = _("Fatal call to deprecated config: %(msg)s") def __init__(self, msg): super(Exception, self).__init__(self.message % dict(msg=msg))
PypiClean