repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
3liz/qgis-wps4server
filters/PyWPS/tests/schema_validation.py
1
17114
import os import sys pywpsPath = os.path.abspath(os.path.join( os.path.split(os.path.abspath(__file__))[0], "..")) # sys.path.append(pywpsPath) sys.path.insert(0, pywpsPath) import pywps import pywps.Process import unittest import time from lxml import etree import urllib import StringIO from pywps import Soap if os.name != "java": from osgeo import ogr else: os.putenv("PYWPS_CFG", os.path.join(pywpsPath, "pywps", "default")) os.environ["PYWPS_CFG"] = os.path.join(pywpsPath, "pywps", "default.cfg") os.putenv("PYWPS_TEMPLATES", os.path.join(pywpsPath, "tests", "Templates")) os.environ["PYWPS_TEMPLATES"] = os.path.join( pywpsPath, "tests", "Templates") os.putenv("PYWPS_PROCESSES", os.path.join(pywpsPath, "tests", "processes")) os.environ["PYWPS_PROCESSES"] = os.path.join( pywpsPath, "tests", "processes") class SchemaTestCase(unittest.TestCase): # The class takes some time to load since it's in here where the schema # objects are created and the schema's URL contacted getCapabilitiesRequest = "service=wps&request=getcapabilities" getDescribeProcessRequest = "service=wps&request=describeprocess&version=1.0.0&identifier=bboxprocess,complexprocess,literalprocess,complexRaster,complexVector,ogrbuffer" postExecuteBBOXRequest = open(os.path.join( pywpsPath, "tests", "requests", "wps_execute_request-bbox.xml")) # 1 raster + 1 vector output No def of response doc postExecuteComplexInputRequest = open(os.path.join( pywpsPath, "tests", "requests", "wps_execute_request-complexinput-direct.xml")) postExecuteComplexInputOneOutputRequest = open(os.path.join( pywpsPath, "tests", "requests", "wps_execute_request-complexinput-one-output-as-reference.xml")) postExecuteLiteraDataRequest = open(os.path.join( pywpsPath, "tests", "requests", "wps_execute_request-literalinput-responsedocument.xml")) base_url = "http://schemas.opengis.net/wps/1.0.0/" getCapabilitiesSchemaResponse = "http://schemas.opengis.net/wps/1.0.0/wpsGetCapabilities_response.xsd" describeProcessSchemaResponse = "http://schemas.opengis.net/wps/1.0.0/wpsDescribeProcess_response.xsd" executeSchemaResponse = "http://schemas.opengis.net/wps/1.0.0/wpsExecute_response.xsd" wsdlSchema = "http://schemas.xmlsoap.org/wsdl/" soap11Schema = "http://schemas.xmlsoap.org/soap/envelope/" soap12Schema = "http://www.w3.org/2003/05/soap-envelope/" parser = etree.XMLParser(no_network=False) def setUp(self): # Silence PyWPS Warning: Usage of.... sys.stderr = open("/dev/null", "w") def testStatusLocation(self): """Test, status=false, storeexecuteresposne=false, statusLocation file should NOT be empty""" self._setFromEnv() schemaDocExecute = etree.XML(urllib.urlopen( self.executeSchemaResponse).read(), parser=self.parser, base_url=self.base_url) schemaExecute = etree.XMLSchema(schemaDocExecute) mypywps = pywps.Pywps(pywps.METHOD_GET) inputs = mypywps.parseRequest( "service=wps&request=execute&version=1.0.0&identifier=ultimatequestionprocess&status=false&storeExecuteResponse=true") mypywps.performRequest() # First parse executeAssyncGET = etree.XML(mypywps.response, self.parser) self.assertEquals(schemaExecute.assertValid(executeAssyncGET), None) # get path to status document fileName = os.path.basename(executeAssyncGET.xpath( "//*[local-name()='ExecuteResponse']/@statusLocation")[0]) filePath = pywps.config.getConfigValue( "server", "outputPath") + "/" + fileName self.assertEquals(True, os.path.exists(filePath)) fileOpen = open(filePath) self.assertEquals(fileOpen.read(), mypywps.response) def testAssync(self): """Test assync status document""" self._setFromEnv() pid = os.getpid() schemaDocExecute = etree.XML(urllib.urlopen( self.executeSchemaResponse).read(), parser=self.parser, base_url=self.base_url) schemaExecute = etree.XMLSchema(schemaDocExecute) mypywps = pywps.Pywps(pywps.METHOD_GET) inputs = mypywps.parseRequest( "service=wps&request=execute&version=1.0.0&identifier=ultimatequestionprocess&status=true&storeExecuteResponse=true") mypywps.performRequest() # Killing the child from os.fork in pywps if (os.getpid() != pid): os._exit(0) # First parse executeAssyncGET = etree.XML(mypywps.response, self.parser) self.assertEquals(schemaExecute.assertValid(executeAssyncGET), None) # get path to status document fileName = os.path.basename(executeAssyncGET.xpath( "//*[local-name()='ExecuteResponse']/@statusLocation")[0]) filePath = pywps.config.getConfigValue( "server", "outputPath") + "/" + fileName time.sleep(2) executeAssyncStatus = etree.parse( open(filePath, "r"), parser=self.parser) self.assertEquals(schemaExecute.assertValid(executeAssyncStatus), None) # Looping waiting for ProcessSucceeded # will loop max 20 times and wait 5 sec # if the assync is taking to long it mught be a problem counter = 0 while counter < 20: executeAssyncStatus = etree.parse( open(filePath, "r"), parser=self.parser) processStatus = executeAssyncStatus.xpath( "//*[local-name()='ProcessAccepted' or local-name()='ProcessStarted' or local-name()='ProcessPaused']") self.assertEquals(schemaExecute.assertValid( executeAssyncStatus), None) if len(processStatus) > 0: counter = counter + 1 time.sleep(5) else: break if counter >= 20: self.assertEquals("The assync process is taking to long", None) def testGetCapabilities(self): """Test if GetCapabilities request returns a valid XML document""" # Note:schemaGetCapabilities.assertValid(getCapabilitiesDoc) # will dump the location of the error, schemaGetCapabilities.validate(getCapabilitiesDoc) # will give true or false # Note2:Setting the Process class constructor (Process/__init__.py) without a default processVersion value # def __init__(self, identifier,...,profile=[],version=None,...): # Will make the parser to invalidate the request, this is a ways to test if the parser is working ok # DocumentInvalid: Element '{http://www.opengis.net/wps/1.0.0}Process': # The attribute '{http://www.opengis.net/wps/1.0.0}processVersion' is # required but missing., line 74 # Note3: complexVector has mimeTypes None (application/x-empty) self._setFromEnv() schemaDocGetCap = etree.XML(urllib.urlopen(self.getCapabilitiesSchemaResponse).read( ), parser=self.parser, base_url=self.base_url) schemaGetCapabilities = etree.XMLSchema(schemaDocGetCap) getpywps = pywps.Pywps(pywps.METHOD_GET) getinputs = getpywps.parseRequest(self.getCapabilitiesRequest) getpywps.performRequest(getinputs) getCapabilitiesGET = etree.XML(getpywps.response, self.parser) # Validate GET response self.assertEquals(schemaGetCapabilities.assertValid( getCapabilitiesGET), None) # POST request should be the same, since the response is generated from the same inputs # But you never know.... #postpywps = pywps.Pywps(pywps.METHOD_POST) #postinputs = postpywps.parseRequest(self.getCapabilitiesRequestFile) # postpywps.performRequest(postinputs) # getCapabilitiesPOST=etree.XML(postpywps.response,self.parser) # self.assertEquals(schemaGetCapabilities.assertValid(getCapabilitiesPOST),None) def testDescribeProcess(self): """Test if DescribeProcess requests returns a valid XML document""" # Note: assyncprocess fails since it has no outputs and outputs # Note2:Processes that miss format list (formats) ex: complexVector will have <MimeType>None></MimeType> # element MimeType: Schemas validity error : Element 'MimeType': [facet 'pattern'] # The value 'None' is not accepted by the pattern # '(application|audio|image|text|video|message|multipart|model)/.+(;\s*.+=.+)*' # Note3: processes ok: # bboxprocess,complexprocess,literalprocess,complexRaster self._setFromEnv() schemaDocDescribe = etree.XML(urllib.urlopen( self.describeProcessSchemaResponse).read(), parser=self.parser, base_url=self.base_url) schemaDescribeProcess = etree.XMLSchema(schemaDocDescribe) getpywps = pywps.Pywps(pywps.METHOD_GET) getinputs = getpywps.parseRequest(self.getDescribeProcessRequest) getpywps.performRequest(getinputs) describeProcessGET = etree.XML(getpywps.response, self.parser) self.assertEquals(schemaDescribeProcess.assertValid( describeProcessGET), None) def testExecuteBBOXProcess(self): """Test execute with bbox""" self._setFromEnv() schemaDocExecute = etree.XML(urllib.urlopen( self.executeSchemaResponse).read(), parser=self.parser, base_url=self.base_url) schemaExecute = etree.XMLSchema(schemaDocExecute) postpywps = pywps.Pywps(pywps.METHOD_POST) postinputs = postpywps.parseRequest(self.postExecuteBBOXRequest) self.postExecuteBBOXRequest.seek(0) postpywps.performRequest(postinputs) executeBBOXPOST = etree.XML(postpywps.response, self.parser) self.assertEquals(schemaExecute.assertValid(executeBBOXPOST), None) def testExecuteComplexInputDirect(self): """Test standard Execute direct output of raster and vector""" # wps_execute_request-complexinput-direct.xml self._setFromEnv() schemaDocExecute = etree.XML(urllib.urlopen( self.executeSchemaResponse).read(), parser=self.parser, base_url=self.base_url) schemaExecute = etree.XMLSchema(schemaDocExecute) postpywps = pywps.Pywps(pywps.METHOD_POST) postinputs = postpywps.parseRequest( self.postExecuteComplexInputRequest) self.postExecuteComplexInputRequest.seek(0) postpywps.performRequest(postinputs) executeComplexInputPOST = etree.XML(postpywps.response, self.parser) self.assertEquals(schemaExecute.assertValid( executeComplexInputPOST), None) def testExecuteComplexInputOutputDirect(self): """Testing raster and vector I/O""" self._setFromEnv() # Testing simple request with 2 complexdata, one raster another vector schemaDocExecute = etree.XML(urllib.urlopen( self.executeSchemaResponse).read(), parser=self.parser, base_url=self.base_url) schemaExecute = etree.XMLSchema(schemaDocExecute) postpywps = pywps.Pywps(pywps.METHOD_POST) postinputs = postpywps.parseRequest( self.postExecuteComplexInputRequest) self.postExecuteComplexInputRequest.seek(0) #postinputs = postpywps.parseRequest(self.postExecuteComplexInputOneOutputRequest) postpywps.performRequest(postinputs) executeComplexInputOneOutputPOST = etree.XML( postpywps.response, self.parser) self.assertEquals(schemaExecute.assertValid( executeComplexInputOneOutputPOST), None) def testExecuteComplexInputOneOutputReference(self): """Test lineage and output as reference""" self._setFromEnv() schemaDocExecute = etree.XML(urllib.urlopen( self.executeSchemaResponse).read(), parser=self.parser, base_url=self.base_url) schemaExecute = etree.XMLSchema(schemaDocExecute) postpywps = pywps.Pywps(pywps.METHOD_POST) postinputs = postpywps.parseRequest( self.postExecuteComplexInputOneOutputRequest) self.postExecuteComplexInputOneOutputRequest.seek(0) postpywps.performRequest(postinputs) executeComplexInputOneOutputPOST = etree.XML( postpywps.response, self.parser) self.assertEquals(schemaExecute.assertValid( executeComplexInputOneOutputPOST), None) def testExecuteLiteraData(self): """Test literaldata lineage and response document""" # Literal data doesnt support reference output, yet self._setFromEnv() schemaDocExecute = etree.XML(urllib.urlopen( self.executeSchemaResponse).read(), parser=self.parser, base_url=self.base_url) schemaExecute = etree.XMLSchema(schemaDocExecute) postpywps = pywps.Pywps(pywps.METHOD_POST) postinputs = postpywps.parseRequest(self.postExecuteLiteraDataRequest) self.postExecuteLiteraDataRequest.seek(0) postpywps.performRequest(postinputs) executeComplexInputOneOutputPOST = etree.XML( postpywps.response, self.parser) self.assertEquals(schemaExecute.assertValid( executeComplexInputOneOutputPOST), None) def testWSDL(self): """Test WSDL output content""" self._setFromEnv() schemaDocWSDL = etree.XML(urllib.urlopen( self.wsdlSchema).read(), parser=self.parser, base_url=self.base_url) schemaWSDL = etree.XMLSchema(schemaDocWSDL) getpywps = pywps.Pywps(pywps.METHOD_GET) inputs = getpywps.parseRequest("WSDL") # print inputs getpywps.performRequest() wsdlDoc = etree.XML(getpywps.response, self.parser) self.assertEquals(schemaWSDL.assertValid(wsdlDoc), None) def testSOAP11(self): """Test SOAP1.1 returned envelope""" # Same as testGetCapabilities is soap_tests self._setFromEnv() schemaDocSOAP = etree.XML(urllib.urlopen( self.soap11Schema).read(), parser=self.parser, base_url=self.base_url) schemaSOAP = etree.XMLSchema(schemaDocSOAP) postpywps = pywps.Pywps(pywps.METHOD_POST) getCapabilitiesSOAP11RequestFile = open(os.path.join( pywpsPath, "tests", "requests", "wps_getcapabilities_request_SOAP11.xml")) postpywps.parseRequest(getCapabilitiesSOAP11RequestFile) postpywps.performRequest() soap = Soap.SOAP() response = soap.getResponse(postpywps.response, soapVersion=postpywps.parser.soapVersion, isSoapExecute=postpywps.parser.isSoapExecute, isPromoteStatus=False) soapDoc = etree.XML(response, self.parser) self.assertEquals(schemaSOAP.assertValid(soapDoc), None) def testSOAP12(self): """Test SOAP1.2 returned envelope""" self._setFromEnv() schemaDocSOAP = etree.XML(urllib.urlopen( self.soap12Schema).read(), parser=self.parser, base_url=self.base_url) schemaSOAP = etree.XMLSchema(schemaDocSOAP) postpywps = pywps.Pywps(pywps.METHOD_POST) getCapabilitiesSOAP12RequestFile = open(os.path.join( pywpsPath, "tests", "requests", "wps_getcapabilities_request_SOAP12.xml")) postpywps.parseRequest(getCapabilitiesSOAP12RequestFile) postpywps.performRequest() soap = Soap.SOAP() response = soap.getResponse(postpywps.response, soapVersion=postpywps.parser.soapVersion, isSoapExecute=postpywps.parser.isSoapExecute, isPromoteStatus=False) soapDoc = etree.XML(response, self.parser) self.assertEquals(schemaSOAP.assertValid(soapDoc), None) def testSOAP11Fault(self): """Test Fault SOAP1.1""" schemaDocSOAP = etree.XML(urllib.urlopen( self.soap11Schema).read(), parser=self.parser, base_url=self.base_url) schemaSOAP = etree.XMLSchema(schemaDocSOAP) postpywps = pywps.Pywps(pywps.METHOD_POST) exceptionFile = open(os.path.join( pywpsPath, "tests", "requests", "wps_describeprocess_exception_SOAP11.xml")) postpywps.parseRequest(exceptionFile) try: postpywps.performRequest() except pywps.Exceptions.InvalidParameterValue, e: postpywps.response = e.getResponse() soap = Soap.SOAP() response = soap.getResponse(postpywps.response, soapVersion=postpywps.parser.soapVersion, isSoapExecute=postpywps.parser.isSoapExecute, isPromoteStatus=False) soapDoc = etree.XML(response, self.parser) self.assertEquals(schemaSOAP.assertValid(soapDoc), None) def _setFromEnv(self): os.putenv("PYWPS_PROCESSES", os.path.join( pywpsPath, "tests", "processes")) os.environ["PYWPS_PROCESSES"] = os.path.join( pywpsPath, "tests", "processes") if __name__ == "__main__": # unittest.main() suite = unittest.TestLoader().loadTestsFromTestCase(SchemaTestCase) unittest.TextTestRunner(verbosity=2).run(suite)
gpl-3.0
kdungs/lhcb-hltflow
hltflow/latex.py
1
1423
""" Adds functionality to make LaTeX figures from the raw TikZ code produced by core.StreamerFlowchart. """ FIGURE = r'''\begin{{figure}} \centering \begin{{tikzpicture}}[{tikzoptions}] {tikz} \end{{tikzpicture}} \caption{{Flowchart of {name}}} \end{{figure}}''' DOCUMENT = r'''\documentclass{{scrartcl}} \usepackage{{fontspec}} \usepackage{{tikz}} \usetikzlibrary{{shapes, arrows, positioning}} \usepackage{{xcolor}} \input{{colors}} \input{{tikzstyles}} \begin{{document}} {figures} \end{{document}} ''' def indent(spaces, multilinestring): """ Indents a given multilinestring by a given number of spaces. This is used to produce properly formatted LaTeX documents. """ indentation = ' ' * spaces return '\n{}'.format(indentation).join(multilinestring.split('\n')) def make_figure(sf, tikzoptions='node distance=.75cm and 2.75cm'): """ Generates a LaTeX figure from a given hltflow.core.StreamerFlowchart. Additionally, tikzoptions can be supplied manually. """ from .core import StreamerFlowchart assert type(sf) is StreamerFlowchart return FIGURE.format(tikz=indent(4, sf.tikz), name=sf.name, tikzoptions=tikzoptions) def make_document(figures): """ Generates a LaTeX document from a given list of figures produced by hltflow.latex.make_figure. """ return DOCUMENT.format(figures='\n\n'.join(figures))
mit
cigroup-ol/metaopt
docs/_extensions/gen_rst.py
1
2094
import os from example_builder import ExampleBuilder RST_TEMPLATE = """ .. _%(sphinx_tag)s: %(docstring)s %(image_list)s .. raw:: html <div class="toggle_trigger"><a href="#"> **Code output:** .. raw:: html </a></div> <div class="toggle_container"> .. literalinclude:: %(stdout)s .. raw:: html </div> <div class="toggle_trigger" id="start_open"><a href="#"> **Python source code:** .. raw:: html </a></div> <div class="toggle_container"> .. literalinclude:: %(fname)s :lines: %(end_line)s- .. raw:: html </div> <div align="right"> :download:`[download source: %(fname)s] <%(fname)s>` .. raw:: html </div> """ class CustomExampleBuilder(ExampleBuilder): def subdir_contents(self, path, subdirs): subdirs = [os.path.join(path, subdir) for subdir in subdirs] subdir_contents = ("\n\n" ".. toctree::\n" " :maxdepth: 2\n\n") # for subdir in subdirs: # index = os.path.splitext(self.rst_index_filename(subdir))[0] # subdir_contents += ' %s\n' % os.path.relpath(index, path) subdir_contents += '\n' return subdir_contents def main(app): target_dir = os.path.join(app.builder.srcdir, 'examples') source_dir = os.path.abspath(app.builder.srcdir + '/../' + 'examples/showcase') try: plot_gallery = eval(app.builder.config.plot_gallery) except TypeError: plot_gallery = bool(app.builder.config.plot_gallery) if not os.path.exists(source_dir): os.makedirs(source_dir) if not os.path.exists(target_dir): os.makedirs(target_dir) EB = CustomExampleBuilder(source_dir, target_dir, execute_files=plot_gallery, dir_info_file='README.rst', sphinx_tag_base='example', template_example=RST_TEMPLATE) EB.run() def setup(app): app.connect('builder-inited', main) app.add_config_value('plot_gallery', True, 'html')
bsd-3-clause
amitt001/Analytics-App
API/rate/reviews_sentiment_read.py
1
5952
""" READ FILE: THIS FILE READS FROM THE ALREADY PICKLED FILES. FIRST RUN 'reviews_sentiment_write.py' RUN THIS ONLY TO CHECK ACCURACY FROM THE ALREADY PICKLED FILES. ALL THE CHNAGES MUST BE MADE TO THE 'reviews_sentiment_write.py'. Play Store apps reviews sentiment analysis using NLTK module of Python. Tagging reviews as positive and negative (and neutral) """ import re import pickle import random from collections import OrderedDict from statistics import mode from unidecode import unidecode from sklearn.svm import SVC, LinearSVC, NuSVC from sklearn.linear_model import LogisticRegression,SGDClassifier from sklearn.naive_bayes import MultinomialNB, GaussianNB, BernoulliNB import nltk from nltk.corpus import stopwords from nltk.classify import ClassifierI from nltk.tokenize import word_tokenize from nltk.classify.scikitlearn import SklearnClassifier class VoteClassifier(ClassifierI): def __init__(self, *classifiers): self._classifiers = classifiers self.votes = [] def classify(self, features): #classify the feature with all the 7 algos #and based on the mode return the result self.votes = [c.classify(features) for c in self._classifiers] return mode(self.votes) def confidence(self, features): choice_votes = self.votes.count(mode(self.votes)) conf = choice_votes / float(len(self.votes)) return conf def find_features(document): """ Thsi fucntion takes a list of words as input. For each word it checks that word is in the most_frequest words list or not. If word in most_frequent words list feature_dict[word] = True else word not in most_frequent words list feature_dict = Flase """ words = set(word_tokenize(document)) return dict((w,True if w in words else False) for w in word_features) short_pos = unidecode(open('positive.txt', 'r').read()) short_neg = unidecode(open('negative.txt', 'r').read()) stpwrd = dict((sw,True) for sw in stopwords.words('english')+['film', 'movie'] if sw not in ['not','below']) all_words = [w.lower() for w in word_tokenize(short_pos) + word_tokenize(short_neg) if len(w) > 1 and not stpwrd.get(w)] with open("pickle/documents.pickle","rb") as doc: documents = pickle.load(doc) all_words = nltk.FreqDist(all_words) all_words = OrderedDict(sorted(all_words.items(), key=lambda x:x[1], reverse=True)) with open("pickle/word_features5k.pickle","rb") as save_word_features: word_features = pickle.load(save_word_features) featuresets = [(find_features(rev), category) for (rev, category) in documents] random.shuffle(featuresets) train_set = featuresets[:8000] test_set = featuresets[8000:] ####DELETE Variables to Free up some space#### del short_neg del short_pos del stpwrd del all_words del word_features del documents del featuresets ################# ## CLASSIFIERS ## ################# with open('pickle/naive_bayes.pickle', 'rb') as saviour: classifier = pickle.load(saviour) print("Naive bayes Algo accuracy", (nltk.classify.accuracy(classifier, test_set))*100) classifier.show_most_informative_features(30) with open('pickle/mnb_classifier.pickle', 'rb') as saviour: MNB_classifier = pickle.load(saviour) print("MNB_classifier accuracy percent:", (nltk.classify.accuracy(MNB_classifier, test_set))*100) with open('pickle/bernoullinb_classifier.pickle', 'rb') as saviour: BernoulliNB_classifier = pickle.load(saviour) print("BernoulliNB_classifier accuracy percent:", (nltk.classify.accuracy(BernoulliNB_classifier, test_set))*100) with open('pickle/logisticregression_classifier.pickle', 'rb') as saviour: LogisticRegression_classifier = pickle.load(saviour) print("LogisticRegression_classifier accuracy percent:", (nltk.classify.accuracy(LogisticRegression_classifier, test_set))*100) with open('pickle/sgdcclassifier_classifier.pickle', 'rb') as saviour: SGDClassifier_classifier = pickle.load(saviour) print("SGDClassifier_classifier accuracy percent:", (nltk.classify.accuracy(SGDClassifier_classifier, test_set))*100) with open('pickle/linearsvc_classifier.pickle', 'rb') as saviour: LinearSVC_classifier = pickle.load(saviour) print("LinearSVC_classifier accuracy percent:", (nltk.classify.accuracy(LinearSVC_classifier, test_set))*100) with open('pickle/nusvc_classifier.pickle', 'rb') as saviour: NuSVC_classifier = pickle.load(saviour) print("NuSVC_classifier accuracy percent:", (nltk.classify.accuracy(NuSVC_classifier, test_set))*100) voted_classifier = VoteClassifier(classifier, NuSVC_classifier, LinearSVC_classifier, SGDClassifier_classifier, MNB_classifier, BernoulliNB_classifier, LogisticRegression_classifier) print("#"*30) print("Voted_classifier accuracy percent:", (nltk.classify.accuracy(voted_classifier, test_set))*100) print("#"*30) print("Classification:", voted_classifier.classify(test_set[0][0]), "Confidence %:",voted_classifier.confidence(test_set[0][0])*100) print("Classification:", voted_classifier.classify(test_set[1][0]), "Confidence %:",voted_classifier.confidence(test_set[1][0])*100) print("Classification:", voted_classifier.classify(test_set[2][0]), "Confidence %:",voted_classifier.confidence(test_set[2][0])*100) print("Classification:", voted_classifier.classify(test_set[3][0]), "Confidence %:",voted_classifier.confidence(test_set[3][0])*100) print("Classification:", voted_classifier.classify(test_set[4][0]), "Confidence %:",voted_classifier.confidence(test_set[4][0])*100) print("Classification:", voted_classifier.classify(test_set[5][0]), "Confidence %:",voted_classifier.confidence(test_set[5][0])*100)
mit
flotre/Sick-Beard
sickbeard/providers/binnewz/binsearch.py
29
3163
# Author: Guillaume Serre <[email protected]> # URL: http://code.google.com/p/sickbeard/ # # This file is part of Sick Beard. # # Sick Beard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Sick Beard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. import urllib from bs4 import BeautifulSoup import re from nzbdownloader import NZBDownloader from nzbdownloader import NZBPostURLSearchResult class BinSearch(NZBDownloader): def search(self, filename, minSize, newsgroup=None): if newsgroup != None: binSearchURLs = [ urllib.urlencode({'server' : 1, 'max': '250', 'adv_g' : newsgroup, 'q' : filename, 'adv_sort' : 'date', 'minsize' : str(minSize)}), urllib.urlencode({'server' : 2, 'max': '250', 'adv_g' : newsgroup, 'q' : filename, 'adv_sort' : 'date', 'minsize' : str(minSize)})] else: binSearchURLs = [ urllib.urlencode({'server' : 1, 'max': '250', 'q' : filename, 'adv_sort' : 'date', 'minsize' : str(minSize)}), urllib.urlencode({'server' : 2, 'max': '250', 'q' : filename, 'adv_sort' : 'date', 'minsize' : str(minSize)})] for suffixURL in binSearchURLs: binSearchURL = "https://binsearch.info/index.php?" + suffixURL binSearchSoup = BeautifulSoup( self.open(binSearchURL) ) foundName = None sizeInMegs = None for elem in binSearchSoup.findAll(lambda tag: tag.name=='tr' and tag.get('bgcolor') == '#FFFFFF' and 'size:' in tag.text): if foundName: break for checkbox in elem.findAll(lambda tag: tag.name=='input' and tag.get('type') == 'checkbox'): if foundName: break sizeStr = re.search("size:\s+([^B]*)B", elem.text).group(1).strip() if "G" in sizeStr: sizeInMegs = float( re.search("([0-9\\.]+)", sizeStr).group(1) ) * 1024 elif "K" in sizeStr: sizeInMegs = 0 else: sizeInMegs = float( re.search("([0-9\\.]+)", sizeStr).group(1) ) if sizeInMegs > minSize: foundName = checkbox.get('name') break if foundName: postData = urllib.urlencode({foundName: 'on', 'action': 'nzb'}) nzbURL = binSearchURL return NZBPostURLSearchResult( self, nzbURL, postData, sizeInMegs, binSearchURL )
gpl-3.0
bzbarsky/servo
tests/wpt/web-platform-tests/tools/html5lib/parse.py
420
8783
#!/usr/bin/env python """usage: %prog [options] filename Parse a document to a tree, with optional profiling """ import sys import os import traceback from optparse import OptionParser from html5lib import html5parser, sanitizer from html5lib.tokenizer import HTMLTokenizer from html5lib import treebuilders, serializer, treewalkers from html5lib import constants def parse(): optParser = getOptParser() opts,args = optParser.parse_args() encoding = "utf8" try: f = args[-1] # Try opening from the internet if f.startswith('http://'): try: import urllib.request, urllib.parse, urllib.error, cgi f = urllib.request.urlopen(f) contentType = f.headers.get('content-type') if contentType: (mediaType, params) = cgi.parse_header(contentType) encoding = params.get('charset') except: pass elif f == '-': f = sys.stdin if sys.version_info[0] >= 3: encoding = None else: try: # Try opening from file system f = open(f, "rb") except IOError as e: sys.stderr.write("Unable to open file: %s\n" % e) sys.exit(1) except IndexError: sys.stderr.write("No filename provided. Use -h for help\n") sys.exit(1) treebuilder = treebuilders.getTreeBuilder(opts.treebuilder) if opts.sanitize: tokenizer = sanitizer.HTMLSanitizer else: tokenizer = HTMLTokenizer p = html5parser.HTMLParser(tree=treebuilder, tokenizer=tokenizer, debug=opts.log) if opts.fragment: parseMethod = p.parseFragment else: parseMethod = p.parse if opts.profile: import cProfile import pstats cProfile.runctx("run(parseMethod, f, encoding)", None, {"run": run, "parseMethod": parseMethod, "f": f, "encoding": encoding}, "stats.prof") # XXX - We should use a temp file here stats = pstats.Stats('stats.prof') stats.strip_dirs() stats.sort_stats('time') stats.print_stats() elif opts.time: import time t0 = time.time() document = run(parseMethod, f, encoding) t1 = time.time() if document: printOutput(p, document, opts) t2 = time.time() sys.stderr.write("\n\nRun took: %fs (plus %fs to print the output)"%(t1-t0, t2-t1)) else: sys.stderr.write("\n\nRun took: %fs"%(t1-t0)) else: document = run(parseMethod, f, encoding) if document: printOutput(p, document, opts) def run(parseMethod, f, encoding): try: document = parseMethod(f, encoding=encoding) except: document = None traceback.print_exc() return document def printOutput(parser, document, opts): if opts.encoding: print("Encoding:", parser.tokenizer.stream.charEncoding) for item in parser.log: print(item) if document is not None: if opts.xml: sys.stdout.write(document.toxml("utf-8")) elif opts.tree: if not hasattr(document,'__getitem__'): document = [document] for fragment in document: print(parser.tree.testSerializer(fragment)) elif opts.hilite: sys.stdout.write(document.hilite("utf-8")) elif opts.html: kwargs = {} for opt in serializer.HTMLSerializer.options: try: kwargs[opt] = getattr(opts,opt) except: pass if not kwargs['quote_char']: del kwargs['quote_char'] tokens = treewalkers.getTreeWalker(opts.treebuilder)(document) if sys.version_info[0] >= 3: encoding = None else: encoding = "utf-8" for text in serializer.HTMLSerializer(**kwargs).serialize(tokens, encoding=encoding): sys.stdout.write(text) if not text.endswith('\n'): sys.stdout.write('\n') if opts.error: errList=[] for pos, errorcode, datavars in parser.errors: errList.append("Line %i Col %i"%pos + " " + constants.E.get(errorcode, 'Unknown error "%s"' % errorcode) % datavars) sys.stdout.write("\nParse errors:\n" + "\n".join(errList)+"\n") def getOptParser(): parser = OptionParser(usage=__doc__) parser.add_option("-p", "--profile", action="store_true", default=False, dest="profile", help="Use the hotshot profiler to " "produce a detailed log of the run") parser.add_option("-t", "--time", action="store_true", default=False, dest="time", help="Time the run using time.time (may not be accurate on all platforms, especially for short runs)") parser.add_option("-b", "--treebuilder", action="store", type="string", dest="treebuilder", default="simpleTree") parser.add_option("-e", "--error", action="store_true", default=False, dest="error", help="Print a list of parse errors") parser.add_option("-f", "--fragment", action="store_true", default=False, dest="fragment", help="Parse as a fragment") parser.add_option("", "--tree", action="store_true", default=False, dest="tree", help="Output as debug tree") parser.add_option("-x", "--xml", action="store_true", default=False, dest="xml", help="Output as xml") parser.add_option("", "--no-html", action="store_false", default=True, dest="html", help="Don't output html") parser.add_option("", "--hilite", action="store_true", default=False, dest="hilite", help="Output as formatted highlighted code.") parser.add_option("-c", "--encoding", action="store_true", default=False, dest="encoding", help="Print character encoding used") parser.add_option("", "--inject-meta-charset", action="store_true", default=False, dest="inject_meta_charset", help="inject <meta charset>") parser.add_option("", "--strip-whitespace", action="store_true", default=False, dest="strip_whitespace", help="strip whitespace") parser.add_option("", "--omit-optional-tags", action="store_true", default=False, dest="omit_optional_tags", help="omit optional tags") parser.add_option("", "--quote-attr-values", action="store_true", default=False, dest="quote_attr_values", help="quote attribute values") parser.add_option("", "--use-best-quote-char", action="store_true", default=False, dest="use_best_quote_char", help="use best quote character") parser.add_option("", "--quote-char", action="store", default=None, dest="quote_char", help="quote character") parser.add_option("", "--no-minimize-boolean-attributes", action="store_false", default=True, dest="minimize_boolean_attributes", help="minimize boolean attributes") parser.add_option("", "--use-trailing-solidus", action="store_true", default=False, dest="use_trailing_solidus", help="use trailing solidus") parser.add_option("", "--space-before-trailing-solidus", action="store_true", default=False, dest="space_before_trailing_solidus", help="add space before trailing solidus") parser.add_option("", "--escape-lt-in-attrs", action="store_true", default=False, dest="escape_lt_in_attrs", help="escape less than signs in attribute values") parser.add_option("", "--escape-rcdata", action="store_true", default=False, dest="escape_rcdata", help="escape rcdata element values") parser.add_option("", "--sanitize", action="store_true", default=False, dest="sanitize", help="sanitize") parser.add_option("-l", "--log", action="store_true", default=False, dest="log", help="log state transitions") return parser if __name__ == "__main__": parse()
mpl-2.0
fnouama/intellij-community
python/lib/Lib/site-packages/django/contrib/sessions/backends/file.py
91
5318
import errno import os import tempfile from django.conf import settings from django.contrib.sessions.backends.base import SessionBase, CreateError from django.core.exceptions import SuspiciousOperation, ImproperlyConfigured class SessionStore(SessionBase): """ Implements a file based session store. """ def __init__(self, session_key=None): self.storage_path = getattr(settings, "SESSION_FILE_PATH", None) if not self.storage_path: self.storage_path = tempfile.gettempdir() # Make sure the storage path is valid. if not os.path.isdir(self.storage_path): raise ImproperlyConfigured( "The session storage path %r doesn't exist. Please set your" " SESSION_FILE_PATH setting to an existing directory in which" " Django can store session data." % self.storage_path) self.file_prefix = settings.SESSION_COOKIE_NAME super(SessionStore, self).__init__(session_key) def _key_to_file(self, session_key=None): """ Get the file associated with this session key. """ if session_key is None: session_key = self.session_key # Make sure we're not vulnerable to directory traversal. Session keys # should always be md5s, so they should never contain directory # components. if os.path.sep in session_key: raise SuspiciousOperation( "Invalid characters (directory components) in session key") return os.path.join(self.storage_path, self.file_prefix + session_key) def load(self): session_data = {} try: session_file = open(self._key_to_file(), "rb") try: file_data = session_file.read() # Don't fail if there is no data in the session file. # We may have opened the empty placeholder file. if file_data: try: session_data = self.decode(file_data) except (EOFError, SuspiciousOperation): self.create() finally: session_file.close() except IOError: self.create() return session_data def create(self): while True: self._session_key = self._get_new_session_key() try: self.save(must_create=True) except CreateError: continue self.modified = True self._session_cache = {} return def save(self, must_create=False): # Get the session data now, before we start messing # with the file it is stored within. session_data = self._get_session(no_load=must_create) session_file_name = self._key_to_file() try: # Make sure the file exists. If it does not already exist, an # empty placeholder file is created. flags = os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) if must_create: flags |= os.O_EXCL fd = os.open(session_file_name, flags) os.close(fd) except OSError, e: if must_create and e.errno == errno.EEXIST: raise CreateError raise # Write the session file without interfering with other threads # or processes. By writing to an atomically generated temporary # file and then using the atomic os.rename() to make the complete # file visible, we avoid having to lock the session file, while # still maintaining its integrity. # # Note: Locking the session file was explored, but rejected in part # because in order to be atomic and cross-platform, it required a # long-lived lock file for each session, doubling the number of # files in the session storage directory at any given time. This # rename solution is cleaner and avoids any additional overhead # when reading the session data, which is the more common case # unless SESSION_SAVE_EVERY_REQUEST = True. # # See ticket #8616. dir, prefix = os.path.split(session_file_name) try: output_file_fd, output_file_name = tempfile.mkstemp(dir=dir, prefix=prefix + '_out_') renamed = False try: try: os.write(output_file_fd, self.encode(session_data)) finally: os.close(output_file_fd) os.rename(output_file_name, session_file_name) renamed = True finally: if not renamed: os.unlink(output_file_name) except (OSError, IOError, EOFError): pass def exists(self, session_key): if os.path.exists(self._key_to_file(session_key)): return True return False def delete(self, session_key=None): if session_key is None: if self._session_key is None: return session_key = self._session_key try: os.unlink(self._key_to_file(session_key)) except OSError: pass def clean(self): pass
apache-2.0
Artemkaaas/indy-sdk
vcx/wrappers/python3/tests/test_issuer_credential.py
1
13555
import pytest import json from vcx.error import ErrorCode, VcxError from vcx.state import State from vcx.api.issuer_credential import IssuerCredential from vcx.api.connection import Connection from vcx.api.credential_def import CredentialDef from vcx.api.credential import Credential source_id = '1' schema_no = 1234 cred_def_id = 'cred_def_id1' cred_def_handle = 1 attrs = {'key': 'value', 'key2': 'value2', 'key3': 'value3'} name = 'Credential Name' issuer_did = '8XFh8yBzrpJQmNyZzgoTqB' connection_options = '{"connection_type":"SMS","phone":"8019119191","use_public_did":true}' price = '1' schema_id = '123' req = {'libindy_cred_req': '', 'libindy_cred_req_meta': '', 'cred_def_id': '', 'tid': '', 'to_did': '', 'from_did': '', 'version': '', 'mid': '', 'msg_ref_id': '123'} @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_create_issuer_credential(): cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) assert issuer_credential.source_id == source_id assert issuer_credential.handle > 0 assert await issuer_credential.get_state() == State.Initialized @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_serialize(): cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) data = await issuer_credential.serialize() assert data.get('data').get('source_id') == source_id assert data.get('data').get('credential_name') == name @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_serialize_with_bad_handle(): with pytest.raises(VcxError) as e: cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) issuer_credential.handle = 0 await issuer_credential.serialize() assert ErrorCode.InvalidIssuerCredentialHandle == e.value.error_code assert 'Invalid Credential Issuer Handle' == e.value.error_msg @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_deserialize(): cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) data = await issuer_credential.serialize() data['data']['handle'] = 99999 data['data']['state'] = State.Expired issuer_credential2 = await IssuerCredential.deserialize(data) assert issuer_credential2.source_id == data.get('data').get('source_id') @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_deserialize_with_invalid_data(): with pytest.raises(VcxError) as e: data = {'data': {'invalid': -99}} await IssuerCredential.deserialize(data) assert ErrorCode.InvalidJson == e.value.error_code assert 'Invalid JSON string' == e.value.error_msg @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_serialize_deserialize_and_then_serialize(): cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) data1 = await issuer_credential.serialize() print("data1: %s" % data1) issuer_credential2 = await IssuerCredential.deserialize(data1) data2 = await issuer_credential2.serialize() assert data1 == data2 @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_update_state(): cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) assert await issuer_credential.update_state() == State.Initialized @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_update_state_with_invalid_handle(): with pytest.raises(VcxError) as e: cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) issuer_credential.handle = 0 await issuer_credential.update_state() assert ErrorCode.InvalidIssuerCredentialHandle == e.value.error_code assert 'Invalid Credential Issuer Handle' == e.value.error_msg @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_get_state(): cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) assert await issuer_credential.get_state() == State.Initialized @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_issuer_credential_release(): with pytest.raises(VcxError) as e: cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) assert issuer_credential.handle > 0 issuer_credential.release() await issuer_credential.serialize() assert ErrorCode.InvalidIssuerCredentialHandle == e.value.error_code assert 'Invalid Credential Issuer Handle' == e.value.error_msg @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_send_offer(): connection = await Connection.create(source_id) await connection.connect(connection_options) cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) await issuer_credential.send_offer(connection) assert await issuer_credential.update_state() == State.OfferSent txn = await issuer_credential.get_payment_txn() assert (txn) @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_get_msgs(): connection = await Connection.create(source_id) await connection.connect(connection_options) my_pw_did = await connection.get_my_pw_did() their_pw_did = await connection.get_their_pw_did() cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) offer = await issuer_credential.get_offer_msg() assert (offer) cred = await Credential.create("cred", offer) assert (cred) request = await cred.get_request_msg(my_pw_did, their_pw_did, 0) print(request) await issuer_credential.update_state_with_message(json.dumps(request)) assert await issuer_credential.get_state() == State.RequestReceived cred_msg = await issuer_credential.get_credential_msg(my_pw_did) await cred.update_state_with_message(json.dumps(cred_msg)) assert (await cred.get_state() == State.Accepted) @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_send_offer_with_invalid_state(): with pytest.raises(VcxError) as e: connection = await Connection.create(source_id) await connection.connect(connection_options) cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) data = await issuer_credential.serialize() data['data']['state'] = State.Expired issuer_credential2 = await IssuerCredential.deserialize(data) await issuer_credential2.send_offer(connection) assert ErrorCode.NotReady == e.value.error_code assert 'Object not ready for specified action' == e.value.error_msg @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_send_offer_with_bad_connection(): with pytest.raises(VcxError) as e: connection = Connection(source_id) cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) await issuer_credential.send_offer(connection) assert ErrorCode.InvalidConnectionHandle == e.value.error_code assert 'Invalid Connection Handle' == e.value.error_msg @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_send_credential(): connection = await Connection.create(source_id) await connection.connect(connection_options) cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) await issuer_credential.send_offer(connection) assert await issuer_credential.update_state() == State.OfferSent # simulate consumer sending credential_req data = await issuer_credential.serialize() data['data']['state'] = State.RequestReceived data['data']['credential_request'] = req issuer_credential2 = await issuer_credential.deserialize(data) await issuer_credential2.send_credential(connection) assert await issuer_credential2.get_state() == State.Accepted @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_get_credential_msg(): connection = await Connection.create(source_id) await connection.connect(connection_options) cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) await issuer_credential.send_offer(connection) assert await issuer_credential.update_state() == State.OfferSent # simulate consumer sending credential_req data = await issuer_credential.serialize() data['data']['state'] = State.RequestReceived data['data']['credential_request'] = req issuer_credential2 = await issuer_credential.deserialize(data) my_pw_did = await connection.get_my_pw_did() msg = await issuer_credential2.get_credential_msg(my_pw_did) assert (msg) @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_send_credential_with_invalid_issuer_credential(): with pytest.raises(VcxError) as e: cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = IssuerCredential(source_id, attrs, cred_def.handle, name, price) await issuer_credential.send_credential(Connection(source_id)) assert ErrorCode.InvalidIssuerCredentialHandle == e.value.error_code @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_send_credential_with_invalid_connection(): with pytest.raises(VcxError) as e: cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) await issuer_credential.send_credential(Connection(source_id)) assert ErrorCode.InvalidConnectionHandle == e.value.error_code @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_send_credential_with_no_prior_offer(): with pytest.raises(VcxError) as e: connection = await Connection.create(source_id) await connection.connect(connection_options) cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) await issuer_credential.send_credential(connection) assert ErrorCode.NotReady == e.value.error_code @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_revoke_credential_fails_with_invalid_rev_details(): cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) with pytest.raises(VcxError) as e: await issuer_credential.revoke_credential() assert ErrorCode.InvalidRevocationDetails == e.value.error_code @pytest.mark.asyncio @pytest.mark.usefixtures('vcx_init_test_mode') async def test_revoke_credential_success(): cred_def = await CredentialDef.create(source_id, name, schema_id, 0) issuer_credential = await IssuerCredential.create(source_id, attrs, cred_def.handle, name, price) serialized = await issuer_credential.serialize() issuer_credential2 = await IssuerCredential.deserialize(serialized) with pytest.raises(VcxError) as e: await issuer_credential2.revoke_credential() assert ErrorCode.InvalidRevocationDetails == e.value.error_code serialized['data']['cred_rev_id'] = '123' issuer_credential3 = await IssuerCredential.deserialize(serialized) with pytest.raises(VcxError) as e: await issuer_credential3.revoke_credential() assert ErrorCode.InvalidRevocationDetails == e.value.error_code serialized['data']['rev_reg_id'] = '456' issuer_credential4 = await IssuerCredential.deserialize(serialized) with pytest.raises(VcxError) as e: await issuer_credential4.revoke_credential() assert ErrorCode.InvalidRevocationDetails == e.value.error_code serialized['data']['tails_file'] = 'file' issuer_credential5 = await IssuerCredential.deserialize(serialized) await issuer_credential5.revoke_credential()
apache-2.0
omnirom/android_external_chromium-org
tools/telemetry/telemetry/page/actions/loop.py
47
1727
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A Telemetry page_action that loops media playback. Action parameters are: - loop_count: The number of times to loop media. - selector: If no selector is defined then the action attempts to loop the first media element on the page. If 'all' then loop all media elements. - timeout_in_seconds: Timeout to wait for media to loop. Default is 60 sec x loop_count. 0 means do not wait. """ from telemetry.core import exceptions from telemetry.page.actions import media_action from telemetry.page.actions import page_action class LoopAction(media_action.MediaAction): def __init__(self, loop_count, selector=None, timeout_in_seconds=None): super(LoopAction, self).__init__() self._loop_count = loop_count self._selector = selector if selector else '' self._timeout_in_seconds = ( timeout_in_seconds if timeout_in_seconds else 60 * loop_count) def WillRunAction(self, tab): """Load the media metrics JS code prior to running the action.""" super(LoopAction, self).WillRunAction(tab) self.LoadJS(tab, 'loop.js') def RunAction(self, tab): try: tab.ExecuteJavaScript('window.__loopMedia("%s", %i);' % (self._selector, self._loop_count)) if self._timeout_in_seconds > 0: self.WaitForEvent(tab, self._selector, 'loop', self._timeout_in_seconds) except exceptions.EvaluateException: raise page_action.PageActionFailed('Cannot loop media element(s) with ' 'selector = %s.' % self._selector)
bsd-3-clause
mirror/vbox
src/VBox/ValidationKit/testmanager/core/testresults.py
3
66539
# -*- coding: utf-8 -*- # $Id$ # pylint: disable=C0302 ## @todo Rename this file to testresult.py! """ Test Manager - Fetch test results. """ __copyright__ = \ """ Copyright (C) 2012-2014 Oracle Corporation This file is part of VirtualBox Open Source Edition (OSE), as available from http://www.virtualbox.org. This file is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License (GPL) as published by the Free Software Foundation, in version 2 as it comes in the "COPYING" file of the VirtualBox OSE distribution. VirtualBox OSE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY of any kind. The contents of this file may alternatively be used under the terms of the Common Development and Distribution License Version 1.0 (CDDL) only, as it comes in the "COPYING.CDDL" file of the VirtualBox OSE distribution, in which case the provisions of the CDDL are applicable instead of those of the GPL. You may elect to license modified versions of this file under the terms and conditions of either the GPL or the CDDL or both. """ __version__ = "$Revision$" # Standard python imports. import unittest; # Validation Kit imports. from common import constants; from testmanager import config; from testmanager.core.base import ModelDataBase, ModelLogicBase, ModelDataBaseTestCase, TMExceptionBase, TMTooManyRows; from testmanager.core.testgroup import TestGroupData from testmanager.core.build import BuildDataEx from testmanager.core.testbox import TestBoxData from testmanager.core.testcase import TestCaseData from testmanager.core.schedgroup import SchedGroupData from testmanager.core.systemlog import SystemLogData, SystemLogLogic; class TestResultData(ModelDataBase): """ Test case execution result data """ ## @name TestStatus_T # @{ ksTestStatus_Running = 'running'; ksTestStatus_Success = 'success'; ksTestStatus_Skipped = 'skipped'; ksTestStatus_BadTestBox = 'bad-testbox'; ksTestStatus_Aborted = 'aborted'; ksTestStatus_Failure = 'failure'; ksTestStatus_TimedOut = 'timed-out'; ksTestStatus_Rebooted = 'rebooted'; ## @} ## List of relatively harmless (to testgroup/case) statuses. kasHarmlessTestStatuses = [ ksTestStatus_Skipped, ksTestStatus_BadTestBox, ksTestStatus_Aborted, ]; ## List of bad statuses. kasBadTestStatuses = [ ksTestStatus_Failure, ksTestStatus_TimedOut, ksTestStatus_Rebooted, ]; ksIdAttr = 'idTestResult'; ksParam_idTestResult = 'TestResultData_idTestResult'; ksParam_idTestResultParent = 'TestResultData_idTestResultParent'; ksParam_idTestSet = 'TestResultData_idTestSet'; ksParam_tsCreated = 'TestResultData_tsCreated'; ksParam_tsElapsed = 'TestResultData_tsElapsed'; ksParam_idStrName = 'TestResultData_idStrName'; ksParam_cErrors = 'TestResultData_cErrors'; ksParam_enmStatus = 'TestResultData_enmStatus'; ksParam_iNestingDepth = 'TestResultData_iNestingDepth'; kasValidValues_enmStatus = [ ksTestStatus_Running, ksTestStatus_Success, ksTestStatus_Skipped, ksTestStatus_BadTestBox, ksTestStatus_Aborted, ksTestStatus_Failure, ksTestStatus_TimedOut, ksTestStatus_Rebooted ]; def __init__(self): ModelDataBase.__init__(self) self.idTestResult = None self.idTestResultParent = None self.idTestSet = None self.tsCreated = None self.tsElapsed = None self.idStrName = None self.cErrors = 0; self.enmStatus = None self.iNestingDepth = None def initFromDbRow(self, aoRow): """ Reinitialize from a SELECT * FROM TestResults. Return self. Raises exception if no row. """ if aoRow is None: raise TMExceptionBase('Test result record not found.') self.idTestResult = aoRow[0] self.idTestResultParent = aoRow[1] self.idTestSet = aoRow[2] self.tsCreated = aoRow[3] self.tsElapsed = aoRow[4] self.idStrName = aoRow[5] self.cErrors = aoRow[6] self.enmStatus = aoRow[7] self.iNestingDepth = aoRow[8] return self; def isFailure(self): """ Check if it's a real failure. """ return self.enmStatus in self.kasBadTestStatuses; class TestResultDataEx(TestResultData): """ Extended test result data class. This is intended for use as a node in a result tree. This is not intended for serialization to parameters or vice versa. Use TestResultLogic to construct the tree. """ def __init__(self): TestResultData.__init__(self) self.sName = None; # idStrName resolved. self.oParent = None; # idTestResultParent within the tree. self.aoChildren = []; # TestResultDataEx; self.aoValues = []; # TestResultValue; self.aoMsgs = []; # TestResultMsg; self.aoFiles = []; # TestResultFile; def initFromDbRow(self, aoRow): """ Initialize from a query like this: SELECT TestResults.*, TestResultStrTab.sValue FROM TestResults, TestResultStrTab WHERE TestResultStrTab.idStr = TestResults.idStrName Note! The caller is expected to fetch children, values, failure details, and files. """ self.sName = None; self.oParent = None; self.aoChildren = []; self.aoValues = []; self.aoMsgs = []; self.aoFiles = []; TestResultData.initFromDbRow(self, aoRow); self.sName = aoRow[9]; return self; class TestResultValueData(ModelDataBase): """ Test result value data. """ ksIdAttr = 'idTestResultValue'; ksParam_idTestResultValue = 'TestResultValue_idTestResultValue'; ksParam_idTestResult = 'TestResultValue_idTestResult'; ksParam_idTestSet = 'TestResultValue_idTestSet'; ksParam_tsCreated = 'TestResultValue_tsCreated'; ksParam_idStrName = 'TestResultValue_idStrName'; ksParam_lValue = 'TestResultValue_lValue'; ksParam_iUnit = 'TestResultValue_iUnit'; def __init__(self): ModelDataBase.__init__(self) self.idTestResultValue = None; self.idTestResult = None; self.idTestSet = None; self.tsCreated = None; self.idStrName = None; self.lValue = None; self.iUnit = 0; def initFromDbRow(self, aoRow): """ Reinitialize from a SELECT * FROM TestResultValues. Return self. Raises exception if no row. """ if aoRow is None: raise TMExceptionBase('Test result value record not found.') self.idTestResultValue = aoRow[0]; self.idTestResult = aoRow[1]; self.idTestSet = aoRow[2]; self.tsCreated = aoRow[3]; self.idStrName = aoRow[4]; self.lValue = aoRow[5]; self.iUnit = aoRow[6]; return self; class TestResultValueDataEx(TestResultValueData): """ Extends TestResultValue by resolving the value name and unit string. """ def __init__(self): TestResultValueData.__init__(self) self.sName = None; self.sUnit = ''; def initFromDbRow(self, aoRow): """ Reinitialize from a query like this: SELECT TestResultValues.*, TestResultStrTab.sValue FROM TestResultValues, TestResultStrTab WHERE TestResultStrTab.idStr = TestResultValues.idStrName Return self. Raises exception if no row. """ TestResultValueData.initFromDbRow(self, aoRow); self.sName = aoRow[7]; if self.iUnit < len(constants.valueunit.g_asNames): self.sUnit = constants.valueunit.g_asNames[self.iUnit]; else: self.sUnit = '<%d>' % (self.iUnit,); return self; class TestResultMsgData(ModelDataBase): """ Test result message data. """ ksIdAttr = 'idTestResultMsg'; ksParam_idTestResultMsg = 'TestResultValue_idTestResultMsg'; ksParam_idTestResult = 'TestResultValue_idTestResult'; ksParam_tsCreated = 'TestResultValue_tsCreated'; ksParam_idStrMsg = 'TestResultValue_idStrMsg'; ksParam_enmLevel = 'TestResultValue_enmLevel'; def __init__(self): ModelDataBase.__init__(self) self.idTestResultMsg = None; self.idTestResult = None; self.tsCreated = None; self.idStrMsg = None; self.enmLevel = None; def initFromDbRow(self, aoRow): """ Reinitialize from a SELECT * FROM TestResultMsgs. Return self. Raises exception if no row. """ if aoRow is None: raise TMExceptionBase('Test result value record not found.') self.idTestResultMsg = aoRow[0]; self.idTestResult = aoRow[1]; self.tsCreated = aoRow[2]; self.idStrMsg = aoRow[3]; self.enmLevel = aoRow[4]; return self; class TestResultMsgDataEx(TestResultMsgData): """ Extends TestResultMsg by resolving the message string. """ def __init__(self): TestResultMsgData.__init__(self) self.sMsg = None; def initFromDbRow(self, aoRow): """ Reinitialize from a query like this: SELECT TestResultMsg.*, TestResultStrTab.sValue FROM TestResultMsg, TestResultStrTab WHERE TestResultStrTab.idStr = TestResultMsgs.idStrName Return self. Raises exception if no row. """ TestResultMsgData.initFromDbRow(self, aoRow); self.sMsg = aoRow[5]; return self; class TestResultFileData(ModelDataBase): """ Test result message data. """ ksIdAttr = 'idTestResultFile'; ksParam_idTestResultFile = 'TestResultFile_idTestResultFile'; ksParam_idTestResult = 'TestResultFile_idTestResult'; ksParam_tsCreated = 'TestResultFile_tsCreated'; ksParam_idStrFile = 'TestResultFile_idStrFile'; ksParam_idStrDescription = 'TestResultFile_idStrDescription'; ksParam_idStrKind = 'TestResultFile_idStrKind'; ksParam_idStrMime = 'TestResultFile_idStrMime'; def __init__(self): ModelDataBase.__init__(self) self.idTestResultFile = None; self.idTestResult = None; self.tsCreated = None; self.idStrFile = None; self.idStrDescription = None; self.idStrKind = None; self.idStrMime = None; def initFromDbRow(self, aoRow): """ Reinitialize from a SELECT * FROM TestResultFiles. Return self. Raises exception if no row. """ if aoRow is None: raise TMExceptionBase('Test result file record not found.') self.idTestResultFile = aoRow[0]; self.idTestResult = aoRow[1]; self.tsCreated = aoRow[2]; self.idStrFile = aoRow[3]; self.idStrDescription = aoRow[4]; self.idStrKind = aoRow[5]; self.idStrMime = aoRow[6]; return self; class TestResultFileDataEx(TestResultFileData): """ Extends TestResultFile by resolving the strings. """ def __init__(self): TestResultFileData.__init__(self) self.sFile = None; self.sDescription = None; self.sKind = None; self.sMime = None; def initFromDbRow(self, aoRow): """ Reinitialize from a query like this: SELECT TestResultFiles.*, StrTabFile.sValue AS sFile, StrTabDesc.sValue AS sDescription StrTabKind.sValue AS sKind, StrTabMime.sValue AS sMime, FROM ... Return self. Raises exception if no row. """ TestResultFileData.initFromDbRow(self, aoRow); self.sFile = aoRow[7]; self.sDescription = aoRow[8]; self.sKind = aoRow[9]; self.sMime = aoRow[10]; return self; def initFakeMainLog(self, oTestSet): """ Reinitializes to represent the main.log object (not in DB). Returns self. """ self.idTestResultFile = 0; self.idTestResult = oTestSet.idTestResult; self.tsCreated = oTestSet.tsCreated; self.idStrFile = None; self.idStrDescription = None; self.idStrKind = None; self.idStrMime = None; self.sFile = 'main.log'; self.sDescription = ''; self.sKind = 'log/main'; self.sMime = 'text/plain'; return self; def isProbablyUtf8Encoded(self): """ Checks if the file is likely to be UTF-8 encoded. """ if self.sMime in [ 'text/plain', 'text/html' ]: return True; return False; def getMimeWithEncoding(self): """ Gets the MIME type with encoding if likely to be UTF-8. """ if self.isProbablyUtf8Encoded(): return '%s; charset=utf-8' % (self.sMime,); return self.sMime; class TestResultListingData(ModelDataBase): # pylint: disable=R0902 """ Test case result data representation for table listing """ def __init__(self): """Initialize""" ModelDataBase.__init__(self) self.idTestSet = None self.idBuildCategory = None; self.sProduct = None self.sRepository = None; self.sBranch = None self.sType = None self.idBuild = None; self.sVersion = None; self.iRevision = None self.sOs = None; self.sOsVersion = None; self.sArch = None; self.sCpuVendor = None; self.sCpuName = None; self.cCpus = None; self.fCpuHwVirt = None; self.fCpuNestedPaging = None; self.fCpu64BitGuest = None; self.idTestBox = None self.sTestBoxName = None self.tsCreated = None self.tsElapsed = None self.enmStatus = None self.cErrors = None; self.idTestCase = None self.sTestCaseName = None self.sBaseCmd = None self.sArgs = None self.idBuildTestSuite = None; self.iRevisionTestSuite = None; def initFromDbRow(self, aoRow): """ Reinitialize from a database query. Return self. Raises exception if no row. """ if aoRow is None: raise TMExceptionBase('Test result record not found.') self.idTestSet = aoRow[0]; self.idBuildCategory = aoRow[1]; self.sProduct = aoRow[2]; self.sRepository = aoRow[3]; self.sBranch = aoRow[4]; self.sType = aoRow[5]; self.idBuild = aoRow[6]; self.sVersion = aoRow[7]; self.iRevision = aoRow[8]; self.sOs = aoRow[9]; self.sOsVersion = aoRow[10]; self.sArch = aoRow[11]; self.sCpuVendor = aoRow[12]; self.sCpuName = aoRow[13]; self.cCpus = aoRow[14]; self.fCpuHwVirt = aoRow[15]; self.fCpuNestedPaging = aoRow[16]; self.fCpu64BitGuest = aoRow[17]; self.idTestBox = aoRow[18]; self.sTestBoxName = aoRow[19]; self.tsCreated = aoRow[20]; self.tsElapsed = aoRow[21]; self.enmStatus = aoRow[22]; self.cErrors = aoRow[23]; self.idTestCase = aoRow[24]; self.sTestCaseName = aoRow[25]; self.sBaseCmd = aoRow[26]; self.sArgs = aoRow[27]; self.idBuildTestSuite = aoRow[28]; self.iRevisionTestSuite = aoRow[29]; return self class TestResultHangingOffence(TMExceptionBase): """Hanging offence committed by test case.""" pass; class TestResultLogic(ModelLogicBase): # pylint: disable=R0903 """ Results grouped by scheduling group. """ # # Result grinding for displaying in the WUI. # ksResultsGroupingTypeNone = 'ResultsGroupingTypeNone' ksResultsGroupingTypeTestGroup = 'ResultsGroupingTypeTestGroup' ksResultsGroupingTypeBuildRev = 'ResultsGroupingTypeBuild' ksResultsGroupingTypeTestBox = 'ResultsGroupingTypeTestBox' ksResultsGroupingTypeTestCase = 'ResultsGroupingTypeTestCase' ksResultsGroupingTypeSchedGroup = 'ResultsGroupingTypeSchedGroup' ksBaseTables = 'BuildCategories, Builds, TestBoxes, TestResults, TestCases, TestCaseArgs,\n' \ + ' TestSets LEFT OUTER JOIN Builds AS TestSuiteBits\n' \ ' ON TestSets.idBuildTestSuite = TestSuiteBits.idBuild\n'; ksBasePreCondition = 'TestSets.idTestSet = TestResults.idTestSet\n' \ + ' AND TestResults.idTestResultParent is NULL\n' \ + ' AND TestSets.idBuild = Builds.idBuild\n' \ + ' AND Builds.tsExpire > TestSets.tsCreated\n' \ + ' AND Builds.tsEffective <= TestSets.tsCreated\n' \ + ' AND Builds.idBuildCategory = BuildCategories.idBuildCategory\n' \ + ' AND TestSets.idGenTestBox = TestBoxes.idGenTestBox\n' \ + ' AND TestSets.idGenTestCase = TestCases.idGenTestCase\n' \ + ' AND TestSets.idGenTestCaseArgs = TestCaseArgs.idGenTestCaseArgs\n' kdResultGroupingMap = { ksResultsGroupingTypeNone: (ksBaseTables, ksBasePreCondition,), ksResultsGroupingTypeTestGroup: (ksBaseTables, ksBasePreCondition + ' AND TestSets.idTestGroup',), ksResultsGroupingTypeBuildRev: (ksBaseTables, ksBasePreCondition + ' AND Builds.iRevision',), ksResultsGroupingTypeTestBox: (ksBaseTables, ksBasePreCondition + ' AND TestSets.idTestBox',), ksResultsGroupingTypeTestCase: (ksBaseTables, ksBasePreCondition + ' AND TestSets.idTestCase',), ksResultsGroupingTypeSchedGroup: (ksBaseTables, ksBasePreCondition + ' AND TestBoxes.idSchedGroup',), } def _getTimePeriodQueryPart(self, tsNow, sInterval): """ Get part of SQL query responsible for SELECT data within specified period of time. """ assert sInterval is not None; # too many rows. cMonthsMourningPeriod = 2; # Stop reminding everyone about testboxes after 2 months. (May also speed up the query.) if tsNow is None: sRet = '(TestSets.tsDone IS NULL OR TestSets.tsDone >= (CURRENT_TIMESTAMP - \'%s\'::interval))\n' \ ' AND TestSets.tsCreated >= (CURRENT_TIMESTAMP - \'%s\'::interval - \'%u months\'::interval)\n' \ % (sInterval, sInterval, cMonthsMourningPeriod); else: sTsNow = '\'%s\'::TIMESTAMP' % (tsNow,); # It's actually a string already. duh. sRet = 'TestSets.tsCreated <= %s\n' \ ' AND TestSets.tsCreated >= (%s - \'%s\'::interval - \'%u months\'::interval)\n' \ ' AND (TestSets.tsDone IS NULL OR TestSets.tsDone >= (%s - \'%s\'::interval))\n' \ % ( sTsNow, sTsNow, sInterval, cMonthsMourningPeriod, sTsNow, sInterval ); return sRet def _getSqlQueryForGroupSearch(self, sWhat, tsNow, sInterval, enmResultsGroupingType, iResultsGroupingValue, fOnlyFailures): """ Returns an SQL query that limits SELECT result in order to satisfy @param enmResultsGroupingType. """ if enmResultsGroupingType is None: raise TMExceptionBase('Unknown grouping type') if enmResultsGroupingType not in self.kdResultGroupingMap: raise TMExceptionBase('Unknown grouping type') # Get SQL query parameters sTables, sCondition = self.kdResultGroupingMap[enmResultsGroupingType] # Extend SQL query with time period limitation sTimePeriodQuery = self._getTimePeriodQueryPart(tsNow, sInterval) if iResultsGroupingValue is not None: sCondition += ' = %d' % iResultsGroupingValue + '\n'; sCondition += ' AND ' + sTimePeriodQuery # Extend the condition with test status limitations if requested. if fOnlyFailures: sCondition += '\n AND TestSets.enmStatus != \'success\'::TestStatus_T' \ '\n AND TestSets.enmStatus != \'running\'::TestStatus_T'; # Assemble the query. sQuery = 'SELECT DISTINCT %s\n' % sWhat sQuery += 'FROM %s\n' % sTables sQuery += 'WHERE %s\n' % sCondition return sQuery def fetchResultsForListing(self, iStart, cMaxRows, tsNow, sInterval, enmResultsGroupingType, iResultsGroupingValue, fOnlyFailures): """ Fetches TestResults table content. If @param enmResultsGroupingType and @param iResultsGroupingValue are not None, then resulting (returned) list contains only records that match specified @param enmResultsGroupingType. If @param enmResultsGroupingType is None, then @param iResultsGroupingValue is ignored. Returns an array (list) of TestResultData items, empty list if none. Raises exception on error. """ sWhat = 'TestSets.idTestSet,\n' \ ' BuildCategories.idBuildCategory,\n' \ ' BuildCategories.sProduct,\n' \ ' BuildCategories.sRepository,\n' \ ' BuildCategories.sBranch,\n' \ ' BuildCategories.sType,\n' \ ' Builds.idBuild,\n' \ ' Builds.sVersion,\n' \ ' Builds.iRevision,\n' \ ' TestBoxes.sOs,\n' \ ' TestBoxes.sOsVersion,\n' \ ' TestBoxes.sCpuArch,\n' \ ' TestBoxes.sCpuVendor,\n' \ ' TestBoxes.sCpuName,\n' \ ' TestBoxes.cCpus,\n' \ ' TestBoxes.fCpuHwVirt,\n' \ ' TestBoxes.fCpuNestedPaging,\n' \ ' TestBoxes.fCpu64BitGuest,\n' \ ' TestBoxes.idTestBox,\n' \ ' TestBoxes.sName,\n' \ ' TestResults.tsCreated,\n' \ ' COALESCE(TestResults.tsElapsed, CURRENT_TIMESTAMP - TestResults.tsCreated),\n' \ ' TestSets.enmStatus,\n' \ ' TestResults.cErrors,\n' \ ' TestCases.idTestCase,\n' \ ' TestCases.sName,\n' \ ' TestCases.sBaseCmd,\n' \ ' TestCaseArgs.sArgs,\n' \ ' TestSuiteBits.idBuild AS idBuildTestSuite,\n' \ ' TestSuiteBits.iRevision AS iRevisionTestSuite,\n' \ ' (TestSets.tsDone IS NULL) SortRunningFirst' \ ; sSqlQuery = self._getSqlQueryForGroupSearch(sWhat, tsNow, sInterval, enmResultsGroupingType, iResultsGroupingValue, fOnlyFailures); sSqlQuery += 'ORDER BY SortRunningFirst DESC, TestSets.idTestSet DESC\n'; sSqlQuery += 'LIMIT %s OFFSET %s\n' % (cMaxRows, iStart,); self._oDb.execute(sSqlQuery); aoRows = []; for aoRow in self._oDb.fetchAll(): aoRows.append(TestResultListingData().initFromDbRow(aoRow)) return aoRows def getEntriesCount(self, tsNow, sInterval, enmResultsGroupingType, iResultsGroupingValue, fOnlyFailures): """ Get number of table records. If @param enmResultsGroupingType and @param iResultsGroupingValue are not None, then we count only only those records that match specified @param enmResultsGroupingType. If @param enmResultsGroupingType is None, then @param iResultsGroupingValue is ignored. """ sSqlQuery = self._getSqlQueryForGroupSearch('COUNT(TestSets.idTestSet)', tsNow, sInterval, enmResultsGroupingType, iResultsGroupingValue, fOnlyFailures) self._oDb.execute(sSqlQuery) return self._oDb.fetchOne()[0] def getTestGroups(self, tsNow, sPeriod): """ Get list of uniq TestGroupData objects which found in all test results. """ self._oDb.execute('SELECT DISTINCT TestGroups.*\n' 'FROM TestGroups, TestSets\n' 'WHERE TestSets.idTestGroup = TestGroups.idTestGroup\n' ' AND TestGroups.tsExpire > TestSets.tsCreated\n' ' AND TestGroups.tsEffective <= TestSets.tsCreated' ' AND ' + self._getTimePeriodQueryPart(tsNow, sPeriod)) aaoRows = self._oDb.fetchAll() aoRet = [] for aoRow in aaoRows: ## @todo Need to take time into consideration. Will go belly up if we delete a testgroup. aoRet.append(TestGroupData().initFromDbRow(aoRow)) return aoRet def getBuilds(self, tsNow, sPeriod): """ Get list of uniq BuildDataEx objects which found in all test results. """ self._oDb.execute('SELECT DISTINCT Builds.*, BuildCategories.*\n' 'FROM Builds, BuildCategories, TestSets\n' 'WHERE TestSets.idBuild = Builds.idBuild\n' ' AND Builds.idBuildCategory = BuildCategories.idBuildCategory\n' ' AND Builds.tsExpire > TestSets.tsCreated\n' ' AND Builds.tsEffective <= TestSets.tsCreated' ' AND ' + self._getTimePeriodQueryPart(tsNow, sPeriod)) aaoRows = self._oDb.fetchAll() aoRet = [] for aoRow in aaoRows: aoRet.append(BuildDataEx().initFromDbRow(aoRow)) return aoRet def getTestBoxes(self, tsNow, sPeriod): """ Get list of uniq TestBoxData objects which found in all test results. """ ## @todo do all in one query. self._oDb.execute('SELECT DISTINCT TestBoxes.idTestBox, TestBoxes.idGenTestBox\n' 'FROM TestBoxes, TestSets\n' 'WHERE TestSets.idGenTestBox = TestBoxes.idGenTestBox\n' ' AND ' + self._getTimePeriodQueryPart(tsNow, sPeriod) + 'ORDER BY TestBoxes.idTestBox, TestBoxes.idGenTestBox DESC' ); idPrevTestBox = -1; asIdGenTestBoxes = []; for aoRow in self._oDb.fetchAll(): if aoRow[0] != idPrevTestBox: idPrevTestBox = aoRow[0]; asIdGenTestBoxes.append(str(aoRow[1])); aoRet = [] if len(asIdGenTestBoxes) > 0: self._oDb.execute('SELECT *\n' 'FROM TestBoxes\n' 'WHERE idGenTestBox IN (' + ','.join(asIdGenTestBoxes) + ')\n' 'ORDER BY sName'); for aoRow in self._oDb.fetchAll(): aoRet.append(TestBoxData().initFromDbRow(aoRow)); return aoRet def getTestCases(self, tsNow, sPeriod): """ Get a list of unique TestCaseData objects which is appears in the test specified result period. """ self._oDb.execute('SELECT DISTINCT TestCases.idTestCase, TestCases.idGenTestCase, TestSets.tsConfig\n' 'FROM TestCases, TestSets\n' 'WHERE TestSets.idTestCase = TestCases.idTestCase\n' ' AND TestCases.tsExpire > TestSets.tsCreated\n' ' AND TestCases.tsEffective <= TestSets.tsCreated\n' ' AND ' + self._getTimePeriodQueryPart(tsNow, sPeriod) + 'ORDER BY TestCases.idTestCase, TestCases.idGenTestCase DESC\n'); aaoRows = self._oDb.fetchAll() aoRet = [] idPrevTestCase = -1; for aoRow in aaoRows: ## @todo reduce subqueries if aoRow[0] != idPrevTestCase: idPrevTestCase = aoRow[0]; aoRet.append(TestCaseData().initFromDbWithGenId(self._oDb, aoRow[1], aoRow[2])) return aoRet def getSchedGroups(self, tsNow, sPeriod): """ Get list of uniq SchedGroupData objects which found in all test results. """ self._oDb.execute('SELECT DISTINCT TestBoxes.idSchedGroup\n' 'FROM TestBoxes, TestSets\n' 'WHERE TestSets.idGenTestBox = TestBoxes.idGenTestBox\n' ' AND TestBoxes.tsExpire > TestSets.tsCreated\n' ' AND TestBoxes.tsEffective <= TestSets.tsCreated' ' AND ' + self._getTimePeriodQueryPart(tsNow, sPeriod)) aiRows = self._oDb.fetchAll() aoRet = [] for iRow in aiRows: ## @todo reduce subqueries aoRet.append(SchedGroupData().initFromDbWithId(self._oDb, iRow)) return aoRet def getById(self, idTestResult): """ Get build record by its id """ self._oDb.execute('SELECT *\n' 'FROM TestResults\n' 'WHERE idTestResult = %s\n', (idTestResult,)) aRows = self._oDb.fetchAll() if len(aRows) not in (0, 1): raise TMExceptionBase('Found more than one test result with the same credentials. Database structure is corrupted.') try: return TestResultData().initFromDbRow(aRows[0]) except IndexError: return None # # Details view and interface. # def fetchResultTree(self, idTestSet, cMaxDepth = None): """ Fetches the result tree for the given test set. Returns a tree of TestResultDataEx nodes. Raises exception on invalid input and database issues. """ # Depth first, i.e. just like the XML added them. ## @todo this still isn't performing extremely well, consider optimizations. sQuery = self._oDb.formatBindArgs( 'SELECT TestResults.*,\n' ' TestResultStrTab.sValue,\n' ' EXISTS ( SELECT idTestResultValue\n' ' FROM TestResultValues\n' ' WHERE TestResultValues.idTestResult = TestResults.idTestResult ) AS fHasValues,\n' ' EXISTS ( SELECT idTestResultMsg\n' ' FROM TestResultMsgs\n' ' WHERE TestResultMsgs.idTestResult = TestResults.idTestResult ) AS fHasMsgs,\n' ' EXISTS ( SELECT idTestResultFile\n' ' FROM TestResultFiles\n' ' WHERE TestResultFiles.idTestResult = TestResults.idTestResult ) AS fHasFiles\n' 'FROM TestResults, TestResultStrTab\n' 'WHERE TestResults.idTestSet = %s\n' ' AND TestResults.idStrName = TestResultStrTab.idStr\n' , ( idTestSet, )); if cMaxDepth is not None: sQuery += self._oDb.formatBindArgs(' AND TestResults.iNestingDepth <= %s\n', (cMaxDepth,)); sQuery += 'ORDER BY idTestResult ASC\n' self._oDb.execute(sQuery); cRows = self._oDb.getRowCount(); if cRows > 65536: raise TMTooManyRows('Too many rows returned for idTestSet=%d: %d' % (idTestSet, cRows,)); aaoRows = self._oDb.fetchAll(); if len(aaoRows) == 0: raise TMExceptionBase('No test results for idTestSet=%d.' % (idTestSet,)); # Set up the root node first. aoRow = aaoRows[0]; oRoot = TestResultDataEx().initFromDbRow(aoRow); if oRoot.idTestResultParent is not None: raise self._oDb.integrityException('The root TestResult (#%s) has a parent (#%s)!' % (oRoot.idTestResult, oRoot.idTestResultParent)); self._fetchResultTreeNodeExtras(oRoot, aoRow[-3], aoRow[-2], aoRow[-1]); # The chilren (if any). dLookup = { oRoot.idTestResult: oRoot }; oParent = oRoot; for iRow in range(1, len(aaoRows)): aoRow = aaoRows[iRow]; oCur = TestResultDataEx().initFromDbRow(aoRow); self._fetchResultTreeNodeExtras(oCur, aoRow[-3], aoRow[-2], aoRow[-1]); # Figure out and vet the parent. if oParent.idTestResult != oCur.idTestResultParent: oParent = dLookup.get(oCur.idTestResultParent, None); if oParent is None: raise self._oDb.integrityException('TestResult #%d is orphaned from its parent #%s.' % (oCur.idTestResult, oCur.idTestResultParent,)); if oParent.iNestingDepth + 1 != oCur.iNestingDepth: raise self._oDb.integrityException('TestResult #%d has incorrect nesting depth (%d instead of %d)' % (oCur.idTestResult, oCur.iNestingDepth, oParent.iNestingDepth + 1,)); # Link it up. oCur.oParent = oParent; oParent.aoChildren.append(oCur); dLookup[oCur.idTestResult] = oCur; return (oRoot, dLookup); def _fetchResultTreeNodeExtras(self, oCurNode, fHasValues, fHasMsgs, fHasFiles): """ fetchResultTree worker that fetches values, message and files for the specified node. """ assert(oCurNode.aoValues == []); assert(oCurNode.aoMsgs == []); assert(oCurNode.aoFiles == []); if fHasValues: self._oDb.execute('SELECT TestResultValues.*,\n' ' TestResultStrTab.sValue\n' 'FROM TestResultValues, TestResultStrTab\n' 'WHERE TestResultValues.idTestResult = %s\n' ' AND TestResultValues.idStrName = TestResultStrTab.idStr\n' 'ORDER BY idTestResultValue ASC\n' , ( oCurNode.idTestResult, )); for aoRow in self._oDb.fetchAll(): oCurNode.aoValues.append(TestResultValueDataEx().initFromDbRow(aoRow)); if fHasMsgs: self._oDb.execute('SELECT TestResultMsgs.*,\n' ' TestResultStrTab.sValue\n' 'FROM TestResultMsgs, TestResultStrTab\n' 'WHERE TestResultMsgs.idTestResult = %s\n' ' AND TestResultMsgs.idStrMsg = TestResultStrTab.idStr\n' 'ORDER BY idTestResultMsg ASC\n' , ( oCurNode.idTestResult, )); for aoRow in self._oDb.fetchAll(): oCurNode.aoMsgs.append(TestResultMsgDataEx().initFromDbRow(aoRow)); if fHasFiles: self._oDb.execute('SELECT TestResultFiles.*,\n' ' StrTabFile.sValue AS sFile,\n' ' StrTabDesc.sValue AS sDescription,\n' ' StrTabKind.sValue AS sKind,\n' ' StrTabMime.sValue AS sMime\n' 'FROM TestResultFiles,\n' ' TestResultStrTab AS StrTabFile,\n' ' TestResultStrTab AS StrTabDesc,\n' ' TestResultStrTab AS StrTabKind,\n' ' TestResultStrTab AS StrTabMime\n' 'WHERE TestResultFiles.idTestResult = %s\n' ' AND TestResultFiles.idStrFile = StrTabFile.idStr\n' ' AND TestResultFiles.idStrDescription = StrTabDesc.idStr\n' ' AND TestResultFiles.idStrKind = StrTabKind.idStr\n' ' AND TestResultFiles.idStrMime = StrTabMime.idStr\n' 'ORDER BY idTestResultFile ASC\n' , ( oCurNode.idTestResult, )); for aoRow in self._oDb.fetchAll(): oCurNode.aoFiles.append(TestResultFileDataEx().initFromDbRow(aoRow)); return True; # # TestBoxController interface(s). # def _inhumeTestResults(self, aoStack, idTestSet, sError): """ The test produces too much output, kill and bury it. Note! We leave the test set open, only the test result records are completed. Thus, _getResultStack will return an empty stack and cause XML processing to fail immediately, while we can still record when it actually completed in the test set the normal way. """ self._oDb.dprint('** _inhumeTestResults: idTestSet=%d\n%s' % (idTestSet, self._stringifyStack(aoStack),)); # # First add a message. # self._newFailureDetails(aoStack[0].idTestResult, sError, None); # # The complete all open test results. # for oTestResult in aoStack: oTestResult.cErrors += 1; self._completeTestResults(oTestResult, None, TestResultData.ksTestStatus_Failure, oTestResult.cErrors); # A bit of paranoia. self._oDb.execute('UPDATE TestResults\n' 'SET cErrors = cErrors + 1,\n' ' enmStatus = \'failure\'::TestStatus_T,\n' ' tsElapsed = CURRENT_TIMESTAMP - tsCreated\n' 'WHERE idTestSet = %s\n' ' AND enmStatus = \'running\'::TestStatus_T\n' , ( idTestSet, )); self._oDb.commit(); return None; def strTabString(self, sString, fCommit = False): """ Gets the string table id for the given string, adding it if new. Note! A copy of this code is also in TestSetLogic. """ ## @todo move this and make a stored procedure for it. self._oDb.execute('SELECT idStr\n' 'FROM TestResultStrTab\n' 'WHERE sValue = %s' , (sString,)); if self._oDb.getRowCount() == 0: self._oDb.execute('INSERT INTO TestResultStrTab (sValue)\n' 'VALUES (%s)\n' 'RETURNING idStr\n' , (sString,)); if fCommit: self._oDb.commit(); return self._oDb.fetchOne()[0]; @staticmethod def _stringifyStack(aoStack): """Returns a string rep of the stack.""" sRet = ''; for i in range(len(aoStack)): sRet += 'aoStack[%d]=%s\n' % (i, aoStack[i]); return sRet; def _getResultStack(self, idTestSet): """ Gets the current stack of result sets. """ self._oDb.execute('SELECT *\n' 'FROM TestResults\n' 'WHERE idTestSet = %s\n' ' AND enmStatus = \'running\'::TestStatus_T\n' 'ORDER BY idTestResult DESC' , ( idTestSet, )); aoStack = []; for aoRow in self._oDb.fetchAll(): aoStack.append(TestResultData().initFromDbRow(aoRow)); for i in range(len(aoStack)): assert aoStack[i].iNestingDepth == len(aoStack) - i - 1, self._stringifyStack(aoStack); return aoStack; def _newTestResult(self, idTestResultParent, idTestSet, iNestingDepth, tsCreated, sName, dCounts, fCommit = False): """ Creates a new test result. Returns the TestResultData object for the new record. May raise exception on database error. """ assert idTestResultParent is not None; assert idTestResultParent > 1; # # This isn't necessarily very efficient, but it's necessary to prevent # a wild test or testbox from filling up the database. # sCountName = 'cTestResults'; if sCountName not in dCounts: self._oDb.execute('SELECT COUNT(idTestResult)\n' 'FROM TestResults\n' 'WHERE idTestSet = %s\n' , ( idTestSet,)); dCounts[sCountName] = self._oDb.fetchOne()[0]; dCounts[sCountName] += 1; if dCounts[sCountName] > config.g_kcMaxTestResultsPerTS: raise TestResultHangingOffence('Too many sub-tests in total!'); sCountName = 'cTestResultsIn%d' % (idTestResultParent,); if sCountName not in dCounts: self._oDb.execute('SELECT COUNT(idTestResult)\n' 'FROM TestResults\n' 'WHERE idTestResultParent = %s\n' , ( idTestResultParent,)); dCounts[sCountName] = self._oDb.fetchOne()[0]; dCounts[sCountName] += 1; if dCounts[sCountName] > config.g_kcMaxTestResultsPerTR: raise TestResultHangingOffence('Too many immediate sub-tests!'); # This is also a hanging offence. if iNestingDepth > config.g_kcMaxTestResultDepth: raise TestResultHangingOffence('To deep sub-test nesting!'); # Ditto. if len(sName) > config.g_kcchMaxTestResultName: raise TestResultHangingOffence('Test name is too long: %d chars - "%s"' % (len(sName), sName)); # # Within bounds, do the job. # idStrName = self.strTabString(sName, fCommit); self._oDb.execute('INSERT INTO TestResults (\n' ' idTestResultParent,\n' ' idTestSet,\n' ' tsCreated,\n' ' idStrName,\n' ' iNestingDepth )\n' 'VALUES (%s, %s, TIMESTAMP WITH TIME ZONE %s, %s, %s)\n' 'RETURNING *\n' , ( idTestResultParent, idTestSet, tsCreated, idStrName, iNestingDepth) ) oData = TestResultData().initFromDbRow(self._oDb.fetchOne()); self._oDb.maybeCommit(fCommit); return oData; def _newTestValue(self, idTestResult, idTestSet, sName, lValue, sUnit, dCounts, tsCreated = None, fCommit = False): """ Creates a test value. May raise exception on database error. """ # # Bounds checking. # sCountName = 'cTestValues'; if sCountName not in dCounts: self._oDb.execute('SELECT COUNT(idTestResultValue)\n' 'FROM TestResultValues, TestResults\n' 'WHERE TestResultValues.idTestResult = TestResults.idTestResult\n' ' AND TestResults.idTestSet = %s\n' , ( idTestSet,)); dCounts[sCountName] = self._oDb.fetchOne()[0]; dCounts[sCountName] += 1; if dCounts[sCountName] > config.g_kcMaxTestValuesPerTS: raise TestResultHangingOffence('Too many values in total!'); sCountName = 'cTestValuesIn%d' % (idTestResult,); if sCountName not in dCounts: self._oDb.execute('SELECT COUNT(idTestResultValue)\n' 'FROM TestResultValues\n' 'WHERE idTestResult = %s\n' , ( idTestResult,)); dCounts[sCountName] = self._oDb.fetchOne()[0]; dCounts[sCountName] += 1; if dCounts[sCountName] > config.g_kcMaxTestValuesPerTR: raise TestResultHangingOffence('Too many immediate values for one test result!'); if len(sName) > config.g_kcchMaxTestValueName: raise TestResultHangingOffence('Value name is too long: %d chars - "%s"' % (len(sName), sName)); # # Do the job. # iUnit = constants.valueunit.g_kdNameToConst.get(sUnit, constants.valueunit.NONE); idStrName = self.strTabString(sName, fCommit); if tsCreated is None: self._oDb.execute('INSERT INTO TestResultValues (\n' ' idTestResult,\n' ' idTestSet,\n' ' idStrName,\n' ' lValue,\n' ' iUnit)\n' 'VALUES ( %s, %s, %s, %s, %s )\n' , ( idTestResult, idTestSet, idStrName, lValue, iUnit,) ); else: self._oDb.execute('INSERT INTO TestResultValues (\n' ' idTestResult,\n' ' idTestSet,\n' ' tsCreated,\n' ' idStrName,\n' ' lValue,\n' ' iUnit)\n' 'VALUES ( %s, %s, TIMESTAMP WITH TIME ZONE %s, %s, %s, %s )\n' , ( idTestResult, idTestSet, tsCreated, idStrName, lValue, iUnit,) ); self._oDb.maybeCommit(fCommit); return True; def _newFailureDetails(self, idTestResult, sText, dCounts, tsCreated = None, fCommit = False): """ Creates a record detailing cause of failure. May raise exception on database error. """ # # Overflow protection. # if dCounts is not None: sCountName = 'cTestMsgsIn%d' % (idTestResult,); if sCountName not in dCounts: self._oDb.execute('SELECT COUNT(idTestResultMsg)\n' 'FROM TestResultMsgs\n' 'WHERE idTestResult = %s\n' , ( idTestResult,)); dCounts[sCountName] = self._oDb.fetchOne()[0]; dCounts[sCountName] += 1; if dCounts[sCountName] > config.g_kcMaxTestMsgsPerTR: raise TestResultHangingOffence('Too many messages under for one test result!'); if len(sText) > config.g_kcchMaxTestMsg: raise TestResultHangingOffence('Failure details message is too long: %d chars - "%s"' % (len(sText), sText)); # # Do the job. # idStrMsg = self.strTabString(sText, fCommit); if tsCreated is None: self._oDb.execute('INSERT INTO TestResultMsgs (\n' ' idTestResult,\n' ' idStrMsg,\n' ' enmLevel)\n' 'VALUES ( %s, %s, %s)\n' , ( idTestResult, idStrMsg, 'failure',) ); else: self._oDb.execute('INSERT INTO TestResultMsgs (\n' ' idTestResult,\n' ' tsCreated,\n' ' idStrMsg,\n' ' enmLevel)\n' 'VALUES ( %s, TIMESTAMP WITH TIME ZONE %s, %s, %s)\n' , ( idTestResult, tsCreated, idStrMsg, 'failure',) ); self._oDb.maybeCommit(fCommit); return True; def _completeTestResults(self, oTestResult, tsDone, enmStatus, cErrors = 0, fCommit = False): """ Completes a test result. Updates the oTestResult object. May raise exception on database error. """ self._oDb.dprint('** _completeTestResults: cErrors=%s tsDone=%s enmStatus=%s oTestResults=\n%s' % (cErrors, tsDone, enmStatus, oTestResult,)); # # Sanity check: No open sub tests (aoStack should make sure about this!). # self._oDb.execute('SELECT COUNT(idTestResult)\n' 'FROM TestResults\n' 'WHERE idTestResultParent = %s\n' ' AND enmStatus = %s\n' , ( oTestResult.idTestResult, TestResultData.ksTestStatus_Running,)); cOpenSubTest = self._oDb.fetchOne()[0]; assert cOpenSubTest == 0, 'cOpenSubTest=%d - %s' % (cOpenSubTest, oTestResult,); assert oTestResult.enmStatus == TestResultData.ksTestStatus_Running; # # Make sure the reporter isn't lying about successes or error counts. # self._oDb.execute('SELECT COALESCE(SUM(cErrors), 0)\n' 'FROM TestResults\n' 'WHERE idTestResultParent = %s\n' , ( oTestResult.idTestResult, )); cMinErrors = self._oDb.fetchOne()[0] + oTestResult.cErrors; if cErrors < cMinErrors: cErrors = cMinErrors; if cErrors > 0 and enmStatus == TestResultData.ksTestStatus_Success: enmStatus = TestResultData.ksTestStatus_Failure # # Do the update. # if tsDone is None: self._oDb.execute('UPDATE TestResults\n' 'SET cErrors = %s,\n' ' enmStatus = %s,\n' ' tsElapsed = CURRENT_TIMESTAMP - tsCreated\n' 'WHERE idTestResult = %s\n' 'RETURNING tsElapsed' , ( cErrors, enmStatus, oTestResult.idTestResult,) ); else: self._oDb.execute('UPDATE TestResults\n' 'SET cErrors = %s,\n' ' enmStatus = %s,\n' ' tsElapsed = TIMESTAMP WITH TIME ZONE %s - tsCreated\n' 'WHERE idTestResult = %s\n' 'RETURNING tsElapsed' , ( cErrors, enmStatus, tsDone, oTestResult.idTestResult,) ); oTestResult.tsElapsed = self._oDb.fetchOne()[0]; oTestResult.enmStatus = enmStatus; oTestResult.cErrors = cErrors; self._oDb.maybeCommit(fCommit); return None; def _doPopHint(self, aoStack, cStackEntries, dCounts): """ Executes a PopHint. """ assert cStackEntries >= 0; while len(aoStack) > cStackEntries: if aoStack[0].enmStatus == TestResultData.ksTestStatus_Running: self._newFailureDetails(aoStack[0].idTestResult, 'XML error: Missing </Test>', dCounts); self._completeTestResults(aoStack[0], tsDone = None, cErrors = 1, enmStatus = TestResultData.ksTestStatus_Failure, fCommit = True); aoStack.pop(0); return True; @staticmethod def _validateElement(sName, dAttribs, fClosed): """ Validates an element and its attributes. """ # # Validate attributes by name. # # Validate integer attributes. for sAttr in [ 'errors', 'testdepth' ]: if sAttr in dAttribs: try: _ = int(dAttribs[sAttr]); except: return 'Element %s has an invalid %s attribute value: %s.' % (sName, sAttr, dAttribs[sAttr],); # Validate long attributes. for sAttr in [ 'value', ]: if sAttr in dAttribs: try: _ = long(dAttribs[sAttr]); except: return 'Element %s has an invalid %s attribute value: %s.' % (sName, sAttr, dAttribs[sAttr],); # Validate string attributes. for sAttr in [ 'name', 'unit', 'text' ]: if sAttr in dAttribs and len(dAttribs[sAttr]) == 0: return 'Element %s has an empty %s attribute value.' % (sName, sAttr,); # Validate the timestamp attribute. if 'timestamp' in dAttribs: (dAttribs['timestamp'], sError) = ModelDataBase.validateTs(dAttribs['timestamp'], fAllowNull = False); if sError is not None: return 'Element %s has an invalid timestamp ("%s"): %s' % (sName, dAttribs['timestamp'], sError,); # # Check that attributes that are required are present. # We ignore extra attributes. # dElementAttribs = \ { 'Test': [ 'timestamp', 'name', ], 'Value': [ 'timestamp', 'name', 'unit', 'value', ], 'FailureDetails': [ 'timestamp', 'text', ], 'Passed': [ 'timestamp', ], 'Skipped': [ 'timestamp', ], 'Failed': [ 'timestamp', 'errors', ], 'TimedOut': [ 'timestamp', 'errors', ], 'End': [ 'timestamp', ], 'PushHint': [ 'testdepth', ], 'PopHint': [ 'testdepth', ], }; if sName not in dElementAttribs: return 'Unknown element "%s".' % (sName,); for sAttr in dElementAttribs[sName]: if sAttr not in dAttribs: return 'Element %s requires attribute "%s".' % (sName, sAttr); # # Only the Test element can (and must) remain open. # if sName == 'Test' and fClosed: return '<Test/> is not allowed.'; if sName != 'Test' and not fClosed: return 'All elements except <Test> must be closed.'; return None; @staticmethod def _parseElement(sElement): """ Parses an element. """ # # Element level bits. # sName = sElement.split()[0]; sElement = sElement[len(sName):]; fClosed = sElement[-1] == '/'; if fClosed: sElement = sElement[:-1]; # # Attributes. # sError = None; dAttribs = {}; sElement = sElement.strip(); while len(sElement) > 0: # Extract attribute name. off = sElement.find('='); if off < 0 or not sElement[:off].isalnum(): sError = 'Attributes shall have alpha numberical names and have values.'; break; sAttr = sElement[:off]; # Extract attribute value. if off + 2 >= len(sElement) or sElement[off + 1] != '"': sError = 'Attribute (%s) value is missing or not in double quotes.' % (sAttr,); break; off += 2; offEndQuote = sElement.find('"', off); if offEndQuote < 0: sError = 'Attribute (%s) value is missing end quotation mark.' % (sAttr,); break; sValue = sElement[off:offEndQuote]; # Check for duplicates. if sAttr in dAttribs: sError = 'Attribute "%s" appears more than once.' % (sAttr,); break; # Unescape the value. sValue = sValue.replace('&lt;', '<'); sValue = sValue.replace('&gt;', '>'); sValue = sValue.replace('&apos;', '\''); sValue = sValue.replace('&quot;', '"'); sValue = sValue.replace('&#xA;', '\n'); sValue = sValue.replace('&#xD;', '\r'); sValue = sValue.replace('&amp;', '&'); # last # Done. dAttribs[sAttr] = sValue; # advance sElement = sElement[offEndQuote + 1:]; sElement = sElement.lstrip(); # # Validate the element before we return. # if sError is None: sError = TestResultLogic._validateElement(sName, dAttribs, fClosed); return (sName, dAttribs, sError) def _handleElement(self, sName, dAttribs, idTestSet, aoStack, aaiHints, dCounts): """ Worker for processXmlStream that handles one element. Returns None on success, error string on bad XML or similar. Raises exception on hanging offence and on database error. """ if sName == 'Test': iNestingDepth = aoStack[0].iNestingDepth + 1 if len(aoStack) > 0 else 0; aoStack.insert(0, self._newTestResult(idTestResultParent = aoStack[0].idTestResult, idTestSet = idTestSet, tsCreated = dAttribs['timestamp'], sName = dAttribs['name'], iNestingDepth = iNestingDepth, dCounts = dCounts, fCommit = True) ); elif sName == 'Value': self._newTestValue(idTestResult = aoStack[0].idTestResult, idTestSet = idTestSet, tsCreated = dAttribs['timestamp'], sName = dAttribs['name'], sUnit = dAttribs['unit'], lValue = long(dAttribs['value']), dCounts = dCounts, fCommit = True); elif sName == 'FailureDetails': self._newFailureDetails(idTestResult = aoStack[0].idTestResult, tsCreated = dAttribs['timestamp'], sText = dAttribs['text'], dCounts = dCounts, fCommit = True); elif sName == 'Passed': self._completeTestResults(aoStack[0], tsDone = dAttribs['timestamp'], enmStatus = TestResultData.ksTestStatus_Success, fCommit = True); elif sName == 'Skipped': self._completeTestResults(aoStack[0], tsDone = dAttribs['timestamp'], enmStatus = TestResultData.ksTestStatus_Skipped, fCommit = True); elif sName == 'Failed': self._completeTestResults(aoStack[0], tsDone = dAttribs['timestamp'], cErrors = int(dAttribs['errors']), enmStatus = TestResultData.ksTestStatus_Failure, fCommit = True); elif sName == 'TimedOut': self._completeTestResults(aoStack[0], tsDone = dAttribs['timestamp'], cErrors = int(dAttribs['errors']), enmStatus = TestResultData.ksTestStatus_TimedOut, fCommit = True); elif sName == 'End': self._completeTestResults(aoStack[0], tsDone = dAttribs['timestamp'], cErrors = int(dAttribs.get('errors', '1')), enmStatus = TestResultData.ksTestStatus_Success, fCommit = True); elif sName == 'PushHint': if len(aaiHints) > 1: return 'PushHint cannot be nested.' aaiHints.insert(0, [len(aoStack), int(dAttribs['testdepth'])]); elif sName == 'PopHint': if len(aaiHints) < 1: return 'No hint to pop.' iDesiredTestDepth = int(dAttribs['testdepth']); cStackEntries, iTestDepth = aaiHints.pop(0); self._doPopHint(aoStack, cStackEntries, dCounts); # Fake the necessary '<End/></Test>' tags. if iDesiredTestDepth != iTestDepth: return 'PopHint tag has different testdepth: %d, on stack %d.' % (iDesiredTestDepth, iTestDepth); else: return 'Unexpected element "%s".' % (sName,); return None; def processXmlStream(self, sXml, idTestSet): """ Processes the "XML" stream section given in sXml. The sXml isn't a complete XML document, even should we save up all sXml for a given set, they may not form a complete and well formed XML document since the test may be aborted, abend or simply be buggy. We therefore do our own parsing and treat the XML tags as commands more than anything else. Returns (sError, fUnforgivable), where sError is None on success. May raise database exception. """ aoStack = self._getResultStack(idTestSet); # [0] == top; [-1] == bottom. if len(aoStack) == 0: return ('No open results', True); self._oDb.dprint('** processXmlStream len(aoStack)=%s' % (len(aoStack),)); #self._oDb.dprint('processXmlStream: %s' % (self._stringifyStack(aoStack),)); #self._oDb.dprint('processXmlStream: sXml=%s' % (sXml,)); dCounts = {}; aaiHints = []; sError = None; fExpectCloseTest = False; sXml = sXml.strip(); while len(sXml) > 0: if sXml.startswith('</Test>'): # Only closing tag. offNext = len('</Test>'); if len(aoStack) <= 1: sError = 'Trying to close the top test results.' break; # ASSUMES that we've just seen an <End/>, <Passed/>, <Failed/>, # <TimedOut/> or <Skipped/> tag earlier in this call! if aoStack[0].enmStatus == TestResultData.ksTestStatus_Running or not fExpectCloseTest: sError = 'Missing <End/>, <Passed/>, <Failed/>, <TimedOut/> or <Skipped/> tag.'; break; aoStack.pop(0); fExpectCloseTest = False; elif fExpectCloseTest: sError = 'Expected </Test>.' break; elif sXml.startswith('<?xml '): # Ignore (included files). offNext = sXml.find('?>'); if offNext < 0: sError = 'Unterminated <?xml ?> element.'; break; offNext += 2; elif sXml[0] == '<': # Parse and check the tag. if not sXml[1].isalpha(): sError = 'Malformed element.'; break; offNext = sXml.find('>') if offNext < 0: sError = 'Unterminated element.'; break; (sName, dAttribs, sError) = self._parseElement(sXml[1:offNext]); offNext += 1; if sError is not None: break; # Handle it. try: sError = self._handleElement(sName, dAttribs, idTestSet, aoStack, aaiHints, dCounts); except TestResultHangingOffence as oXcpt: self._inhumeTestResults(aoStack, idTestSet, str(oXcpt)); return (str(oXcpt), True); fExpectCloseTest = sName in [ 'End', 'Passed', 'Failed', 'TimedOut', 'Skipped', ]; else: sError = 'Unexpected content.'; break; # Advance. sXml = sXml[offNext:]; sXml = sXml.lstrip(); # # Post processing checks. # if sError is None and fExpectCloseTest: sError = 'Expected </Test> before the end of the XML section.' elif sError is None and len(aaiHints) > 0: sError = 'Expected </PopHint> before the end of the XML section.' if len(aaiHints) > 0: self._doPopHint(aoStack, aaiHints[-1][0], dCounts); # # Log the error. # if sError is not None: SystemLogLogic(self._oDb).addEntry(SystemLogData.ksEvent_XmlResultMalformed, 'idTestSet=%s idTestResult=%s XML="%s" %s' % ( idTestSet, aoStack[0].idTestResult if len(aoStack) > 0 else -1, sXml[:30 if len(sXml) >= 30 else len(sXml)], sError, ), cHoursRepeat = 6, fCommit = True); return (sError, False); # # Unit testing. # # pylint: disable=C0111 class TestResultDataTestCase(ModelDataBaseTestCase): def setUp(self): self.aoSamples = [TestResultData(),]; class TestResultValueDataTestCase(ModelDataBaseTestCase): def setUp(self): self.aoSamples = [TestResultValueData(),]; if __name__ == '__main__': unittest.main(); # not reached.
gpl-2.0
imbasimba/astroquery
astroquery/dace/tests/test_dace.py
2
1203
import os import unittest import json from astroquery.dace import Dace DATA_FILES = { 'parameter_list': 'parameter_list.json', } class TestDaceClass(unittest.TestCase): def test_transform_data_as_dict(self): expected_parameter_dict = {'ccf_noise': [0.005320016177906, 0.00393390440796704, 0.0032324617496158], 'ins_name': ['CORALIE98', 'CORALIE98', 'HARPS'], 'drs_qc': [True, True, False], 'rjd': [51031, 51039, 51088], 'rv': [31300.4226771379, 31295.5671320506, 31294.3391634734], 'rv_err': [5.420218247708816, 4.0697289792344185, 3.4386352834851026]} with open(self._data_path('parameter_list.json'), 'r') as file: parameter_list = json.load(file) parameter_dict = Dace.transform_data_as_dict(parameter_list) assert parameter_dict == expected_parameter_dict @staticmethod def _data_path(filename): data_dir = os.path.join(os.path.dirname(__file__), 'data') return os.path.join(data_dir, filename) if __name__ == "__main__": unittest.main()
bsd-3-clause
danielpalomino/gem5
src/dev/mips/Malta.py
18
2863
# Copyright (c) 2007 The Regents of The University of Michigan # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Korey Sewell from m5.params import * from m5.proxy import * from BadDevice import BadDevice from Device import BasicPioDevice from Pci import PciConfigAll from Platform import Platform from Uart import Uart8250 class MaltaCChip(BasicPioDevice): type = 'MaltaCChip' malta = Param.Malta(Parent.any, "Malta") class MaltaIO(BasicPioDevice): type = 'MaltaIO' time = Param.Time('01/01/2009', "System time to use (0 for actual time, default is 1/1/06)") year_is_bcd = Param.Bool(False, "The RTC should interpret the year as a BCD value") malta = Param.Malta(Parent.any, "Malta") frequency = Param.Frequency('1024Hz', "frequency of interrupts") class MaltaPChip(BasicPioDevice): type = 'MaltaPChip' malta = Param.Malta(Parent.any, "Malta") class Malta(Platform): type = 'Malta' system = Param.System(Parent.any, "system") cchip = MaltaCChip(pio_addr=0x801a0000000) io = MaltaIO(pio_addr=0x801fc000000) uart = Uart8250(pio_addr=0xBFD003F8) # Attach I/O devices to specified bus object. Can't do this # earlier, since the bus object itself is typically defined at the # System level. def attachIO(self, bus): self.cchip.pio = bus.master self.io.pio = bus.master self.uart.pio = bus.master
bsd-3-clause
rwboyer/marilyn-project
node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/pygments/formatters/img.py
268
18059
# -*- coding: utf-8 -*- """ pygments.formatters.img ~~~~~~~~~~~~~~~~~~~~~~~ Formatter for Pixmap output. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import sys from pygments.formatter import Formatter from pygments.util import get_bool_opt, get_int_opt, \ get_list_opt, get_choice_opt # Import this carefully try: from PIL import Image, ImageDraw, ImageFont pil_available = True except ImportError: pil_available = False try: import _winreg except ImportError: _winreg = None __all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter', 'BmpImageFormatter'] # For some unknown reason every font calls it something different STYLES = { 'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'], 'ITALIC': ['Oblique', 'Italic'], 'BOLD': ['Bold'], 'BOLDITALIC': ['Bold Oblique', 'Bold Italic'], } # A sane default for modern systems DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono' DEFAULT_FONT_NAME_WIN = 'Courier New' class PilNotAvailable(ImportError): """When Python imaging library is not available""" class FontNotFound(Exception): """When there are no usable fonts specified""" class FontManager(object): """ Manages a set of fonts: normal, italic, bold, etc... """ def __init__(self, font_name, font_size=14): self.font_name = font_name self.font_size = font_size self.fonts = {} self.encoding = None if sys.platform.startswith('win'): if not font_name: self.font_name = DEFAULT_FONT_NAME_WIN self._create_win() else: if not font_name: self.font_name = DEFAULT_FONT_NAME_NIX self._create_nix() def _get_nix_font_path(self, name, style): from commands import getstatusoutput exit, out = getstatusoutput('fc-list "%s:style=%s" file' % (name, style)) if not exit: lines = out.splitlines() if lines: path = lines[0].strip().strip(':') return path def _create_nix(self): for name in STYLES['NORMAL']: path = self._get_nix_font_path(self.font_name, name) if path is not None: self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size) break else: raise FontNotFound('No usable fonts named: "%s"' % self.font_name) for style in ('ITALIC', 'BOLD', 'BOLDITALIC'): for stylename in STYLES[style]: path = self._get_nix_font_path(self.font_name, stylename) if path is not None: self.fonts[style] = ImageFont.truetype(path, self.font_size) break else: if style == 'BOLDITALIC': self.fonts[style] = self.fonts['BOLD'] else: self.fonts[style] = self.fonts['NORMAL'] def _lookup_win(self, key, basename, styles, fail=False): for suffix in ('', ' (TrueType)'): for style in styles: try: valname = '%s%s%s' % (basename, style and ' '+style, suffix) val, _ = _winreg.QueryValueEx(key, valname) return val except EnvironmentError: continue else: if fail: raise FontNotFound('Font %s (%s) not found in registry' % (basename, styles[0])) return None def _create_win(self): try: key = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts') except EnvironmentError: try: key = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') except EnvironmentError: raise FontNotFound('Can\'t open Windows font registry key') try: path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True) self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size) for style in ('ITALIC', 'BOLD', 'BOLDITALIC'): path = self._lookup_win(key, self.font_name, STYLES[style]) if path: self.fonts[style] = ImageFont.truetype(path, self.font_size) else: if style == 'BOLDITALIC': self.fonts[style] = self.fonts['BOLD'] else: self.fonts[style] = self.fonts['NORMAL'] finally: _winreg.CloseKey(key) def get_char_size(self): """ Get the character size. """ return self.fonts['NORMAL'].getsize('M') def get_font(self, bold, oblique): """ Get the font based on bold and italic flags. """ if bold and oblique: return self.fonts['BOLDITALIC'] elif bold: return self.fonts['BOLD'] elif oblique: return self.fonts['ITALIC'] else: return self.fonts['NORMAL'] class ImageFormatter(Formatter): """ Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code. *New in Pygments 0.10.* Additional options accepted: `image_format` An image format to output to that is recognised by PIL, these include: * "PNG" (default) * "JPEG" * "BMP" * "GIF" `line_pad` The extra spacing (in pixels) between each line of text. Default: 2 `font_name` The font name to be used as the base font from which others, such as bold and italic fonts will be generated. This really should be a monospace font to look sane. Default: "Bitstream Vera Sans Mono" `font_size` The font size in points to be used. Default: 14 `image_pad` The padding, in pixels to be used at each edge of the resulting image. Default: 10 `line_numbers` Whether line numbers should be shown: True/False Default: True `line_number_start` The line number of the first line. Default: 1 `line_number_step` The step used when printing line numbers. Default: 1 `line_number_bg` The background colour (in "#123456" format) of the line number bar, or None to use the style background color. Default: "#eed" `line_number_fg` The text color of the line numbers (in "#123456"-like format). Default: "#886" `line_number_chars` The number of columns of line numbers allowable in the line number margin. Default: 2 `line_number_bold` Whether line numbers will be bold: True/False Default: False `line_number_italic` Whether line numbers will be italicized: True/False Default: False `line_number_separator` Whether a line will be drawn between the line number area and the source code area: True/False Default: True `line_number_pad` The horizontal padding (in pixels) between the line number margin, and the source code area. Default: 6 `hl_lines` Specify a list of lines to be highlighted. *New in Pygments 1.2.* Default: empty list `hl_color` Specify the color for highlighting lines. *New in Pygments 1.2.* Default: highlight color of the selected style """ # Required by the pygments mapper name = 'img' aliases = ['img', 'IMG', 'png'] filenames = ['*.png'] unicodeoutput = False default_image_format = 'png' def __init__(self, **options): """ See the class docstring for explanation of options. """ if not pil_available: raise PilNotAvailable( 'Python Imaging Library is required for this formatter') Formatter.__init__(self, **options) # Read the style self.styles = dict(self.style) if self.style.background_color is None: self.background_color = '#fff' else: self.background_color = self.style.background_color # Image options self.image_format = get_choice_opt( options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'], self.default_image_format, normcase=True) self.image_pad = get_int_opt(options, 'image_pad', 10) self.line_pad = get_int_opt(options, 'line_pad', 2) # The fonts fontsize = get_int_opt(options, 'font_size', 14) self.fonts = FontManager(options.get('font_name', ''), fontsize) self.fontw, self.fonth = self.fonts.get_char_size() # Line number options self.line_number_fg = options.get('line_number_fg', '#886') self.line_number_bg = options.get('line_number_bg', '#eed') self.line_number_chars = get_int_opt(options, 'line_number_chars', 2) self.line_number_bold = get_bool_opt(options, 'line_number_bold', False) self.line_number_italic = get_bool_opt(options, 'line_number_italic', False) self.line_number_pad = get_int_opt(options, 'line_number_pad', 6) self.line_numbers = get_bool_opt(options, 'line_numbers', True) self.line_number_separator = get_bool_opt(options, 'line_number_separator', True) self.line_number_step = get_int_opt(options, 'line_number_step', 1) self.line_number_start = get_int_opt(options, 'line_number_start', 1) if self.line_numbers: self.line_number_width = (self.fontw * self.line_number_chars + self.line_number_pad * 2) else: self.line_number_width = 0 self.hl_lines = [] hl_lines_str = get_list_opt(options, 'hl_lines', []) for line in hl_lines_str: try: self.hl_lines.append(int(line)) except ValueError: pass self.hl_color = options.get('hl_color', self.style.highlight_color) or '#f90' self.drawables = [] def get_style_defs(self, arg=''): raise NotImplementedError('The -S option is meaningless for the image ' 'formatter. Use -O style=<stylename> instead.') def _get_line_height(self): """ Get the height of a line. """ return self.fonth + self.line_pad def _get_line_y(self, lineno): """ Get the Y coordinate of a line number. """ return lineno * self._get_line_height() + self.image_pad def _get_char_width(self): """ Get the width of a character. """ return self.fontw def _get_char_x(self, charno): """ Get the X coordinate of a character position. """ return charno * self.fontw + self.image_pad + self.line_number_width def _get_text_pos(self, charno, lineno): """ Get the actual position for a character and line position. """ return self._get_char_x(charno), self._get_line_y(lineno) def _get_linenumber_pos(self, lineno): """ Get the actual position for the start of a line number. """ return (self.image_pad, self._get_line_y(lineno)) def _get_text_color(self, style): """ Get the correct color for the token from the style. """ if style['color'] is not None: fill = '#' + style['color'] else: fill = '#000' return fill def _get_style_font(self, style): """ Get the correct font for the style. """ return self.fonts.get_font(style['bold'], style['italic']) def _get_image_size(self, maxcharno, maxlineno): """ Get the required image size. """ return (self._get_char_x(maxcharno) + self.image_pad, self._get_line_y(maxlineno + 0) + self.image_pad) def _draw_linenumber(self, posno, lineno): """ Remember a line number drawable to paint later. """ self._draw_text( self._get_linenumber_pos(posno), str(lineno).rjust(self.line_number_chars), font=self.fonts.get_font(self.line_number_bold, self.line_number_italic), fill=self.line_number_fg, ) def _draw_text(self, pos, text, font, **kw): """ Remember a single drawable tuple to paint later. """ self.drawables.append((pos, text, font, kw)) def _create_drawables(self, tokensource): """ Create drawables for the token content. """ lineno = charno = maxcharno = 0 for ttype, value in tokensource: while ttype not in self.styles: ttype = ttype.parent style = self.styles[ttype] # TODO: make sure tab expansion happens earlier in the chain. It # really ought to be done on the input, as to do it right here is # quite complex. value = value.expandtabs(4) lines = value.splitlines(True) #print lines for i, line in enumerate(lines): temp = line.rstrip('\n') if temp: self._draw_text( self._get_text_pos(charno, lineno), temp, font = self._get_style_font(style), fill = self._get_text_color(style) ) charno += len(temp) maxcharno = max(maxcharno, charno) if line.endswith('\n'): # add a line for each extra line in the value charno = 0 lineno += 1 self.maxcharno = maxcharno self.maxlineno = lineno def _draw_line_numbers(self): """ Create drawables for the line numbers. """ if not self.line_numbers: return for p in xrange(self.maxlineno): n = p + self.line_number_start if (n % self.line_number_step) == 0: self._draw_linenumber(p, n) def _paint_line_number_bg(self, im): """ Paint the line number background on the image. """ if not self.line_numbers: return if self.line_number_fg is None: return draw = ImageDraw.Draw(im) recth = im.size[-1] rectw = self.image_pad + self.line_number_width - self.line_number_pad draw.rectangle([(0, 0), (rectw, recth)], fill=self.line_number_bg) draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg) del draw def format(self, tokensource, outfile): """ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)`` tuples and write it into ``outfile``. This implementation calculates where it should draw each token on the pixmap, then calculates the required pixmap size and draws the items. """ self._create_drawables(tokensource) self._draw_line_numbers() im = Image.new( 'RGB', self._get_image_size(self.maxcharno, self.maxlineno), self.background_color ) self._paint_line_number_bg(im) draw = ImageDraw.Draw(im) # Highlight if self.hl_lines: x = self.image_pad + self.line_number_width - self.line_number_pad + 1 recth = self._get_line_height() rectw = im.size[0] - x for linenumber in self.hl_lines: y = self._get_line_y(linenumber - 1) draw.rectangle([(x, y), (x + rectw, y + recth)], fill=self.hl_color) for pos, value, font, kw in self.drawables: draw.text(pos, value, font=font, **kw) im.save(outfile, self.image_format.upper()) # Add one formatter per format, so that the "-f gif" option gives the correct result # when used in pygmentize. class GifImageFormatter(ImageFormatter): """ Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code. *New in Pygments 1.0.* (You could create GIF images before by passing a suitable `image_format` option to the `ImageFormatter`.) """ name = 'img_gif' aliases = ['gif'] filenames = ['*.gif'] default_image_format = 'gif' class JpgImageFormatter(ImageFormatter): """ Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code. *New in Pygments 1.0.* (You could create JPEG images before by passing a suitable `image_format` option to the `ImageFormatter`.) """ name = 'img_jpg' aliases = ['jpg', 'jpeg'] filenames = ['*.jpg'] default_image_format = 'jpeg' class BmpImageFormatter(ImageFormatter): """ Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code. *New in Pygments 1.0.* (You could create bitmap images before by passing a suitable `image_format` option to the `ImageFormatter`.) """ name = 'img_bmp' aliases = ['bmp', 'bitmap'] filenames = ['*.bmp'] default_image_format = 'bmp'
mit
nishad-jobsglobal/odoo-marriot
addons/mrp/product.py
180
4590
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class product_template(osv.osv): _inherit = "product.template" def _bom_orders_count(self, cr, uid, ids, field_name, arg, context=None): Bom = self.pool('mrp.bom') res = {} for product_tmpl_id in ids: nb = Bom.search_count(cr, uid, [('product_tmpl_id', '=', product_tmpl_id)], context=context) res[product_tmpl_id] = { 'bom_count': nb, } return res def _bom_orders_count_mo(self, cr, uid, ids, name, arg, context=None): res = {} for product_tmpl_id in self.browse(cr, uid, ids): res[product_tmpl_id.id] = sum([p.mo_count for p in product_tmpl_id.product_variant_ids]) return res _columns = { 'bom_ids': fields.one2many('mrp.bom', 'product_tmpl_id','Bill of Materials'), 'bom_count': fields.function(_bom_orders_count, string='# Bill of Material', type='integer', multi="_bom_order_count"), 'mo_count': fields.function(_bom_orders_count_mo, string='# Manufacturing Orders', type='integer'), 'produce_delay': fields.float('Manufacturing Lead Time', help="Average delay in days to produce this product. In the case of multi-level BOM, the manufacturing lead times of the components will be added."), 'track_production': fields.boolean('Track Manufacturing Lots', help="Forces to specify a Serial Number for all moves containing this product and generated by a Manufacturing Order"), } _defaults = { 'produce_delay': 1, } def action_view_mos(self, cr, uid, ids, context=None): products = self._get_products(cr, uid, ids, context=context) result = self._get_act_window_dict(cr, uid, 'mrp.act_product_mrp_production', context=context) if len(ids) == 1 and len(products) == 1: result['context'] = "{'default_product_id': " + str(products[0]) + ", 'search_default_product_id': " + str(products[0]) + "}" else: result['domain'] = "[('product_id','in',[" + ','.join(map(str, products)) + "])]" result['context'] = "{}" return result class product_product(osv.osv): _inherit = "product.product" def _bom_orders_count(self, cr, uid, ids, field_name, arg, context=None): Production = self.pool('mrp.production') res = {} for product_id in ids: res[product_id] = Production.search_count(cr,uid, [('product_id', '=', product_id)], context=context) return res _columns = { 'mo_count': fields.function(_bom_orders_count, string='# Manufacturing Orders', type='integer'), } def action_view_bom(self, cr, uid, ids, context=None): tmpl_obj = self.pool.get("product.template") products = set() for product in self.browse(cr, uid, ids, context=context): products.add(product.product_tmpl_id.id) result = tmpl_obj._get_act_window_dict(cr, uid, 'mrp.product_open_bom', context=context) # bom specific to this variant or global to template domain = [ '|', ('product_id', 'in', ids), '&', ('product_id', '=', False), ('product_tmpl_id', 'in', list(products)), ] result['context'] = "{'default_product_id': active_id, 'search_default_product_id': active_id, 'default_product_tmpl_id': %s}" % (len(products) and products.pop() or 'False') result['domain'] = str(domain) return result # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
canaltinova/servo
components/script/dom/bindings/codegen/parser/tests/test_replaceable.py
138
1833
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. def should_throw(parser, harness, message, code): parser = parser.reset(); threw = False try: parser.parse(code) parser.finish() except: threw = True harness.ok(threw, "Should have thrown: %s" % message) def WebIDLTest(parser, harness): # The [Replaceable] extended attribute MUST take no arguments. should_throw(parser, harness, "no arguments", """ interface I { [Replaceable=X] readonly attribute long A; }; """) # An attribute with the [Replaceable] extended attribute MUST NOT also be # declared with the [PutForwards] extended attribute. should_throw(parser, harness, "PutForwards", """ interface I { [PutForwards=B, Replaceable] readonly attribute J A; }; interface J { attribute long B; }; """) # The [Replaceable] extended attribute MUST NOT be used on an attribute # that is not read only. should_throw(parser, harness, "writable attribute", """ interface I { [Replaceable] attribute long A; }; """) # The [Replaceable] extended attribute MUST NOT be used on a static # attribute. should_throw(parser, harness, "static attribute", """ interface I { [Replaceable] static readonly attribute long A; }; """) # The [Replaceable] extended attribute MUST NOT be used on an attribute # declared on a callback interface. should_throw(parser, harness, "callback interface", """ callback interface I { [Replaceable] readonly attribute long A; }; """)
mpl-2.0
jesusesc/Informatica13-14
in_out.py
1
2096
# -*- coding: utf-8 -*- ''' Ejemplos de Entrada / Salida ''' from math import pi def circle_length(radius): return 2*pi*radius def half(valor): return valor/2 def main(): ''' Lectura de datos desde el teclado: raw_input Debemos especificar qué tipo de datos queremos leer ''' #Leemos el radio desde el teclado rad = float(raw_input()) print circle_length(rad) #mejor: mensaje introductorio rad = float(raw_input("radio = ")) print circle_length(rad) def main2(): #Cuidado con qué quieres leer x = raw_input("dato = ") y = int(raw_input("dato = ")) z = float(raw_input("dato = ")) #print mitad(x), mitad(y), mitad(z) print half(y), half(z) def main3(): #mejorando print r = float(raw_input("radius = ")) print "The circle length is", circle_length(r) #puede mostrar distintos mensajes separados por comas #otra forma print "The circle length is", print circle_length(r) def main4(): #SALIDA CON FORMATO n = int(raw_input("n = ")) #presentamos por pantalla sin ningún tipo de formato for k in range(2, 11): print n, "elevado a", k, "es igual a", n**k print '='*10 #presentamos por pantalla utilizando un formato de salida for k in range(2, 11): print '%d elevado a %d es igual a %d' % (n, k, n**k) print '='*10 ''' %<numero>d --> formato enteros %<numero1.numero2>f --> formato flotantes %s --> cadenas ''' #Con un buen formato, mejoramos la presentación for k in range(2, 11): print '%d elevado a %2d es igual a %9d' % (n, k, n**k) print '='*10 #Podemos mezclar reales y enteros for k in range(1, 10): print 'la circunferencia de radio %d tiene longitud %f' % (k, circle_length(k)) print '='*10 for k in range(1, 10): print 'la circunferencia de radio %d tiene longitud %5.1f' % (k, circle_length(k)) print '='*10 for k in range(1, 10): r = 1.0/k print 'la circunferencia de radio %4.2f tiene longitud %5.1f' % (r, circle_length(r))
mit
AsimmHirani/ISpyPi
tensorflow/contrib/tensorflow-master/tensorflow/python/kernel_tests/identity_op_py_test.py
115
2643
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for IdentityOp.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops import gen_array_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test class IdentityOpTest(test.TestCase): def testInt32_6(self): with self.test_session(): value = array_ops.identity([1, 2, 3, 4, 5, 6]).eval() self.assertAllEqual(np.array([1, 2, 3, 4, 5, 6]), value) def testInt32_2_3(self): with self.test_session(): inp = constant_op.constant([10, 20, 30, 40, 50, 60], shape=[2, 3]) value = array_ops.identity(inp).eval() self.assertAllEqual(np.array([[10, 20, 30], [40, 50, 60]]), value) def testString(self): source = [b"A", b"b", b"C", b"d", b"E", b"f"] with self.test_session(): value = array_ops.identity(source).eval() self.assertAllEqual(source, value) def testIdentityShape(self): with self.test_session(): shape = [2, 3] array_2x3 = [[1, 2, 3], [6, 5, 4]] tensor = constant_op.constant(array_2x3) self.assertEquals(shape, tensor.get_shape()) self.assertEquals(shape, array_ops.identity(tensor).get_shape()) self.assertEquals(shape, array_ops.identity(array_2x3).get_shape()) self.assertEquals(shape, array_ops.identity(np.array(array_2x3)).get_shape()) def testRefIdentityShape(self): with self.test_session(): shape = [2, 3] tensor = variables.Variable( constant_op.constant( [[1, 2, 3], [6, 5, 4]], dtype=dtypes.int32)) self.assertEquals(shape, tensor.get_shape()) self.assertEquals(shape, gen_array_ops._ref_identity(tensor).get_shape()) if __name__ == "__main__": test.main()
apache-2.0
Chemcy/vnpy
vn.trader/gateway/femasGateway/femasGateway.py
6
37219
# encoding: UTF-8 ''' vn.femas的gateway接入 考虑到飞马只对接期货(目前只有中金所), vtSymbol直接使用symbol ''' import os import json from vnfemasmd import MdApi from vnfemastd import TdApi from femasDataType import * from vtGateway import * # 以下为一些VT类型和CTP类型的映射字典 # 价格类型映射 priceTypeMap = {} priceTypeMap[PRICETYPE_LIMITPRICE] = defineDict["USTP_FTDC_OPT_LimitPrice"] priceTypeMap[PRICETYPE_MARKETPRICE] = defineDict["USTP_FTDC_OPT_AnyPrice"] priceTypeMapReverse = {v: k for k, v in priceTypeMap.items()} # 方向类型映射 directionMap = {} directionMap[DIRECTION_LONG] = defineDict['USTP_FTDC_D_Buy'] directionMap[DIRECTION_SHORT] = defineDict['USTP_FTDC_D_Sell'] directionMapReverse = {v: k for k, v in directionMap.items()} # 开平类型映射 offsetMap = {} offsetMap[OFFSET_OPEN] = defineDict['USTP_FTDC_OF_Open'] offsetMap[OFFSET_CLOSE] = defineDict['USTP_FTDC_OF_Close'] offsetMap[OFFSET_CLOSETODAY] = defineDict['USTP_FTDC_OF_CloseToday'] offsetMap[OFFSET_CLOSEYESTERDAY] = defineDict['USTP_FTDC_OF_CloseYesterday'] offsetMapReverse = {v:k for k,v in offsetMap.items()} # 交易所类型映射 exchangeMap = {} #exchangeMap[EXCHANGE_CFFEX] = defineDict['USTP_FTDC_EIDT_CFFEX'] #exchangeMap[EXCHANGE_SHFE] = defineDict['USTP_FTDC_EIDT_SHFE'] #exchangeMap[EXCHANGE_CZCE] = defineDict['USTP_FTDC_EIDT_CZCE'] #exchangeMap[EXCHANGE_DCE] = defineDict['USTP_FTDC_EIDT_DCE'] exchangeMap[EXCHANGE_CFFEX] = 'CFFEX' exchangeMap[EXCHANGE_SHFE] = 'SHFE' exchangeMap[EXCHANGE_CZCE] = 'CZCE' exchangeMap[EXCHANGE_DCE] = 'DCE' exchangeMap[EXCHANGE_UNKNOWN] = '' exchangeMapReverse = {v:k for k,v in exchangeMap.items()} # 持仓类型映射 posiDirectionMap = {} posiDirectionMap[DIRECTION_LONG] = defineDict["USTP_FTDC_D_Buy"] posiDirectionMap[DIRECTION_SHORT] = defineDict["USTP_FTDC_D_Sell"] posiDirectionMapReverse = {v:k for k,v in posiDirectionMap.items()} ######################################################################## class FemasGateway(VtGateway): """飞马接口""" #---------------------------------------------------------------------- def __init__(self, eventEngine, gatewayName='FEMAS'): """Constructor""" super(FemasGateway, self).__init__(eventEngine, gatewayName) self.mdApi = FemasMdApi(self) # 行情API self.tdApi = FemasTdApi(self) # 交易API self.mdConnected = False # 行情API连接状态,登录完成后为True self.tdConnected = False # 交易API连接状态 self.qryEnabled = False # 是否要启动循环查询 #---------------------------------------------------------------------- def connect(self): """连接""" # 载入json文件 fileName = self.gatewayName + '_connect.json' path = os.path.abspath(os.path.dirname(__file__)) fileName = os.path.join(path, fileName) try: f = file(fileName) except IOError: log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'读取连接配置出错,请检查' self.onLog(log) return # 解析json文件 setting = json.load(f) try: userID = str(setting['userID']) password = str(setting['password']) brokerID = str(setting['brokerID']) tdAddress = str(setting['tdAddress']) mdAddress = str(setting['mdAddress']) except KeyError: log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'连接配置缺少字段,请检查' self.onLog(log) return # 创建行情和交易接口对象 self.mdApi.connect(userID, password, brokerID, mdAddress) self.tdApi.connect(userID, password, brokerID, tdAddress) # 初始化并启动查询 self.initQuery() #---------------------------------------------------------------------- def subscribe(self, subscribeReq): """订阅行情""" self.mdApi.subscribe(subscribeReq) #---------------------------------------------------------------------- def sendOrder(self, orderReq): """发单""" return self.tdApi.sendOrder(orderReq) #---------------------------------------------------------------------- def cancelOrder(self, cancelOrderReq): """撤单""" self.tdApi.cancelOrder(cancelOrderReq) #---------------------------------------------------------------------- def qryAccount(self): """查询账户资金""" self.tdApi.qryAccount() #---------------------------------------------------------------------- def qryPosition(self): """查询持仓""" self.tdApi.qryPosition() #---------------------------------------------------------------------- def close(self): """关闭""" if self.mdConnected: self.mdApi.close() if self.tdConnected: self.tdApi.close() #---------------------------------------------------------------------- def initQuery(self): """初始化连续查询""" if self.qryEnabled: # 需要循环的查询函数列表 self.qryFunctionList = [self.qryAccount, self.qryPosition] self.qryCount = 0 # 查询触发倒计时 self.qryTrigger = 2 # 查询触发点 self.qryNextFunction = 0 # 上次运行的查询函数索引 self.startQuery() #---------------------------------------------------------------------- def query(self, event): """注册到事件处理引擎上的查询函数""" self.qryCount += 1 if self.qryCount > self.qryTrigger: # 清空倒计时 self.qryCount = 0 # 执行查询函数 function = self.qryFunctionList[self.qryNextFunction] function() # 计算下次查询函数的索引,如果超过了列表长度,则重新设为0 self.qryNextFunction += 1 if self.qryNextFunction == len(self.qryFunctionList): self.qryNextFunction = 0 #---------------------------------------------------------------------- def startQuery(self): """启动连续查询""" self.eventEngine.register(EVENT_TIMER, self.query) #---------------------------------------------------------------------- def setQryEnabled(self, qryEnabled): """设置是否要启动循环查询""" self.qryEnabled = qryEnabled ######################################################################## class FemasMdApi(MdApi): """飞马行情API实现""" #---------------------------------------------------------------------- def __init__(self, gateway): """Constructor""" super(FemasMdApi, self).__init__() self.gateway = gateway # gateway对象 self.gatewayName = gateway.gatewayName # gateway对象名称 self.reqID = EMPTY_INT # 操作请求编号 self.connectionStatus = False # 连接状态 self.loginStatus = False # 登录状态 self.subscribedSymbols = set() # 已订阅合约代码 self.userID = EMPTY_STRING # 账号 self.password = EMPTY_STRING # 密码 self.brokerID = EMPTY_STRING # 经纪商代码 self.address = EMPTY_STRING # 服务器地址 #---------------------------------------------------------------------- def onFrontConnected(self): """服务器连接""" self.connectionStatus = True log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'行情服务器连接成功' self.gateway.onLog(log) self.login() #---------------------------------------------------------------------- def onFrontDisconnected(self, n): """服务器断开""" self.connectionStatus = False self.loginStatus = False self.gateway.mdConnected = False log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'行情服务器连接断开' self.gateway.onLog(log) #---------------------------------------------------------------------- def onHeartBeatWarning(self, n): """心跳报警""" # 因为API的心跳报警比较常被触发,且与API工作关系不大,因此选择忽略 pass #---------------------------------------------------------------------- def onRspError(self, error, n, last): """错误回报""" err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRspUserLogin(self, data, error, n, last): """登陆回报""" # 如果登录成功,推送日志信息 if error['ErrorID'] == 0: self.loginStatus = True self.gateway.mdConnected = True log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'行情服务器登录完成' self.gateway.onLog(log) # 重新订阅之前订阅的合约 for subscribeReq in self.subscribedSymbols: self.subscribe(subscribeReq) # 否则,推送错误信息 else: err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRspUserLogout(self, data, error, n, last): """登出回报""" # 如果登出成功,推送日志信息 if error['ErrorID'] == 0: self.loginStatus = False self.gateway.tdConnected = False log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'行情服务器登出完成' self.gateway.onLog(log) # 否则,推送错误信息 else: err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRspSubMarketData(self, data, error, n, last): """订阅合约回报""" # 通常不在乎订阅错误,选择忽略 pass #---------------------------------------------------------------------- def onRspUnSubMarketData(self, data, error, n, last): """退订合约回报""" # 同上 pass #---------------------------------------------------------------------- def onRspSubscribeTopic(self, data, error, n, last): """""" # 同上 pass #---------------------------------------------------------------------- def onRspQryTopic(self, data, error, n, last): """""" # 同上 pass #---------------------------------------------------------------------- def onRtnDepthMarketData(self, data): """行情推送""" tick = VtTickData() tick.gatewayName = self.gatewayName tick.symbol = data['InstrumentID'] tick.vtSymbol = tick.symbol tick.lastPrice = data['LastPrice'] tick.volume = data['Volume'] tick.openInterest = data['OpenInterest'] tick.time = '.'.join([data['UpdateTime'], str(data['UpdateMillisec']/100)]) tick.date = data['TradingDay'] tick.openPrice = data['OpenPrice'] tick.highPrice = data['HighestPrice'] tick.lowPrice = data['LowestPrice'] tick.preClosePrice = data['PreClosePrice'] tick.upperLimit = data['UpperLimitPrice'] tick.lowerLimit = data['LowerLimitPrice'] # CTP只有一档行情 tick.bidPrice1 = data['BidPrice1'] tick.bidVolume1 = data['BidVolume1'] tick.askPrice1 = data['AskPrice1'] tick.askVolume1 = data['AskVolume1'] self.gateway.onTick(tick) #---------------------------------------------------------------------- def connect(self, userID, password, brokerID, address): """初始化连接""" self.userID = userID # 账号 self.password = password # 密码 self.brokerID = brokerID # 经纪商代码 self.address = address # 服务器地址 # 如果尚未建立服务器连接,则进行连接 if not self.connectionStatus: # 创建C++环境中的API对象,这里传入的参数是需要用来保存.con文件的文件夹路径 path = os.getcwd() + '/temp/' + self.gatewayName + '/' if not os.path.exists(path): os.makedirs(path) self.createFtdcMdApi(path) # 订阅主题 self.subscribeMarketDataTopic(100, 2) # 注册服务器地址 self.registerFront(self.address) # 初始化连接,成功会调用onFrontConnected self.init() # 若已经连接但尚未登录,则进行登录 else: if not self.loginStatus: self.login() #---------------------------------------------------------------------- def subscribe(self, subscribeReq): """订阅合约""" # 这里的设计是,如果尚未登录就调用了订阅方法 # 则先保存订阅请求,登录完成后会自动订阅 if self.loginStatus: self.subMarketData(subscribeReq.symbol) self.subscribedSymbols.add(subscribeReq) #---------------------------------------------------------------------- def login(self): """登录""" # 如果填入了用户名密码等,则登录 if self.userID and self.password and self.brokerID: req = {} req['UserID'] = self.userID req['Password'] = self.password req['BrokerID'] = self.brokerID self.reqID += 1 self.reqUserLogin(req, self.reqID) #---------------------------------------------------------------------- def close(self): """关闭""" self.exit() ######################################################################## class FemasTdApi(TdApi): """飞马交易API实现""" #---------------------------------------------------------------------- def __init__(self, gateway): """API对象的初始化函数""" super(FemasTdApi, self).__init__() self.gateway = gateway # gateway对象 self.gatewayName = gateway.gatewayName # gateway对象名称 self.reqID = EMPTY_INT # 操作请求编号 self.localID = EMPTY_INT # 本地订单编号 self.connectionStatus = False # 连接状态 self.loginStatus = False # 登录状态 self.userID = EMPTY_STRING # 账号 self.password = EMPTY_STRING # 密码 self.brokerID = EMPTY_STRING # 经纪商代码 self.address = EMPTY_STRING # 服务器地址 self.frontID = EMPTY_INT # 前置机编号 self.sessionID = EMPTY_INT # 会话编号 #---------------------------------------------------------------------- def connect(self, userID, password, brokerID, address): """初始化连接""" self.userID = userID # 账号 self.password = password # 密码 self.brokerID = brokerID # 经纪商代码 self.address = address # 服务器地址 # 如果尚未建立服务器连接,则进行连接 if not self.connectionStatus: # 创建C++环境中的API对象,这里传入的参数是需要用来保存.con文件的文件夹路径 path = os.getcwd() + '/temp/' + self.gatewayName + '/' if not os.path.exists(path): os.makedirs(path) self.createFtdcTraderApi(path) # 订阅主题 self.subscribePrivateTopic(0) self.subscribePublicTopic(0) #self.subscribeUserTopic(0) # 注册服务器地址 self.registerFront(self.address) # 初始化连接,成功会调用onFrontConnected self.init() # 若已经连接但尚未登录,则进行登录 else: if not self.loginStatus: self.login() #---------------------------------------------------------------------- def login(self): """连接服务器""" # 如果填入了用户名密码等,则登录 if self.userID and self.password and self.brokerID: req = {} req['UserID'] = self.userID req['Password'] = self.password req['BrokerID'] = self.brokerID self.reqID += 1 self.reqUserLogin(req, self.reqID) #---------------------------------------------------------------------- def qryAccount(self): """查询账户""" self.reqID += 1 req = {} req['BrokerID'] = self.brokerID req['InvestorID'] = self.userID self.reqQryInvestorAccount(req, self.reqID) #---------------------------------------------------------------------- def qryPosition(self): """查询持仓""" self.reqID += 1 req = {} req['BrokerID'] = self.brokerID req['InvestorID'] = self.userID self.reqQryInvestorPosition(req, self.reqID) #---------------------------------------------------------------------- def sendOrder(self, orderReq): """发单""" self.reqID += 1 self.localID += 1 strLocalID = generateStrLocalID(self.localID) req = {} req['InstrumentID'] = orderReq.symbol req['ExchangeID'] = orderReq.exchange req['LimitPrice'] = orderReq.price req['Volume'] = orderReq.volume # 下面如果由于传入的类型本接口不支持,则会返回空字符串 try: req['OrderPriceType'] = priceTypeMap[orderReq.priceType] req['Direction'] = directionMap[orderReq.direction] req['OffsetFlag'] = offsetMap[orderReq.offset] except KeyError: return '' req['UserOrderLocalID'] = strLocalID req['InvestorID'] = self.userID req['UserID'] = self.userID req['BrokerID'] = self.brokerID req['HedgeFlag'] = defineDict['USTP_FTDC_CHF_Speculation'] # 投机单 req['ForceCloseReason'] = defineDict['USTP_FTDC_FCR_NotForceClose'] # 非强平 req['IsAutoSuspend'] = 0 # 非自动挂起 req['TimeCondition'] = defineDict['USTP_FTDC_TC_GFD'] # 今日有效 req['VolumeCondition'] = defineDict['USTP_FTDC_VC_AV'] # 任意成交量 req['MinVolume'] = 1 # 最小成交量为1 self.reqOrderInsert(req, self.reqID) # 返回订单号(字符串),便于某些算法进行动态管理 vtOrderID = '.'.join([self.gatewayName, strLocalID]) return vtOrderID #---------------------------------------------------------------------- def cancelOrder(self, cancelOrderReq): """撤单""" self.reqID += 1 self.localID += 1 strLocalID = generateStrLocalID(self.localID) req = {} req['ExchangeID'] = cancelOrderReq.exchange req['UserOrderLocalID'] = cancelOrderReq.orderID req['UserOrderActionLocalID'] = strLocalID # 飞马需要传入撤单编号字段,即该次撤单操作的唯一编号 req['ActionFlag'] = defineDict['USTP_FTDC_AF_Delete'] req['BrokerID'] = self.brokerID req['InvestorID'] = self.userID req['UserID'] = self.userID # 飞马需要传入UserID字段(CTP不用) self.reqOrderAction(req, self.reqID) #---------------------------------------------------------------------- def close(self): """关闭""" self.exit() #---------------------------------------------------------------------- def onFrontConnected(self): """服务器连接""" self.connectionStatus = True log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'交易服务器连接成功' self.gateway.onLog(log) self.login() #---------------------------------------------------------------------- def onFrontDisconnected(self, n): """服务器断开""" self.connectionStatus = False self.loginStatus = False self.gateway.tdConnected = False log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'交易服务器连接断开' self.gateway.onLog(log) #---------------------------------------------------------------------- def onHeartBeatWarning(self, n): """""" pass #---------------------------------------------------------------------- def onRspError(self, error, n, last): """错误回报""" err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRspUserLogin(self, data, error, n, last): """登陆回报""" # 如果登录成功,推送日志信息 if error['ErrorID'] == 0: for k, v in data.items(): print k, ':', v if data['MaxOrderLocalID']: self.localID = int(data['MaxOrderLocalID']) # 目前最大本地报单号 print 'id now', self.localID self.loginStatus = True self.gateway.mdConnected = True log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'交易服务器登录完成' self.gateway.onLog(log) # 查询合约代码 self.reqID += 1 self.reqQryInstrument({}, self.reqID) # 否则,推送错误信息 else: err = VtErrorData() err.gatewayName = self.gateway err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRspUserLogout(self, data, error, n, last): """登出回报""" # 如果登出成功,推送日志信息 if error['ErrorID'] == 0: self.loginStatus = False self.gateway.tdConnected = False log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'交易服务器登出完成' self.gateway.onLog(log) # 否则,推送错误信息 else: err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRspUserPasswordUpdate(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspOrderInsert(self, data, error, n, last): """发单错误(柜台)""" # 飞马在无错误信息时也可能进行推送(内容为正确),但是没有错误编号 if error['ErrorID']: err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRspOrderAction(self, data, error, n, last): """撤单错误(柜台)""" if error['ErrorID']: err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRtnFlowMessageCancel(self, data): """""" pass #---------------------------------------------------------------------- def onRtnTrade(self, data): """成交回报""" # 创建报单数据对象 trade = VtTradeData() trade.gatewayName = self.gatewayName # 保存代码和报单号 trade.symbol = data['InstrumentID'] trade.exchange = exchangeMapReverse[data['ExchangeID']] trade.vtSymbol = trade.symbol #'.'.join([trade.symbol, trade.exchange]) trade.tradeID = data['TradeID'] trade.vtTradeID = '.'.join([self.gatewayName, trade.tradeID]) trade.orderID = data['UserOrderLocalID'] trade.vtOrderID = '.'.join([self.gatewayName, trade.orderID]) # 方向 trade.direction = directionMapReverse.get(data['Direction'], '') # 开平 trade.offset = offsetMapReverse.get(data['OffsetFlag'], '') # 价格、报单量等数值 trade.price = data['TradePrice'] trade.volume = data['TradeVolume'] trade.tradeTime = data['TradeTime'] # 推送 self.gateway.onTrade(trade) #---------------------------------------------------------------------- def onRtnOrder(self, data): """报单回报""" # 更新最大报单编号 self.localID = max(self.localID, int(data['UserOrderLocalID'])) # 检查并增加本地报单编号 # 创建报单数据对象 order = VtOrderData() order.gatewayName = self.gatewayName # 保存代码和报单号 order.symbol = data['InstrumentID'] order.exchange = exchangeMapReverse[data['ExchangeID']] order.vtSymbol = order.symbol #'.'.join([order.symbol, order.exchange]) order.orderID = data['UserOrderLocalID'] # 飞马使用该单一字段维护报单,为字符串 # 方向 if data['Direction'] == '0': order.direction = DIRECTION_LONG elif data['Direction'] == '1': order.direction = DIRECTION_SHORT else: order.direction = DIRECTION_UNKNOWN # 开平 if data['OffsetFlag'] == '0': order.offset = OFFSET_OPEN elif data['OffsetFlag'] == '1': order.offset = OFFSET_CLOSE elif data['OffsetFlag'] == '3': order.offset = OFFSET_CLOSETODAY elif data['OffsetFlag'] == '4': order.offset = OFFSET_CLOSEYESTERDAY else: order.offset = OFFSET_UNKNOWN # 状态 if data['OrderStatus'] == '0': order.status = STATUS_ALLTRADED elif data['OrderStatus'] == '1': order.status = STATUS_PARTTRADED elif data['OrderStatus'] == '3': order.status = STATUS_NOTTRADED elif data['OrderStatus'] == '5': order.status = STATUS_CANCELLED else: order.status = STATUS_UNKNOWN # 价格、报单量等数值 order.price = data['LimitPrice'] order.totalVolume = data['Volume'] order.tradedVolume = data['VolumeTraded'] order.orderTime = data['InsertTime'] order.cancelTime = data['CancelTime'] # CTP的报单号一致性维护需要基于frontID, sessionID, orderID三个字段 # 但在本接口设计中,已经考虑了CTP的OrderRef的自增性,避免重复 # 唯一可能出现OrderRef重复的情况是多处登录并在非常接近的时间内(几乎同时发单) # 考虑到VtTrader的应用场景,认为以上情况不会构成问题 order.vtOrderID = '.'.join([self.gatewayName, order.orderID]) # 推送 self.gateway.onOrder(order) #---------------------------------------------------------------------- def onErrRtnOrderInsert(self, data, error): """发单错误回报(交易所)""" if error['ErrorID']: err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onErrRtnOrderAction(self, data, error): """撤单错误回报(交易所)""" if error['ErrorID']: err = VtErrorData() err.gatewayName = self.gatewayName err.errorID = error['ErrorID'] err.errorMsg = error['ErrorMsg'].decode('gbk') self.gateway.onError(err) #---------------------------------------------------------------------- def onRtnInstrumentStatus(self, data): """""" pass #---------------------------------------------------------------------- def onRtnInvestorAccountDeposit(self, data): """""" pass #---------------------------------------------------------------------- def onRspQryOrder(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryTrade(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryUserInvestor(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryTradingCode(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryInvestorAccount(self, data, error, n, last): """资金账户查询回报""" account = VtAccountData() account.gatewayName = self.gatewayName # 账户代码 account.accountID = data['AccountID'] account.vtAccountID = '.'.join([self.gatewayName, account.accountID]) # 数值相关 account.preBalance = data['PreBalance'] account.available = data['Available'] account.commission = data['Fee'] account.margin = data['Margin'] account.closeProfit = data['CloseProfit'] account.positionProfit = data['PositionProfit'] # 这里的balance和快期中的账户不确定是否一样,需要测试 #account.balance = (data['PreBalance'] - data['Withdraw'] + data['Deposit'] + #data['CloseProfit'] + data['PositionProfit'] + data['TodayInOut'] - #data['Fee']) account.balance = data['DynamicRights'] # 飞马直接提供动态权益字段 # 推送 self.gateway.onAccount(account) #---------------------------------------------------------------------- def onRspQryInstrument(self, data, error, n, last): """合约查询回报""" contract = VtContractData() contract.gatewayName = self.gatewayName contract.symbol = data['InstrumentID'] contract.exchange = exchangeMapReverse[data['ExchangeID']] contract.vtSymbol = contract.symbol #'.'.join([contract.symbol, contract.exchange]) contract.name = data['InstrumentName'].decode('GBK') # 合约数值 contract.size = data['VolumeMultiple'] contract.priceTick = data['PriceTick'] contract.strikePrice = data['StrikePrice'] contract.underlyingSymbol = data['UnderlyingInstrID'] # 期权类型 if data['OptionsType'] == '1': contract.productClass = PRODUCT_OPTION contract.optionType = OPTION_CALL elif data['OptionsType'] == '2': contract.productClass = PRODUCT_OPTION contract.optionType = OPTION_PUT elif data['OptionsType'] == '3': contract.productClass = PRODUCT_FUTURES contract.optionType = '' # 推送 self.gateway.onContract(contract) if last: log = VtLogData() log.gatewayName = self.gatewayName log.logContent = u'交易合约信息获取完成' self.gateway.onLog(log) #---------------------------------------------------------------------- def onRspQryExchange(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryInvestorPosition(self, data, error, n, last): """持仓查询回报""" pos = VtPositionData() pos.gatewayName = self.gatewayName # 保存代码 pos.symbol = data['InstrumentID'] pos.vtSymbol = pos.symbol # 这里因为data中没有ExchangeID这个字段 # 方向和持仓冻结数量 pos.direction = posiDirectionMapReverse.get(data['Direction'], '') pos.frozen = data['FrozenPosition'] # 持仓量 pos.position = data['Position'] pos.ydPosition = data['YdPosition'] # 持仓均价 if pos.position: pos.price = data['PositionCost'] / pos.position # VT系统持仓名 pos.vtPositionName = '.'.join([pos.vtSymbol, pos.direction]) # 推送 self.gateway.onPosition(pos) #---------------------------------------------------------------------- def onRspSubscribeTopic(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryComplianceParam(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryTopic(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryInvestorFee(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def onRspQryInvestorMargin(self, data, error, n, last): """""" pass #---------------------------------------------------------------------- def test(): """测试""" from PyQt4 import QtCore import sys def print_log(event): log = event.dict_['data'] print ':'.join([log.logTime, log.logContent]) app = QtCore.QCoreApplication(sys.argv) eventEngine = EventEngine() eventEngine.register(EVENT_LOG, print_log) eventEngine.start() gateway = FemasGateway(eventEngine) gateway.connect() sys.exit(app.exec_()) #---------------------------------------------------------------------- def generateStrLocalID(localID): """把整数的本地委托号转化为字符串""" return str(localID).rjust(12, '0') if __name__ == '__main__': test()
mit
MrKulu/webdam
motsclefs.py
1
5307
# -*- coding: utf-8 -*- from __future__ import unicode_literals import math separateurs = ".,;:/?!&\n\\\"\'()[]{}#-|" mots_a_retirer = ['http','www','youtube','com'] mots_filtres = ['le','la','les','un','une','des','de','du','d','l','m','t','s','mais','ou','et','douc','or','ni','car','qui','que','quoi','dont','où','à','dans','par','pour','en','vers','avec','sans','sur','sous','entre','derrière','devant','en','je','tu','il','elle','nou','vous','ils','elles','son','sa','ses','mon','ma','mes','au','ton','ta','tes','notre','nos','votre','vos','leur','leurs','ai','as','a','avons','avez','ont','eu','suis','es','est','sommes','êtes','sont','été','plus','moins','moi','lui','toi','me','te','se','eux','and','by','http','https','www','com','youtube','to','of','the','for','you','on','in','it','is','with','this','my','from','your','are','if','how','org','can','out','one','will','that','user','video','watch','like','upload','was','but','now','have','our','there','some','all','do','vs','fr'] #mots : liste de chaines def frequences_mots(mots): frequences = dict({}) for mot in mots: if mot in frequences: frequences[mot] += 1 else: frequences[mot] = 1 try: i = frequences.iterkeys() while True: mot = i.next() frequences[mot] /= float(len(mots)) except StopIteration: return frequences #textes : liste de listes de chaines def tf(textes): resultat = [] for texte in textes: resultat.append(frequences_mots(texte)) return resultat #table_frequences : dict chaine -> frequence def idf(table_frequences): resultat = [] occurrences = dict({}) for frequences in table_frequences: for mot in frequences: try: occurrences[mot] += 1 except KeyError: occurrences[mot] = 1 for frequences in table_frequences: idfrequences = dict({}) for mot in frequences: idfrequences[mot] = math.log((float(len(table_frequences))+1) / occurrences[mot]) resultat.append(idfrequences) return resultat #textes : listes de listes de chaines def tfidf(textes): tfs = tf(textes) idfs = idf(tfs) resultat = [] for i in xrange(len(textes)): tfidfs = dict({}) for mot in tfs[i]: tfidfs[mot] = tfs[i][mot]*idfs[i][mot] resultat.append(tfidfs) return resultat def one_space(s): a = s.replace(" "," ") b = s while(a != b): b = a a = a.replace(" "," ") return a #texte : chaine #chaines_a_retirer : liste de chaines def simplifie(texte,chaines_a_retirer): texte2 = texte.lower() texte2 = one_space(texte2) for x in separateurs: texte2 = texte2.replace(x,' ') texte2 = one_space(texte2) for x in mots_a_retirer: texte2 = texte2.replace(x,' ') texte2 = one_space(texte2) for x in chaines_a_retirer: texte2 = texte2.replace(x,' ') texte2 = one_space(texte2) return texte2 #texte : chaine def groupe_mots(texte,n): mots = texte.split() mots2 = [] mots_recents = ["" for i in range(n)] if n<=0: return [] for mot in mots: del mots_recents[0] mots_recents.append(mot) if mots_recents[0] != "": groupe = " ".join(mots_recents) if groupe not in mots_filtres and len(groupe)>1: mots2.append(groupe) return mots2 #texte : chaine #chaines_a_retirer : liste de chaines def preparation(texte,chaines_a_retirer,n): texte2 = simplifie(texte,chaines_a_retirer) return groupe_mots(texte2,n) #tfidfs : liste de dict chaine -> coeff def mots_pertinents(tfidfs,n): resultat = [] for poids in tfidfs: mots = poids.items() mots.sort(lambda x,y:cmp(x[1],y[1]),None,True) resultat_partiel = [] i = 0 while i<len(mots) and len(resultat_partiel)<n: p= mots[i][1] while i<len(mots) and mots[i][1] == p: resultat_partiel.append(mots[i][0]) i += 1 resultat.append(resultat_partiel) return resultat #tfs : dict chaine -> coeff def mots_frequents(tfs,n,seuil): resultat = [] mots = tfs.items() mots.sort(lambda x,y:cmp(x[1],y[1]),None,True) i = 0 if seuil: while i<len(mots) and mots[i][1]>=(mots[0][1]*0.75): resultat.append(mots[i][0]) i += 1 if len(resultat)>n: resultat = [] else: while i<len(mots) and len(resultat)<n: p = mots[i][1] while i<len(mots) and mots[i][1] == p: resultat.append(mots[i][0]) i += 1 return resultat #texte : liste de chaines def mots_clefs(texte,n,seuil): tfs = tf([texte])[0] return mots_frequents(tfs,n,seuil) #textes : liste de liste de chaines def mots_clefs_multiple(textes,n): tfidfs = tfidf(textes) return mots_pertinents(tfidfs,n) def entropie(texte): f = frequences_mots(texte.split()) h = 0 for i in f.keys(): e = 1/((1/f[i])-1) h += -(e*math.log(e,2)) return h def entropie_vids(vid): t = "" for v in vid: t += v.name + v.desc return(entropie(t))
gpl-2.0
spcui/virt-test
qemu/tests/usb_storage.py
3
8667
import logging import re import uuid from autotest.client.shared import error from virttest import utils_test, aexpect, utils_misc @error.context_aware def run_usb_storage(test, params, env): """ Test usb storage devices in the guest. 1) Create a image file by qemu-img 2) Boot up a guest add this image as a usb device 3) Check usb device information via monitor 4) Check usb information by executing guest command 5) Check usb serial option (optional) 6) Check usb removable option (optional) 7) Check usb min_io_size/opt_io_size option (optional) :param test: QEMU test object :param params: Dictionary with the test parameters :param env: Dictionary with test environment. """ @error.context_aware def _verify_string(regex_str, string, expect_result, search_opt=0): """ Verify USB storage device in monitor :param regex_str: Regex for checking command output :param string: The string which will be checked :param expect_result: The expected string :param search_opt: Search option for re module. """ def _compare_str(act, exp, ignore_case): str_func = lambda x: x if ignore_case: str_func = lambda x: x.lower() if str_func(act) != str_func(exp): return ("Expected: '%s', Actual: '%s'" % (str_func(exp), str_func(act))) return "" ignore_case = False if search_opt & re.I == re.I: ignore_case = True error.context("Finding matched sub-string with regex pattern %s" % regex_str) m = re.findall(regex_str, string, search_opt) if not m: logging.debug(string) raise error.TestError("Could not find matched sub-string") error.context("Verify matched string is same as expected") actual_result = m[0] fail_log = [] if isinstance(actual_result, tuple): for i, v in enumerate(expect_result): ret = _compare_str(actual_result[i], v, ignore_case) if ret: fail_log.append(ret) else: ret = _compare_str(actual_result, expect_result[0], ignore_case) if ret: fail_log.append(ret) if fail_log: logging.debug(string) raise error.TestFail("Could not find expected string:\n %s" % ("\n".join(fail_log))) def _do_io_test_guest(session): utils_test.run_virt_sub_test(test, params, env, "format_disk") @error.context_aware def _restart_vm(options): if vm.is_alive(): vm.destroy() new_params = params.copy() for option, value in options.iteritems(): new_params[option] = value error.context("Restarting VM") vm.create(params=new_params) vm.verify_alive() def _login(): return vm.wait_for_login(timeout=login_timeout) def _get_usb_disk_name_in_guest(session): def _get_output(): cmd = "ls -l /dev/disk/by-path/* | grep usb" try: return session.cmd(cmd).strip() except aexpect.ShellCmdError: return "" output = utils_misc.wait_for(_get_output, login_timeout, step=5, text="Wait for getting USB disk name") devname = re.findall("sd\w", output) if devname: return devname[0] return "sda" @error.context_aware def _check_serial_option(serial, regex_str, expect_str): error.context("Set serial option to '%s'" % serial, logging.info) _restart_vm({"drive_serial_stg": serial}) error.context("Check serial option in monitor", logging.info) output = str(vm.monitor.info("qtree")) _verify_string(regex_str, output, [expect_str], re.S) error.context("Check serial option in guest", logging.info) session = _login() output = session.cmd("lsusb -v") if not ("EMPTY_STRING" in serial or "NO_EQUAL_STRING" in serial): # Verify in guest when serial is set to empty/null is meaningless. _verify_string(serial, output, [serial]) _do_io_test_guest(session) session.close() @error.context_aware def _check_removable_option(removable, expect_str): error.context("Set removable option to '%s'" % removable, logging.info) _restart_vm({"removable_stg": removable}) error.context("Check removable option in monitor", logging.info) output = str(vm.monitor.info("qtree")) regex_str = 'usb-storage.*?removable = (.*?)\n' _verify_string(regex_str, output, [removable], re.S) error.context("Check removable option in guest", logging.info) session = _login() cmd = "dmesg | grep %s" % _get_usb_disk_name_in_guest(session) output = session.cmd(cmd) _verify_string(expect_str, output, [expect_str], re.I) _do_io_test_guest(session) session.close() @error.context_aware def _check_io_size_option(min_io_size="512", opt_io_size="0"): error.context("Set min_io_size to %s, opt_io_size to %s" % (min_io_size, opt_io_size), logging.info) opt = {} opt["min_io_size_stg"] = min_io_size opt["opt_io_size_stg"] = opt_io_size _restart_vm(opt) error.context("Check min/opt io_size option in monitor", logging.info) output = str(vm.monitor.info("qtree")) regex_str = "usb-storage.*?min_io_size = (\d+).*?opt_io_size = (\d+)" _verify_string(regex_str, output, [min_io_size, opt_io_size], re.S) error.context("Check min/opt io_size option in guest", logging.info) session = _login() d = _get_usb_disk_name_in_guest(session) cmd = ("cat /sys/block/%s/queue/{minimum,optimal}_io_size" % d) output = session.cmd(cmd) # Note: If set min_io_size = 0, guest min_io_size would be set to # 512 by default. if min_io_size != "0": expected_min_size = min_io_size else: expected_min_size = "512" _verify_string( "(\d+)\n(\d+)", output, [expected_min_size, opt_io_size]) _do_io_test_guest(session) session.close() vm = env.get_vm(params["main_vm"]) vm.verify_alive() login_timeout = int(params.get("login_timeout", 360)) error.context("Check usb device information in monitor", logging.info) output = str(vm.monitor.info("usb")) if "Product QEMU USB MSD" not in output: logging.debug(output) raise error.TestFail("Could not find mass storage device") error.context("Check usb device information in guest", logging.info) session = _login() output = session.cmd(params["chk_usb_info_cmd"]) # No bus specified, default using "usb.0" for "usb-storage" for i in params["chk_usb_info_keyword"].split(","): _verify_string(i, output, [i]) _do_io_test_guest(session) session.close() if params.get("check_serial_option") == "yes": error.context("Check usb serial option", logging.info) serial = str(uuid.uuid4()) regex_str = 'usb-storage.*?serial = "(.*?)"\n' _check_serial_option(serial, regex_str, serial) logging.info("Check this option with some illegal string") logging.info("Set usb serial to a empty string") # An empty string, "" serial = "EMPTY_STRING" regex_str = 'usb-storage.*?serial = (.*?)\n' _check_serial_option(serial, regex_str, '""') logging.info("Leave usb serial option blank") serial = "NO_EQUAL_STRING" regex_str = 'usb-storage.*?serial = (.*?)\n' _check_serial_option(serial, regex_str, '"on"') if params.get("check_removable_option") == "yes": error.context("Check usb removable option", logging.info) removable = "on" expect_str = "Attached SCSI removable disk" _check_removable_option(removable, expect_str) removable = "off" expect_str = "Attached SCSI disk" _check_removable_option(removable, expect_str) if params.get("check_io_size_option") == "yes": error.context("Check usb min/opt io_size option", logging.info) _check_io_size_option("0", "0") # Guest can't recognize correct value which we set now, # So comment these test temporary. #_check_io_size_option("1024", "1024") #_check_io_size_option("4096", "4096")
gpl-2.0
square/pants
tests/python/pants_test/base/test_payload_field.py
2
9060
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (nested_scopes, generators, division, absolute_import, with_statement, print_function, unicode_literals) from pants.backend.jvm.targets.exclude import Exclude from pants.backend.jvm.targets.jar_dependency import Artifact, JarDependency from pants.backend.jvm.targets.jvm_binary import Bundle from pants.backend.python.python_requirement import PythonRequirement from pants.base.payload import Payload, PayloadFieldAlreadyDefinedError, PayloadFrozenError from pants.base.payload_field import (BundleField, ExcludesField, JarsField, PrimitiveField, PythonRequirementsField, SourcesField) from pants_test.base_test import BaseTest class PayloadTest(BaseTest): def test_excludes_field(self): empty = ExcludesField() empty_fp = empty.fingerprint() self.assertEqual(empty_fp, empty.fingerprint()) normal = ExcludesField([Exclude('com', 'foozle'), Exclude('org')]) normal_fp = normal.fingerprint() self.assertEqual(normal_fp, normal.fingerprint()) normal_dup = ExcludesField([Exclude('com', 'foozle'), Exclude('org')]) self.assertEqual(normal_fp, normal_dup.fingerprint()) self.assertNotEqual(empty_fp, normal_fp) def test_jars_field_order(self): jar1 = JarDependency('com', 'foo', '1.0.0') jar2 = JarDependency('org', 'baz') self.assertNotEqual( JarsField([jar1, jar2]).fingerprint(), JarsField([jar2, jar1]).fingerprint(), ) def test_jars_field_artifacts(self): jar1 = JarDependency('com', 'foo', '1.0.0').with_artifact('com', 'baz') jar2 = JarDependency('com', 'foo', '1.0.0') self.assertNotEqual( JarsField([jar1]).fingerprint(), JarsField([jar2]).fingerprint(), ) def test_jars_field_artifacts(self): jar1 = (JarDependency('com', 'foo', '1.0.0') .with_artifact('com', 'baz') .with_artifact('org', 'bat')) jar2 = (JarDependency('com', 'foo', '1.0.0') .with_artifact('org', 'bat') .with_artifact('com', 'baz')) jar3 = (JarDependency('com', 'foo', '1.0.0') .with_artifact('org', 'bat')) jar4 = JarDependency('com', 'foo', '1.0.0') self.assertEqual( JarsField([jar1]).fingerprint(), JarsField([jar2]).fingerprint(), ) self.assertNotEqual( JarsField([jar1]).fingerprint(), JarsField([jar3]).fingerprint(), ) self.assertNotEqual( JarsField([jar1]).fingerprint(), JarsField([jar4]).fingerprint(), ) self.assertNotEqual( JarsField([jar3]).fingerprint(), JarsField([jar4]).fingerprint(), ) def test_jars_field_artifacts_ordering(self): """JarDependencies throw away ordering information about their artifacts in the cache key. But they do not throw it away in their internal representation! In the future, this should be fixed: either they should sort them as they are added and keep a canonical representation, or the order information should be preserved. """ jar1 = (JarDependency('com', 'foo', '1.0.0') .with_artifact('com', 'baz') .with_artifact('org', 'bat')) jar2 = (JarDependency('com', 'foo', '1.0.0') .with_artifact('org', 'bat') .with_artifact('com', 'baz')) self.assertEqual( JarsField([jar1]).fingerprint(), JarsField([jar2]).fingerprint(), ) def test_jars_field_configuration_order(self): """Like artifacts, JarDependencies throw away order information about their configurations. But only in the hash key, the internal representation is in the order inserted. """ jar1 = (JarDependency('com', 'foo', '1.0.0') .with_docs() .with_sources()) jar2 = (JarDependency('com', 'foo', '1.0.0') .with_sources() .with_docs()) self.assertEqual( JarsField([jar1]).fingerprint(), JarsField([jar2]).fingerprint(), ) def test_jars_field_configuration(self): jar1 = (JarDependency('com', 'foo', '1.0.0') .with_sources()) jar2 = (JarDependency('com', 'foo', '1.0.0') .with_docs()) self.assertNotEqual( JarsField([jar1]).fingerprint(), JarsField([jar2]).fingerprint(), ) def test_jars_field_artifact_configuration(self): """Like artifacts, JarDependencies throw away order information about their configurations. But only in the hash key, the internal representation is in the order inserted. """ jar1 = (JarDependency('com', 'foo', '1.0.0') .with_sources()) jar2 = (JarDependency('com', 'foo', '1.0.0') .with_docs()) self.assertNotEqual( JarsField([jar1]).fingerprint(), JarsField([jar2]).fingerprint(), ) def test_jars_field_apidocs(self): """apidocs are not properly rolled into the cache key right now. Is this intentional?""" jar1 = JarDependency('com', 'foo', '1.0.0', apidocs='pantsbuild.github.io') jar2 = JarDependency('com', 'foo', '1.0.0', apidocs='someother.pantsbuild.github.io') self.assertEqual( JarsField([jar1]).fingerprint(), JarsField([jar2]).fingerprint(), ) def test_python_requirements_field(self): req1 = PythonRequirement('foo==1.0') req2 = PythonRequirement('bar==1.0') self.assertNotEqual( PythonRequirementsField([req1]).fingerprint(), PythonRequirementsField([req2]).fingerprint(), ) def test_python_requirements_field_version_filter(self): """version_filter is a lambda and can't be hashed properly. Since in practice this is only ever used to differentiate between py3k and py2, it should use a tuple of strings or even just a flag instead. """ req1 = PythonRequirement('foo==1.0', version_filter=lambda py, pl: False) req2 = PythonRequirement('foo==1.0') self.assertEqual( PythonRequirementsField([req1]).fingerprint(), PythonRequirementsField([req2]).fingerprint(), ) def test_primitive_field(self): self.assertEqual( PrimitiveField({'foo': 'bar'}).fingerprint(), PrimitiveField({'foo': 'bar'}).fingerprint(), ) self.assertEqual( PrimitiveField(['foo', 'bar']).fingerprint(), PrimitiveField(('foo', 'bar')).fingerprint(), ) self.assertEqual( PrimitiveField(['foo', 'bar']).fingerprint(), PrimitiveField(('foo', 'bar')).fingerprint(), ) self.assertEqual( PrimitiveField('foo').fingerprint(), PrimitiveField(b'foo').fingerprint(), ) self.assertNotEqual( PrimitiveField('foo').fingerprint(), PrimitiveField('bar').fingerprint(), ) def test_excludes_field(self): self.assertEqual( ExcludesField([Exclude('com', 'foo')]).fingerprint(), ExcludesField([Exclude('com', 'foo')]).fingerprint(), ) self.assertEqual( ExcludesField([]).fingerprint(), ExcludesField().fingerprint(), ) self.assertNotEqual( ExcludesField([Exclude('com', 'foo')]).fingerprint(), ExcludesField([Exclude('com')]).fingerprint(), ) self.assertNotEqual( ExcludesField([Exclude('com', 'foo'), Exclude('org', 'bar')]).fingerprint(), ExcludesField([Exclude('org', 'bar'), Exclude('com', 'foo')]).fingerprint(), ) def test_sources_field(self): self.create_file('foo/bar/a.txt', 'a_contents') self.create_file('foo/bar/b.txt', 'b_contents') self.assertNotEqual( SourcesField( sources_rel_path='foo/bar', sources=['a.txt'], ).fingerprint(), SourcesField( sources_rel_path='foo/bar', sources=['b.txt'], ).fingerprint(), ) self.assertEqual( SourcesField( sources_rel_path='foo/bar', sources=['a.txt'], ).fingerprint(), SourcesField( sources_rel_path='foo/bar', sources=['a.txt'], ).fingerprint(), ) self.assertEqual( SourcesField( sources_rel_path='foo/bar', sources=['a.txt'], ).fingerprint(), SourcesField( sources_rel_path='foo/bar', sources=['a.txt'], ).fingerprint(), ) self.assertEqual( SourcesField( sources_rel_path='foo/bar', sources=['a.txt', 'b.txt'], ).fingerprint(), SourcesField( sources_rel_path='foo/bar', sources=['b.txt', 'a.txt'], ).fingerprint(), ) fp1 = SourcesField( sources_rel_path='foo/bar', sources=['a.txt'], ).fingerprint() self.create_file('foo/bar/a.txt', 'a_contents_different') fp2 = SourcesField( sources_rel_path='foo/bar', sources=['a.txt'], ).fingerprint() self.assertNotEqual(fp1, fp2)
apache-2.0
warreee/aim-3
Assignment2/Tableau/src/data_cleaner.py
1
4904
import graphlab as gl import graphlab.aggregate as agg data_in_path = "/home/warreee/projects/2016-SS-Assignments/Assignment2/Tableau/raw_data/" data_out_path = "/home/warreee/projects/2016-SS-Assignments/Assignment2/Tableau/clean_data/" agora = gl.SFrame.read_csv(data_in_path + "agora1.txt", delimiter=' ', header=False) agora['X2'] = agora['X2'].apply(lambda x : x.replace(',', '')) agora['X3'] = agora['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) agora = agora.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) agora.save(data_out_path + "agora.csv", format='csv') biomedisch = gl.SFrame.read_csv(data_in_path + "biomedisch1.txt", delimiter=' ', header=False) biomedisch['X2'] = biomedisch['X2'].apply(lambda x : x.replace(',', '')) biomedisch['X3'] = biomedisch['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) biomedisch = biomedisch.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) biomedisch.save(data_out_path + "biomedisch.csv", format='csv') cba = gl.SFrame.read_csv(data_in_path + "cba1.txt", delimiter=' ', header=False) cba['X2'] = cba['X2'].apply(lambda x : x.replace(',', '')) cba['X3'] = cba['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) cba = cba.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) cba.save(data_out_path + "cba.csv", format='csv') centrale = gl.SFrame.read_csv(data_in_path + "centrale1.txt", delimiter=' ', header=False) centrale['X2'] = centrale['X2'].apply(lambda x : x.replace(',', '')) centrale['X3'] = centrale['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) centrale = centrale.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) centrale.save(data_out_path + "centrale.csv", format='csv') economie = gl.SFrame.read_csv(data_in_path + "economie1.txt", delimiter=' ', header=False) economie['X2'] = economie['X2'].apply(lambda x : x.replace(',', '')) economie['X3'] = economie['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) economie = economie.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) economie.save(data_out_path + "economie.csv", format='csv') kulak = gl.SFrame.read_csv(data_in_path + "kulak1.txt", delimiter=' ', header=False) kulak['X2'] = kulak['X2'].apply(lambda x : x.replace(',', '')) kulak['X3'] = kulak['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) kulak = kulak.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) kulak.save(data_out_path + "kulak.csv", format='csv') letteren = gl.SFrame.read_csv(data_in_path + "letteren1.txt", delimiter=' ', header=False) letteren['X2'] = letteren['X2'].apply(lambda x : x.replace(',', '')) letteren['X3'] = letteren['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) letteren = letteren.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) letteren.save(data_out_path + "letteren.csv", format='csv') psychologie = gl.SFrame.read_csv(data_in_path + "psychologie1.txt", delimiter=' ', header=False) psychologie['X2'] = psychologie['X2'].apply(lambda x : x.replace(',', '')) psychologie['X3'] = psychologie['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) psychologie = psychologie.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) psychologie.save(data_out_path + "psychologie.csv", format='csv') rechten = gl.SFrame.read_csv(data_in_path + "rechten1.txt", delimiter=' ', header=False) rechten['X2'] = rechten['X2'].apply(lambda x : x.replace(',', '')) rechten['X3'] = rechten['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) rechten = rechten.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) rechten.save(data_out_path + "rechten.csv", format='csv') socialewet = gl.SFrame.read_csv(data_in_path + "socialewet1.txt", delimiter=' ', header=False) socialewet['X2'] = socialewet['X2'].apply(lambda x : x.replace(',', '')) socialewet['X3'] = socialewet['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) socialewet = socialewet.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) socialewet.save(data_out_path + "socialewet.csv", format='csv') wijsbegeerte = gl.SFrame.read_csv(data_in_path + "wijsbegeerte1.txt", delimiter=' ', header=False) wijsbegeerte['X2'] = wijsbegeerte['X2'].apply(lambda x : x.replace(',', '')) wijsbegeerte['X3'] = wijsbegeerte['X3'].apply(lambda x : ':'.join(x.split(':')[0:2])) wijsbegeerte = wijsbegeerte.groupby(key_columns=['X3', 'X2'], operations={'average': agg.MEAN('X1')}).sort(sort_columns=['X2', 'X3']) wijsbegeerte.save(data_out_path + "wijsbegeerte.csv", format='csv')
apache-2.0
PaloAltoNetworks-BD/SplunkforPaloAltoNetworks
Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/chardet/mbcssm.py
289
25481
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .enums import MachineState # BIG5 BIG5_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,1, # 78 - 7f 4,4,4,4,4,4,4,4, # 80 - 87 4,4,4,4,4,4,4,4, # 88 - 8f 4,4,4,4,4,4,4,4, # 90 - 97 4,4,4,4,4,4,4,4, # 98 - 9f 4,3,3,3,3,3,3,3, # a0 - a7 3,3,3,3,3,3,3,3, # a8 - af 3,3,3,3,3,3,3,3, # b0 - b7 3,3,3,3,3,3,3,3, # b8 - bf 3,3,3,3,3,3,3,3, # c0 - c7 3,3,3,3,3,3,3,3, # c8 - cf 3,3,3,3,3,3,3,3, # d0 - d7 3,3,3,3,3,3,3,3, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,3,3,3, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,3,3,0 # f8 - ff ) BIG5_ST = ( MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 ) BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) BIG5_SM_MODEL = {'class_table': BIG5_CLS, 'class_factor': 5, 'state_table': BIG5_ST, 'char_len_table': BIG5_CHAR_LEN_TABLE, 'name': 'Big5'} # CP949 CP949_CLS = ( 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff ) CP949_ST = ( #cls= 0 1 2 3 4 5 6 7 8 9 # previous state = MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 ) CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) CP949_SM_MODEL = {'class_table': CP949_CLS, 'class_factor': 10, 'state_table': CP949_ST, 'char_len_table': CP949_CHAR_LEN_TABLE, 'name': 'CP949'} # EUC-JP EUCJP_CLS = ( 4,4,4,4,4,4,4,4, # 00 - 07 4,4,4,4,4,4,5,5, # 08 - 0f 4,4,4,4,4,4,4,4, # 10 - 17 4,4,4,5,4,4,4,4, # 18 - 1f 4,4,4,4,4,4,4,4, # 20 - 27 4,4,4,4,4,4,4,4, # 28 - 2f 4,4,4,4,4,4,4,4, # 30 - 37 4,4,4,4,4,4,4,4, # 38 - 3f 4,4,4,4,4,4,4,4, # 40 - 47 4,4,4,4,4,4,4,4, # 48 - 4f 4,4,4,4,4,4,4,4, # 50 - 57 4,4,4,4,4,4,4,4, # 58 - 5f 4,4,4,4,4,4,4,4, # 60 - 67 4,4,4,4,4,4,4,4, # 68 - 6f 4,4,4,4,4,4,4,4, # 70 - 77 4,4,4,4,4,4,4,4, # 78 - 7f 5,5,5,5,5,5,5,5, # 80 - 87 5,5,5,5,5,5,1,3, # 88 - 8f 5,5,5,5,5,5,5,5, # 90 - 97 5,5,5,5,5,5,5,5, # 98 - 9f 5,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,0,5 # f8 - ff ) EUCJP_ST = ( 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 ) EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, 'class_factor': 6, 'state_table': EUCJP_ST, 'char_len_table': EUCJP_CHAR_LEN_TABLE, 'name': 'EUC-JP'} # EUC-KR EUCKR_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 1,1,1,1,1,1,1,1, # 40 - 47 1,1,1,1,1,1,1,1, # 48 - 4f 1,1,1,1,1,1,1,1, # 50 - 57 1,1,1,1,1,1,1,1, # 58 - 5f 1,1,1,1,1,1,1,1, # 60 - 67 1,1,1,1,1,1,1,1, # 68 - 6f 1,1,1,1,1,1,1,1, # 70 - 77 1,1,1,1,1,1,1,1, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,3,3,3, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,3,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 2,2,2,2,2,2,2,2, # e0 - e7 2,2,2,2,2,2,2,2, # e8 - ef 2,2,2,2,2,2,2,2, # f0 - f7 2,2,2,2,2,2,2,0 # f8 - ff ) EUCKR_ST = ( MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f ) EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, 'class_factor': 4, 'state_table': EUCKR_ST, 'char_len_table': EUCKR_CHAR_LEN_TABLE, 'name': 'EUC-KR'} # EUC-TW EUCTW_CLS = ( 2,2,2,2,2,2,2,2, # 00 - 07 2,2,2,2,2,2,0,0, # 08 - 0f 2,2,2,2,2,2,2,2, # 10 - 17 2,2,2,0,2,2,2,2, # 18 - 1f 2,2,2,2,2,2,2,2, # 20 - 27 2,2,2,2,2,2,2,2, # 28 - 2f 2,2,2,2,2,2,2,2, # 30 - 37 2,2,2,2,2,2,2,2, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,2, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,6,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,3,4,4,4,4,4,4, # a0 - a7 5,5,1,1,1,1,1,1, # a8 - af 1,1,1,1,1,1,1,1, # b0 - b7 1,1,1,1,1,1,1,1, # b8 - bf 1,1,3,1,3,3,3,3, # c0 - c7 3,3,3,3,3,3,3,3, # c8 - cf 3,3,3,3,3,3,3,3, # d0 - d7 3,3,3,3,3,3,3,3, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,3,3,3, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,3,3,0 # f8 - ff ) EUCTW_ST = ( MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f ) EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, 'class_factor': 7, 'state_table': EUCTW_ST, 'char_len_table': EUCTW_CHAR_LEN_TABLE, 'name': 'x-euc-tw'} # GB2312 GB2312_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 3,3,3,3,3,3,3,3, # 30 - 37 3,3,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,4, # 78 - 7f 5,6,6,6,6,6,6,6, # 80 - 87 6,6,6,6,6,6,6,6, # 88 - 8f 6,6,6,6,6,6,6,6, # 90 - 97 6,6,6,6,6,6,6,6, # 98 - 9f 6,6,6,6,6,6,6,6, # a0 - a7 6,6,6,6,6,6,6,6, # a8 - af 6,6,6,6,6,6,6,6, # b0 - b7 6,6,6,6,6,6,6,6, # b8 - bf 6,6,6,6,6,6,6,6, # c0 - c7 6,6,6,6,6,6,6,6, # c8 - cf 6,6,6,6,6,6,6,6, # d0 - d7 6,6,6,6,6,6,6,6, # d8 - df 6,6,6,6,6,6,6,6, # e0 - e7 6,6,6,6,6,6,6,6, # e8 - ef 6,6,6,6,6,6,6,6, # f0 - f7 6,6,6,6,6,6,6,0 # f8 - ff ) GB2312_ST = ( MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f ) # To be accurate, the length of class 6 can be either 2 or 4. # But it is not necessary to discriminate between the two since # it is used for frequency analysis only, and we are validating # each code range there as well. So it is safe to set it to be # 2 here. GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) GB2312_SM_MODEL = {'class_table': GB2312_CLS, 'class_factor': 7, 'state_table': GB2312_ST, 'char_len_table': GB2312_CHAR_LEN_TABLE, 'name': 'GB2312'} # Shift_JIS SJIS_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,1, # 78 - 7f 3,3,3,3,3,2,2,3, # 80 - 87 3,3,3,3,3,3,3,3, # 88 - 8f 3,3,3,3,3,3,3,3, # 90 - 97 3,3,3,3,3,3,3,3, # 98 - 9f #0xa0 is illegal in sjis encoding, but some pages does #contain such byte. We need to be more error forgiven. 2,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,4,4,4, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,0,0,0) # f8 - ff SJIS_ST = ( MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 ) SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) SJIS_SM_MODEL = {'class_table': SJIS_CLS, 'class_factor': 6, 'state_table': SJIS_ST, 'char_len_table': SJIS_CHAR_LEN_TABLE, 'name': 'Shift_JIS'} # UCS2-BE UCS2BE_CLS = ( 0,0,0,0,0,0,0,0, # 00 - 07 0,0,1,0,0,2,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,3,0,0,0,0, # 18 - 1f 0,0,0,0,0,0,0,0, # 20 - 27 0,3,3,3,3,3,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,0,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,0,0,0,0,0,0,0, # a0 - a7 0,0,0,0,0,0,0,0, # a8 - af 0,0,0,0,0,0,0,0, # b0 - b7 0,0,0,0,0,0,0,0, # b8 - bf 0,0,0,0,0,0,0,0, # c0 - c7 0,0,0,0,0,0,0,0, # c8 - cf 0,0,0,0,0,0,0,0, # d0 - d7 0,0,0,0,0,0,0,0, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,4,5 # f8 - ff ) UCS2BE_ST = ( 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 ) UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, 'class_factor': 6, 'state_table': UCS2BE_ST, 'char_len_table': UCS2BE_CHAR_LEN_TABLE, 'name': 'UTF-16BE'} # UCS2-LE UCS2LE_CLS = ( 0,0,0,0,0,0,0,0, # 00 - 07 0,0,1,0,0,2,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,3,0,0,0,0, # 18 - 1f 0,0,0,0,0,0,0,0, # 20 - 27 0,3,3,3,3,3,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,0,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,0,0,0,0,0,0,0, # a0 - a7 0,0,0,0,0,0,0,0, # a8 - af 0,0,0,0,0,0,0,0, # b0 - b7 0,0,0,0,0,0,0,0, # b8 - bf 0,0,0,0,0,0,0,0, # c0 - c7 0,0,0,0,0,0,0,0, # c8 - cf 0,0,0,0,0,0,0,0, # d0 - d7 0,0,0,0,0,0,0,0, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,4,5 # f8 - ff ) UCS2LE_ST = ( 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 ) UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, 'class_factor': 6, 'state_table': UCS2LE_ST, 'char_len_table': UCS2LE_CHAR_LEN_TABLE, 'name': 'UTF-16LE'} # UTF-8 UTF8_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 1,1,1,1,1,1,1,1, # 40 - 47 1,1,1,1,1,1,1,1, # 48 - 4f 1,1,1,1,1,1,1,1, # 50 - 57 1,1,1,1,1,1,1,1, # 58 - 5f 1,1,1,1,1,1,1,1, # 60 - 67 1,1,1,1,1,1,1,1, # 68 - 6f 1,1,1,1,1,1,1,1, # 70 - 77 1,1,1,1,1,1,1,1, # 78 - 7f 2,2,2,2,3,3,3,3, # 80 - 87 4,4,4,4,4,4,4,4, # 88 - 8f 4,4,4,4,4,4,4,4, # 90 - 97 4,4,4,4,4,4,4,4, # 98 - 9f 5,5,5,5,5,5,5,5, # a0 - a7 5,5,5,5,5,5,5,5, # a8 - af 5,5,5,5,5,5,5,5, # b0 - b7 5,5,5,5,5,5,5,5, # b8 - bf 0,0,6,6,6,6,6,6, # c0 - c7 6,6,6,6,6,6,6,6, # c8 - cf 6,6,6,6,6,6,6,6, # d0 - d7 6,6,6,6,6,6,6,6, # d8 - df 7,8,8,8,8,8,8,8, # e0 - e7 8,8,8,8,8,9,8,8, # e8 - ef 10,11,11,11,11,11,11,11, # f0 - f7 12,13,13,13,14,15,0,0 # f8 - ff ) UTF8_ST = ( MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 9, 11, 8, 7, 6, 5, 4, 3,#08-0f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf ) UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) UTF8_SM_MODEL = {'class_table': UTF8_CLS, 'class_factor': 16, 'state_table': UTF8_ST, 'char_len_table': UTF8_CHAR_LEN_TABLE, 'name': 'UTF-8'}
isc
xydinesh/youtube-dl
youtube_dl/extractor/traileraddict.py
146
2690
from __future__ import unicode_literals import re from .common import InfoExtractor class TrailerAddictIE(InfoExtractor): _WORKING = False _VALID_URL = r'(?:http://)?(?:www\.)?traileraddict\.com/(?:trailer|clip)/(?P<movie>.+?)/(?P<trailer_name>.+)' _TEST = { 'url': 'http://www.traileraddict.com/trailer/prince-avalanche/trailer', 'md5': '41365557f3c8c397d091da510e73ceb4', 'info_dict': { 'id': '76184', 'ext': 'mp4', 'title': 'Prince Avalanche Trailer', 'description': 'Trailer for Prince Avalanche.\n\nTwo highway road workers spend the summer of 1988 away from their city lives. The isolated landscape becomes a place of misadventure as the men find themselves at odds with each other and the women they left behind.', } } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) name = mobj.group('movie') + '/' + mobj.group('trailer_name') webpage = self._download_webpage(url, name) title = self._search_regex(r'<title>(.+?)</title>', webpage, 'video title').replace(' - Trailer Addict', '') view_count_str = self._search_regex( r'<span class="views_n">([0-9,.]+)</span>', webpage, 'view count', fatal=False) view_count = ( None if view_count_str is None else int(view_count_str.replace(',', ''))) video_id = self._search_regex( r'<param\s+name="movie"\s+value="/emb/([0-9]+)"\s*/>', webpage, 'video id') # Presence of (no)watchplus function indicates HD quality is available if re.search(r'function (no)?watchplus()', webpage): fvar = "fvarhd" else: fvar = "fvar" info_url = "http://www.traileraddict.com/%s.php?tid=%s" % (fvar, str(video_id)) info_webpage = self._download_webpage(info_url, video_id, "Downloading the info webpage") final_url = self._search_regex(r'&fileurl=(.+)', info_webpage, 'Download url').replace('%3F', '?') thumbnail_url = self._search_regex(r'&image=(.+?)&', info_webpage, 'thumbnail url') description = self._html_search_regex( r'(?s)<div class="synopsis">.*?<div class="movie_label_info"[^>]*>(.*?)</div>', webpage, 'description', fatal=False) return { 'id': video_id, 'url': final_url, 'title': title, 'thumbnail': thumbnail_url, 'description': description, 'view_count': view_count, }
unlicense
ZhangXinNan/tensorflow
tensorflow/contrib/slim/__init__.py
101
2359
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Slim is an interface to contrib functions, examples and models. TODO(nsilberman): flesh out documentation. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=unused-import,line-too-long,g-importing-member,wildcard-import # TODO(jart): Delete non-slim imports from tensorflow.contrib import losses from tensorflow.contrib import metrics from tensorflow.contrib.framework.python.ops.arg_scope import * from tensorflow.contrib.framework.python.ops.variables import * from tensorflow.contrib.layers.python.layers import * from tensorflow.contrib.layers.python.layers.initializers import * from tensorflow.contrib.layers.python.layers.regularizers import * from tensorflow.contrib.slim.python.slim import evaluation from tensorflow.contrib.slim.python.slim import learning from tensorflow.contrib.slim.python.slim import model_analyzer from tensorflow.contrib.slim.python.slim import queues from tensorflow.contrib.slim.python.slim import summaries from tensorflow.contrib.slim.python.slim.data import data_decoder from tensorflow.contrib.slim.python.slim.data import data_provider from tensorflow.contrib.slim.python.slim.data import dataset from tensorflow.contrib.slim.python.slim.data import dataset_data_provider from tensorflow.contrib.slim.python.slim.data import parallel_reader from tensorflow.contrib.slim.python.slim.data import prefetch_queue from tensorflow.contrib.slim.python.slim.data import tfexample_decoder from tensorflow.python.util.all_util import make_all # pylint: enable=unused-import,line-too-long,g-importing-member,wildcard-import __all__ = make_all(__name__)
apache-2.0
kaxel/tdsftp
vendor/ruby/1.9.1/gems/nokogiri-1.6.1/ext/nokogiri/tmp/x86_64-apple-darwin13.0.0/ports/libxml2/2.8.0/libxml2-2.8.0/python/tests/compareNodes.py
87
1507
#!/usr/bin/python -u import sys import libxml2 # Memory debug specific libxml2.debugMemory(1) # # Testing XML Node comparison and Node hash-value # doc = libxml2.parseDoc("""<root><foo/></root>""") root = doc.getRootElement() # Create two different objects which point to foo foonode1 = root.children foonode2 = root.children # Now check that [in]equality tests work ok if not ( foonode1 == foonode2 ): print "Error comparing nodes with ==, nodes should be equal but are unequal" sys.exit(1) if not ( foonode1 != root ): print "Error comparing nodes with ==, nodes should not be equal but are equal" sys.exit(1) if not ( foonode1 != root ): print "Error comparing nodes with !=, nodes should not be equal but are equal" if ( foonode1 != foonode2 ): print "Error comparing nodes with !=, nodes should be equal but are unequal" # Next check that the hash function for the objects also works ok if not (hash(foonode1) == hash(foonode2)): print "Error hash values for two equal nodes are different" sys.exit(1) if not (hash(foonode1) != hash(root)): print "Error hash values for two unequal nodes are not different" sys.exit(1) if hash(foonode1) == hash(root): print "Error hash values for two unequal nodes are equal" sys.exit(1) # Basic tests successful doc.freeDoc() # Memory debug specific libxml2.cleanupParser() if libxml2.debugMemory(1) == 0: print "OK" else: print "Memory leak %d bytes" % (libxml2.debugMemory(1)) libxml2.dumpMemory()
apache-2.0
andreatulimiero/js_closure_compiler_in_python
closure_compiler.py
2
3294
import os, requests, json from functools import reduce from termcolor import colored import colorama # Default settings config = { 'url' : 'http://closure-compiler.appspot.com/compile', 'compilation_level' : 'WHITESPACE_ONLY', 'output_format' : 'text', 'output_info' : 'compiled_code', 'output_file' : 'app.min.js' } files_names = [] def print_info(info): print( colored(info, 'white') ) def print_success(success): print( colored(success, 'green')) def print_warning(warning): print( colored(warning, 'yellow') ) def print_error(error): print( colored(error, 'red') ) def print_filename(filename): print( colored(filename, 'grey') ) def print_stat(stat): print( colored(stat, 'cyan') ) def switch_to_current_dir(): abspath = os.path.abspath(__file__) dname = os.path.dirname(abspath) os.chdir(dname) def load_configs(): print_info('Loading configurations ...') try: f = open('.closure_compiler_config.json') try: loaded_config = json.loads(f.read().strip()) except json.JSONDecodeError: print_warning('#Malformed config file, using defaults ...') return for key in loaded_config: if key not in config: print_warning('#' + key + ' is not a valid configuration') else: config[key] = loaded_config[key] except FileNotFoundError: print_warning('#No .closure_compile_config.json file found. Using default values') def get_js_code(): print('\nReading files specified in .to_closure_compile ...') joined_js = '' try: to_closure_compile = open('.to_closure_compile') for filename in to_closure_compile.read().split('\n'): try: f = open(filename.strip()) joined_js += f.read() files_names.append(filename.strip()) print_filename('|-' + filename) except FileNotFoundError: print_error('|-No ' + filename + ' file found') except FileNotFoundError: print_warning('#No .to_closure_compile file found') return joined_js if __name__ == '__main__': colorama.init() switch_to_current_dir() load_configs() data = { 'js_code' : get_js_code(), 'compilation_level' : config['compilation_level'], 'output_format' : config['output_format'] , 'output_info' : config['output_info'] } print_info('\nRequesting compilation ...') if not len(data['js_code']): print_error('#No file specified, aborting compilation\n') exit(2) try: r = requests.post(config['url'], data=data) except: print_error('Impossible to communicate with the server, check your internet connection') exit(1) with open(config['output_file'], mode='w+') as output_file: print(r.text, file=output_file) print_success('Compilation saved in ' + config['output_file'] + '\n') current_size = os.path.getsize(config['output_file'])//1024 previous_size = reduce(lambda x,y: x + y, [os.path.getsize(x) for x in files_names])//1024 print_info('Previous size: ' + str(previous_size) + ' Kb') print_info('Current size: ' + str(current_size) + ' Kb') saved_percentage = int((previous_size - current_size)/previous_size*100) print_stat('Saved: ~' + str(saved_percentage if saved_percentage > 0 else '100') + '% (' + str(previous_size - current_size) + ' Kb) of room') exit(0)
mit
lnybrave/zzbook
subject/migrations/0001_initial.py
1
10442
# -*- coding: utf-8 -*- # Generated by Django 1.10.7 on 2017-09-07 11:29 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import mptt.fields import utils.storage class Migration(migrations.Migration): initial = True dependencies = [ ('books', '0001_initial'), ] operations = [ migrations.CreateModel( name='Classification', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True, verbose_name='\u540d\u79f0')), ('order', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')), ('status', models.IntegerField(default=1, verbose_name='\u72b6\u6001')), ('icon', models.ImageField(blank=True, null=True, storage=utils.storage.ImageStorage(), upload_to=b'icons/')), ('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')), ('update_time', models.DateTimeField(auto_now=True, verbose_name='\u4fee\u6539\u65f6\u95f4')), ('del_flag', models.IntegerField(default=0, verbose_name='\u5220\u9664')), ('lft', models.PositiveIntegerField(db_index=True, editable=False)), ('rght', models.PositiveIntegerField(db_index=True, editable=False)), ('tree_id', models.PositiveIntegerField(db_index=True, editable=False)), ('level', models.PositiveIntegerField(db_index=True, editable=False)), ('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='subject.Classification')), ], options={ 'db_table': 't_classification', 'verbose_name': '\u5206\u7c7b', 'verbose_name_plural': '\u5206\u7c7b', }, ), migrations.CreateModel( name='ClassificationConfig', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('order', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')), ('status', models.IntegerField(choices=[(0, '\u5ba1\u6838'), (1, '\u4e0a\u67b6'), (2, '\u4e0b\u67b6')], default=0, verbose_name='\u72b6\u6001')), ('book', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='books.Book', verbose_name=b'\xe5\x9b\xbe\xe4\xb9\xa6')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='subject.Classification', verbose_name='\u5206\u7c7b')), ], options={ 'ordering': ('order',), 'db_table': 't_classification_config', 'verbose_name': '\u5206\u7c7b\u914d\u7f6e', 'verbose_name_plural': '\u5206\u7c7b\u914d\u7f6e', }, ), migrations.CreateModel( name='Column', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128, verbose_name='\u540d\u79f0')), ('desc', models.CharField(max_length=256, verbose_name='\u63cf\u8ff0')), ('order', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')), ('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')), ('update_time', models.DateTimeField(auto_now=True, verbose_name='\u4fee\u6539\u65f6\u95f4')), ('del_flag', models.IntegerField(default=0, verbose_name='\u5220\u9664')), ], options={ 'ordering': ('order',), 'db_table': 't_column', 'verbose_name': '\u680f\u76ee', 'verbose_name_plural': '\u680f\u76ee', }, ), migrations.CreateModel( name='ColumnConfig', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('order', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')), ('status', models.IntegerField(choices=[(0, '\u5ba1\u6838'), (1, '\u4e0a\u67b6'), (2, '\u4e0b\u67b6')], default=0, verbose_name='\u72b6\u6001')), ('book', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='books.Book', verbose_name=b'\xe5\x9b\xbe\xe4\xb9\xa6')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='subject.Column', verbose_name='\u680f\u76ee')), ], options={ 'ordering': ('order',), 'db_table': 't_column_config', 'verbose_name': '\u680f\u76ee\u914d\u7f6e', 'verbose_name_plural': '\u680f\u76ee\u914d\u7f6e', }, ), migrations.CreateModel( name='Ranking', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, verbose_name='\u540d\u79f0')), ('order', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')), ('status', models.IntegerField(default=1, verbose_name='\u72b6\u6001')), ('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')), ('update_time', models.DateTimeField(auto_now=True, verbose_name='\u4fee\u6539\u65f6\u95f4')), ('del_flag', models.IntegerField(default=0, verbose_name='\u5220\u9664')), ('lft', models.PositiveIntegerField(db_index=True, editable=False)), ('rght', models.PositiveIntegerField(db_index=True, editable=False)), ('tree_id', models.PositiveIntegerField(db_index=True, editable=False)), ('level', models.PositiveIntegerField(db_index=True, editable=False)), ('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='subject.Ranking')), ], options={ 'db_table': 't_ranking', 'verbose_name': '\u6392\u884c', 'verbose_name_plural': '\u6392\u884c', }, ), migrations.CreateModel( name='RankingConfig', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('order', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')), ('status', models.IntegerField(choices=[(0, '\u5ba1\u6838'), (1, '\u4e0a\u67b6'), (2, '\u4e0b\u67b6')], default=0, verbose_name='\u72b6\u6001')), ('book', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='books.Book', verbose_name=b'\xe5\x9b\xbe\xe4\xb9\xa6')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='subject.Ranking', verbose_name='\u6392\u884c')), ], options={ 'ordering': ('order',), 'db_table': 't_ranking_config', 'verbose_name': '\u6392\u884c\u914d\u7f6e', 'verbose_name_plural': '\u6392\u884c\u914d\u7f6e', }, ), migrations.CreateModel( name='Topic', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128, verbose_name='\u540d\u79f0')), ('desc', models.CharField(max_length=256, verbose_name='\u63cf\u8ff0')), ('type', models.IntegerField(choices=[(1, '\u5c01\u9762+\u4e66\u540d'), (2, '\u5c01\u9762+\u4e66\u540d+\u7b80\u4ecb')], default=0, verbose_name='\u7c7b\u578b')), ('status', models.IntegerField(choices=[(0, '\u5ba1\u6838'), (1, '\u4e0a\u67b6'), (2, '\u4e0b\u67b6')], default=0, verbose_name='\u72b6\u6001')), ('create_time', models.DateTimeField(auto_now_add=True, verbose_name='\u521b\u5efa\u65f6\u95f4')), ('update_time', models.DateTimeField(auto_now=True, verbose_name='\u4fee\u6539\u65f6\u95f4')), ('del_flag', models.IntegerField(default=0, verbose_name='\u5220\u9664')), ], options={ 'ordering': ('-create_time',), 'db_table': 't_topic', 'verbose_name': '\u4e13\u9898', 'verbose_name_plural': '\u4e13\u9898', }, ), migrations.CreateModel( name='TopicConfig', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('order', models.IntegerField(default=0, verbose_name='\u6392\u5e8f')), ('status', models.IntegerField(choices=[(0, '\u5ba1\u6838'), (1, '\u4e0a\u67b6'), (2, '\u4e0b\u67b6')], default=0, verbose_name='\u72b6\u6001')), ('book', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='books.Book', verbose_name=b'\xe5\x9b\xbe\xe4\xb9\xa6')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='subject.Topic', verbose_name='\u4e13\u9898')), ], options={ 'ordering': ('order',), 'db_table': 't_topic_config', 'verbose_name': '\u4e13\u9898\u914d\u7f6e', 'verbose_name_plural': '\u4e13\u9898\u914d\u7f6e', }, ), migrations.AddField( model_name='columnconfig', name='topic', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='subject.Topic', verbose_name=b'\xe4\xb8\x93\xe9\xa2\x98'), ), migrations.AlterUniqueTogether( name='topicconfig', unique_together=set([('item', 'book')]), ), ]
apache-2.0
EvenStrangest/tensorflow
tensorflow/python/platform/default/flags_test.py
20
3131
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for our flags implementation.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import sys from tensorflow.python.platform.default import _googletest as googletest from tensorflow.python.platform.default import _flags as flags flags.DEFINE_string("string_foo", "default_val", "HelpString") flags.DEFINE_integer("int_foo", 42, "HelpString") flags.DEFINE_float("float_foo", 42.0, "HelpString") flags.DEFINE_boolean("bool_foo", True, "HelpString") flags.DEFINE_boolean("bool_negation", True, "HelpString") flags.DEFINE_boolean("bool_a", False, "HelpString") flags.DEFINE_boolean("bool_c", False, "HelpString") flags.DEFINE_boolean("bool_d", True, "HelpString") flags.DEFINE_bool("bool_e", True, "HelpString") FLAGS = flags.FLAGS class FlagsTest(googletest.TestCase): def testString(self): res = FLAGS.string_foo self.assertEqual(res, "default_val") FLAGS.string_foo = "bar" self.assertEqual("bar", FLAGS.string_foo) def testBool(self): res = FLAGS.bool_foo self.assertTrue(res) FLAGS.bool_foo = False self.assertFalse(FLAGS.bool_foo) def testBoolCommandLines(self): # Specified on command line with no args, sets to True, # even if default is False. self.assertEqual(True, FLAGS.bool_a) # --no before the flag forces it to False, even if the # default is True self.assertEqual(False, FLAGS.bool_negation) # --bool_flag=True sets to True self.assertEqual(True, FLAGS.bool_c) # --bool_flag=False sets to False self.assertEqual(False, FLAGS.bool_d) # --bool_flag=gibberish sets to False self.assertEqual(False, FLAGS.bool_e) def testInt(self): res = FLAGS.int_foo self.assertEquals(res, 42) FLAGS.int_foo = -1 self.assertEqual(-1, FLAGS.int_foo) def testFloat(self): res = FLAGS.float_foo self.assertEquals(42.0, res) FLAGS.float_foo = -1.0 self.assertEqual(-1.0, FLAGS.float_foo) if __name__ == "__main__": # Test command lines sys.argv.extend(["--bool_a", "--nobool_negation", "--bool_c=True", "--bool_d=False", "--bool_e=gibberish", "--unknown_flag", "and_argument"]) # googletest.main() tries to interpret the above flags, so use the # direct functions instead. runner = googletest.TextTestRunner() itersuite = googletest.TestLoader().loadTestsFromTestCase(FlagsTest) runner.run(itersuite)
apache-2.0
jhsenjaliya/incubator-airflow
airflow/www/forms.py
9
1469
# -*- coding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from datetime import datetime from flask_admin.form import DateTimePickerWidget from wtforms import DateTimeField, SelectField from flask_wtf import Form class DateTimeForm(Form): # Date filter form needed for gantt and graph view execution_date = DateTimeField( "Execution date", widget=DateTimePickerWidget()) class DateTimeWithNumRunsForm(Form): # Date time and number of runs form for tree view, task duration # and landing times base_date = DateTimeField( "Anchor date", widget=DateTimePickerWidget(), default=datetime.utcnow()) num_runs = SelectField("Number of runs", default=25, choices=( (5, "5"), (25, "25"), (50, "50"), (100, "100"), (365, "365"), ))
apache-2.0
miyataken999/weblate
weblate/trans/views/widgets.py
2
4560
# -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <[email protected]> # # This file is part of Weblate <https://weblate.org/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from django.http import HttpResponse, Http404 from django.shortcuts import render, redirect from django.core.urlresolvers import reverse from django.views.decorators.cache import cache_page from weblate.trans.site import get_site_url from weblate.lang.models import Language from weblate.trans.forms import EnageLanguageForm from weblate.trans.widgets import WIDGETS from weblate.trans.views.helper import get_project, try_set_language def widgets_root(request): return render( request, 'widgets-root.html', ) def widgets_sorter(widget): """ Provides better ordering of widgets. """ return WIDGETS[widget].order def widgets(request, project): obj = get_project(request, project) # Parse possible language selection form = EnageLanguageForm(obj, request.GET) lang = None if form.is_valid() and form.cleaned_data['lang'] != '': lang = Language.objects.get(code=form.cleaned_data['lang']) if lang is None: engage_base = reverse('engage', kwargs={'project': obj.slug}) else: engage_base = reverse( 'engage-lang', kwargs={'project': obj.slug, 'lang': lang.code} ) engage_url = get_site_url(engage_base) engage_url_track = '%s?utm_source=widget' % engage_url widget_base_url = get_site_url( reverse('widgets', kwargs={'project': obj.slug}) ) widget_list = [] for widget_name in sorted(WIDGETS, key=widgets_sorter): widget_class = WIDGETS[widget_name] color_list = [] for color in widget_class.colors: if lang is None: color_url = reverse( 'widget-image', kwargs={ 'project': obj.slug, 'widget': widget_name, 'color': color, 'extension': widget_class.extension, } ) else: color_url = reverse( 'widget-image-lang', kwargs={ 'project': obj.slug, 'widget': widget_name, 'color': color, 'lang': lang.code, 'extension': widget_class.extension, } ) color_list.append({ 'name': color, 'url': get_site_url(color_url), }) widget_list.append({ 'name': widget_name, 'colors': color_list, }) return render( request, 'widgets.html', { 'engage_url': engage_url, 'engage_url_track': engage_url_track, 'widget_list': widget_list, 'widget_base_url': widget_base_url, 'object': obj, 'image_src': widget_list[0]['colors'][0]['url'], 'form': form, } ) @cache_page(3600) def render_widget(request, project, widget='287x66', color=None, lang=None, extension='png'): # We intentionally skip ACL here to allow widget sharing obj = get_project(request, project, skip_acl=True) # Handle language parameter if lang is not None: lang = try_set_language(lang) # Get widget class try: widget_class = WIDGETS[widget] except KeyError: raise Http404() # Construct object widget = widget_class(obj, color, lang) # Redirect widget if hasattr(widget, 'redirect'): return redirect(widget.redirect()) # Render widget widget.render() # Get image data data = widget.get_image() return HttpResponse( content_type=widget.content_type, content=data )
gpl-3.0
coxley/ansible
lib/ansible/module_utils/rax.py
280
11974
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by # Ansible still belong to the author of the module, and may assign their own # license to the complete work. # # Copyright (c), Michael DeHaan <[email protected]>, 2012-2013 # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from uuid import UUID FINAL_STATUSES = ('ACTIVE', 'ERROR') VOLUME_STATUS = ('available', 'attaching', 'creating', 'deleting', 'in-use', 'error', 'error_deleting') CLB_ALGORITHMS = ['RANDOM', 'LEAST_CONNECTIONS', 'ROUND_ROBIN', 'WEIGHTED_LEAST_CONNECTIONS', 'WEIGHTED_ROUND_ROBIN'] CLB_PROTOCOLS = ['DNS_TCP', 'DNS_UDP', 'FTP', 'HTTP', 'HTTPS', 'IMAPS', 'IMAPv4', 'LDAP', 'LDAPS', 'MYSQL', 'POP3', 'POP3S', 'SMTP', 'TCP', 'TCP_CLIENT_FIRST', 'UDP', 'UDP_STREAM', 'SFTP'] NON_CALLABLES = (basestring, bool, dict, int, list, type(None)) PUBLIC_NET_ID = "00000000-0000-0000-0000-000000000000" SERVICE_NET_ID = "11111111-1111-1111-1111-111111111111" def rax_slugify(value): """Prepend a key with rax_ and normalize the key name""" return 'rax_%s' % (re.sub('[^\w-]', '_', value).lower().lstrip('_')) def rax_clb_node_to_dict(obj): """Function to convert a CLB Node object to a dict""" if not obj: return {} node = obj.to_dict() node['id'] = obj.id node['weight'] = obj.weight return node def rax_to_dict(obj, obj_type='standard'): """Generic function to convert a pyrax object to a dict obj_type values: standard clb server """ instance = {} for key in dir(obj): value = getattr(obj, key) if obj_type == 'clb' and key == 'nodes': instance[key] = [] for node in value: instance[key].append(rax_clb_node_to_dict(node)) elif (isinstance(value, list) and len(value) > 0 and not isinstance(value[0], NON_CALLABLES)): instance[key] = [] for item in value: instance[key].append(rax_to_dict(item)) elif (isinstance(value, NON_CALLABLES) and not key.startswith('_')): if obj_type == 'server': if key == 'image': if not value: instance['rax_boot_source'] = 'volume' else: instance['rax_boot_source'] = 'local' key = rax_slugify(key) instance[key] = value if obj_type == 'server': for attr in ['id', 'accessIPv4', 'name', 'status']: instance[attr] = instance.get(rax_slugify(attr)) return instance def rax_find_bootable_volume(module, rax_module, server, exit=True): """Find a servers bootable volume""" cs = rax_module.cloudservers cbs = rax_module.cloud_blockstorage server_id = rax_module.utils.get_id(server) volumes = cs.volumes.get_server_volumes(server_id) bootable_volumes = [] for volume in volumes: vol = cbs.get(volume) if module.boolean(vol.bootable): bootable_volumes.append(vol) if not bootable_volumes: if exit: module.fail_json(msg='No bootable volumes could be found for ' 'server %s' % server_id) else: return False elif len(bootable_volumes) > 1: if exit: module.fail_json(msg='Multiple bootable volumes found for server ' '%s' % server_id) else: return False return bootable_volumes[0] def rax_find_image(module, rax_module, image, exit=True): """Find a server image by ID or Name""" cs = rax_module.cloudservers try: UUID(image) except ValueError: try: image = cs.images.find(human_id=image) except(cs.exceptions.NotFound, cs.exceptions.NoUniqueMatch): try: image = cs.images.find(name=image) except (cs.exceptions.NotFound, cs.exceptions.NoUniqueMatch): if exit: module.fail_json(msg='No matching image found (%s)' % image) else: return False return rax_module.utils.get_id(image) def rax_find_volume(module, rax_module, name): """Find a Block storage volume by ID or name""" cbs = rax_module.cloud_blockstorage try: UUID(name) volume = cbs.get(name) except ValueError: try: volume = cbs.find(name=name) except rax_module.exc.NotFound: volume = None except Exception, e: module.fail_json(msg='%s' % e) return volume def rax_find_network(module, rax_module, network): """Find a cloud network by ID or name""" cnw = rax_module.cloud_networks try: UUID(network) except ValueError: if network.lower() == 'public': return cnw.get_server_networks(PUBLIC_NET_ID) elif network.lower() == 'private': return cnw.get_server_networks(SERVICE_NET_ID) else: try: network_obj = cnw.find_network_by_label(network) except (rax_module.exceptions.NetworkNotFound, rax_module.exceptions.NetworkLabelNotUnique): module.fail_json(msg='No matching network found (%s)' % network) else: return cnw.get_server_networks(network_obj) else: return cnw.get_server_networks(network) def rax_find_server(module, rax_module, server): """Find a Cloud Server by ID or name""" cs = rax_module.cloudservers try: UUID(server) server = cs.servers.get(server) except ValueError: servers = cs.servers.list(search_opts=dict(name='^%s$' % server)) if not servers: module.fail_json(msg='No Server was matched by name, ' 'try using the Server ID instead') if len(servers) > 1: module.fail_json(msg='Multiple servers matched by name, ' 'try using the Server ID instead') # We made it this far, grab the first and hopefully only server # in the list server = servers[0] return server def rax_find_loadbalancer(module, rax_module, loadbalancer): """Find a Cloud Load Balancer by ID or name""" clb = rax_module.cloud_loadbalancers try: found = clb.get(loadbalancer) except: found = [] for lb in clb.list(): if loadbalancer == lb.name: found.append(lb) if not found: module.fail_json(msg='No loadbalancer was matched') if len(found) > 1: module.fail_json(msg='Multiple loadbalancers matched') # We made it this far, grab the first and hopefully only item # in the list found = found[0] return found def rax_argument_spec(): """Return standard base dictionary used for the argument_spec argument in AnsibleModule """ return dict( api_key=dict(type='str', aliases=['password'], no_log=True), auth_endpoint=dict(type='str'), credentials=dict(type='str', aliases=['creds_file']), env=dict(type='str'), identity_type=dict(type='str', default='rackspace'), region=dict(type='str'), tenant_id=dict(type='str'), tenant_name=dict(type='str'), username=dict(type='str'), verify_ssl=dict(choices=BOOLEANS, type='bool'), ) def rax_required_together(): """Return the default list used for the required_together argument to AnsibleModule""" return [['api_key', 'username']] def setup_rax_module(module, rax_module, region_required=True): """Set up pyrax in a standard way for all modules""" rax_module.USER_AGENT = 'ansible/%s %s' % (ANSIBLE_VERSION, rax_module.USER_AGENT) api_key = module.params.get('api_key') auth_endpoint = module.params.get('auth_endpoint') credentials = module.params.get('credentials') env = module.params.get('env') identity_type = module.params.get('identity_type') region = module.params.get('region') tenant_id = module.params.get('tenant_id') tenant_name = module.params.get('tenant_name') username = module.params.get('username') verify_ssl = module.params.get('verify_ssl') if env is not None: rax_module.set_environment(env) rax_module.set_setting('identity_type', identity_type) if verify_ssl is not None: rax_module.set_setting('verify_ssl', verify_ssl) if auth_endpoint is not None: rax_module.set_setting('auth_endpoint', auth_endpoint) if tenant_id is not None: rax_module.set_setting('tenant_id', tenant_id) if tenant_name is not None: rax_module.set_setting('tenant_name', tenant_name) try: username = username or os.environ.get('RAX_USERNAME') if not username: username = rax_module.get_setting('keyring_username') if username: api_key = 'USE_KEYRING' if not api_key: api_key = os.environ.get('RAX_API_KEY') credentials = (credentials or os.environ.get('RAX_CREDENTIALS') or os.environ.get('RAX_CREDS_FILE')) region = (region or os.environ.get('RAX_REGION') or rax_module.get_setting('region')) except KeyError, e: module.fail_json(msg='Unable to load %s' % e.message) try: if api_key and username: if api_key == 'USE_KEYRING': rax_module.keyring_auth(username, region=region) else: rax_module.set_credentials(username, api_key=api_key, region=region) elif credentials: credentials = os.path.expanduser(credentials) rax_module.set_credential_file(credentials, region=region) else: raise Exception('No credentials supplied!') except Exception, e: if e.message: msg = str(e.message) else: msg = repr(e) module.fail_json(msg=msg) if region_required and region not in rax_module.regions: module.fail_json(msg='%s is not a valid region, must be one of: %s' % (region, ','.join(rax_module.regions))) return rax_module
gpl-3.0
youprofit/zato
code/zato-web-admin/src/zato/admin/web/views/cluster.py
6
11873
# -*- coding: utf-8 -*- """ Copyright (C) 2010 Dariusz Suchojad <dsuch at zato.io> Licensed under LGPLv3, see LICENSE.txt for terms and conditions. """ from __future__ import absolute_import, division, print_function, unicode_literals # stdlib import logging from string import whitespace from traceback import format_exc # anyjson from anyjson import dumps # Bunch from bunch import Bunch # Django from django.http import HttpResponse, HttpResponseServerError from django.template import loader from django.template.response import TemplateResponse # pytz from pytz import UTC # Zato from zato.admin.web import from_utc_to_user from zato.admin.web.forms.cluster import DeleteClusterForm, EditClusterForm, EditServerForm from zato.admin.web.views import Delete as _Delete, get_lb_client, method_allowed, set_servers_state from zato.admin.settings import DATABASE_ENGINE, DATABASE_HOST, DATABASE_NAME, DATABASE_PORT, \ DATABASE_USER, sqlalchemy_django_engine from zato.common import SERVER_UP_STATUS from zato.common.odb.model import Cluster, Server logger = logging.getLogger(__name__) def _edit_create_response(item, verb): if item.lb_config: has_lb_config = True addresses = loader.render_to_string('zato/cluster/addresses.html', {'item':item}) else: has_lb_config = False addresses = '' return_data = { 'id': item.id, 'message': 'Successfully {0} the cluster [{1}]'.format(verb, item.name), 'addresses': addresses, 'has_lb_config': has_lb_config } return HttpResponse(dumps(return_data), mimetype='application/javascript') def _create_edit(req, verb, item, form_class, prefix=''): join = '-' if prefix else '' try: for s in whitespace: if s in req.POST[prefix + join + 'name']: return HttpResponseServerError('Cluster name must not contain whitespace.') description = req.POST[prefix + join + 'description'].strip() description = description if description else None item.name = req.POST[prefix + join + 'name'].strip() item.description = description item.lb_host = req.POST[prefix + join + 'lb_host'].strip() item.lb_port = req.POST[prefix + join + 'lb_port'].strip() item.lb_agent_port = req.POST[prefix + join + 'lb_agent_port'].strip() try: req.zato.odb.add(item) req.zato.odb.commit() try: item.lb_config = get_lb_client(item).get_config() except Exception, e: item.lb_config = None msg = "Exception caught while fetching the load balancer's config, e:[{0}]".format(format_exc(e)) logger.error(msg) return _edit_create_response(item, verb) except Exception, e: msg = 'Exception caught, e:[{0}]'.format(format_exc(e)) logger.error(msg) return HttpResponseServerError(msg) except Exception, e: req.zato.odb.rollback() return HttpResponseServerError(str(format_exc(e))) def _get_server_data(client, server_name): """ Gets the server's state as seen by the load balancer. """ lb_server_data = client.get_server_data_dict(server_name) if lb_server_data: in_lb = True state = lb_server_data[server_name]['state'] lb_address = lb_server_data[server_name]['address'] else: in_lb = False state = '(unknown)' lb_address = '(unknown)' return Bunch({ 'in_lb': in_lb, 'state': state, 'lb_address': lb_address, }) def _common_edit_message(client, success_msg, id, name, host, up_status, up_mod_date, cluster_id, user_profile, fetch_lb_data=True): """ Returns a common JSON message for both the actual 'edit' and 'add/remove to/from LB' actions. """ return_data = { 'id': id, 'name': name, 'host': host if host else '(unknown)', 'up_status': up_status if up_status else '(unknown)', 'up_mod_date': from_utc_to_user(up_mod_date+'+00:00', user_profile) if up_mod_date else '(unknown)', 'cluster_id': cluster_id if cluster_id else '', 'lb_state': '(unknown)', 'lb_address': '(unknown)', 'in_lb': '(unknown)', 'message': success_msg.format(name) } if fetch_lb_data: lb_server_data = _get_server_data(client, name) return_data.update({ 'lb_state': lb_server_data.state, 'lb_address': lb_server_data.lb_address, 'in_lb': lb_server_data.in_lb, }) return HttpResponse(dumps(return_data), mimetype='application/javascript') @method_allowed('GET') def index(req): initial = {} initial['odb_type'] = sqlalchemy_django_engine[DATABASE_ENGINE.replace('django.db.backends.', '')] initial['odb_host'] = DATABASE_HOST initial['odb_port'] = DATABASE_PORT initial['odb_user'] = DATABASE_USER initial['odb_db_name'] = DATABASE_NAME delete_form = DeleteClusterForm(prefix='delete') items = req.zato.odb.query(Cluster).order_by('name').all() for item in items: client = get_lb_client(item) try: lb_config = client.get_config() item.lb_config = lb_config # Assign the flags indicating whether servers are DOWN or in the MAINT mode. set_servers_state(item, client) except Exception, e: msg = 'Could not invoke agent, client:[{client!r}], e:[{e}]'.format(client=client, e=format_exc(e)) logger.error(msg) item.lb_config = None return_data = {'delete_form':delete_form, 'edit_form':EditClusterForm(prefix='edit'), 'items':items} return TemplateResponse(req, 'zato/cluster/index.html', return_data) @method_allowed('POST') def edit(req): return _create_edit(req, 'updated', req.zato.odb.query(Cluster).filter_by(id=req.POST['id']).one(), EditClusterForm, 'edit') def _get(req, **filter): cluster = req.zato.odb.query(Cluster).filter_by(**filter).one() return HttpResponse(cluster.to_json(), mimetype='application/javascript') @method_allowed('GET') def get_by_id(req, cluster_id): return _get(req, id=cluster_id) @method_allowed('GET') def get_by_name(req, cluster_name): return _get(req, name=cluster_name) @method_allowed('GET') def get_servers_state(req, cluster_id): cluster = req.zato.odb.query(Cluster).filter_by(id=cluster_id).one() client = get_lb_client(cluster) # Assign the flags indicating whether servers are DOWN or in the MAINT mode. try: set_servers_state(cluster, client) except Exception, e: msg = "Failed to invoke the load-balancer's agent and set the state of servers, e:[{e}]".format(e=format_exc(e)) logger.error(msg) return HttpResponseServerError(msg) return TemplateResponse(req, 'zato/cluster/servers_state.html', {'cluster':cluster}) @method_allowed('POST') def delete(req, id): """ Deletes a cluster *permanently*. """ try: cluster = req.zato.odb.query(Cluster).filter_by(id=id).one() req.zato.odb.delete(cluster) req.zato.odb.commit() except Exception, e: msg = 'Could not delete the cluster, e:[{e}]'.format(e=format_exc(e)) logger.error(msg) return HttpResponseServerError(msg) else: return HttpResponse() @method_allowed('GET') def servers(req): """ A view for server management. """ items = req.zato.odb.query(Server).order_by('name').all() try: client = get_lb_client(req.zato.get('cluster')) server_data_dict = client.get_server_data_dict() bck_http_plain = client.get_config()['backend']['bck_http_plain'] lb_client_invoked = True except Exception, e: logger.error(format_exc(e)) lb_client_invoked = False if lb_client_invoked: def _update_item(server_name, lb_address, lb_state): for item in items: if item.name == server_name: item.in_lb = True item.lb_address = lb_address item.lb_state = lb_state if item.up_mod_date: item.up_mod_date_user = from_utc_to_user(item.up_mod_date.replace(tzinfo=UTC).isoformat(), req.zato.user_profile) if item.up_status == SERVER_UP_STATUS.RUNNING: item.may_be_deleted = False else: item.may_be_deleted = True for server_name in bck_http_plain: lb_address = '{}:{}'.format(bck_http_plain[server_name]['address'], bck_http_plain[server_name]['port']) _update_item(server_name, lb_address, server_data_dict[server_name]['state']) return_data = { 'items':items, 'choose_cluster_form':req.zato.choose_cluster_form, 'zato_clusters':req.zato.clusters, 'cluster':req.zato.get('cluster'), 'edit_form':EditServerForm(prefix='edit') } return TemplateResponse(req, 'zato/cluster/servers.html', return_data) @method_allowed('POST') def servers_edit(req): """ Updates a server in both ODB and the load balancer. """ try: client = get_lb_client(req.zato.cluster) server_id = req.POST['id'] server = req.zato.odb.query(Server).filter_by(id=server_id).one() if client.get_server_data_dict(server.name): fetch_lb_data = True client.rename_server(req.POST['edit-old_name'], req.POST['edit-name']) else: fetch_lb_data = False response = req.zato.client.invoke('zato.cluster.server.edit', {'id':server_id, 'name':req.POST['edit-name']}) return _common_edit_message(client, 'Server [{}] updated', response.data.id, response.data.name, response.data.host, response.data.up_status, response.data.up_mod_date, req.zato.cluster_id, req.zato.user_profile, fetch_lb_data) except Exception, e: return HttpResponseServerError(format_exc(e)) @method_allowed('POST') def servers_add_remove_lb(req, action, server_id): """ Adds or removes a server from the load balancer's configuration. """ server = req.zato.odb.query(Server).filter_by(id=server_id).one() up_mod_date = server.up_mod_date.isoformat() if server.up_mod_date else '' client = get_lb_client(req.zato.cluster) client.add_remove_server(action, server.name) if action == 'add': success_msg = 'added to' fetch_lb_data = True else: success_msg = 'removed from' fetch_lb_data = False return _common_edit_message(client, 'Server [{{}}] {} the load balancer'.format(success_msg), server.id, server.name, server.host, server.up_status, up_mod_date, server.cluster_id, req.zato.user_profile, fetch_lb_data) class ServerDelete(_Delete): url_name = 'cluster-servers-delete' error_message = 'Could not delete the server' service_name = 'zato.server.delete' def __call__(self, req, *args, **kwargs): response = req.zato.client.invoke('zato.server.get-by-id', {'id':req.zato.id}) server = req.zato.odb.query(Server).filter_by(id=req.zato.id).one() client = get_lb_client(req.zato.cluster) # Checks whether the server is known by LB if client.get_server_data_dict(server.name): client.add_remove_server('remove', response.data.name) return super(ServerDelete, self).__call__(req, *args, **kwargs)
gpl-3.0
berezovskyi/nikola
nikola/data/themes/base/messages/messages_pt.py
8
1751
# -*- encoding:utf-8 -*- from __future__ import unicode_literals MESSAGES = { "%d min remaining to read": "%d minutos restante para leitura", "(active)": "(ativo)", "Also available in:": "Também disponível em:", "Archive": "Arquivo", "Authors": "", "Categories": "Categorias", "Comments": "Comentários", "LANGUAGE": "Português", "Languages:": "Idiomas:", "More posts about %s": "Mais textos publicados sobre %s", "Newer posts": "Textos publicados mais recentes", "Next post": "Próximo texto publicado", "No posts found.": "Nenhum texto publicado foi encontrado", "Nothing found.": "Nada encontrado.", "Older posts": "Textos publicados mais antigos", "Original site": "Sítio original", "Posted:": "Publicado:", "Posts about %s": "Textos publicados sobre %s", "Posts by %s": "", "Posts for year %s": "Textos publicados do ano %s", "Posts for {month} {day}, {year}": "Textos publicados de {day} {month} {year}", "Posts for {month} {year}": "Textos publicados de {month} {year}", "Previous post": "Texto publicado anterior", "Publication date": "Data de publicação", "RSS feed": "Feed RSS", "Read in English": "Ler em português", "Read more": "Ler mais", "Skip to main content": "Saltar para o conteúdo principal", "Source": "Código", "Subcategories:": "Sub-Categorias:", "Tags and Categories": "Etiquetas e Categorias", "Tags": "Etiqueta", "Uncategorized": "", "Updates": "", "Write your page here.": "Escreva a sua página aqui.", "Write your post here.": "Escreva o seu texto para publicar aqui.", "old posts, page %d": "Textos publicados antigos, página %d", "page %d": "página %d", }
mit
simonwydooghe/ansible
lib/ansible/plugins/doc_fragments/dimensiondata.py
44
1562
# -*- coding: utf-8 -*- # # Copyright: (c) 2016, Dimension Data # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Authors: # - Adam Friedman <[email protected]> class ModuleDocFragment(object): # Dimension Data doc fragment DOCUMENTATION = r''' options: region: description: - The target region. choices: - Regions are defined in Apache libcloud project [libcloud/common/dimensiondata.py] - They are also listed in U(https://libcloud.readthedocs.io/en/latest/compute/drivers/dimensiondata.html) - Note that the default value "na" stands for "North America". - The module prepends 'dd-' to the region choice. type: str default: na mcp_user: description: - The username used to authenticate to the CloudControl API. - If not specified, will fall back to C(MCP_USER) from environment variable or C(~/.dimensiondata). type: str mcp_password: description: - The password used to authenticate to the CloudControl API. - If not specified, will fall back to C(MCP_PASSWORD) from environment variable or C(~/.dimensiondata). - Required if I(mcp_user) is specified. type: str location: description: - The target datacenter. type: str required: true validate_certs: description: - If C(false), SSL certificates will not be validated. - This should only be used on private instances of the CloudControl API that use self-signed certificates. type: bool default: yes '''
gpl-3.0
paran0ids0ul/infernal-twin
build/pip/build/lib.linux-i686-2.7/pip/utils/deprecation.py
271
2152
""" A module that implments tooling to enable easy warnings about deprecations. """ from __future__ import absolute_import import logging import warnings class PipDeprecationWarning(Warning): pass class RemovedInPip8Warning(PipDeprecationWarning, PendingDeprecationWarning): pass class RemovedInPip9Warning(PipDeprecationWarning, PendingDeprecationWarning): pass DEPRECATIONS = [RemovedInPip8Warning, RemovedInPip9Warning] # Warnings <-> Logging Integration _warnings_showwarning = None def _showwarning(message, category, filename, lineno, file=None, line=None): if file is not None: if _warnings_showwarning is not None: _warnings_showwarning( message, category, filename, lineno, file, line, ) else: if issubclass(category, PipDeprecationWarning): # We use a specially named logger which will handle all of the # deprecation messages for pip. logger = logging.getLogger("pip.deprecations") # This is purposely using the % formatter here instead of letting # the logging module handle the interpolation. This is because we # want it to appear as if someone typed this entire message out. log_message = "DEPRECATION: %s" % message # Things that are DeprecationWarnings will be removed in the very # next version of pip. We want these to be more obvious so we # use the ERROR logging level while the PendingDeprecationWarnings # are still have at least 2 versions to go until they are removed # so they can just be warnings. if issubclass(category, DeprecationWarning): logger.error(log_message) else: logger.warning(log_message) else: _warnings_showwarning( message, category, filename, lineno, file, line, ) def install_warning_logger(): global _warnings_showwarning if _warnings_showwarning is None: _warnings_showwarning = warnings.showwarning warnings.showwarning = _showwarning
gpl-3.0
Parisson/django-paypal
paypal/standard/widgets.py
30
1124
#!/usr/bin/env python # -*- coding: utf-8 -*- from django import forms try: from django.forms.utils import flatatt # Django 1.7 and later except ImportError: from django.forms.util import flatatt # earlier from django.utils.safestring import mark_safe from django.utils.encoding import force_text class ValueHiddenInput(forms.HiddenInput): """ Widget that renders only if it has a value. Used to remove unused fields from PayPal buttons. """ def render(self, name, value, attrs=None): if value is None: return u'' else: return super(ValueHiddenInput, self).render(name, value, attrs) class ReservedValueHiddenInput(ValueHiddenInput): """ Overrides the default name attribute of the form. Used for the PayPal `return` field. """ def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, type=self.input_type) if value != '': final_attrs['value'] = force_text(value) return mark_safe(u'<input%s />' % flatatt(final_attrs))
mit
phlax/translate
translate/storage/placeables/base.py
4
2877
# -*- coding: utf-8 -*- # # Copyright 2008-2009 Zuza Software Foundation # # This file is part of the Translate Toolkit. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. """ Contains base placeable classes with names based on XLIFF placeables. See the XLIFF standard for more information about what the names mean. """ from translate.storage.placeables.interfaces import * from translate.storage.placeables.strelem import StringElem __all__ = ( 'Bpt', 'Ept', 'Ph', 'It', 'G', 'Bx', 'Ex', 'X', 'Sub', 'to_base_placeables' ) # Basic placeable types. class Bpt(MaskingPlaceable, PairedDelimiter): has_content = True class Ept(MaskingPlaceable, PairedDelimiter): has_content = True class Ph(MaskingPlaceable): has_content = True istranslatable = False class It(MaskingPlaceable, Delimiter): has_content = True class G(ReplacementPlaceable): has_content = True class Bx(ReplacementPlaceable, PairedDelimiter): has_content = False istranslatable = False def __init__(self, id=None, xid=None, **kwargs): # kwargs is ignored ReplacementPlaceable.__init__(self, id=id, xid=xid, **kwargs) class Ex(ReplacementPlaceable, PairedDelimiter): has_content = False istranslatable = False def __init__(self, id=None, xid=None, **kwargs): # kwargs is ignored ReplacementPlaceable.__init__(self, id=id, xid=xid, **kwargs) class X(ReplacementPlaceable, Delimiter): has_content = False iseditable = False isfragile = True istranslatable = False def __init__(self, id=None, xid=None, **kwargs): ReplacementPlaceable.__init__(self, id=id, xid=xid, **kwargs) class Sub(SubflowPlaceable): has_content = True def to_base_placeables(tree): if not isinstance(tree, StringElem): return tree base_class = [klass for klass in tree.__class__.__bases__ if klass in [Bpt, Ept, Ph, It, G, Bx, Ex, X, Sub]] if not base_class: base_class = tree.__class__ else: base_class = base_class[0] newtree = base_class() newtree.id = tree.id newtree.rid = tree.rid newtree.xid = tree.xid newtree.sub = [] for subtree in tree.sub: newtree.sub.append(to_base_placeables(subtree)) return newtree
gpl-2.0
75651/kbengine_cloud
kbe/res/scripts/common/Lib/multiprocessing/popen_forkserver.py
94
1967
import io import os from . import reduction if not reduction.HAVE_SEND_HANDLE: raise ImportError('No support for sending fds between processes') from . import context from . import forkserver from . import popen_fork from . import spawn from . import util __all__ = ['Popen'] # # Wrapper for an fd used while launching a process # class _DupFd(object): def __init__(self, ind): self.ind = ind def detach(self): return forkserver.get_inherited_fds()[self.ind] # # Start child process using a server process # class Popen(popen_fork.Popen): method = 'forkserver' DupFd = _DupFd def __init__(self, process_obj): self._fds = [] super().__init__(process_obj) def duplicate_for_child(self, fd): self._fds.append(fd) return len(self._fds) - 1 def _launch(self, process_obj): prep_data = spawn.get_preparation_data(process_obj._name) buf = io.BytesIO() context.set_spawning_popen(self) try: reduction.dump(prep_data, buf) reduction.dump(process_obj, buf) finally: context.set_spawning_popen(None) self.sentinel, w = forkserver.connect_to_new_process(self._fds) util.Finalize(self, os.close, (self.sentinel,)) with open(w, 'wb', closefd=True) as f: f.write(buf.getbuffer()) self.pid = forkserver.read_unsigned(self.sentinel) def poll(self, flag=os.WNOHANG): if self.returncode is None: from multiprocessing.connection import wait timeout = 0 if flag == os.WNOHANG else None if not wait([self.sentinel], timeout): return None try: self.returncode = forkserver.read_unsigned(self.sentinel) except (OSError, EOFError): # The process ended abnormally perhaps because of a signal self.returncode = 255 return self.returncode
lgpl-3.0
matllubos/django-sms-operator
sms_operator/models.py
1
4032
from __future__ import unicode_literals from django.db import models from django.utils.translation import ugettext_lazy as _ from django.utils.datastructures import SortedDict from easymode.i18n.decorators import I18n from chamber.utils.datastructures import ChoicesNumEnum from chamber.models.fields import SouthMixin class SMSChoicesNumEnum(ChoicesNumEnum): def __init__(self, *items): """ Receives item with four values key, label, i and send state choices """ super(SMSChoicesNumEnum, self).__init__(*((key, label, i) for key, label, i, _ in items)) self.sender_enum = SortedDict() for (key, label, i, sender_choices) in items: for j, choice_label in sender_choices: self.sender_enum[j] = (choice_label, i) @property def sender_choices(self): return [(val, choice[0]) for val, choice in self.sender_enum.items()] def get_value_from_sender_value(self, sender_val): """ Return value according to sender_val """ return self.sender_enum.get(sender_val)[1] if sender_val in self.sender_enum else self.ERROR class SMSState(SouthMixin, models.PositiveIntegerField): def __init__(self, *args, **kwargs): self.enum = kwargs.pop('enum', None) if self.enum: kwargs['choices'] = self.enum.choices super(SMSState, self).__init__(*args, **kwargs) def pre_save(self, model_instance, add): """ This model field is not editable value is set according to sender_state value """ if self.enum: value = self.enum.get_value_from_sender_value(getattr(model_instance, 'sender_state')) setattr(model_instance, self.attname, value) return value class SMSMessage(models.Model): STATE = SMSChoicesNumEnum( ('NEW', _('New'), 0, ( (17, _('Not send')), )), ('WAITING', _('Waiting'), 1, ( (11, _('Unknown state')), (12, _('Only partly delivered')), (13, _('Only partly delivered')), (14, _('Only partly delivered')), )), ('DELIVERED', _('Delivered'), 2, ( (0, _('Delivered')), )), ('ERROR', _('Error'), 3, ( (1, _('Failed')), (2, _('Number does not exist')), (3, _('Timeout')), (4, _('Number has wrong format')), (5, _('GSM operator error')), (6, _('GSM operator error')), (7, _('SMS text too long')), (10, _('Only partly delivered')), (15, _('Message not found')), (16, _('Connection error')), )), ('DEBUG', _('Debug'), 4, ( (18, _('Debug')), )), ) created_at = models.DateTimeField(verbose_name=_('created at'), null=False, blank=False, auto_now_add=True) state = SMSState(verbose_name=_('state'), null=False, blank=False, enum=STATE, editable=False) sender_state = models.PositiveIntegerField(verbose_name=_('sender state'), null=False, blank=False, choices=STATE.sender_choices, default=17) phone = models.CharField(verbose_name=_('phone'), null=False, blank=False, max_length=20) text = models.TextField(verbose_name=_('text'), null=False, blank=False) @property def failed(self): return self.state == self.STATE.ERROR def __unicode__(self): return self.phone class Meta: verbose_name = _('Log SMS message') verbose_name_plural = _('Log SMS messages') ordering = ('-created_at',) @I18n('body') class SMSTemplate(models.Model): slug = models.SlugField(max_length=100, null=False, blank=False, unique=True, verbose_name=_('slug')) body = models.TextField(null=True, blank=False, verbose_name=_('message body')) def __unicode__(self): return self.slug class Meta: verbose_name = _('SMS template') verbose_name_plural = _('SMS templates')
lgpl-3.0
ganeshnalawade/ansible
lib/ansible/module_utils/common/sys_info.py
47
5641
# Copyright (c), Michael DeHaan <[email protected]>, 2012-2013 # Copyright (c), Toshio Kuratomi <[email protected]> 2016 # Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause) from __future__ import absolute_import, division, print_function __metaclass__ = type import platform from ansible.module_utils import distro from ansible.module_utils.common._utils import get_all_subclasses __all__ = ('get_distribution', 'get_distribution_version', 'get_platform_subclass') def get_distribution(): ''' Return the name of the distribution the module is running on :rtype: NativeString or None :returns: Name of the distribution the module is running on This function attempts to determine what Linux distribution the code is running on and return a string representing that value. If the distribution cannot be determined, it returns ``OtherLinux``. If not run on Linux it returns None. ''' distribution = None if platform.system() == 'Linux': distribution = distro.id().capitalize() if distribution == 'Amzn': distribution = 'Amazon' elif distribution == 'Rhel': distribution = 'Redhat' elif not distribution: distribution = 'OtherLinux' return distribution def get_distribution_version(): ''' Get the version of the Linux distribution the code is running on :rtype: NativeString or None :returns: A string representation of the version of the distribution. If it cannot determine the version, it returns empty string. If this is not run on a Linux machine it returns None ''' version = None needs_best_version = frozenset(( u'centos', u'debian', )) if platform.system() == 'Linux': version = distro.version() distro_id = distro.id() if version is not None: if distro_id in needs_best_version: version_best = distro.version(best=True) # CentoOS maintainers believe only the major version is appropriate # but Ansible users desire minor version information, e.g., 7.5. # https://github.com/ansible/ansible/issues/50141#issuecomment-449452781 if distro_id == u'centos': version = u'.'.join(version_best.split(u'.')[:2]) # Debian does not include minor version in /etc/os-release. # Bug report filed upstream requesting this be added to /etc/os-release # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=931197 if distro_id == u'debian': version = version_best else: version = u'' return version def get_distribution_codename(): ''' Return the code name for this Linux Distribution :rtype: NativeString or None :returns: A string representation of the distribution's codename or None if not a Linux distro ''' codename = None if platform.system() == 'Linux': # Until this gets merged and we update our bundled copy of distro: # https://github.com/nir0s/distro/pull/230 # Fixes Fedora 28+ not having a code name and Ubuntu Xenial Xerus needing to be "xenial" os_release_info = distro.os_release_info() codename = os_release_info.get('version_codename') if codename is None: codename = os_release_info.get('ubuntu_codename') if codename is None and distro.id() == 'ubuntu': lsb_release_info = distro.lsb_release_info() codename = lsb_release_info.get('codename') if codename is None: codename = distro.codename() if codename == u'': codename = None return codename def get_platform_subclass(cls): ''' Finds a subclass implementing desired functionality on the platform the code is running on :arg cls: Class to find an appropriate subclass for :returns: A class that implements the functionality on this platform Some Ansible modules have different implementations depending on the platform they run on. This function is used to select between the various implementations and choose one. You can look at the implementation of the Ansible :ref:`User module<user_module>` module for an example of how to use this. This function replaces ``basic.load_platform_subclass()``. When you port code, you need to change the callers to be explicit about instantiating the class. For instance, code in the Ansible User module changed from:: .. code-block:: python # Old class User: def __new__(cls, args, kwargs): return load_platform_subclass(User, args, kwargs) # New class User: def __new__(cls, *args, **kwargs): new_cls = get_platform_subclass(User) return super(cls, new_cls).__new__(new_cls) ''' this_platform = platform.system() distribution = get_distribution() subclass = None # get the most specific superclass for this platform if distribution is not None: for sc in get_all_subclasses(cls): if sc.distribution is not None and sc.distribution == distribution and sc.platform == this_platform: subclass = sc if subclass is None: for sc in get_all_subclasses(cls): if sc.platform == this_platform and sc.distribution is None: subclass = sc if subclass is None: subclass = cls return subclass
gpl-3.0
cjordog/NRLWebsite
demo/auto_generate_plots.py
1
5007
''' auto_generate_plots/py This script generates plots for experimental result data that has not yet been plotted. This script uses the matplotlib plotting library, and the mplot3d toolkit, for generating data plots: http://matplotlib.org/mpl_toolkits/mplot3d/tutorial.html TODO: Turn this script to face the web more. Enable interactive use so users can choose viewing angle, axes labels, title, zoom, projection, etc. ''' import json, sys, mysql.connector from matplotlib import pyplot import pylab from mpl_toolkits.mplot3d import Axes3D, proj3d import numpy as np import matplotlib.pyplot as plt from scipy.interpolate import griddata ''' orthogonal_proj(zfront, zback) Transform 3D plots from perspective to orthogonal projection. But, it breaks automatic axes repositioning! TODO: fix ''' def orthogonal_proj(zfront, zback): a = (zfront+zback)/(zfront-zback) b = -2*(zfront*zback)/(zfront-zback) return np.array([[1,0,0,0], [0,1,0,0], [0,0,a,b], [0,0,0,zback]]) ''' TODO: the following line breaks automatic axes repositioning! Fix, then uncomment the following line ''' # proj3d.persp_transformation = orthogonal_proj ### booleans to determine which plots to generate scatter = False wireframe = True ### remove the following line, Mark's system is messed up and required this sys.path.append('/usr/lib/python2.7/dist-packages') ''' TODO: change the call to connect() Pass variables that contain local db login info ''' cnx = mysql.connector.connect(user='ruolinfan', password='pass', host='localhost', database='UWNet') cursor = cnx.cursor() data = {} ### find the experiments for which plots still need to be generated getUnplottedExpIds = ("SELECT id, testData FROM InputQueue WHERE plotsGenerated = FALSE") cursor.execute(getUnplottedExpIds) fileSizes = {} ### TODO: do the following two for-loops in one for-loop ### collect file sizes in dictionary keyed by experiment id for (id, testData) in cursor: ### TODO: calculate file size fileSize = 1 fileSizes[id] = fileSize filePath = fileSizes[id] ''' TODO: fix the following for-loop Right now it assumes that the user submitted form with: mpwr 10 lpwr 10 ppwr 10 mmod 5 lmod 1 pmod 1 mbkn 16 lbkn 1 pbkn 1 rptt 1 Fix to allow arbitrary input ''' ### for each experiment, generate plots and save to files for key in fileSizes: id = int(key) getResults = ("SELECT parameters, results FROM Results WHERE experimentID = {}".format(id)) cursor.execute(getResults) ### create a list of all the points bkns = [] mods = [] delays = [] ### collect bkns, mods, and delays for (parameters, results) in cursor: bkns.append(json.loads(parameters)['bkn']) mods.append(json.loads(parameters)['mod']) delays.append(json.loads(results)['0']['delay']) plot_data = { 'bkns': bkns, 'mods': mods, 'delays': delays } ### change the following two lines when the user can submit arbitrary parameters bkn_vals = range(1, 16 + 1) ### change mod_vals = range(1, 5 + 1) ### change delay_vals = plot_data['delays'] X, Y = np.meshgrid(bkn_vals, mod_vals) ### change the following lines when the user can submit arbitrary parameters Z = np.asarray([np.asarray(delay_vals[0:16]), np.asarray(delay_vals[16:32]), np.asarray(delay_vals[32:48]), np.asarray(delay_vals[48:64]), np.asarray(delay_vals[64:80])]) ### change ### now create the plots el = 15 ### elevation of the viewing camera for plots ### scatter plot if scatter == True: fig = plt.figure() ax = fig.add_subplot(111, projection ='3d') ax.scatter(plot_data['bkns'], plot_data['mods'], delay_vals) ax.set_title('{} file transmission'.format(fileSizes[id])) ax.set_xlabel('blocks per packet') ax.set_ylabel('transmission mode') ax.set_zlabel('file transmission delay (s)') plt.xticks(np.arange(min(bkn_vals), max(bkn_vals) + 1, 1)) plt.yticks(np.arange(min(mod_vals), max(mod_vals) + 1, 1)) ax.view_init(elev=el, azim=45) plt.savefig("plots/exp{0}_scat_elev{1}degrees.png".format(id, el)) ### use plt.show() for the interactive version of this script, see top TODO # plt.show() ### wireframe plot if wireframe == True: fig = plt.figure() ax = fig.add_subplot(111, projection ='3d') ax.plot_wireframe(X, Y, Z) ax.set_title('{} file transmission'.format(fileSizes[id])) ax.set_xlabel('blocks per packet') ax.set_ylabel('transmission mode') ax.set_zlabel('file transmission delay (s)') plt.xticks(np.arange(min(bkn_vals), max(bkn_vals) + 1, 1)) plt.yticks(np.arange(min(mod_vals), max(mod_vals) + 1, 1)) ax.view_init(elev=el, azim=45) plt.savefig("plots/exp{0}_wire_elev{1}degrees.png".format(id, el)) ### update InputQueue to notify that plots were generated plots_generated = ("UPDATE InputQueue SET plotsGenerated = 1 WHERE id = {0}".format(id)) cursor.execute(plots_generated) cnx.commit() cursor.close() cnx.close()
mit
Aloomaio/googleads-python-lib
examples/ad_manager/v201811/proposal_service/create_proposals.py
1
2591
#!/usr/bin/env python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This code example creates new proposals. To determine which proposals exist, run get_all_proposals.py. """ import uuid # Import appropriate modules from the client library. from googleads import ad_manager ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE' PRIMARY_SALESPERSON_ID = 'INSERT_PRIMARY_SALESPERSON_ID_HERE' SECONDARY_SALESPERSON_ID = 'INSERT_SECONDARY_SALESPERSON_ID_HERE' PRIMARY_TRAFFICKER_ID = 'INSERT_PRIMARY_TRAFFICKER_ID_HERE' def main(client, advertiser_id, primary_salesperson_id, secondary_salesperson_id, primary_trafficker_id): # Initialize appropriate services. proposal_service = client.GetService('ProposalService', version='v201811') network_service = client.GetService('NetworkService', version='v201811') # Create proposal objects. proposal = { 'name': 'Proposal #%s' % uuid.uuid4(), 'advertiser': { 'companyId': advertiser_id, 'type': 'ADVERTISER' }, 'primarySalesperson': { 'userId': primary_salesperson_id, 'split': '75000' }, 'secondarySalespeople': [{ 'userId': secondary_salesperson_id, 'split': '25000' }], 'primaryTraffickerId': primary_trafficker_id, 'probabilityOfClose': '100000', 'budget': { 'microAmount': '100000000', 'currencyCode': network_service.getCurrentNetwork()['currencyCode'] }, 'billingCap': 'CAPPED_CUMULATIVE', 'billingSource': 'DFP_VOLUME' } # Add proposals. proposals = proposal_service.createProposals([proposal]) # Display results. for proposal in proposals: print ('Proposal with id "%s" and name "%s" was created.' % (proposal['id'], proposal['name'])) if __name__ == '__main__': # Initialize client object. ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage() main(ad_manager_client, ADVERTISER_ID, PRIMARY_SALESPERSON_ID, SECONDARY_SALESPERSON_ID, PRIMARY_TRAFFICKER_ID)
apache-2.0
opendroid-Team/enigma2-4.1
lib/python/Plugins/Extensions/PluginHider/PluginHiderSetup.py
11
6978
from . import _ # GUI (Screens) from Screens.Screen import Screen from Components.ConfigList import ConfigListScreen # GUI (Summary) from Screens.HelpMenu import HelpableScreen from Screens.MessageBox import MessageBox from Screens.Setup import SetupSummary # GUI (Components) from Components.ActionMap import HelpableActionMap from Components.SelectionList import SelectionList, SelectionEntryComponent from Components.Sources.StaticText import StaticText from Components.Pixmap import MultiPixmap # Configuration from Components.config import config from Components.PluginComponent import plugins from Plugins.Plugin import PluginDescriptor import inspect LIST_PLUGINS = 0 LIST_EXTENSIONS = 1 LIST_EVENTINFO = 2 class PluginHiderSetup(Screen, HelpableScreen): skin = """<screen name="PluginHiderSetup" title="PluginHider Setup" position="center,center" size="565,395"> <ePixmap position="0,358" size="140,40" pixmap="skin_default/buttons/red.png" transparent="1" alphatest="on" /> <ePixmap position="140,358" size="140,40" pixmap="skin_default/buttons/green.png" transparent="1" alphatest="on" /> <ePixmap position="280,358" size="140,40" pixmap="skin_default/buttons/yellow.png" transparent="1" alphatest="on" /> <ePixmap position="420,358" size="140,40" pixmap="skin_default/buttons/blue.png" transparent="1" alphatest="on" /> <widget source="key_red" render="Label" position="0,358" zPosition="1" size="140,40" valign="center" halign="center" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" /> <widget source="key_green" render="Label" position="140,358" zPosition="1" size="140,40" valign="center" halign="center" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" /> <widget source="key_yellow" render="Label" position="280,358" zPosition="1" size="140,40" valign="center" halign="center" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" /> <widget source="key_blue" render="Label" position="420,358" zPosition="1" size="140,40" valign="center" halign="center" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" /> <ePixmap size="551,336" alphatest="on" position="5,21" pixmap="skin_default/border_epg.png" zPosition="3" /> <widget size="320,25" alphatest="on" position="5,1" zPosition="1" name="tabbar" pixmaps="skin_default/epg_now.png,skin_default/epg_next.png,skin_default/epg_more.png" /> <widget valign="center" transparent="1" size="108,22" backgroundColor="#25062748" position="5,1" zPosition="2" source="plugins" render="Label" halign="center" font="Regular;18" /> <widget valign="center" transparent="1" size="108,22" backgroundColor="#25062748" position="111,1" zPosition="2" source="extensions" render="Label" halign="center" font="Regular;18" /> <widget valign="center" transparent="1" size="108,22" backgroundColor="#25062748" position="216,1" zPosition="2" source="eventinfo" render="Label" halign="center" font="Regular;18" /> <widget name="list" position="11,26" size="540,330" scrollbarMode="showOnDemand" /> </screen>""" def __init__(self, session): Screen.__init__(self, session) HelpableScreen.__init__(self) # Initialize widgets self["key_green"] = StaticText(_("OK")) self["key_red"] = StaticText(_("Cancel")) self["key_yellow"] = StaticText("") self["key_blue"] = StaticText(_("Run")) self["plugins"] = StaticText(_("Plugins")) self["extensions"] = StaticText(_("Extensions")) self["eventinfo"] = StaticText(_("Eventinfo")) self["tabbar"] = MultiPixmap() self["list"] = SelectionList([]) self.selectedList = LIST_PLUGINS self.updateList() self["PluginHiderSetupActions"] = HelpableActionMap(self, "PluginHiderSetupActions", { "ok": (self["list"].toggleSelection, _("toggle selection")), "cancel": (self.cancel, _("end editing")), "green": (self.save, _("save")), "blue": (self.run, _("run selected plugin")), "next": (self.next, _("select next tab")), "previous": (self.previous, _("select previous tab")), }, -1 ) self.onLayoutFinish.append(self.setCustomTitle) def run(self): cur = self["list"].getCurrent() cur = cur and cur[0] if cur: plugin = cur[1] if self.selectedList == LIST_PLUGINS: plugin(session=self.session) else: #if self.selectedList == LIST_EXTENSIONS or self.selectedList == LIST_EVENTINFO: from Screens.InfoBar import InfoBar instance = InfoBar.instance args = inspect.getargspec(plugin.__call__)[0] if len(args) == 1: plugin(session=self.session) elif instance and instance.servicelist: plugin(session=self.session,servicelist=instance.servicelist) else: session.open(MessageBox, _("Could not start Plugin:") + "\n" + _("Unable to access InfoBar."), type=MessageBox.TYPE_ERROR) def cancel(self): config.plugins.pluginhider.hideplugins.cancel() config.plugins.pluginhider.hideextensions.cancel() self.close() def save(self): self.keepCurrent() config.plugins.pluginhider.save() self.close() def previous(self): self.keepCurrent() self.selectedList -= 1 if self.selectedList < 0: self.selectedList = LIST_EVENTINFO self.updateList() def next(self): self.keepCurrent() self.selectedList += 1 if self.selectedList > LIST_EVENTINFO: self.selectedList = LIST_PLUGINS self.updateList() def setCustomTitle(self): self.setTitle(_("PluginHider Setup")) def updateList(self): if hasattr(plugins, 'pluginHider_baseGetPlugins'): fnc = plugins.pluginHider_baseGetPlugins else: fnc = plugins.getPlugins if self.selectedList == LIST_PLUGINS: list = fnc([PluginDescriptor.WHERE_PLUGINMENU]) selected = config.plugins.pluginhider.hideplugins.value elif self.selectedList == LIST_EXTENSIONS: list = fnc([PluginDescriptor.WHERE_EXTENSIONSMENU]) selected = config.plugins.pluginhider.hideextensions.value else: #if self.selectedList == LIST_EVENTINFO: list = fnc([PluginDescriptor.WHERE_EVENTINFO]) selected = config.plugins.pluginhider.hideeventinfo.value self["tabbar"].setPixmapNum(self.selectedList) res = [] i = 0 for plugin in list: if plugin.description: name = "%s (%s)" % (plugin.name, plugin.description) else: name = plugin.name res.append(SelectionEntryComponent( name, plugin, i, plugin.name in selected, )) i += 1 self["list"].setList(res) if res: self["list"].moveToIndex(0) def keepCurrent(self): selected = self["list"].getSelectionsList() if self.selectedList == LIST_PLUGINS: config.plugins.pluginhider.hideplugins.value = [x[1].name for x in selected] elif self.selectedList == LIST_EXTENSIONS: config.plugins.pluginhider.hideextensions.value = [x[1].name for x in selected] else: #if self.selectedList == LIST_EVENTINFO: config.plugins.pluginhider.hideeventinfo.value = [x[1].name for x in selected]
gpl-2.0
ruiaylin/percona-xtrabackup
storage/innobase/xtrabackup/test/python/testtools/tests/test_helpers.py
42
3583
# Copyright (c) 2010 testtools developers. See LICENSE for details. from testtools import TestCase from testtools.helpers import ( try_import, try_imports, ) from testtools.matchers import ( Equals, Is, ) class TestTryImport(TestCase): def test_doesnt_exist(self): # try_import('thing', foo) returns foo if 'thing' doesn't exist. marker = object() result = try_import('doesntexist', marker) self.assertThat(result, Is(marker)) def test_None_is_default_alternative(self): # try_import('thing') returns None if 'thing' doesn't exist. result = try_import('doesntexist') self.assertThat(result, Is(None)) def test_existing_module(self): # try_import('thing', foo) imports 'thing' and returns it if it's a # module that exists. result = try_import('os', object()) import os self.assertThat(result, Is(os)) def test_existing_submodule(self): # try_import('thing.another', foo) imports 'thing' and returns it if # it's a module that exists. result = try_import('os.path', object()) import os self.assertThat(result, Is(os.path)) def test_nonexistent_submodule(self): # try_import('thing.another', foo) imports 'thing' and returns foo if # 'another' doesn't exist. marker = object() result = try_import('os.doesntexist', marker) self.assertThat(result, Is(marker)) def test_object_from_module(self): # try_import('thing.object') imports 'thing' and returns # 'thing.object' if 'thing' is a module and 'object' is not. result = try_import('os.path.join') import os self.assertThat(result, Is(os.path.join)) class TestTryImports(TestCase): def test_doesnt_exist(self): # try_imports('thing', foo) returns foo if 'thing' doesn't exist. marker = object() result = try_imports(['doesntexist'], marker) self.assertThat(result, Is(marker)) def test_fallback(self): result = try_imports(['doesntexist', 'os']) import os self.assertThat(result, Is(os)) def test_None_is_default_alternative(self): # try_imports('thing') returns None if 'thing' doesn't exist. e = self.assertRaises( ImportError, try_imports, ['doesntexist', 'noreally']) self.assertThat( str(e), Equals("Could not import any of: doesntexist, noreally")) def test_existing_module(self): # try_imports('thing', foo) imports 'thing' and returns it if it's a # module that exists. result = try_imports(['os'], object()) import os self.assertThat(result, Is(os)) def test_existing_submodule(self): # try_imports('thing.another', foo) imports 'thing' and returns it if # it's a module that exists. result = try_imports(['os.path'], object()) import os self.assertThat(result, Is(os.path)) def test_nonexistent_submodule(self): # try_imports('thing.another', foo) imports 'thing' and returns foo if # 'another' doesn't exist. marker = object() result = try_imports(['os.doesntexist'], marker) self.assertThat(result, Is(marker)) def test_fallback_submodule(self): result = try_imports(['os.doesntexist', 'os.path']) import os self.assertThat(result, Is(os.path)) def test_suite(): from unittest import TestLoader return TestLoader().loadTestsFromName(__name__)
gpl-2.0
dneural/python-nullnude
nullnude/client.py
1
2209
#!/usr/bin/env python # -*- encoding: utf-8 -*- # # Copyright (c) 2015, dNeural.com # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # Except as contained in this notice, the name of dNeural and or its trademarks # (and among others NullNude) shall not be used in advertising or otherwise to # promote the sale, use or other dealings in this Software without prior # written authorization from dNeural. """NullNude SDK interface for users.""" from __future__ import absolute_import from .wrapper_api import WrapperApi from .nudity import NudityManager from .roi import RoiManager from .moderate import ModerateManager class Nullnude(object): """SDK interface to get cloud services from NullNude.""" def __init__(self, api_key, api_secret): """Create the main interface of the SDK. :param api_key: API key of your NullNude account :param api_secret: API secret of your NullNude account """ self.endpoint = 'https://api.dneural.com/1.0' self._api = WrapperApi(api_key, api_secret) self.nudity = NudityManager(self._api, self) self.roi = RoiManager(self._api, self) self.moderate = ModerateManager(self._api, self)
mit
fuzeman/Catalytic
deluge/ui/gtkui/systemtray.py
2
19639
# # systemtray.py # # Copyright (C) 2007, 2008 Andrew Resch <[email protected]> # # Deluge is free software. # # You may redistribute it and/or modify it under the terms of the # GNU General Public License, as published by the Free Software # Foundation; either version 3 of the License, or (at your option) # any later version. # # deluge is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with deluge. If not, write to: # The Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor # Boston, MA 02110-1301, USA. # # In addition, as a special exception, the copyright holders give # permission to link the code of portions of this program with the OpenSSL # library. # You must obey the GNU General Public License in all respects for all of # the code used other than OpenSSL. If you modify file(s) with this # exception, you may extend this exception to your version of the file(s), # but you are not obligated to do so. If you do not wish to do so, delete # this exception statement from your version. If you delete this exception # statement from all source files in the program, then also delete it here. # # try: import appindicator except ImportError: appindicator = None import gtk import pkg_resources import deluge.component as component from deluge.ui.client import client import deluge.common from deluge.configmanager import ConfigManager from deluge.log import LOG as log import common class SystemTray(component.Component): def __init__(self): component.Component.__init__(self, "SystemTray", interval=4) self.window = component.get("MainWindow") self.config = ConfigManager("gtkui.conf") # List of widgets that need to be hidden when not connected to a host self.hide_widget_list = [ "menuitem_add_torrent", "menuitem_pause_all", "menuitem_resume_all", "menuitem_download_limit", "menuitem_upload_limit", "menuitem_quitdaemon", "separatormenuitem1", "separatormenuitem2", "separatormenuitem3", "separatormenuitem4" ] self.config.register_set_function("enable_system_tray", self.on_enable_system_tray_set) # bit of a hack to prevent function from doing something on startup self.__enabled_set_once = False self.config.register_set_function("enable_appindicator", self.on_enable_appindicator_set) self.max_download_speed = -1.0 self.download_rate = 0.0 self.max_upload_speed = -1.0 self.upload_rate = 0.0 self.config_value_changed_dict = { "max_download_speed": self._on_max_download_speed, "max_upload_speed": self._on_max_upload_speed } def enable(self): """Enables the system tray icon.""" self.tray_glade = gtk.glade.XML( pkg_resources.resource_filename("deluge.ui.gtkui", "glade/tray_menu.glade")) self.tray_glade.signal_autoconnect({ "on_menuitem_show_deluge_activate": \ self.on_menuitem_show_deluge_activate, "on_menuitem_add_torrent_activate": \ self.on_menuitem_add_torrent_activate, "on_menuitem_pause_all_activate": \ self.on_menuitem_pause_all_activate, "on_menuitem_resume_all_activate": \ self.on_menuitem_resume_all_activate, "on_menuitem_quit_activate": self.on_menuitem_quit_activate, "on_menuitem_quitdaemon_activate": \ self.on_menuitem_quitdaemon_activate }) self.tray_menu = self.tray_glade.get_widget("tray_menu") if appindicator and self.config["enable_appindicator"]: log.debug("Enabling the Application Indicator..") self.indicator = appindicator.Indicator ( "deluge", "deluge", appindicator.CATEGORY_APPLICATION_STATUS) # Pass the menu to the Application Indicator self.indicator.set_menu(self.tray_menu) # Make sure the status of the Show Window MenuItem is correct self._sig_win_hide = self.window.window.connect("hide", self._on_window_hide) self._sig_win_show = self.window.window.connect("show", self._on_window_show) if self.window.visible(): self.tray_glade.get_widget("menuitem_show_deluge").set_active(True) else: self.tray_glade.get_widget("menuitem_show_deluge").set_active(False) # Show the Application Indicator self.indicator.set_status(appindicator.STATUS_ACTIVE) else: log.debug("Enabling the system tray icon..") if deluge.common.windows_check() or deluge.common.osx_check(): self.tray = gtk.status_icon_new_from_pixbuf( common.get_logo(32)) else: try: self.tray = gtk.status_icon_new_from_icon_name("deluge") except: log.warning("Update PyGTK to 2.10 or greater for SystemTray..") return self.tray.connect("activate", self.on_tray_clicked) self.tray.connect("popup-menu", self.on_tray_popup) # For some reason these icons do not display in appindicator self.tray_glade.get_widget("download-limit-image").set_from_file( deluge.common.get_pixmap("downloading16.png")) self.tray_glade.get_widget("upload-limit-image").set_from_file( deluge.common.get_pixmap("seeding16.png")) client.register_event_handler("ConfigValueChangedEvent", self.config_value_changed) if client.connected(): # We're connected so we need to get some values from the core self.__start() else: # Hide menu widgets because we're not connected to a host. for widget in self.hide_widget_list: self.tray_glade.get_widget(widget).hide() def __start(self): if self.config["enable_system_tray"]: if self.config["classic_mode"]: self.hide_widget_list.remove("menuitem_quitdaemon") self.hide_widget_list.remove("separatormenuitem4") self.tray_glade.get_widget("menuitem_quitdaemon").hide() self.tray_glade.get_widget("separatormenuitem4").hide() # These do not work with appindicator currently and can crash Deluge. # Related to Launchpad bug #608219 if appindicator and self.config["enable_appindicator"]: self.hide_widget_list.remove("menuitem_download_limit") self.hide_widget_list.remove("menuitem_upload_limit") self.hide_widget_list.remove("separatormenuitem3") self.tray_glade.get_widget("menuitem_download_limit").hide() self.tray_glade.get_widget("menuitem_upload_limit").hide() self.tray_glade.get_widget("separatormenuitem3").hide() # Show widgets in the hide list because we've connected to a host for widget in self.hide_widget_list: self.tray_glade.get_widget(widget).show() # Build the bandwidth speed limit menus self.build_tray_bwsetsubmenu() # Get some config values client.core.get_config_value( "max_download_speed").addCallback(self._on_max_download_speed) client.core.get_config_value( "max_upload_speed").addCallback(self._on_max_upload_speed) self.send_status_request() def start(self): self.__start() def stop(self): if self.config["enable_system_tray"] and not self.config["enable_appindicator"]: try: # Hide widgets in hide list because we're not connected to a host for widget in self.hide_widget_list: self.tray_glade.get_widget(widget).hide() except Exception, e: log.debug("Unable to hide system tray menu widgets: %s", e) self.tray.set_tooltip(_("Deluge") + "\n" + _("Not Connected...")) def shutdown(self): if self.config["enable_system_tray"]: if appindicator and self.config["enable_appindicator"]: self.indicator.set_status(appindicator.STATUS_PASSIVE) else: self.tray.set_visible(False) def send_status_request(self): client.core.get_session_status([ "payload_upload_rate", "payload_download_rate"]).addCallback(self._on_get_session_status) def config_value_changed(self, key, value): """This is called when we received a config_value_changed signal from the core.""" if key in self.config_value_changed_dict.keys(): self.config_value_changed_dict[key](value) def _on_max_download_speed(self, max_download_speed): if self.max_download_speed != max_download_speed: self.max_download_speed = max_download_speed self.build_tray_bwsetsubmenu() def _on_max_upload_speed(self, max_upload_speed): if self.max_upload_speed != max_upload_speed: self.max_upload_speed = max_upload_speed self.build_tray_bwsetsubmenu() def _on_get_session_status(self, status): self.download_rate = deluge.common.fsize(status["payload_download_rate"]) self.upload_rate = deluge.common.fsize(status["payload_upload_rate"]) def update(self): if not self.config["enable_system_tray"]: return # Tool tip text not available for appindicator if appindicator and self.config["enable_appindicator"]: return # Set the tool tip text max_download_speed = self.max_download_speed max_upload_speed = self.max_upload_speed if max_download_speed == -1: max_download_speed = _("Unlimited") else: max_download_speed = "%s %s" % (max_download_speed, _("KiB/s")) if max_upload_speed == -1: max_upload_speed = _("Unlimited") else: max_upload_speed = "%s %s" % (max_upload_speed, _("KiB/s")) msg = '%s\n%s: %s (%s)\n%s: %s (%s)' % (\ _("Deluge"), _("Down"), self.download_rate, \ max_download_speed, _("Up"), self.upload_rate, max_upload_speed) # Set the tooltip self.tray.set_tooltip(msg) self.send_status_request() def build_tray_bwsetsubmenu(self): # Create the Download speed list sub-menu submenu_bwdownset = common.build_menu_radio_list( self.config["tray_download_speed_list"], self.tray_setbwdown, self.max_download_speed, _("KiB/s"), show_notset=True, show_other=True) # Create the Upload speed list sub-menu submenu_bwupset = common.build_menu_radio_list( self.config["tray_upload_speed_list"], self.tray_setbwup, self.max_upload_speed, _("KiB/s"), show_notset=True, show_other=True) # Add the sub-menus to the tray menu self.tray_glade.get_widget("menuitem_download_limit").set_submenu( submenu_bwdownset) self.tray_glade.get_widget("menuitem_upload_limit").set_submenu( submenu_bwupset) # Show the sub-menus for all to see submenu_bwdownset.show_all() submenu_bwupset.show_all() # Re-set the menu to partly work around Launchpad bug #608219 if appindicator and self.config["enable_appindicator"]: self.indicator.set_menu(self.tray_menu) def disable(self,invert_app_ind_conf=False): """Disables the system tray icon or appindicator.""" try: if invert_app_ind_conf: app_ind_conf = not self.config["enable_appindicator"] else: app_ind_conf = self.config["enable_appindicator"] if appindicator and app_ind_conf: if hasattr(self, "_sig_win_hide"): self.window.window.disconnect(self._sig_win_hide) self.window.window.disconnect(self._sig_win_show) log.debug("Disabling the application indicator..") self.indicator.set_status(appindicator.STATUS_PASSIVE) del self.indicator else: log.debug("Disabling the system tray icon..") self.tray.set_visible(False) del self.tray del self.tray_glade del self.tray_menu except Exception, e: log.debug("Unable to disable system tray: %s", e) def blink(self, value): try: self.tray.set_blinking(value) except AttributeError: # If self.tray is not defined then ignore. This happens when the # tray icon is not being used. pass def on_enable_system_tray_set(self, key, value): """Called whenever the 'enable_system_tray' config key is modified""" if value: self.enable() else: self.disable() def on_enable_appindicator_set(self, key, value): """Called whenever the 'enable_appindicator' config key is modified""" if self.__enabled_set_once: self.disable(True) self.enable() self.__enabled_set_once = True def on_tray_clicked(self, icon): """Called when the tray icon is left clicked.""" self.blink(False) if self.window.active(): self.window.hide() else: if self.config["lock_tray"]: self.unlock_tray() else: self.window.present() def on_tray_popup(self, status_icon, button, activate_time): """Called when the tray icon is right clicked.""" self.blink(False) if self.window.visible(): self.tray_glade.get_widget("menuitem_show_deluge").set_active(True) else: self.tray_glade.get_widget("menuitem_show_deluge").set_active(False) popup_function = gtk.status_icon_position_menu if deluge.common.windows_check(): popup_function = None self.tray_menu.popup(None, None, popup_function, button, activate_time, status_icon) def on_menuitem_show_deluge_activate(self, menuitem): log.debug("on_menuitem_show_deluge_activate") if menuitem.get_active() and not self.window.visible(): if self.config["lock_tray"]: self.unlock_tray() else: self.window.present() elif not menuitem.get_active() and self.window.visible(): self.window.hide() def on_menuitem_add_torrent_activate(self, menuitem): log.debug("on_menuitem_add_torrent_activate") component.get("AddTorrentDialog").show() def on_menuitem_pause_all_activate(self, menuitem): log.debug("on_menuitem_pause_all_activate") client.core.pause_all_torrents() def on_menuitem_resume_all_activate(self, menuitem): log.debug("on_menuitem_resume_all_activate") client.core.resume_all_torrents() def on_menuitem_quit_activate(self, menuitem): log.debug("on_menuitem_quit_activate") if self.config["lock_tray"] and not self.window.visible(): self.unlock_tray() self.window.quit() def on_menuitem_quitdaemon_activate(self, menuitem): log.debug("on_menuitem_quitdaemon_activate") if self.config["lock_tray"] and not self.window.visible(): self.unlock_tray() self.window.quit(shutdown=True) def tray_setbwdown(self, widget, data=None): self.setbwlimit(widget, _("Set Maximum Download Speed"), "max_download_speed", "tray_download_speed_list", self.max_download_speed, "downloading.svg") def _on_window_hide(self, widget, data=None): """_on_window_hide - update the menuitem's status""" log.debug("_on_window_hide") self.tray_glade.get_widget("menuitem_show_deluge").set_active(False) def _on_window_show(self, widget, data=None): """_on_window_show - update the menuitem's status""" log.debug("_on_window_show") self.tray_glade.get_widget("menuitem_show_deluge").set_active(True) def tray_setbwup(self, widget, data=None): self.setbwlimit(widget, _("Set Maximum Upload Speed"), "max_upload_speed", "tray_upload_speed_list", self.max_upload_speed, "seeding.svg") def setbwlimit(self, widget, string, core_key, ui_key, default, image): """Sets the bandwidth limit based on the user selection.""" value = widget.get_children()[0].get_text().rstrip(" " + _("KiB/s")) if value == _("Unlimited"): value = -1 if value == _("Other..."): value = common.show_other_dialog(string, _("KiB/s"), None, image, default) if value == None: return # Set the config in the core client.core.set_config({core_key: value}) self.build_tray_bwsetsubmenu() def unlock_tray(self, is_showing_dlg=[False]): try: from hashlib import sha1 as sha_hash except ImportError: from sha import new as sha_hash log.debug("Show tray lock dialog") if is_showing_dlg[0]: return is_showing_dlg[0] = True entered_pass = gtk.Entry(25) entered_pass.set_activates_default(True) entered_pass.set_width_chars(25) entered_pass.set_visibility(False) tray_lock = gtk.Dialog(title="", parent=self.window.window, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OK, gtk.RESPONSE_OK)) tray_lock.set_default_response(gtk.RESPONSE_OK) tray_lock.set_has_separator(False) tray_lock.set_border_width(5) hbox = gtk.HBox(spacing=5) image = gtk.image_new_from_file(deluge.common.get_pixmap("lock48.png")) image.set_alignment(0.5, 0.0) hbox.pack_start(image, False) vbox = gtk.VBox(spacing=5) hbox.pack_start(vbox, False) label = gtk.Label("<b><big>%s</big></b>" % _("Deluge is password protected!")) label.set_use_markup(True) label.set_alignment(0.0, 0.5) label.set_line_wrap(True) vbox.pack_start(label, False) tlabel = gtk.Label("<i>%s</i>" % _("Enter your password to continue")) tlabel.set_use_markup(True) tlabel.set_alignment(0.0, 0.5) tlabel.set_line_wrap(True) vbox.pack_start(tlabel, False) vbox.pack_start(entered_pass) tray_lock.vbox.pack_start(hbox) def on_response(dialog, response_id): if response_id == gtk.RESPONSE_OK: if self.config["tray_password"] == sha_hash(entered_pass.get_text()).hexdigest(): self.window.present() tray_lock.destroy() is_showing_dlg[0] = False tray_lock.connect("response", on_response) tray_lock.show_all()
gpl-3.0
ovnicraft/odoo
addons/marketing_campaign/report/__init__.py
441
1071
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import campaign_analysis # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
crlang/sublime-text---front-end-config
Data/Packages/mdpopups/st3/mdpopups/colorbox.py
5
7341
""" Sublime tooltip color box. Licensed under MIT Copyright (c) 2015 - 2016 Isaac Muse <[email protected]> """ from .png import Writer from .rgba import RGBA import base64 import io CHECK_LIGHT = "#FFFFFF" CHECK_DARK = "#CCCCCC" LIGHT = 0 DARK = 1 TOP = 1 RIGHT = 2 BOTTOM = 4 LEFT = 8 X = 0 Y = 1 __all__ = ('color_box',) def to_list(rgb, alpha=False): """ Break rgb channel itno a list. Take a color of the format #RRGGBBAA (alpha optional and will be stripped) and convert to a list with format [r, g, b]. """ if alpha: return [ int(rgb[1:3], 16), int(rgb[3:5], 16), int(rgb[5:7], 16), int(rgb[7:9], 16) if len(rgb) > 7 else 255 ] else: return [ int(rgb[1:3], 16), int(rgb[3:5], 16), int(rgb[5:7], 16) ] def checkered_color(color, background): """Mix color with the checkered color.""" checkered = RGBA(color) checkered.apply_alpha(background) return checkered.get_rgb() def get_border_size(direction, border_map): """Get size of border map.""" size = 0 if direction == X: if border_map & LEFT: size += 1 if border_map & RIGHT: size += 1 elif direction == Y: if border_map & TOP: size += 1 if border_map & BOTTOM: size += 1 return size def color_box_raw( colors, border="#000000", border2=None, height=32, width=32, border_size=1, check_size=4, max_colors=5, alpha=False, border_map=0xF ): """ Generate palette preview. Create a color box with the specified RGBA color(s) and RGB(A) border (alpha will be stripped out of border color). Colors is a list of colors, but only up to 5 Border can be up to 2 colors (double border). Hight, width and border thickness can all be defined. If using a transparent color, you can define the checkerboard pattern size that shows through. If using multiple colors, you can control the max colors to display. Colors currently are done horizontally only. Define size of swatch, border width, and size of checkerboard squares. """ assert height - (border_size * 2) >= 0, "Border size too big!" assert width - (border_size * 2) >= 0, "Border size too big!" # Gather preview colors preview_colors = [] count = max_colors if len(colors) >= max_colors else len(colors) border = to_list(border, False) if border2 is not None: border2 = to_list(border2, False) border1_size = border2_size = int(border_size / 2) border1_size += border_size % 2 if border2 is None: border1_size += border2_size border2_size = 0 if count: for c in range(0, count): if alpha: preview_colors.append( ( to_list(colors[c], True), to_list(colors[c], True) ) ) else: preview_colors.append( ( to_list(checkered_color(colors[c], CHECK_LIGHT)), to_list(checkered_color(colors[c], CHECK_DARK)) ) ) else: if alpha: preview_colors.append( (to_list('#00000000'), to_list('#00000000')) ) else: preview_colors.append( (to_list(CHECK_LIGHT), to_list(CHECK_DARK)) ) color_height = height - (border_size * get_border_size(Y, border_map)) color_width = width - (border_size * get_border_size(X, border_map)) if count: dividers = int(color_width / count) if color_width % count: dividers += 1 else: dividers = 0 color_size_x = color_width p = [] # Top Border if border_map & TOP: for x in range(0, border1_size): row = list(border * width) p.append(row) for x in range(0, border2_size): row = [] if border_map & LEFT and border_map & RIGHT: row += list(border * border1_size) row += list(border2 * border2_size) row += list(border2 * color_width) row += list(border2 * border2_size) row += list(border * border1_size) elif border_map & RIGHT: row += list(border2 * color_width) row += list(border2 * border2_size) row += list(border * border1_size) elif border_map & LEFT: row += list(border * border1_size) row += list(border2 * border2_size) row += list(border2 * color_width) else: row += list(border2 * color_width) p.append(row) check_color_y = DARK for y in range(0, color_height): index = 0 if y % check_size == 0: check_color_y = DARK if check_color_y == LIGHT else LIGHT # Left border row = [] if border_map & LEFT: row += list(border * border1_size) if border2: row += list(border2 * border2_size) check_color_x = check_color_y for x in range(0, color_size_x): if x != 0 and dividers != 0 and x % dividers == 0: index += 1 if x % check_size == 0: check_color_x = DARK if check_color_x == LIGHT else LIGHT row += (preview_colors[index][1] if check_color_x == DARK else preview_colors[index][0]) if border_map & RIGHT: # Right border if border2: row += list(border2 * border2_size) row += list(border * border1_size) p.append(row) if border_map & BOTTOM: # Bottom border for x in range(0, border2_size): row = [] if border_map & LEFT and border_map & RIGHT: row += list(border * border1_size) row += list(border2 * border2_size) row += list(border2 * color_width) row += list(border2 * border2_size) row += list(border * border1_size) elif border_map & LEFT: row += list(border * border1_size) row += list(border2 * border2_size) row += list(border2 * color_width) elif border_map & RIGHT: row += list(border2 * color_width) row += list(border2 * border2_size) row += list(border * border1_size) else: row += list(border2 * color_width) p.append(row) for x in range(0, border1_size): row = list(border * width) p.append(row) # Create bytes buffer for png with io.BytesIO() as f: # Write out png img = Writer(width, height, alpha=alpha) img.write(f, p) # Read out png bytes and base64 encode f.seek(0) return f.read() def color_box(*args, **kwargs): """Generate palette preview and base64 encode it.""" return "<img src=\"data:image/png;base64,%s\">" % ( base64.b64encode(color_box_raw(*args, **kwargs)).decode('ascii') )
mit
tedder/ansible
lib/ansible/modules/network/cnos/cnos_linkagg.py
27
12973
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import (absolute_import, division, print_function) __metaclass__ = type # # Copyright (C) 2017 Lenovo, Inc. # (c) 2017, Ansible by Red Hat, inc # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # # Module to work on Link Aggregation with Lenovo Switches # Lenovo Networking # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = """ --- module: cnos_linkagg version_added: "2.8" author: "Anil Kumar Muraleedharan (@auraleedhar)" short_description: Manage link aggregation groups on Lenovo CNOS devices description: - This module provides declarative management of link aggregation groups on Lenovo CNOS network devices. notes: - Tested against CNOS 10.8.1 options: group: description: - Channel-group number for the port-channel Link aggregation group. Range 1-255. mode: description: - Mode of the link aggregation group. choices: ['active', 'on', 'passive'] members: description: - List of members of the link aggregation group. aggregate: description: List of link aggregation definitions. state: description: - State of the link aggregation group. default: present choices: ['present', 'absent'] purge: description: - Purge links not defined in the I(aggregate) parameter. type: bool default: no provider: description: - B(Deprecated) - "Starting with Ansible 2.5 we recommend using C(connection: network_cli)." - For more information please see the L(CNOS Platform Options guide, ../network/user_guide/platform_cnos.html). - HORIZONTALLINE - A dict object containing connection details. version_added: "2.8" suboptions: host: description: - Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport. required: true port: description: - Specifies the port to use when building the connection to the remote device. default: 22 username: description: - Configures the username to use to authenticate the connection to the remote device. This value is used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead. password: description: - Specifies the password to use to authenticate the connection to the remote device. This value is used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead. timeout: description: - Specifies the timeout in seconds for communicating with the network device for either connecting or sending commands. If the timeout is exceeded before the operation is completed, the module will error. default: 10 ssh_keyfile: description: - Specifies the SSH key to use to authenticate the connection to the remote device. This value is the path to the key used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead. authorize: description: - Instructs the module to enter privileged mode on the remote device before sending any commands. If not specified, the device will attempt to execute all commands in non-privileged mode. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTHORIZE) will be used instead. type: bool default: 'no' auth_pass: description: - Specifies the password to use if required to enter privileged mode on the remote device. If I(authorize) is false, then this argument does nothing. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTH_PASS) will be used instead. """ EXAMPLES = """ - name: create link aggregation group cnos_linkagg: group: 10 state: present - name: delete link aggregation group cnos_linkagg: group: 10 state: absent - name: set link aggregation group to members cnos_linkagg: group: 200 mode: active members: - Ethernet1/33 - Ethernet1/44 - name: remove link aggregation group from GigabitEthernet0/0 cnos_linkagg: group: 200 mode: active members: - Ethernet1/33 - name: Create aggregate of linkagg definitions cnos_linkagg: aggregate: - { group: 3, mode: on, members: [Ethernet1/33] } - { group: 100, mode: passive, members: [Ethernet1/44] } """ RETURN = """ commands: description: The list of configuration mode commands to send to the device returned: always, except for the platforms that use Netconf transport to manage the device. type: list sample: - interface port-channel 30 - interface Ethernet1/33 - channel-group 30 mode on - no interface port-channel 30 """ import re from copy import deepcopy from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.common.config import CustomNetworkConfig from ansible.module_utils.network.common.utils import remove_default_spec from ansible.module_utils.network.cnos.cnos import get_config, load_config from ansible.module_utils.network.cnos.cnos import cnos_argument_spec def search_obj_in_list(group, lst): for o in lst: if o['group'] == group: return o def map_obj_to_commands(updates, module): commands = list() want, have = updates purge = module.params['purge'] for w in want: group = w['group'] mode = w['mode'] members = w.get('members') or [] state = w['state'] del w['state'] obj_in_have = search_obj_in_list(group, have) if state == 'absent': if obj_in_have: commands.append('no interface port-channel {0}'.format(group)) elif state == 'present': cmd = ['interface port-channel {0}'.format(group), 'exit'] if not obj_in_have: if not group: module.fail_json(msg='group is a required option') commands.extend(cmd) if members: for m in members: commands.append('interface {0}'.format(m)) commands.append('channel-group {0} mode {1}'.format(group, mode)) else: if members: if 'members' not in obj_in_have.keys(): for m in members: commands.extend(cmd) commands.append('interface {0}'.format(m)) commands.append('channel-group {0} mode {1}'.format(group, mode)) elif set(members) != set(obj_in_have['members']): missing_members = list(set(members) - set(obj_in_have['members'])) for m in missing_members: commands.extend(cmd) commands.append('interface {0}'.format(m)) commands.append('channel-group {0} mode {1}'.format(group, mode)) superfluous_members = list(set(obj_in_have['members']) - set(members)) for m in superfluous_members: commands.extend(cmd) commands.append('interface {0}'.format(m)) commands.append('no channel-group') if purge: for h in have: obj_in_want = search_obj_in_list(h['group'], want) if not obj_in_want: commands.append('no interface port-channel {0}'.format(h['group'])) return commands def map_params_to_obj(module): obj = [] aggregate = module.params.get('aggregate') if aggregate: for item in aggregate: for key in item: if item.get(key) is None: item[key] = module.params[key] d = item.copy() d['group'] = str(d['group']) obj.append(d) else: obj.append({ 'group': str(module.params['group']), 'mode': module.params['mode'], 'members': module.params['members'], 'state': module.params['state'] }) return obj def parse_mode(module, config, group, member): mode = None netcfg = CustomNetworkConfig(indent=1, contents=config) parents = ['interface {0}'.format(member)] body = netcfg.get_section(parents) match_int = re.findall(r'interface {0}\n'.format(member), body, re.M) if match_int: match = re.search(r'channel-group {0} mode (\S+)'.format(group), body, re.M) if match: mode = match.group(1) return mode def parse_members(module, config, group): members = [] for line in config.strip().split('!'): l = line.strip() if l.startswith('interface'): match_group = re.findall(r'channel-group {0} mode'.format(group), l, re.M) if match_group: match = re.search(r'interface (\S+)', l, re.M) if match: members.append(match.group(1)) return members def get_channel(module, config, group): match = re.findall(r'^interface (\S+)', config, re.M) if not match: return {} channel = {} for item in set(match): member = item channel['mode'] = parse_mode(module, config, group, member) channel['members'] = parse_members(module, config, group) return channel def map_config_to_obj(module): objs = list() config = get_config(module) for line in config.split('\n'): l = line.strip() match = re.search(r'interface port-channel(\S+)', l, re.M) if match: obj = {} group = match.group(1) obj['group'] = group obj.update(get_channel(module, config, group)) objs.append(obj) return objs def main(): """ main entry point for module execution """ element_spec = dict( group=dict(type='int'), mode=dict(choices=['active', 'on', 'passive']), members=dict(type='list'), state=dict(default='present', choices=['present', 'absent']) ) aggregate_spec = deepcopy(element_spec) aggregate_spec['group'] = dict(required=True) required_one_of = [['group', 'aggregate']] required_together = [['members', 'mode']] mutually_exclusive = [['group', 'aggregate']] # remove default in aggregate spec, to handle common arguments remove_default_spec(aggregate_spec) argument_spec = dict( aggregate=dict(type='list', elements='dict', options=aggregate_spec, required_together=required_together), purge=dict(default=False, type='bool') ) argument_spec.update(element_spec) argument_spec.update(cnos_argument_spec) module = AnsibleModule(argument_spec=argument_spec, required_one_of=required_one_of, required_together=required_together, mutually_exclusive=mutually_exclusive, supports_check_mode=True) warnings = list() result = {'changed': False} if warnings: result['warnings'] = warnings want = map_params_to_obj(module) have = map_config_to_obj(module) commands = map_obj_to_commands((want, have), module) result['commands'] = commands if commands: if not module.check_mode: load_config(module, commands) result['changed'] = True module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
Tom3141/Marlin
create_speed_lookuptable.py
333
1382
#!/usr/bin/env python """ Generate the stepper delay lookup table for Marlin firmware. """ import argparse __author__ = "Ben Gamari <[email protected]>" __copyright__ = "Copyright 2012, Ben Gamari" __license__ = "GPL" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-f', '--cpu-freq', type=int, default=16, help='CPU clockrate in MHz (default=16)') parser.add_argument('-d', '--divider', type=int, default=8, help='Timer/counter pre-scale divider (default=8)') args = parser.parse_args() cpu_freq = args.cpu_freq * 1000000 timer_freq = cpu_freq / args.divider print "#ifndef SPEED_LOOKUPTABLE_H" print "#define SPEED_LOOKUPTABLE_H" print print '#include "Marlin.h"' print print "const uint16_t speed_lookuptable_fast[256][2] PROGMEM = {" a = [ timer_freq / ((i*256)+(args.cpu_freq*2)) for i in range(256) ] b = [ a[i] - a[i+1] for i in range(255) ] b.append(b[-1]) for i in range(32): print " ", for j in range(8): print "{%d, %d}," % (a[8*i+j], b[8*i+j]), print print "};" print print "const uint16_t speed_lookuptable_slow[256][2] PROGMEM = {" a = [ timer_freq / ((i*8)+(args.cpu_freq*2)) for i in range(256) ] b = [ a[i] - a[i+1] for i in range(255) ] b.append(b[-1]) for i in range(32): print " ", for j in range(8): print "{%d, %d}," % (a[8*i+j], b[8*i+j]), print print "};" print print "#endif"
gpl-3.0
pniedzielski/fb-hackathon-2013-11-21
src/repl.it/jsrepl/extern/python/closured/lib/python2.7/lib2to3/fixes/fix_types.py
304
1806
# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for removing uses of the types module. These work for only the known names in the types module. The forms above can include types. or not. ie, It is assumed the module is imported either as: import types from types import ... # either * or specific types The import statements are not modified. There should be another fixer that handles at least the following constants: type([]) -> list type(()) -> tuple type('') -> str """ # Local imports from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name _TYPE_MAPPING = { 'BooleanType' : 'bool', 'BufferType' : 'memoryview', 'ClassType' : 'type', 'ComplexType' : 'complex', 'DictType': 'dict', 'DictionaryType' : 'dict', 'EllipsisType' : 'type(Ellipsis)', #'FileType' : 'io.IOBase', 'FloatType': 'float', 'IntType': 'int', 'ListType': 'list', 'LongType': 'int', 'ObjectType' : 'object', 'NoneType': 'type(None)', 'NotImplementedType' : 'type(NotImplemented)', 'SliceType' : 'slice', 'StringType': 'bytes', # XXX ? 'StringTypes' : 'str', # XXX ? 'TupleType': 'tuple', 'TypeType' : 'type', 'UnicodeType': 'str', 'XRangeType' : 'range', } _pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING] class FixTypes(fixer_base.BaseFix): BM_compatible = True PATTERN = '|'.join(_pats) def transform(self, node, results): new_value = unicode(_TYPE_MAPPING.get(results["name"].value)) if new_value: return Name(new_value, prefix=node.prefix) return None
agpl-3.0
crocs-muni/RTTWebInterface
SubmitExperiment/views.py
1
7105
from django.shortcuts import render, redirect from django.conf import settings from django.core.files.storage import FileSystemStorage from django.contrib import messages from .forms import ExperimentForm from .models import PredefinedConfiguration from .models import AccessCode import subprocess import shlex import os import _thread import logging logger = logging.getLogger(__name__) # Checks whether user is authenticated. If he is not, # access code (if any) is checked. If this function # returns anything other than None it is rendered # appropriate error page and authentication failed. def get_auth_error(request): # Is user authenticated? if not request.user.is_authenticated: # User is not logged in. But maybe he has stored access code in session? access_code = request.session.get('access_code', None) if access_code is not None: # Yup, he has. Let's check it. try: # Is the code even in the database? ac = AccessCode.objects.get(access_code=access_code) # Hmm, yes it is. But is it still valid? if not ac.is_valid(): # No it is not. Get out. return render(request, 'SubmitExperiment/access_code_expired.html') # Okay, he has stored correct code that is still valid, let him in. return None except AccessCode.DoesNotExist: # Wrong code, sorry buddy. return render(request, 'SubmitExperiment/access_code_bad.html') else: # Nope, he has not. So kick him out, he has no business here. return render(request, 'access_denied.html') else: # User is logged in, everything is okay. return None def submit_experiment(form, email, in_file_path, cfg_file_path): args_str = settings.SUBMIT_EXP_BINARY if form.cleaned_data['batt_sts']: args_str += ' --nist_sts ' if form.cleaned_data['batt_die']: args_str += ' --dieharder ' if form.cleaned_data['batt_tu_sc']: args_str += ' --tu01_smallcrush ' if form.cleaned_data['batt_tu_c']: args_str += ' --tu01_crush ' if form.cleaned_data['batt_tu_bc']: args_str += ' --tu01_bigcrush ' if form.cleaned_data['batt_tu_rab']: args_str += ' --tu01_rabbit ' if form.cleaned_data['batt_tu_ab']: args_str += ' --tu01_alphabit ' if form.cleaned_data['batt_tu_bab']: args_str += ' --tu01_blockalphabit ' if form.cleaned_data['batt_bool1']: args_str += ' --booltest-1 ' if form.cleaned_data['batt_bool2']: args_str += ' --booltest-2 ' if email is not None and len(email) > 0: args_str += ' --email {} '.format(email) args_str += ' --name \'{}\' --cfg \'{}\' --file \'{}\' ' \ .format(form.cleaned_data['exp_name'], cfg_file_path, in_file_path) log_file = os.path.splitext(settings.SUBMIT_EXP_BINARY)[0] log_file += ".log" with open(log_file, 'a') as f_null: logger.info('Executing: %s' % (args_str,)) subprocess.call(shlex.split(args_str), stdout=f_null, stderr=subprocess.STDOUT) # Remove files on the server os.remove(in_file_path) os.remove(cfg_file_path) # Sole purpose of this view is to save access code into # session. # Code will be verified when accessing index (so after redirect) def gain_access(request, access_code): request.session['access_code'] = access_code return redirect('SubmitExperiment:index') # Create your views here. def index(request): # Authentication auth_error = get_auth_error(request) if auth_error is not None: return auth_error # Finally process the request if request.method != 'POST': # GET processing # Render the form return render(request, 'SubmitExperiment/index.html', {'form': ExperimentForm()}) # POST processing # Validate form and submit experiment form = ExperimentForm(request.POST, request.FILES) if not form.is_valid(): messages.error(request, 'Submitted form was not valid.') messages.info(request, 'Please fix the errors and try again.') return render(request, 'SubmitExperiment/index.html', {'form': form}) # I shouldn't be validating fields here, but in form # I can't know whether user is logged in or not if request.user.is_authenticated: email = request.user.email else: email = form.cleaned_data['author_email'] if email is None or len(email) == 0: messages.error(request, 'Submitted form was not valid.') messages.info(request, 'Please fix the errors and try again.') form.add_error('author_email', 'This field is required.') return render(request, 'SubmitExperiment/index.html', {'form': form}) # Form is valid here, continue in_file = request.FILES['in_file'] if form.cleaned_data['default_cfg']: # Picking default configuration if possible config_list = PredefinedConfiguration.objects.all().order_by('required_bytes') if len(config_list) == 0: raise AssertionError('there are no predefined configurations') last_leq_id = None for c in config_list: if in_file.size >= c.required_bytes: last_leq_id = c.id else: break if last_leq_id is None: last_leq_id = config_list[0].id messages.warning(request, "Provided file is too small for all configurations.") chosen_cfg = PredefinedConfiguration.objects.get(id=last_leq_id) messages.info(request, "Best possible configuration was chosen and requires {} bytes." .format(chosen_cfg.required_bytes)) cfg_file = chosen_cfg.cfg_file elif form.cleaned_data['choose_cfg'] is not None: # User picked one of predefined configurations cfg = PredefinedConfiguration.objects.get(id=form.cleaned_data['choose_cfg'].id) cfg_file = cfg.cfg_file if in_file.size < cfg.required_bytes: messages.warning(request, "Your file is smaller than" " recommended file size for chosen configuration.") messages.warning(request, "Recommended file size: {} bytes".format(cfg.required_bytes)) messages.warning(request, "Size of provided file: {} bytes".format(in_file.size)) else: # User provided his own configuration cfg_file = request.FILES['own_cfg'] fs = FileSystemStorage() in_file_path = fs.path(fs.save(in_file.name, in_file)) cfg_file_path = fs.path(fs.save(cfg_file.name, cfg_file)) try: _thread.start_new_thread(submit_experiment, (form, email, in_file_path, cfg_file_path)) except BaseException as e: print('Could not start a thread: {}'.format(e)) messages.success(request, 'Experiment {} was created.'.format(form.cleaned_data['exp_name'])) return redirect('SubmitExperiment:index')
mit
o3project/odenos
src/main/python/org/o3project/odenos/remoteobject/transport/__init__.py
233
1026
# -*- coding:utf-8 -*- # Copyright 2015 NEC Corporation. # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. #
apache-2.0
ltn22/SCHC
python/examples/coap_client/CoAP.py
1
3377
''' SCHC compressor, Copyright (c) <2017><IMT Atlantique and Philippe Clavier> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/> ''' import struct CON = 0 """Confirmable message type.""" NON = 1 """Non-confirmable message type.""" ACK = 2 """Acknowledgement message type.""" RST = 3 """Reset message type""" # types = {0: 'CON', # 1: 'NON', # 2: 'ACK', # 3: 'RST'} EMPTY = 0 GET = 1 POST = 2 PUT = 3 DELETE = 4 mid = 1 class Message: """ class CoAP for client and server """ def __init__( self, buf = b'' ): """ Coap message constructor """ self.buffer = buf self.option = 0 def __dump_buffer( self ): """Dumps the content of the message as hexa""" for bytes in self.buffer: print ( hex( bytes ), end = '-' ) def new_header ( self, type = CON, code = GET, token = 0x12, midSize = 16 ): "Creates a new message header" global mid self.buffer = bytearray() # First 32 bit word byte = ( ( 1 ) << 6 ) | ( type << 4 ) | 0x01 # need to compute token length # /!\ Token is one byte long, should be changed to allow different sizes self.buffer = struct.pack ( '!BBHB', byte, code, mid, token ) # In some cases the Message ID size must be limited to a smaller number of bits # To allow rule selection, especially with MSB the size must be controlled mid = ( mid + 1 ) % ( 1 << midSize ) if ( mid == 0 ): mid = 1 # mid = 0 may be ack with a random number print( "MID = ", mid ) def __add_option_TL ( self, T, L ): """ adds an option at a specific length """ delta = T - self.option self.option = T if ( delta < 13 ) and ( L < 13 ) is True: self.buffer += struct.pack( 'B', ( delta << 4 ) | L ) else: print( 'Not Done' ) def add_option_path( self, path = '' ): "Adds a path element to the message" self.__add_option_TL( 11, len( path ) ) self.buffer += path def add_option_query( self, query = '' ): "Adds a CoAP query to the message" self.__add_option_TL( 15, len( query ) ) self.buffer += query def end_option( self ): "Marks the end of the coap option list" self.buffer += struct.pack( 'B', 0xFF ) def add_value( self, pvalue = '' ): '''Adds a value to the message''' print ( 'Type = ', type( pvalue ) ) if isinstance(pvalue, str): self.buffer = pvalue self.__dump_buffer() def to_coap( self ): """ Returns the message's buffer""" return self.buffer def type ( self ): """Returns the message's CoAP type""" return( ( self.buffer[0] & 0x30 ) >> 4 )
gpl-3.0
frishberg/django
django/contrib/admin/widgets.py
14
14518
""" Form Widget classes specific to the Django admin site. """ from __future__ import unicode_literals import copy from django import forms from django.db.models.deletion import CASCADE from django.forms.utils import flatatt from django.forms.widgets import RadioFieldRenderer from django.template.loader import render_to_string from django.urls import reverse from django.urls.exceptions import NoReverseMatch from django.utils import six from django.utils.encoding import force_text from django.utils.html import format_html, format_html_join, smart_urlquote from django.utils.safestring import mark_safe from django.utils.text import Truncator from django.utils.translation import ugettext as _ class FilteredSelectMultiple(forms.SelectMultiple): """ A SelectMultiple with a JavaScript filter interface. Note that the resulting JavaScript assumes that the jsi18n catalog has been loaded in the page """ @property def media(self): js = ["core.js", "SelectBox.js", "SelectFilter2.js"] return forms.Media(js=["admin/js/%s" % path for path in js]) def __init__(self, verbose_name, is_stacked, attrs=None, choices=()): self.verbose_name = verbose_name self.is_stacked = is_stacked super(FilteredSelectMultiple, self).__init__(attrs, choices) def render(self, name, value, attrs=None): if attrs is None: attrs = {} attrs['class'] = 'selectfilter' if self.is_stacked: attrs['class'] += 'stacked' attrs['data-field-name'] = self.verbose_name attrs['data-is-stacked'] = int(self.is_stacked) output = super(FilteredSelectMultiple, self).render(name, value, attrs) return mark_safe(output) class AdminDateWidget(forms.DateInput): @property def media(self): js = ["calendar.js", "admin/DateTimeShortcuts.js"] return forms.Media(js=["admin/js/%s" % path for path in js]) def __init__(self, attrs=None, format=None): final_attrs = {'class': 'vDateField', 'size': '10'} if attrs is not None: final_attrs.update(attrs) super(AdminDateWidget, self).__init__(attrs=final_attrs, format=format) class AdminTimeWidget(forms.TimeInput): @property def media(self): js = ["calendar.js", "admin/DateTimeShortcuts.js"] return forms.Media(js=["admin/js/%s" % path for path in js]) def __init__(self, attrs=None, format=None): final_attrs = {'class': 'vTimeField', 'size': '8'} if attrs is not None: final_attrs.update(attrs) super(AdminTimeWidget, self).__init__(attrs=final_attrs, format=format) class AdminSplitDateTime(forms.SplitDateTimeWidget): """ A SplitDateTime Widget that has some admin-specific styling. """ def __init__(self, attrs=None): widgets = [AdminDateWidget, AdminTimeWidget] # Note that we're calling MultiWidget, not SplitDateTimeWidget, because # we want to define widgets. forms.MultiWidget.__init__(self, widgets, attrs) def format_output(self, rendered_widgets): return format_html('<p class="datetime">{} {}<br />{} {}</p>', _('Date:'), rendered_widgets[0], _('Time:'), rendered_widgets[1]) class AdminRadioFieldRenderer(RadioFieldRenderer): def render(self): """Outputs a <ul> for this set of radio fields.""" return format_html('<ul{}>\n{}\n</ul>', flatatt(self.attrs), format_html_join('\n', '<li>{}</li>', ((force_text(w),) for w in self))) class AdminRadioSelect(forms.RadioSelect): renderer = AdminRadioFieldRenderer class AdminFileWidget(forms.ClearableFileInput): template_with_initial = ( '<p class="file-upload">%s</p>' % forms.ClearableFileInput.template_with_initial ) template_with_clear = ( '<span class="clearable-file-input">%s</span>' % forms.ClearableFileInput.template_with_clear ) def url_params_from_lookup_dict(lookups): """ Converts the type of lookups specified in a ForeignKey limit_choices_to attribute to a dictionary of query parameters """ params = {} if lookups and hasattr(lookups, 'items'): items = [] for k, v in lookups.items(): if callable(v): v = v() if isinstance(v, (tuple, list)): v = ','.join(str(x) for x in v) elif isinstance(v, bool): v = ('0', '1')[v] else: v = six.text_type(v) items.append((k, v)) params.update(dict(items)) return params class ForeignKeyRawIdWidget(forms.TextInput): """ A Widget for displaying ForeignKeys in the "raw_id" interface rather than in a <select> box. """ def __init__(self, rel, admin_site, attrs=None, using=None): self.rel = rel self.admin_site = admin_site self.db = using super(ForeignKeyRawIdWidget, self).__init__(attrs) def render(self, name, value, attrs=None): rel_to = self.rel.model if attrs is None: attrs = {} extra = [] if rel_to in self.admin_site._registry: # The related object is registered with the same AdminSite related_url = reverse( 'admin:%s_%s_changelist' % ( rel_to._meta.app_label, rel_to._meta.model_name, ), current_app=self.admin_site.name, ) params = self.url_parameters() if params: url = '?' + '&amp;'.join('%s=%s' % (k, v) for k, v in params.items()) else: url = '' if "class" not in attrs: attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook. # TODO: "lookup_id_" is hard-coded here. This should instead use # the correct API to determine the ID dynamically. extra.append( '<a href="%s%s" class="related-lookup" id="lookup_id_%s" title="%s"></a>' % (related_url, url, name, _('Lookup')) ) output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra if value: output.append(self.label_for_value(value)) return mark_safe(''.join(output)) def base_url_parameters(self): limit_choices_to = self.rel.limit_choices_to if callable(limit_choices_to): limit_choices_to = limit_choices_to() return url_params_from_lookup_dict(limit_choices_to) def url_parameters(self): from django.contrib.admin.views.main import TO_FIELD_VAR params = self.base_url_parameters() params.update({TO_FIELD_VAR: self.rel.get_related_field().name}) return params def label_for_value(self, value): key = self.rel.get_related_field().name try: obj = self.rel.model._default_manager.using(self.db).get(**{key: value}) except (ValueError, self.rel.model.DoesNotExist): return '' label = '&nbsp;<strong>{}</strong>' text = Truncator(obj).words(14, truncate='...') try: change_url = reverse( '%s:%s_%s_change' % ( self.admin_site.name, obj._meta.app_label, obj._meta.object_name.lower(), ), args=(obj.pk,) ) except NoReverseMatch: pass # Admin not registered for target model. else: text = format_html('<a href="{}">{}</a>', change_url, text) return format_html(label, text) class ManyToManyRawIdWidget(ForeignKeyRawIdWidget): """ A Widget for displaying ManyToMany ids in the "raw_id" interface rather than in a <select multiple> box. """ def render(self, name, value, attrs=None): if attrs is None: attrs = {} if self.rel.model in self.admin_site._registry: # The related object is registered with the same AdminSite attrs['class'] = 'vManyToManyRawIdAdminField' if value: value = ','.join(force_text(v) for v in value) else: value = '' return super(ManyToManyRawIdWidget, self).render(name, value, attrs) def url_parameters(self): return self.base_url_parameters() def label_for_value(self, value): return '' def value_from_datadict(self, data, files, name): value = data.get(name) if value: return value.split(',') class RelatedFieldWidgetWrapper(forms.Widget): """ This class is a wrapper to a given widget to add the add icon for the admin interface. """ template = 'admin/related_widget_wrapper.html' def __init__(self, widget, rel, admin_site, can_add_related=None, can_change_related=False, can_delete_related=False): self.needs_multipart_form = widget.needs_multipart_form self.attrs = widget.attrs self.choices = widget.choices self.widget = widget self.rel = rel # Backwards compatible check for whether a user can add related # objects. if can_add_related is None: can_add_related = rel.model in admin_site._registry self.can_add_related = can_add_related # XXX: The UX does not support multiple selected values. multiple = getattr(widget, 'allow_multiple_selected', False) self.can_change_related = not multiple and can_change_related # XXX: The deletion UX can be confusing when dealing with cascading deletion. cascade = getattr(rel, 'on_delete', None) is CASCADE self.can_delete_related = not multiple and not cascade and can_delete_related # so we can check if the related object is registered with this AdminSite self.admin_site = admin_site def __deepcopy__(self, memo): obj = copy.copy(self) obj.widget = copy.deepcopy(self.widget, memo) obj.attrs = self.widget.attrs memo[id(self)] = obj return obj @property def is_hidden(self): return self.widget.is_hidden @property def media(self): return self.widget.media def get_related_url(self, info, action, *args): return reverse("admin:%s_%s_%s" % (info + (action,)), current_app=self.admin_site.name, args=args) def render(self, name, value, *args, **kwargs): from django.contrib.admin.views.main import IS_POPUP_VAR, TO_FIELD_VAR rel_opts = self.rel.model._meta info = (rel_opts.app_label, rel_opts.model_name) self.widget.choices = self.choices url_params = '&'.join("%s=%s" % param for param in [ (TO_FIELD_VAR, self.rel.get_related_field().name), (IS_POPUP_VAR, 1), ]) context = { 'widget': self.widget.render(name, value, *args, **kwargs), 'name': name, 'url_params': url_params, 'model': rel_opts.verbose_name, } if self.can_change_related: change_related_template_url = self.get_related_url(info, 'change', '__fk__') context.update( can_change_related=True, change_related_template_url=change_related_template_url, ) if self.can_add_related: add_related_url = self.get_related_url(info, 'add') context.update( can_add_related=True, add_related_url=add_related_url, ) if self.can_delete_related: delete_related_template_url = self.get_related_url(info, 'delete', '__fk__') context.update( can_delete_related=True, delete_related_template_url=delete_related_template_url, ) return mark_safe(render_to_string(self.template, context)) def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs) return self.attrs def value_from_datadict(self, data, files, name): return self.widget.value_from_datadict(data, files, name) def id_for_label(self, id_): return self.widget.id_for_label(id_) class AdminTextareaWidget(forms.Textarea): def __init__(self, attrs=None): final_attrs = {'class': 'vLargeTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminTextareaWidget, self).__init__(attrs=final_attrs) class AdminTextInputWidget(forms.TextInput): def __init__(self, attrs=None): final_attrs = {'class': 'vTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminTextInputWidget, self).__init__(attrs=final_attrs) class AdminEmailInputWidget(forms.EmailInput): def __init__(self, attrs=None): final_attrs = {'class': 'vTextField'} if attrs is not None: final_attrs.update(attrs) super(AdminEmailInputWidget, self).__init__(attrs=final_attrs) class AdminURLFieldWidget(forms.URLInput): def __init__(self, attrs=None): final_attrs = {'class': 'vURLField'} if attrs is not None: final_attrs.update(attrs) super(AdminURLFieldWidget, self).__init__(attrs=final_attrs) def render(self, name, value, attrs=None): html = super(AdminURLFieldWidget, self).render(name, value, attrs) if value: value = force_text(self.format_value(value)) final_attrs = {'href': smart_urlquote(value)} html = format_html( '<p class="url">{} <a{}>{}</a><br />{} {}</p>', _('Currently:'), flatatt(final_attrs), value, _('Change:'), html ) return html class AdminIntegerFieldWidget(forms.NumberInput): class_name = 'vIntegerField' def __init__(self, attrs=None): final_attrs = {'class': self.class_name} if attrs is not None: final_attrs.update(attrs) super(AdminIntegerFieldWidget, self).__init__(attrs=final_attrs) class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget): class_name = 'vBigIntegerField'
bsd-3-clause
pamfilos/invenio
modules/weblinkback/lib/weblinkback_config.py
28
2599
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2011 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """WebLinkback - Configuration Parameters""" CFG_WEBLINKBACK_STATUS = {'APPROVED': 'approved', 'PENDING': 'pending', 'REJECTED': 'rejected', 'INSERTED': 'inserted', 'BROKEN': 'broken'} CFG_WEBLINKBACK_TYPE = {'TRACKBACK': 'trackback', 'REFBACK': 'refback', 'PINGBACK': 'pingback'} CFG_WEBLINKBACK_LIST_TYPE = {'WHITELIST': 'whitelist', 'BLACKLIST': 'blacklist'} CFG_WEBLINKBACK_ORDER_BY_INSERTION_TIME = {'ASC': 'ASC', 'DESC': 'DESC'} CFG_WEBLINKBACK_ADMIN_MODERATION_ACTION = {'REJECT': 'reject', 'APPROVE': 'approve', 'INSERT': 'insert', 'DELETE': 'delete'} CFG_WEBLINKBACK_ACTION_RETURN_CODE = {'OK': 0, 'INVALID_ACTION': 1, 'DUPLICATE': 2, 'BAD_INPUT': 3} CFG_WEBLINKBACK_PAGE_TITLE_STATUS = {'NEW': 'n', 'OLD': 'o', 'MANUALLY_SET': 'm'} CFG_WEBLINKBACK_LATEST_COUNT_VALUES = (10, 20, 50, 100, 200) CFG_WEBLINKBACK_LATEST_COUNT_DEFAULT = 10 CFG_WEBLINKBACK_BROKEN_COUNT = 5 CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME = 'default' CFG_WEBLINKBACK_TRACKBACK_SUBSCRIPTION_ERROR_MESSAGE= {'BAD_ARGUMENT': 'Refused: URL argument not set', 'BLACKLIST': 'Refused: URL in blacklist'} CFG_WEBLINKBACK_DEFAULT_USER = 0 CFG_WEBLINKBACK_LATEST_FACTOR = 3 CFG_WEBLINKBACK_MAX_LINKBACKS_IN_EMAIL = 100
gpl-2.0
cindyyu/kuma
vendor/packages/translate/storage/oo.py
25
15103
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2002-2008 Zuza Software Foundation # # This file is part of translate. # # translate is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # translate is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. """ Classes that hold units of .oo files (oounit) or entire files (oofile). These are specific .oo files for localisation exported by OpenOffice.org - SDF format (previously knows as GSI files). .. There used to be an overview of the format here http://l10n.openoffice.org/L10N_Framework/Intermediate_file_format.html The behaviour in terms of escaping is explained in detail in the programming comments. """ # FIXME: add simple test which reads in a file and writes it out again import os import re import warnings from translate.misc import quote, wStringIO # File normalisation normalfilenamechars = "/#.0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" normalizetable = "" for i in map(chr, range(256)): if i in normalfilenamechars: normalizetable += i else: normalizetable += "_" class unormalizechar(dict): def __init__(self, normalchars): self.normalchars = {} for char in normalchars: self.normalchars[ord(char)] = char def __getitem__(self, key): return self.normalchars.get(key, u"_") unormalizetable = unormalizechar(normalfilenamechars.decode("ascii")) def normalizefilename(filename): """converts any non-alphanumeric (standard roman) characters to _""" if isinstance(filename, str): return filename.translate(normalizetable) else: return filename.translate(unormalizetable) def makekey(ookey, long_keys): """converts an oo key tuple into a unique identifier :param ookey: an oo key :type ookey: tuple :param long_keys: Use long keys :type long_keys: Boolean :rtype: str :return: unique ascii identifier """ project, sourcefile, resourcetype, groupid, localid, platform = ookey sourcefile = sourcefile.replace('\\', '/') if long_keys: sourcebase = os.path.join(project, sourcefile) else: sourceparts = sourcefile.split('/') sourcebase = "".join(sourceparts[-1:]) if len(groupid) == 0 or len(localid) == 0: fullid = groupid + localid else: fullid = groupid + "." + localid if resourcetype: fullid = fullid + "." + resourcetype key = "%s#%s" % (sourcebase, fullid) return normalizefilename(key) # These are functions that deal with escaping and unescaping of the text fields # of the SDF file. These should only be applied to the text column. # The fields quickhelptext and title are assumed to carry no escaping. # # The escaping of all strings except those coming from .xhp (helpcontent2) # sourcefiles work as follows: # (newline) -> \n # (carriage return) -> \r # (tab) -> \t # Backslash characters (\) and single quotes (') are not consistently escaped, # and are therefore left as they are. # # For strings coming from .xhp (helpcontent2) sourcefiles the following # characters are escaped inside XML tags only: # < -> \< when used with lowercase tagnames (with some exceptions) # > -> \> when used with lowercase tagnames (with some exceptions) # " -> \" around XML properties # The following is consistently escaped in .xhp strings (not only in XML tags): # \ -> \\ def escape_text(text): """Escapes SDF text to be suitable for unit consumption.""" return text.replace("\n", "\\n").replace("\t", "\\t").replace("\r", "\\r") def unescape_text(text): """Unescapes SDF text to be suitable for unit consumption.""" return text.replace("\\\\", "\a").replace("\\n", "\n").replace("\\t", "\t").\ replace("\\r", "\r").replace("\a", "\\\\") helptagre = re.compile('''<[/]??[a-z_\-]+?(?:| +[a-z]+?=".*?") *[/]??>''') def escape_help_text(text): """Escapes the help text as it would be in an SDF file. <, >, " are only escaped in <[[:lower:]]> tags. Some HTML tags make it in in lowercase so those are dealt with. Some OpenOffice.org help tags are not escaped. """ text = text.replace("\\", "\\\\") for tag in helptagre.findall(text): escapethistag = False for escape_tag in ["ahelp", "link", "item", "emph", "defaultinline", "switchinline", "caseinline", "variable", "bookmark_value", "image", "embedvar", "alt"]: if tag.startswith("<%s" % escape_tag) or tag == "</%s>" % escape_tag: escapethistag = True if tag in ["<br/>", "<help-id-missing/>"]: escapethistag = True if escapethistag: escaped_tag = ("\\<" + tag[1:-1] + "\\>").replace('"', '\\"') text = text.replace(tag, escaped_tag) return text def unescape_help_text(text): """Unescapes normal text to be suitable for writing to the SDF file.""" return text.replace(r"\<", "<").replace(r"\>", ">").replace(r'\"', '"').replace(r"\\", "\\") def encode_if_needed_utf8(text): """Encode a Unicode string the the specified encoding""" if isinstance(text, unicode): return text.encode('UTF-8') return text class ooline(object): """this represents one line, one translation in an .oo file""" def __init__(self, parts=None): """construct an ooline from its parts""" if parts is None: self.project, self.sourcefile, self.dummy, self.resourcetype, \ self.groupid, self.localid, self.helpid, self.platform, \ self.width, self.languageid, self.text, self.helptext, \ self.quickhelptext, self.title, self.timestamp = [""] * 15 else: self.setparts(parts) def setparts(self, parts): """create a line from its tab-delimited parts""" if len(parts) != 15: warnings.warn("oo line contains %d parts, it should contain 15: %r" % (len(parts), parts)) newparts = list(parts) if len(newparts) < 15: newparts = newparts + [""] * (15 - len(newparts)) else: newparts = newparts[:15] parts = tuple(newparts) self.project, self.sourcefile, self.dummy, self.resourcetype, \ self.groupid, self.localid, self.helpid, self.platform, \ self.width, self.languageid, self._text, self.helptext, \ self.quickhelptext, self.title, self.timestamp = parts def getparts(self): """return a list of parts in this line""" return (self.project, self.sourcefile, self.dummy, self.resourcetype, self.groupid, self.localid, self.helpid, self.platform, self.width, self.languageid, self._text, self.helptext, self.quickhelptext, self.title, self.timestamp) def gettext(self): """Obtains the text column and handle escaping.""" if self.sourcefile.endswith(".xhp"): return unescape_help_text(self._text) else: return unescape_text(self._text) def settext(self, text): """Sets the text column and handle escaping.""" if self.sourcefile.endswith(".xhp"): self._text = escape_help_text(text) else: self._text = escape_text(text) text = property(gettext, settext) def __str__(self): """convert to a string. double check that unicode is handled""" return encode_if_needed_utf8(self.getoutput()) def getoutput(self): """return a line in tab-delimited form""" parts = self.getparts() return "\t".join(parts) def getkey(self): """get the key that identifies the resource""" return (self.project, self.sourcefile, self.resourcetype, self.groupid, self.localid, self.platform) class oounit: """this represents a number of translations of a resource""" def __init__(self): """construct the oounit""" self.languages = {} self.lines = [] def addline(self, line): """add a line to the oounit""" self.languages[line.languageid] = line self.lines.append(line) def __str__(self): """convert to a string. double check that unicode is handled""" return encode_if_needed_utf8(self.getoutput()) def getoutput(self, skip_source=False, fallback_lang=None): """return the lines in tab-delimited form""" if skip_source: lines = self.lines[1:] if not lines: # Untranslated, so let's do fall-back: (bug 1883) new_line = ooline(self.lines[0].getparts()) new_line.languageid = fallback_lang lines = [new_line] else: lines = self.lines return "\r\n".join([str(line) for line in lines]) class oofile: """this represents an entire .oo file""" UnitClass = oounit def __init__(self, input=None): """constructs the oofile""" self.oolines = [] self.units = [] self.ookeys = {} self.filename = "" self.languages = [] if input is not None: self.parse(input) def addline(self, thisline): """adds a parsed line to the file""" key = thisline.getkey() element = self.ookeys.get(key, None) if element is None: element = self.UnitClass() self.units.append(element) self.ookeys[key] = element element.addline(thisline) self.oolines.append(thisline) if thisline.languageid not in self.languages: self.languages.append(thisline.languageid) def parse(self, input): """parses lines and adds them to the file""" if not self.filename: self.filename = getattr(input, 'name', '') if hasattr(input, "read"): src = input.read() input.close() else: src = input for line in src.split("\n"): line = quote.rstripeol(line) if not line: continue parts = line.split("\t") thisline = ooline(parts) self.addline(thisline) def __str__(self, skip_source=False, fallback_lang=None): """convert to a string. double check that unicode is handled""" return encode_if_needed_utf8(self.getoutput(skip_source, fallback_lang)) def getoutput(self, skip_source=False, fallback_lang=None): """converts all the lines back to tab-delimited form""" lines = [] for oe in self.units: if len(oe.lines) > 2: warnings.warn("contains %d lines (should be 2 at most): languages %r" % (len(oe.lines), oe.languages)) oekeys = [line.getkey() for line in oe.lines] warnings.warn("contains %d lines (should be 2 at most): keys %r" % (len(oe.lines), oekeys)) oeline = oe.getoutput(skip_source, fallback_lang) + "\r\n" lines.append(oeline) return "".join(lines) class oomultifile: """this takes a huge GSI file and represents it as multiple smaller files...""" def __init__(self, filename, mode=None, multifilestyle="single"): """initialises oomultifile from a seekable inputfile or writable outputfile""" self.filename = filename if mode is None: if os.path.exists(filename): mode = 'r' else: mode = 'w' self.mode = mode self.multifilestyle = multifilestyle self.multifilename = os.path.splitext(filename)[0] self.multifile = open(filename, mode) self.subfilelines = {} if mode == "r": self.createsubfileindex() def createsubfileindex(self): """reads in all the lines and works out the subfiles""" linenum = 0 for line in self.multifile: subfile = self.getsubfilename(line) if not subfile in self.subfilelines: self.subfilelines[subfile] = [] self.subfilelines[subfile].append(linenum) linenum += 1 def getsubfilename(self, line): """looks up the subfile name for the line""" if line.count("\t") < 2: raise ValueError("invalid tab-delimited line: %r" % line) lineparts = line.split("\t", 2) module, filename = lineparts[0], lineparts[1] if self.multifilestyle == "onefile": ooname = self.multifilename elif self.multifilestyle == "toplevel": ooname = module else: filename = filename.replace("\\", "/") fileparts = [module] + filename.split("/") ooname = os.path.join(*fileparts[:-1]) return ooname + os.extsep + "oo" def listsubfiles(self): """returns a list of subfiles in the file""" return self.subfilelines.keys() def __iter__(self): """iterates through the subfile names""" for subfile in self.listsubfiles(): yield subfile def __contains__(self, pathname): """checks if this pathname is a valid subfile""" return pathname in self.subfilelines def getsubfilesrc(self, subfile): """returns the list of lines matching the subfile""" lines = [] requiredlines = dict.fromkeys(self.subfilelines[subfile]) linenum = 0 self.multifile.seek(0) for line in self.multifile: if linenum in requiredlines: lines.append(line) linenum += 1 return "".join(lines) def openinputfile(self, subfile): """returns a pseudo-file object for the given subfile""" subfilesrc = self.getsubfilesrc(subfile) inputfile = wStringIO.StringIO(subfilesrc) inputfile.filename = subfile return inputfile def openoutputfile(self, subfile): """returns a pseudo-file object for the given subfile""" def onclose(contents): self.multifile.write(contents) self.multifile.flush() outputfile = wStringIO.CatchStringOutput(onclose) outputfile.filename = subfile return outputfile def getoofile(self, subfile): """returns an oofile built up from the given subfile's lines""" subfilesrc = self.getsubfilesrc(subfile) oosubfile = oofile() oosubfile.filename = subfile oosubfile.parse(subfilesrc) return oosubfile
mpl-2.0
sekikn/incubator-airflow
tests/providers/http/operators/test_http.py
7
3204
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import unittest from unittest import mock import requests_mock from airflow.exceptions import AirflowException from airflow.providers.http.operators.http import SimpleHttpOperator @mock.patch.dict('os.environ', AIRFLOW_CONN_HTTP_EXAMPLE='http://www.example.com') class TestSimpleHttpOp(unittest.TestCase): @requests_mock.mock() def test_response_in_logs(self, m): """ Test that when using SimpleHttpOperator with 'GET', the log contains 'Example Domain' in it """ m.get('http://www.example.com', text='Example.com fake response') operator = SimpleHttpOperator( task_id='test_HTTP_op', method='GET', endpoint='/', http_conn_id='HTTP_EXAMPLE', log_response=True, ) with mock.patch.object(operator.log, 'info') as mock_info: operator.execute(None) calls = [mock.call('Example.com fake response'), mock.call('Example.com fake response')] mock_info.has_calls(calls) @requests_mock.mock() def test_response_in_logs_after_failed_check(self, m): """ Test that when using SimpleHttpOperator with log_response=True, the response is logged even if request_check fails """ def response_check(response): return response.text != 'invalid response' m.get('http://www.example.com', text='invalid response') operator = SimpleHttpOperator( task_id='test_HTTP_op', method='GET', endpoint='/', http_conn_id='HTTP_EXAMPLE', log_response=True, response_check=response_check, ) with mock.patch.object(operator.log, 'info') as mock_info: self.assertRaises(AirflowException, operator.execute, {}) calls = [mock.call('Calling HTTP method'), mock.call('invalid response')] mock_info.assert_has_calls(calls, any_order=True) @requests_mock.mock() def test_filters_response(self, m): m.get('http://www.example.com', json={'value': 5}) operator = SimpleHttpOperator( task_id='test_HTTP_op', method='GET', endpoint='/', http_conn_id='HTTP_EXAMPLE', response_filter=lambda response: response.json(), ) result = operator.execute({}) assert result == {'value': 5}
apache-2.0
opps/opps
opps/archives/migrations/0002_auto__add_field_file_archive_link__chg_field_file_archive.py
5
6181
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models from django.contrib.auth import get_user_model User = get_user_model() class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'File.archive_link' db.add_column(u'archives_file', 'archive_link', self.gf('django.db.models.fields.URLField')(max_length=255, null=True, blank=True), keep_default=False) try: # Adding M2M table for field mirror_site on 'File' m2m_table_name = db.shorten_name(u'archives_file_mirror_site') db.create_table(m2m_table_name, ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('file', models.ForeignKey(orm[u'archives.file'], null=False)), ('site', models.ForeignKey(orm[u'sites.site'], null=False)) )) db.create_unique(m2m_table_name, ['file_id', 'site_id']) except: pass # Changing field 'File.archive' db.alter_column(u'archives_file', 'archive', self.gf('django.db.models.fields.files.FileField')(max_length=255, null=True)) def backwards(self, orm): # Deleting field 'File.archive_link' db.delete_column(u'archives_file', 'archive_link') try: # Removing M2M table for field mirror_site on 'File' db.delete_table(db.shorten_name(u'archives_file_mirror_site')) except: pass # Changing field 'File.archive' db.alter_column(u'archives_file', 'archive', self.gf('django.db.models.fields.files.FileField')(default='', max_length=255)) models = { u'%s.%s' % (User._meta.app_label, User._meta.module_name): { 'Meta': {'object_name': User.__name__}, }, u'archives.file': { 'Meta': {'object_name': 'File'}, 'archive': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'archive_link': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'date_available': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mirror_site': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "u'archives_file_mirror_site'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['sites.Site']"}), 'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']"}), 'site_domain': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}), 'site_iid': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'max_length': '4', 'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '150'}), 'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'tags': ('django.db.models.fields.CharField', [], {'max_length': '4000', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'db_index': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s.%s']" % (User._meta.app_label, User._meta.object_name)}) }, u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'sites.site': { 'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"}, 'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) } } complete_apps = ['archives']
mit
ngannguyen/immunoseq
src/tests/vjusageTest.py
1
1986
#!/usr/bin/env python import immunoseq.src.vjusage as vju import unittest class TestVjusage( unittest.TestCase ): def test_vjusage(self): indir = "/hive/users/nknguyen/reconGit/immunoseq/data/vjusageTest/seqs" samples = vju.readFiles(indir) for s in samples: s.getVJusage() #V usage: s2v2c = { 'sample1':{'TRBV1-1':35, 'TRBV1-2':15, 'TRBV2':20, 'TRBV3':20, 'TRBV4':10, 'TRBV5':0}, 'sample2':{'TRBV1-1':95, 'TRBV1-2':15, 'TRBV2':20, 'TRBV3':20, 'TRBV4':0, 'TRBV5':50} } s2v2uc = { 'sample1':{'TRBV1-1':1, 'TRBV1-2':0, 'TRBV2':0, 'TRBV3':0, 'TRBV4':1, 'TRBV5':0}, 'sample2':{'TRBV1-1':2, 'TRBV1-2':0, 'TRBV2':0, 'TRBV3':0, 'TRBV4':0, 'TRBV5':1} } knownVs = 'TRBV1-1,TRBV1-2,TRBV2,TRBV3,TRBV4,TRBV5'.split(',') vgenes = vju.getUnionGeneList(samples, 'v') for v in knownVs: self.assertTrue( v in vgenes ) for s in samples: if s.name not in ['sample1', 'sample2']: print s.name continue v2c = s.usage['v'] for v in v2c: count = int( v2c[v][0] ) uniq = int( v2c[v][1] ) knowncount = s2v2c[s.name][v] knownuniq = s2v2uc[s.name][v] self.assertTrue( count == knowncount ) self.assertTrue( uniq == knownuniq ) #VJ usage: vjs = vju.getUnionGeneList(samples, 'vj') vj2c = samples[0].usage['vj'] count = int( vj2c['TRBV1-1|TRBJ1-1'][0] ) self.assertTrue( count == 35 ) uniqcount = int( vj2c['TRBV1-1|TRBJ1-1'][1] ) self.assertTrue( uniqcount == 1 ) count = int( vj2c['TRBV2|TRBJ2'][0] ) self.assertTrue( count == 10 ) uniqcount = int( vj2c['TRBV2|TRBJ2'][1] ) self.assertTrue( uniqcount == 0 ) if __name__ == '__main__': unittest.main()
mit
fxfitz/ansible
lib/ansible/modules/network/cnos/cnos_interface.py
9
27599
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import (absolute_import, division, print_function) __metaclass__ = type # # Copyright (C) 2017 Lenovo, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # # Module to send Port channel commands to Lenovo Switches # Lenovo Networking # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: cnos_interface author: "Anil Kumar Muraleedharan (@amuraleedhar)" short_description: Manage interface configuration on devices running Lenovo CNOS description: - This module allows you to work with interface related configurations. The operators used are overloaded to ensure control over switch interface configurations. Apart from the regular device connection related attributes, there are seven interface arguments that will perform further configurations. They are interfaceArg1, interfaceArg2, interfaceArg3, interfaceArg4, interfaceArg5, interfaceArg6, and interfaceArg7. For more details on how to use these arguments, see [Overloaded Variables]. Interface configurations are taken care at six contexts in a regular CLI. They are 1. Interface Name - Configurations 2. Ethernet Interface - Configurations 3. Loopback Interface Configurations 4. Management Interface Configurations 5. Port Aggregation - Configurations 6. VLAN Configurations This module uses SSH to manage network device configuration. The results of the operation will be placed in a directory named 'results' that must be created by the user in their local directory to where the playbook is run. For more information about this module from Lenovo and customizing it usage for your use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_interface.html) version_added: "2.3" extends_documentation_fragment: cnos options: interfaceRange: description: - This specifies the interface range in which the port aggregation is envisaged required: Yes default: Null interfaceOption: description: - This specifies the attribute you specify subsequent to interface command required: Yes default: Null choices: [None, ethernet, loopback, mgmt, port-aggregation, vlan] interfaceArg1: description: - This is an overloaded interface first argument. Usage of this argument can be found is the User Guide referenced above. required: Yes default: Null choices: [aggregation-group, bfd, bridgeport, description, duplex, flowcontrol, ip, ipv6, lacp, lldp, load-interval, mac, mac-address, mac-learn, microburst-detection, mtu, service, service-policy, shutdown, snmp, spanning-tree, speed, storm-control, vlan, vrrp, port-aggregation] interfaceArg2: description: - This is an overloaded interface second argument. Usage of this argument can be found is the User Guide referenced above. required: No default: Null choices: [aggregation-group number, access or mode or trunk, description, auto or full or half, receive or send, port-priority, suspend-individual, timeout, receive or transmit or trap-notification, tlv-select, Load interval delay in seconds, counter, Name for the MAC Access List, mac-address in HHHH.HHHH.HHHH format, THRESHOLD Value in unit of buffer cell, <64-9216> MTU in bytes-<64-9216> for L2 packet,<576-9216> for L3 IPv4 packet, <1280-9216> for L3 IPv6 packet, enter the instance id, input or output, copp-system-policy, type, 1000 or 10000 or 40000 or auto, broadcast or multicast or unicast, disable or enable or egress-only, Virtual router identifier, destination-ip or destination-mac or destination-port or source-dest-ip or source-dest-mac or source-dest-port or source-interface or source-ip or source-mac or source-port] interfaceArg3: description: - This is an overloaded interface third argument. Usage of this argument can be found is the User Guide referenced above. required: No default: Null choices: [active or on or passive, on or off, LACP port priority, long or short, link-aggregation or mac-phy-status or management-address or max-frame-size or port-description or port-protocol-vlan or port-vlan or power-mdi or protocol-identity or system-capabilities or system-description or system-name or vid-management or vlan-name, counter for load interval, policy input name, all or Copp class name to attach, qos, queueing, Enter the allowed traffic level, ipv6] interfaceArg4: description: - This is an overloaded interface fourth argument. Usage of this argument can be found is the User Guide referenced above. required: No default: Null choices: [key-chain, key-id, keyed-md5 or keyed-sha1 or meticulous-keyed-md5 or meticulous-keyed-sha1 or simple, Interval value in milliseconds, Destination IP (Both IPV4 and IPV6),in or out, MAC address, Time-out value in seconds, class-id, request, Specify the IPv4 address, OSPF area ID as a decimal value, OSPF area ID in IP address format, anycast or secondary, ethernet, vlan, MAC (hardware) address in HHHH.HHHH.HHHH format, Load interval delay in seconds, Specify policy input name, input or output, cost, port-priority, BFD minimum receive interval,source-interface] interfaceArg5: description: - This is an overloaded interface fifth argument. Usage of this argument can be found is the User Guide referenced above. required: No default: Null choices: [name of key-chain, key-Id Value, key-chain , key-id, BFD minimum receive interval, Value of Hello Multiplier, admin-down or multihop or non-persistent, Vendor class-identifier name, bootfile-name or host-name or log-server or ntp-server or tftp-server-name, Slot/chassis number, Vlan interface, Specify policy input name, Port path cost or auto, Port priority increments of 32] interfaceArg6: description: - This is an overloaded interface sixth argument. Usage of this argument can be found is the User Guide referenced above. required: No default: Null choices: [Authentication key string, name of key-chain, key-Id Value, Value of Hello Multiplier, admin-down or non-persistent] interfaceArg7: description: - This is an overloaded interface seventh argument. Usage of this argument can be found is the User Guide referenced above. required: No default: Null choices: [Authentication key string, admin-down] ''' EXAMPLES = ''' Tasks : The following are examples of using the module cnos_interface. These are written in the main.yml file of the tasks directory. --- - name: Test Interface Ethernet - aggregation-group cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 1 interfaceArg1: "aggregation-group" interfaceArg2: 33 interfaceArg3: "on" - name: Test Interface Ethernet - bridge-port cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "bridge-port" interfaceArg2: "access" interfaceArg3: 33 - name: Test Interface Ethernet - bridgeport mode cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "bridge-port" interfaceArg2: "mode" interfaceArg3: "access" - name: Test Interface Ethernet - Description cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "description" interfaceArg2: "Hentammoo " - name: Test Interface Ethernet - Duplex cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 1 interfaceArg1: "duplex" interfaceArg2: "auto" - name: Test Interface Ethernet - flowcontrol cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "flowcontrol" interfaceArg2: "send" interfaceArg3: "off" - name: Test Interface Ethernet - lacp cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "lacp" interfaceArg2: "port-priority" interfaceArg3: 33 - name: Test Interface Ethernet - lldp cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "lldp" interfaceArg2: "tlv-select" interfaceArg3: "max-frame-size" - name: Test Interface Ethernet - load-interval cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "load-interval" interfaceArg2: "counter" interfaceArg3: 2 interfaceArg4: 33 - name: Test Interface Ethernet - mac cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "mac" interfaceArg2: "copp-system-acl-vlag-hc" - name: Test Interface Ethernet - microburst-detection cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "microburst-detection" interfaceArg2: 25 - name: Test Interface Ethernet - mtu cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "mtu" interfaceArg2: 66 - name: Test Interface Ethernet - service-policy cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "service-policy" interfaceArg2: "input" interfaceArg3: "Anil" - name: Test Interface Ethernet - speed cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 1 interfaceArg1: "speed" interfaceArg2: "auto" - name: Test Interface Ethernet - storm cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "storm-control" interfaceArg2: "broadcast" interfaceArg3: 12.5 - name: Test Interface Ethernet - vlan cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "vlan" interfaceArg2: "disable" - name: Test Interface Ethernet - vrrp cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "vrrp" interfaceArg2: 33 - name: Test Interface Ethernet - spanning tree1 cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "spanning-tree" interfaceArg2: "bpduguard" interfaceArg3: "enable" - name: Test Interface Ethernet - spanning tree 2 cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "spanning-tree" interfaceArg2: "mst" interfaceArg3: "33-35" interfaceArg4: "cost" interfaceArg5: 33 - name: Test Interface Ethernet - ip1 cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "ip" interfaceArg2: "access-group" interfaceArg3: "anil" interfaceArg4: "in" - name: Test Interface Ethernet - ip2 cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "ip" interfaceArg2: "port" interfaceArg3: "anil" - name: Test Interface Ethernet - bfd cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "bfd" interfaceArg2: "interval" interfaceArg3: 55 interfaceArg4: 55 interfaceArg5: 33 - name: Test Interface Ethernet - bfd cnos_interface: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}" outputfile: "./results/test_interface_{{ inventory_hostname }}_output.txt" interfaceOption: 'ethernet' interfaceRange: 33 interfaceArg1: "bfd" interfaceArg2: "ipv4" interfaceArg3: "authentication" interfaceArg4: "meticulous-keyed-md5" interfaceArg5: "key-chain" interfaceArg6: "mychain" ''' RETURN = ''' msg: description: Success or failure message returned: always type: string sample: "Interface configurations accomplished." ''' import sys try: import paramiko HAS_PARAMIKO = True except ImportError: HAS_PARAMIKO = False import time import socket import array import json import time import re try: from ansible.module_utils.network.cnos import cnos HAS_LIB = True except: HAS_LIB = False from ansible.module_utils.basic import AnsibleModule from collections import defaultdict def main(): module = AnsibleModule( argument_spec=dict( outputfile=dict(required=True), host=dict(required=True), username=dict(required=True), password=dict(required=True, no_log=True), enablePassword=dict(required=False, no_log=True), deviceType=dict(required=True), interfaceRange=dict(required=False), interfaceOption=dict(required=False), interfaceArg1=dict(required=True), interfaceArg2=dict(required=False), interfaceArg3=dict(required=False), interfaceArg4=dict(required=False), interfaceArg5=dict(required=False), interfaceArg6=dict(required=False), interfaceArg7=dict(required=False),), supports_check_mode=False) username = module.params['username'] password = module.params['password'] enablePassword = module.params['enablePassword'] interfaceRange = module.params['interfaceRange'] interfaceOption = module.params['interfaceOption'] interfaceArg1 = module.params['interfaceArg1'] interfaceArg2 = module.params['interfaceArg2'] interfaceArg3 = module.params['interfaceArg3'] interfaceArg4 = module.params['interfaceArg4'] interfaceArg5 = module.params['interfaceArg5'] interfaceArg6 = module.params['interfaceArg6'] interfaceArg7 = module.params['interfaceArg7'] outputfile = module.params['outputfile'] hostIP = module.params['host'] deviceType = module.params['deviceType'] output = "" if not HAS_PARAMIKO: module.fail_json(msg='paramiko is required for this module') # Create instance of SSHClient object remote_conn_pre = paramiko.SSHClient() # Automatically add untrusted hosts (make sure okay for security policy in your environment) remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # initiate SSH connection with the switch remote_conn_pre.connect(hostIP, username=username, password=password) time.sleep(2) # Use invoke_shell to establish an 'interactive session' remote_conn = remote_conn_pre.invoke_shell() time.sleep(2) # Enable and enter configure terminal then send command output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn) output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn) # Make terminal length = 0 output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn) # Go to config mode output = output + cnos.waitForDeviceResponse("configure device\n", "(config)#", 2, remote_conn) # Send the CLi command if(interfaceOption is None or interfaceOption == ""): output = output + cnos.interfaceConfig(remote_conn, deviceType, "(config)#", 2, None, interfaceRange, interfaceArg1, interfaceArg2, interfaceArg3, interfaceArg4, interfaceArg5, interfaceArg6, interfaceArg7) elif(interfaceOption == "ethernet"): output = output + cnos.interfaceConfig(remote_conn, deviceType, "(config)#", 2, "ethernet", interfaceRange, interfaceArg1, interfaceArg2, interfaceArg3, interfaceArg4, interfaceArg5, interfaceArg6, interfaceArg7) elif(interfaceOption == "loopback"): output = output + cnos.interfaceConfig(remote_conn, deviceType, "(config)#", 2, "loopback", interfaceRange, interfaceArg1, interfaceArg2, interfaceArg3, interfaceArg4, interfaceArg5, interfaceArg6, interfaceArg7) elif(interfaceOption == "mgmt"): output = output + cnos.interfaceConfig(remote_conn, deviceType, "(config)#", 2, "mgmt", interfaceRange, interfaceArg1, interfaceArg2, interfaceArg3, interfaceArg4, interfaceArg5, interfaceArg6, interfaceArg7) elif(interfaceOption == "port-aggregation"): output = output + cnos.interfaceConfig(remote_conn, deviceType, "(config)#", 2, "port-aggregation", interfaceRange, interfaceArg1, interfaceArg2, interfaceArg3, interfaceArg4, interfaceArg5, interfaceArg6, interfaceArg7) elif(interfaceOption == "vlan"): output = output + cnos.interfaceConfig(remote_conn, deviceType, "(config)#", 2, "vlan", interfaceRange, interfaceArg1, interfaceArg2, interfaceArg3, interfaceArg4, interfaceArg5, interfaceArg6, interfaceArg7) else: output = "Invalid interface option \n" # Save it into the file file = open(outputfile, "a") file.write(output) file.close() # Logic to check when changes occur or not errorMsg = cnos.checkOutputForError(output) if(errorMsg is None): module.exit_json(changed=True, msg="Interface Configuration is done") else: module.fail_json(msg=errorMsg) if __name__ == '__main__': main()
gpl-3.0
byterom/android_external_chromium_org
tools/binary_size/PRESUBMIT.py
52
1112
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """binary_size presubmit script See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details about the presubmit API built into gcl. """ def CommonChecks(input_api, output_api): output = [] output.extend(input_api.canned_checks.RunPylint(input_api, output_api)) output.extend( input_api.canned_checks.RunUnitTestsInDirectory( input_api, output_api, input_api.PresubmitLocalPath(), whitelist=[r'.+_unittest\.py$'])) if input_api.is_committing: output.extend(input_api.canned_checks.PanProjectChecks(input_api, output_api, owners_check=False)) return output def CheckChangeOnUpload(input_api, output_api): return CommonChecks(input_api, output_api) def CheckChangeOnCommit(input_api, output_api): return CommonChecks(input_api, output_api)
bsd-3-clause
SEC-i/ecoControl
server/hooks.py
1
4591
import sys import logging from time import time import json from django.contrib.auth import authenticate, login, logout from django.core.exceptions import ObjectDoesNotExist from django.core.exceptions import PermissionDenied from django.db import connection from django.db.models import Count, Min, Sum, Avg from django.forms.models import model_to_dict from django.http import HttpResponse from django.shortcuts import redirect from django.utils.timezone import utc from django.views.decorators.debug import sensitive_post_parameters from django.views.decorators.http import require_POST import functions from models import Device, Configuration, DeviceConfiguration, Sensor, Notification from helpers import create_json_response logger = logging.getLogger('django') def index(request): return redirect('/static/index.html', permanent=True) def api_index(request): return create_json_response({'version': 0.2}, request) @require_POST @sensitive_post_parameters('password') def login_user(request): if 'username' in request.POST and 'password' in request.POST: user = authenticate(username=request.POST[ 'username'], password=request.POST['password']) if user is not None: if user.is_active: login(request, user) return create_json_response({"login": "successful", "user": request.user.get_full_name()}, request) else: return create_json_response({"login": "disabled", "user": request.user.get_full_name()}, request) else: return create_json_response({"login": "invalid"}, request) else: return create_json_response({"login": "failed"}, request) def logout_user(request): logout(request) return create_json_response({"logout": "successful"}, request) def status(request): output = [ ("system_status", functions.get_configuration("system_status", False))] output.append( ("system_mode", functions.get_configuration("system_mode", False))) output.append(("login", request.user.is_authenticated())) if request.user.is_authenticated(): output.append(("user", request.user.get_full_name())) output.append(("admin", request.user.is_superuser)) output.append( ("auto_optimization", functions.get_configuration("auto_optimization", False))) return create_json_response(dict(output), request) @require_POST def export_csv(request): if not request.user.is_authenticated(): raise PermissionDenied response = HttpResponse(content_type='text/csv') response[ 'Content-Disposition'] = 'attachment; filename="export_%s.csv"' % time() if 'csv' in request.POST: response.write(request.POST['csv']) return response def list_settings(request): if not request.user.is_authenticated(): raise PermissionDenied output = [] output += functions.get_configurations() output += functions.get_device_configurations() return create_json_response(dict(output), request) def list_sensors(request): if not request.user.is_authenticated(): raise PermissionDenied sensors = Sensor.objects.filter(in_diagram=True).values( 'id', 'name', 'unit', 'device__name', 'aggregate_sum', 'aggregate_avg') # rename device__name to device for convenience output = [{'id': x['id'], 'name': x['name'], 'unit': x['unit'], 'device': x['device__name'], 'sum': x['aggregate_sum'], 'avg': x['aggregate_avg']} for x in sensors] return create_json_response(output, request) def list_notifications(request, start, end): if not request.user.is_authenticated(): raise PermissionDenied start = 0 if start is None else start end = 25 if end is None else end if request.user.is_superuser: notifications = Notification.objects.all() else: notifications = Notification.objects.filter( threshold__show_manager=True) notifications = notifications.select_related() output = { 'total': len(notifications), 'notifications': [] } for notification in notifications.order_by('-sensor_value__timestamp')[int(start):int(end)]: output['notifications'].append({ 'id': notification.id, 'threshold': model_to_dict(notification.threshold), 'sensor_value': model_to_dict(notification.sensor_value), 'read': notification.read, 'target': notification.target, }) return create_json_response(output, request)
mit
gdementen/PyTables
tables/conditions.py
5
16123
# -*- coding: utf-8 -*- ######################################################################## # # License: BSD # Created: 2006-09-19 # Author: Ivan Vilata i Balaguer -- [email protected] # :Notes: Heavily modified by Francesc Alted for multi-index support. # 2008-04-09 # Combined common & pro version. # 2011-06-04 # # $Id$ # ######################################################################## """Utility functions and classes for supporting query conditions. Classes: `CompileCondition` Container for a compiled condition. Functions: `compile_condition` Compile a condition and extract usable index conditions. `call_on_recarr` Evaluate a function over a structured array. """ from __future__ import absolute_import import re from numexpr.necompiler import typecode_to_kind from numexpr.necompiler import expressionToAST, typeCompileAst from numexpr.necompiler import stringToExpression, NumExpr from numexpr.expressions import ExpressionNode from .utilsextension import get_nested_field from .utils import lazyattr from six.moves import zip _no_matching_opcode = re.compile(r"[^a-z]([a-z]+)_([a-z]+)[^a-z]") # E.g. "gt" and "bfc" from "couldn't find matching opcode for 'gt_bfc'". def _unsupported_operation_error(exception): """Make the \"no matching opcode\" Numexpr `exception` more clear. A new exception of the same kind is returned. """ message = exception.args[0] op, types = _no_matching_opcode.search(message).groups() newmessage = "unsupported operand types for *%s*: " % op newmessage += ', '.join([typecode_to_kind[t] for t in types[1:]]) return exception.__class__(newmessage) def _check_indexable_cmp(getidxcmp): """Decorate `getidxcmp` to check the returned indexable comparison. This does some extra checking that Numexpr would perform later on the comparison if it was compiled within a complete condition. """ def newfunc(exprnode, indexedcols): result = getidxcmp(exprnode, indexedcols) if result[0] is not None: try: typeCompileAst(expressionToAST(exprnode)) except NotImplementedError as nie: # Try to make this Numexpr error less cryptic. raise _unsupported_operation_error(nie) return result newfunc.__name__ = getidxcmp.__name__ newfunc.__doc__ = getidxcmp.__doc__ return newfunc @_check_indexable_cmp def _get_indexable_cmp(exprnode, indexedcols): """Get the indexable variable-constant comparison in `exprnode`. A tuple of (variable, operation, constant) is returned if `exprnode` is a variable-constant (or constant-variable) comparison, and the variable is in `indexedcols`. A normal variable can also be used instead of a constant: a tuple with its name will appear instead of its value. Otherwise, the values in the tuple are ``None``. """ not_indexable = (None, None, None) turncmp = {'lt': 'gt', 'le': 'ge', 'eq': 'eq', 'ge': 'le', 'gt': 'lt', } def get_cmp(var, const, op): var_value, const_value = var.value, const.value if (var.astType == 'variable' and var_value in indexedcols and const.astType in ['constant', 'variable']): if const.astType == 'variable': const_value = (const_value, ) return (var_value, op, const_value) return None def is_indexed_boolean(node): return (node.astType == 'variable' and node.astKind == 'bool' and node.value in indexedcols) # Boolean variables are indexable by themselves. if is_indexed_boolean(exprnode): return (exprnode.value, 'eq', True) # And so are negations of boolean variables. if exprnode.astType == 'op' and exprnode.value == 'invert': child = exprnode.children[0] if is_indexed_boolean(child): return (child.value, 'eq', False) # A negation of an expression will be returned as ``~child``. # The indexability of the negated expression will be decided later on. if child.astKind == "bool": return (child, 'invert', None) # Check node type. Only comparisons are indexable from now on. if exprnode.astType != 'op': return not_indexable cmpop = exprnode.value if cmpop not in turncmp: return not_indexable # Look for a variable-constant comparison in both directions. left, right = exprnode.children cmp_ = get_cmp(left, right, cmpop) if cmp_: return cmp_ cmp_ = get_cmp(right, left, turncmp[cmpop]) if cmp_: return cmp_ return not_indexable def _equiv_expr_node(x, y): """Returns whether two ExpressionNodes are equivalent. This is needed because '==' is overridden on ExpressionNode to return a new ExpressionNode. """ if not isinstance(x, ExpressionNode) and not isinstance(y, ExpressionNode): return x == y elif (type(x) is not type(y) or not isinstance(x, ExpressionNode) or not isinstance(y, ExpressionNode) or x.value != y.value or x.astKind != y.astKind or len(x.children) != len(y.children)): return False for xchild, ychild in zip(x.children, y.children): if not _equiv_expr_node(xchild, ychild): return False return True def _get_idx_expr_recurse(exprnode, indexedcols, idxexprs, strexpr): """Here lives the actual implementation of the get_idx_expr() wrapper. 'idxexprs' is a list of expressions in the form ``(var, (ops), (limits))``. 'strexpr' is the indexable expression in string format. These parameters will be received empty (i.e. [], ['']) for the first time and populated during the different recursive calls. Finally, they are returned in the last level to the original wrapper. If 'exprnode' is not indexable, it will return the tuple ([], ['']) so as to signal this. """ not_indexable = ([], ['']) op_conv = { 'and': '&', 'or': '|', 'not': '~', } negcmp = { 'lt': 'ge', 'le': 'gt', 'ge': 'lt', 'gt': 'le', } def fix_invert(idxcmp, exprnode, indexedcols): invert = False # Loop until all leading negations have been dealt with while idxcmp[1] == "invert": invert ^= True # The information about the negated node is in first position exprnode = idxcmp[0] idxcmp = _get_indexable_cmp(exprnode, indexedcols) return idxcmp, exprnode, invert # Indexable variable-constant comparison. idxcmp = _get_indexable_cmp(exprnode, indexedcols) idxcmp, exprnode, invert = fix_invert(idxcmp, exprnode, indexedcols) if idxcmp[0]: if invert: var, op, value = idxcmp if op == 'eq' and value in [True, False]: # ``var`` must be a boolean index. Flip its value. value ^= True else: op = negcmp[op] expr = (var, (op,), (value,)) invert = False else: expr = (idxcmp[0], (idxcmp[1],), (idxcmp[2],)) return [expr] # For now negations of complex expressions will be not supported as # forming part of an indexable condition. This might be supported in # the future. if invert: return not_indexable # Only conjunctions and disjunctions of comparisons are considered # for the moment. if exprnode.astType != 'op' or exprnode.value not in ['and', 'or']: return not_indexable left, right = exprnode.children # Get the expression at left lcolvar, lop, llim = _get_indexable_cmp(left, indexedcols) # Get the expression at right rcolvar, rop, rlim = _get_indexable_cmp(right, indexedcols) # Use conjunction of indexable VC comparisons like # ``(a <[=] x) & (x <[=] b)`` or ``(a >[=] x) & (x >[=] b)`` # as ``a <[=] x <[=] b``, for the moment. op = exprnode.value if (lcolvar is not None and rcolvar is not None and _equiv_expr_node(lcolvar, rcolvar) and op == 'and'): if lop in ['gt', 'ge'] and rop in ['lt', 'le']: # l <= x <= r expr = (lcolvar, (lop, rop), (llim, rlim)) return [expr] if lop in ['lt', 'le'] and rop in ['gt', 'ge']: # l >= x >= r expr = (rcolvar, (rop, lop), (rlim, llim)) return [expr] # Recursively get the expressions at the left and the right lexpr = _get_idx_expr_recurse(left, indexedcols, idxexprs, strexpr) rexpr = _get_idx_expr_recurse(right, indexedcols, idxexprs, strexpr) def add_expr(expr, idxexprs, strexpr): """Add a single expression to the list.""" if isinstance(expr, list): # expr is a single expression idxexprs.append(expr[0]) lenexprs = len(idxexprs) # Mutate the strexpr string if lenexprs == 1: strexpr[:] = ["e0"] else: strexpr[:] = [ "(%s %s e%d)" % (strexpr[0], op_conv[op], lenexprs - 1)] # Add expressions to the indexable list when they are and'ed, or # they are both indexable. if lexpr != not_indexable and (op == "and" or rexpr != not_indexable): add_expr(lexpr, idxexprs, strexpr) if rexpr != not_indexable: add_expr(rexpr, idxexprs, strexpr) return (idxexprs, strexpr) if rexpr != not_indexable and op == "and": add_expr(rexpr, idxexprs, strexpr) return (idxexprs, strexpr) # Can not use indexed column. return not_indexable def _get_idx_expr(expr, indexedcols): """Extract an indexable expression out of `exprnode`. Looks for variable-constant comparisons in the expression node `exprnode` involving variables in `indexedcols`. It returns a tuple of (idxexprs, strexpr) where 'idxexprs' is a list of expressions in the form ``(var, (ops), (limits))`` and 'strexpr' is the indexable expression in string format. Expressions such as ``0 < c1 <= 1`` do not work as expected. Right now only some of the *indexable comparisons* are considered: * ``a <[=] x``, ``a == x`` and ``a >[=] x`` * ``(a <[=] x) & (y <[=] b)`` and ``(a == x) | (b == y)`` * ``~(~c_bool)``, ``~~c_bool`` and ``~(~c_bool) & (c_extra != 2)`` (where ``a``, ``b`` and ``c_bool`` are indexed columns, but ``c_extra`` is not) Particularly, the ``!=`` operator and negations of complex boolean expressions are *not considered* as valid candidates: * ``a != 1`` and ``c_bool != False`` * ``~((a > 0) & (c_bool))`` """ return _get_idx_expr_recurse(expr, indexedcols, [], ['']) class CompiledCondition(object): """Container for a compiled condition.""" # Lazy attributes # ``````````````` @lazyattr def index_variables(self): """The columns participating in the index expression.""" idxexprs = self.index_expressions idxvars = [] for expr in idxexprs: idxvar = expr[0] if idxvar not in idxvars: idxvars.append(idxvar) return frozenset(idxvars) def __init__(self, func, params, idxexprs, strexpr): self.function = func """The compiled function object corresponding to this condition.""" self.parameters = params """A list of parameter names for this condition.""" self.index_expressions = idxexprs """A list of expressions in the form ``(var, (ops), (limits))``.""" self.string_expression = strexpr """The indexable expression in string format.""" def __repr__(self): return ("idxexprs: %s\nstrexpr: %s\nidxvars: %s" % (self.index_expressions, self.string_expression, self.index_variables)) def with_replaced_vars(self, condvars): """Replace index limit variables with their values in-place. A new compiled condition is returned. Values are taken from the `condvars` mapping and converted to Python scalars. """ exprs = self.index_expressions exprs2 = [] for expr in exprs: idxlims = expr[2] # the limits are in third place limit_values = [] for idxlim in idxlims: if isinstance(idxlim, tuple): # variable idxlim = condvars[idxlim[0]] # look up value idxlim = idxlim.tolist() # convert back to Python limit_values.append(idxlim) # Add this replaced entry to the new exprs2 var, ops, _ = expr exprs2.append((var, ops, tuple(limit_values))) # Create a new container for the converted values newcc = CompiledCondition( self.function, self.parameters, exprs2, self.string_expression) return newcc def _get_variable_names(expression): """Return the list of variable names in the Numexpr `expression`.""" names = [] stack = [expression] while stack: node = stack.pop() if node.astType == 'variable': names.append(node.value) elif hasattr(node, 'children'): stack.extend(node.children) return list(set(names)) # remove repeated names def compile_condition(condition, typemap, indexedcols): """Compile a condition and extract usable index conditions. Looks for variable-constant comparisons in the `condition` string involving the indexed columns whose variable names appear in `indexedcols`. The part of `condition` having usable indexes is returned as a compiled condition in a `CompiledCondition` container. Expressions such as '0 < c1 <= 1' do not work as expected. The Numexpr types of *all* variables must be given in the `typemap` mapping. The ``function`` of the resulting `CompiledCondition` instance is a Numexpr function object, and the ``parameters`` list indicates the order of its parameters. """ # Get the expression tree and extract index conditions. expr = stringToExpression(condition, typemap, {}) if expr.astKind != 'bool': raise TypeError("condition ``%s`` does not have a boolean type" % condition) idxexprs = _get_idx_expr(expr, indexedcols) # Post-process the answer if isinstance(idxexprs, list): # Simple expression strexpr = ['e0'] else: # Complex expression idxexprs, strexpr = idxexprs # Get rid of the unneccessary list wrapper for strexpr strexpr = strexpr[0] # Get the variable names used in the condition. # At the same time, build its signature. varnames = _get_variable_names(expr) signature = [(var, typemap[var]) for var in varnames] try: # See the comments in `numexpr.evaluate()` for the # reasons of inserting copy operators for unaligned, # *unidimensional* arrays. func = NumExpr(expr, signature) except NotImplementedError as nie: # Try to make this Numexpr error less cryptic. raise _unsupported_operation_error(nie) params = varnames # This is more comfortable to handle about than a tuple. return CompiledCondition(func, params, idxexprs, strexpr) def call_on_recarr(func, params, recarr, param2arg=None): """Call `func` with `params` over `recarr`. The `param2arg` function, when specified, is used to get an argument given a parameter name; otherwise, the parameter itself is used as an argument. When the argument is a `Column` object, the proper column from `recarr` is used as its value. """ args = [] for param in params: if param2arg: arg = param2arg(param) else: arg = param if hasattr(arg, 'pathname'): # looks like a column arg = get_nested_field(recarr, arg.pathname) args.append(arg) return func(*args)
bsd-3-clause
pbaesse/Sissens
lib/python2.7/site-packages/eventlet/debug.py
14
6326
"""The debug module contains utilities and functions for better debugging Eventlet-powered applications.""" from __future__ import print_function import os import sys import linecache import re import inspect __all__ = ['spew', 'unspew', 'format_hub_listeners', 'format_hub_timers', 'hub_listener_stacks', 'hub_exceptions', 'tpool_exceptions', 'hub_prevent_multiple_readers', 'hub_timer_stacks', 'hub_blocking_detection'] _token_splitter = re.compile('\W+') class Spew(object): def __init__(self, trace_names=None, show_values=True): self.trace_names = trace_names self.show_values = show_values def __call__(self, frame, event, arg): if event == 'line': lineno = frame.f_lineno if '__file__' in frame.f_globals: filename = frame.f_globals['__file__'] if (filename.endswith('.pyc') or filename.endswith('.pyo')): filename = filename[:-1] name = frame.f_globals['__name__'] line = linecache.getline(filename, lineno) else: name = '[unknown]' try: src = inspect.getsourcelines(frame) line = src[lineno] except IOError: line = 'Unknown code named [%s]. VM instruction #%d' % ( frame.f_code.co_name, frame.f_lasti) if self.trace_names is None or name in self.trace_names: print('%s:%s: %s' % (name, lineno, line.rstrip())) if not self.show_values: return self details = [] tokens = _token_splitter.split(line) for tok in tokens: if tok in frame.f_globals: details.append('%s=%r' % (tok, frame.f_globals[tok])) if tok in frame.f_locals: details.append('%s=%r' % (tok, frame.f_locals[tok])) if details: print("\t%s" % ' '.join(details)) return self def spew(trace_names=None, show_values=False): """Install a trace hook which writes incredibly detailed logs about what code is being executed to stdout. """ sys.settrace(Spew(trace_names, show_values)) def unspew(): """Remove the trace hook installed by spew. """ sys.settrace(None) def format_hub_listeners(): """ Returns a formatted string of the current listeners on the current hub. This can be useful in determining what's going on in the event system, especially when used in conjunction with :func:`hub_listener_stacks`. """ from eventlet import hubs hub = hubs.get_hub() result = ['READERS:'] for l in hub.get_readers(): result.append(repr(l)) result.append('WRITERS:') for l in hub.get_writers(): result.append(repr(l)) return os.linesep.join(result) def format_hub_timers(): """ Returns a formatted string of the current timers on the current hub. This can be useful in determining what's going on in the event system, especially when used in conjunction with :func:`hub_timer_stacks`. """ from eventlet import hubs hub = hubs.get_hub() result = ['TIMERS:'] for l in hub.timers: result.append(repr(l)) return os.linesep.join(result) def hub_listener_stacks(state=False): """Toggles whether or not the hub records the stack when clients register listeners on file descriptors. This can be useful when trying to figure out what the hub is up to at any given moment. To inspect the stacks of the current listeners, call :func:`format_hub_listeners` at critical junctures in the application logic. """ from eventlet import hubs hubs.get_hub().set_debug_listeners(state) def hub_timer_stacks(state=False): """Toggles whether or not the hub records the stack when timers are set. To inspect the stacks of the current timers, call :func:`format_hub_timers` at critical junctures in the application logic. """ from eventlet.hubs import timer timer._g_debug = state def hub_prevent_multiple_readers(state=True): """Toggle prevention of multiple greenlets reading from a socket When multiple greenlets read from the same socket it is often hard to predict which greenlet will receive what data. To achieve resource sharing consider using ``eventlet.pools.Pool`` instead. But if you really know what you are doing you can change the state to ``False`` to stop the hub from protecting against this mistake. """ from eventlet.hubs import hub hub.g_prevent_multiple_readers = state def hub_exceptions(state=True): """Toggles whether the hub prints exceptions that are raised from its timers. This can be useful to see how greenthreads are terminating. """ from eventlet import hubs hubs.get_hub().set_timer_exceptions(state) from eventlet import greenpool greenpool.DEBUG = state def tpool_exceptions(state=False): """Toggles whether tpool itself prints exceptions that are raised from functions that are executed in it, in addition to raising them like it normally does.""" from eventlet import tpool tpool.QUIET = not state def hub_blocking_detection(state=False, resolution=1): """Toggles whether Eventlet makes an effort to detect blocking behavior in an application. It does this by telling the kernel to raise a SIGALARM after a short timeout, and clearing the timeout every time the hub greenlet is resumed. Therefore, any code that runs for a long time without yielding to the hub will get interrupted by the blocking detector (don't use it in production!). The *resolution* argument governs how long the SIGALARM timeout waits in seconds. The implementation uses :func:`signal.setitimer` and can be specified as a floating-point value. The shorter the resolution, the greater the chance of false positives. """ from eventlet import hubs assert resolution > 0 hubs.get_hub().debug_blocking = state hubs.get_hub().debug_blocking_resolution = resolution if not state: hubs.get_hub().block_detect_post()
gpl-3.0
lanmomo/gameserver-notifier
src/notifier.py
1
2023
#!/usr/bin/env python3 import sys import subprocess import time import json import getopt import requests def query_server(token, game_id): data = {} command_ip = 'ip addr | grep "inet " | grep -v "127." | head -n 1 | tr -s " " | cut -d " " -f3 | cut -d "/" -f1' data['hostname'] = subprocess.check_output('hostname -s', shell=True).rstrip().decode() data['ip'] = subprocess.check_output(command_ip, shell=True).rstrip().decode() data['game'] = game_id data['token'] = token return data def notify_master(url, data): headers = {'Content-type': 'application/json', 'Accept': 'application/json'} return requests.post(url + '/api/servers', data=json.dumps(data), headers=headers).json() # python3 notifier.py --token=abc123 --interval=60 --url=https://lanmomo.ca css def main(): opts, args = getopt.getopt(sys.argv[1:], 't:i:u:v', ['token=', 'interval=', 'url=', 'verbose']) token = None interval = None url = None verbose = None invalid = False for opt in opts: if opt[0] in ('-t', '--token'): token = opt[1] elif opt[0] in ('-i', '--interval'): interval = int(opt[1]) elif opt[0] in ('-u', '--url'): url = opt[1] elif opt[0] in ('-v', '--verbose'): verbose = True if not token: print('No token specified') invalid = True if not url: print('No url specified') invalid = True if not args: print('No game_id specified') invalid = True if invalid: exit(1) game_id = args[0] while True: try: query_result = query_server(token, game_id) if verbose: print(query_result) result = notify_master(url, query_result) if verbose: print(result) except Exception as e: print(e) if not interval: break time.sleep(interval) if __name__ == '__main__': main()
gpl-2.0
tinkhaven-organization/odoo
openerp/cli/start.py
240
2748
#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import glob import os import sys from . import Command from .server import main from openerp.modules.module import get_module_root, MANIFEST from openerp.service.db import _create_empty_database, DatabaseExists class Start(Command): """Quick start the Odoo server for your project""" def get_module_list(self, path): mods = glob.glob(os.path.join(path, '*/%s' % MANIFEST)) return [mod.split(os.path.sep)[-2] for mod in mods] def run(self, cmdargs): parser = argparse.ArgumentParser( prog="%s start" % sys.argv[0].split(os.path.sep)[-1], description=self.__doc__ ) parser.add_argument('--path', default=".", help="Directory where your project's modules are stored (will autodetect from current dir)") parser.add_argument("-d", "--database", dest="db_name", default=None, help="Specify the database name (default to project's directory name") args, unknown = parser.parse_known_args(args=cmdargs) project_path = os.path.abspath(os.path.expanduser(os.path.expandvars(args.path))) module_root = get_module_root(project_path) db_name = None if module_root: # started in a module so we choose this module name for database db_name = project_path.split(os.path.sep)[-1] # go to the parent's directory of the module root project_path = os.path.abspath(os.path.join(project_path, os.pardir)) # check if one of the subfolders has at least one module mods = self.get_module_list(project_path) if mods and '--addons-path' not in cmdargs: cmdargs.append('--addons-path=%s' % project_path) if not args.db_name: args.db_name = db_name or project_path.split(os.path.sep)[-1] cmdargs.extend(('-d', args.db_name)) # TODO: forbid some database names ? eg template1, ... try: _create_empty_database(args.db_name) except DatabaseExists, e: pass except Exception, e: die("Could not create database `%s`. (%s)" % (args.db_name, e)) if '--db-filter' not in cmdargs: cmdargs.append('--db-filter=^%s$' % args.db_name) # Remove --path /-p options from the command arguments def to_remove(i, l): return l[i] == '-p' or l[i].startswith('--path') or \ (i > 0 and l[i-1] in ['-p', '--path']) cmdargs = [v for i, v in enumerate(cmdargs) if not to_remove(i, cmdargs)] main(cmdargs) def die(message, code=1): print >>sys.stderr, message sys.exit(code)
agpl-3.0
charukiewicz/beer-manager
venv/lib/python3.4/site-packages/sqlalchemy/testing/schema.py
10
3433
# testing/schema.py # Copyright (C) 2005-2014 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from . import exclusions from .. import schema, event from . import config __all__ = 'Table', 'Column', table_options = {} def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k, kw.pop(k)) for k in list(kw) if k.startswith('test_')]) kw.update(table_options) if exclusions.against(config._current, 'mysql'): if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' else: kw['mysql_engine'] = 'MyISAM' # Apply some default cascading rules for self-referential foreign keys. # MySQL InnoDB has some issues around seleting self-refs too. if exclusions.against(config._current, 'firebird'): table_name = args[0] unpack = (config.db.dialect. identifier_preparer.unformat_identifiers) # Only going after ForeignKeys in Columns. May need to # expand to ForeignKeyConstraint too. fks = [fk for col in args if isinstance(col, schema.Column) for fk in col.foreign_keys] for fk in fks: # root around in raw spec ref = fk._colspec if isinstance(ref, schema.Column): name = ref.table.name else: # take just the table name: on FB there cannot be # a schema, so the first element is always the # table name, possibly followed by the field name name = unpack(ref)[0] if name == table_name: if fk.ondelete is None: fk.ondelete = 'CASCADE' if fk.onupdate is None: fk.onupdate = 'CASCADE' return schema.Table(*args, **kw) def Column(*args, **kw): """A schema.Column wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k, kw.pop(k)) for k in list(kw) if k.startswith('test_')]) if config.requirements.foreign_key_ddl.predicate(config): args = [arg for arg in args if not isinstance(arg, schema.ForeignKey)] col = schema.Column(*args, **kw) if 'test_needs_autoincrement' in test_opts and \ kw.get('primary_key', False): # allow any test suite to pick up on this col.info['test_needs_autoincrement'] = True # hardcoded rule for firebird, oracle; this should # be moved out if exclusions.against(config._current, 'firebird', 'oracle'): def add_seq(c, tbl): c._init_items( schema.Sequence(_truncate_name( config.db.dialect, tbl.name + '_' + c.name + '_seq'), optional=True) ) event.listen(col, 'after_parent_attach', add_seq, propagate=True) return col def _truncate_name(dialect, name): if len(name) > dialect.max_identifier_length: return name[0:max(dialect.max_identifier_length - 6, 0)] + \ "_" + hex(hash(name) % 64)[2:] else: return name
mit
doismellburning/edx-platform
common/djangoapps/external_auth/views.py
51
36636
import functools import json import logging import random import re import string # pylint: disable=deprecated-module import fnmatch import unicodedata import urllib from textwrap import dedent from external_auth.models import ExternalAuthMap from external_auth.djangostore import DjangoOpenIDStore from django.conf import settings from django.contrib.auth import REDIRECT_FIELD_NAME, authenticate, login from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.core.validators import validate_email from django.core.exceptions import ValidationError if settings.FEATURES.get('AUTH_USE_CAS'): from django_cas.views import login as django_cas_login from student.helpers import get_next_url_for_login_page from student.models import UserProfile from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden from django.utils.http import urlquote, is_safe_url from django.shortcuts import redirect from django.utils.translation import ugettext as _ from edxmako.shortcuts import render_to_response, render_to_string try: from django.views.decorators.csrf import csrf_exempt except ImportError: from django.contrib.csrf.middleware import csrf_exempt from django.views.decorators.csrf import ensure_csrf_cookie import django_openid_auth.views as openid_views from django_openid_auth import auth as openid_auth from openid.consumer.consumer import SUCCESS from openid.server.server import Server, ProtocolError, UntrustedReturnURL from openid.server.trustroot import TrustRoot from openid.extensions import ax, sreg from ratelimitbackend.exceptions import RateLimitException import student.views from xmodule.modulestore.django import modulestore from opaque_keys.edx.locations import SlashSeparatedCourseKey log = logging.getLogger("edx.external_auth") AUDIT_LOG = logging.getLogger("audit") SHIBBOLETH_DOMAIN_PREFIX = settings.SHIBBOLETH_DOMAIN_PREFIX OPENID_DOMAIN_PREFIX = settings.OPENID_DOMAIN_PREFIX # ----------------------------------------------------------------------------- # OpenID Common # ----------------------------------------------------------------------------- @csrf_exempt def default_render_failure(request, message, status=403, template_name='extauth_failure.html', exception=None): """Render an Openid error page to the user""" log.debug("In openid_failure " + message) data = render_to_string(template_name, dict(message=message, exception=exception)) return HttpResponse(data, status=status) # ----------------------------------------------------------------------------- # OpenID Authentication # ----------------------------------------------------------------------------- def generate_password(length=12, chars=string.letters + string.digits): """Generate internal password for externally authenticated user""" choice = random.SystemRandom().choice return ''.join([choice(chars) for _i in range(length)]) @csrf_exempt def openid_login_complete(request, redirect_field_name=REDIRECT_FIELD_NAME, render_failure=None): """Complete the openid login process""" render_failure = (render_failure or default_render_failure) openid_response = openid_views.parse_openid_response(request) if not openid_response: return render_failure(request, 'This is an OpenID relying party endpoint.') if openid_response.status == SUCCESS: external_id = openid_response.identity_url oid_backend = openid_auth.OpenIDBackend() details = oid_backend._extract_user_details(openid_response) log.debug('openid success, details=%s', details) url = getattr(settings, 'OPENID_SSO_SERVER_URL', None) external_domain = "{0}{1}".format(OPENID_DOMAIN_PREFIX, url) fullname = '%s %s' % (details.get('first_name', ''), details.get('last_name', '')) return _external_login_or_signup( request, external_id, external_domain, details, details.get('email', ''), fullname, retfun=functools.partial(redirect, get_next_url_for_login_page(request)), ) return render_failure(request, 'Openid failure') def _external_login_or_signup(request, external_id, external_domain, credentials, email, fullname, retfun=None): """Generic external auth login or signup""" # see if we have a map from this external_id to an edX username try: eamap = ExternalAuthMap.objects.get(external_id=external_id, external_domain=external_domain) log.debug(u'Found eamap=%s', eamap) except ExternalAuthMap.DoesNotExist: # go render form for creating edX user eamap = ExternalAuthMap(external_id=external_id, external_domain=external_domain, external_credentials=json.dumps(credentials)) eamap.external_email = email eamap.external_name = fullname eamap.internal_password = generate_password() log.debug(u'Created eamap=%s', eamap) eamap.save() log.info(u"External_Auth login_or_signup for %s : %s : %s : %s", external_domain, external_id, email, fullname) uses_shibboleth = settings.FEATURES.get('AUTH_USE_SHIB') and external_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX) uses_certs = settings.FEATURES.get('AUTH_USE_CERTIFICATES') internal_user = eamap.user if internal_user is None: if uses_shibboleth: # If we are using shib, try to link accounts # For Stanford shib, the email the idp returns is actually under the control of the user. # Since the id the idps return is not user-editable, and is of the from "[email protected]", # use the id to link accounts instead. try: link_user = User.objects.get(email=eamap.external_id) if not ExternalAuthMap.objects.filter(user=link_user).exists(): # if there's no pre-existing linked eamap, we link the user eamap.user = link_user eamap.save() internal_user = link_user log.info(u'SHIB: Linking existing account for %s', eamap.external_id) # now pass through to log in else: # otherwise, there must have been an error, b/c we've already linked a user with these external # creds failure_msg = _( "You have already created an account using " "an external login like WebAuth or Shibboleth. " "Please contact {tech_support_email} for support." ).format( tech_support_email=settings.TECH_SUPPORT_EMAIL, ) return default_render_failure(request, failure_msg) except User.DoesNotExist: log.info(u'SHIB: No user for %s yet, doing signup', eamap.external_email) return _signup(request, eamap, retfun) else: log.info(u'No user for %s yet. doing signup', eamap.external_email) return _signup(request, eamap, retfun) # We trust shib's authentication, so no need to authenticate using the password again uname = internal_user.username if uses_shibboleth: user = internal_user # Assuming this 'AUTHENTICATION_BACKENDS' is set in settings, which I think is safe if settings.AUTHENTICATION_BACKENDS: auth_backend = settings.AUTHENTICATION_BACKENDS[0] else: auth_backend = 'django.contrib.auth.backends.ModelBackend' user.backend = auth_backend if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.info(u'Linked user.id: {0} logged in via Shibboleth'.format(user.id)) else: AUDIT_LOG.info(u'Linked user "{0}" logged in via Shibboleth'.format(user.email)) elif uses_certs: # Certificates are trusted, so just link the user and log the action user = internal_user user.backend = 'django.contrib.auth.backends.ModelBackend' if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.info(u'Linked user_id {0} logged in via SSL certificate'.format(user.id)) else: AUDIT_LOG.info(u'Linked user "{0}" logged in via SSL certificate'.format(user.email)) else: user = authenticate(username=uname, password=eamap.internal_password, request=request) if user is None: # we want to log the failure, but don't want to log the password attempted: if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.warning(u'External Auth Login failed') else: AUDIT_LOG.warning(u'External Auth Login failed for "{0}"'.format(uname)) return _signup(request, eamap, retfun) if not user.is_active: if settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'): # if BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH, we trust external auth and activate any users # that aren't already active user.is_active = True user.save() if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.info(u'Activating user {0} due to external auth'.format(user.id)) else: AUDIT_LOG.info(u'Activating user "{0}" due to external auth'.format(uname)) else: if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.warning(u'User {0} is not active after external login'.format(user.id)) else: AUDIT_LOG.warning(u'User "{0}" is not active after external login'.format(uname)) # TODO: improve error page msg = 'Account not yet activated: please look for link in your email' return default_render_failure(request, msg) login(request, user) request.session.set_expiry(0) if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.info(u"Login success - user.id: {0}".format(user.id)) else: AUDIT_LOG.info(u"Login success - {0} ({1})".format(user.username, user.email)) if retfun is None: return redirect('/') return retfun() def _flatten_to_ascii(txt): """ Flattens possibly unicode txt to ascii (django username limitation) @param name: @return: the flattened txt (in the same type as was originally passed in) """ if isinstance(txt, str): txt = txt.decode('utf-8') return unicodedata.normalize('NFKD', txt).encode('ASCII', 'ignore') else: return unicode(unicodedata.normalize('NFKD', txt).encode('ASCII', 'ignore')) @ensure_csrf_cookie def _signup(request, eamap, retfun=None): """ Present form to complete for signup via external authentication. Even though the user has external credentials, he/she still needs to create an account on the edX system, and fill in the user registration form. eamap is an ExternalAuthMap object, specifying the external user for which to complete the signup. retfun is a function to execute for the return value, if immediate signup is used. That allows @ssl_login_shortcut() to work. """ # save this for use by student.views.create_account request.session['ExternalAuthMap'] = eamap if settings.FEATURES.get('AUTH_USE_CERTIFICATES_IMMEDIATE_SIGNUP', ''): # do signin immediately, by calling create_account, instead of asking # student to fill in form. MIT students already have information filed. username = eamap.external_email.split('@', 1)[0] username = username.replace('.', '_') post_vars = dict(username=username, honor_code=u'true', terms_of_service=u'true') log.info(u'doing immediate signup for %s, params=%s', username, post_vars) student.views.create_account(request, post_vars) # should check return content for successful completion before if retfun is not None: return retfun() else: return redirect('/') # default conjoin name, no spaces, flattened to ascii b/c django can't handle unicode usernames, sadly # but this only affects username, not fullname username = re.sub(r'\s', '', _flatten_to_ascii(eamap.external_name), flags=re.UNICODE) context = {'has_extauth_info': True, 'show_signup_immediately': True, 'extauth_domain': eamap.external_domain, 'extauth_id': eamap.external_id, 'extauth_email': eamap.external_email, 'extauth_username': username, 'extauth_name': eamap.external_name, 'ask_for_tos': True, } # Some openEdX instances can't have terms of service for shib users, like # according to Stanford's Office of General Counsel uses_shibboleth = (settings.FEATURES.get('AUTH_USE_SHIB') and eamap.external_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX)) if uses_shibboleth and settings.FEATURES.get('SHIB_DISABLE_TOS'): context['ask_for_tos'] = False # detect if full name is blank and ask for it from user context['ask_for_fullname'] = eamap.external_name.strip() == '' # validate provided mail and if it's not valid ask the user try: validate_email(eamap.external_email) context['ask_for_email'] = False except ValidationError: context['ask_for_email'] = True log.info(u'EXTAUTH: Doing signup for %s', eamap.external_id) return student.views.register_user(request, extra_context=context) # ----------------------------------------------------------------------------- # MIT SSL # ----------------------------------------------------------------------------- def _ssl_dn_extract_info(dn_string): """ Extract username, email address (may be [email protected]) and full name from the SSL DN string. Return (user,email,fullname) if successful, and None otherwise. """ ss = re.search('/emailAddress=(.*)@([^/]+)', dn_string) if ss: user = ss.group(1) email = "%s@%s" % (user, ss.group(2)) else: return None ss = re.search('/CN=([^/]+)/', dn_string) if ss: fullname = ss.group(1) else: return None return (user, email, fullname) def ssl_get_cert_from_request(request): """ Extract user information from certificate, if it exists, returning (user, email, fullname). Else return None. """ certkey = "SSL_CLIENT_S_DN" # specify the request.META field to use cert = request.META.get(certkey, '') if not cert: cert = request.META.get('HTTP_' + certkey, '') if not cert: try: # try the direct apache2 SSL key cert = request._req.subprocess_env.get(certkey, '') except Exception: return '' return cert def ssl_login_shortcut(fn): """ Python function decorator for login procedures, to allow direct login based on existing ExternalAuth record and MIT ssl certificate. """ def wrapped(*args, **kwargs): """ This manages the function wrapping, by determining whether to inject the _external signup or just continuing to the internal function call. """ if not settings.FEATURES['AUTH_USE_CERTIFICATES']: return fn(*args, **kwargs) request = args[0] if request.user and request.user.is_authenticated(): # don't re-authenticate return fn(*args, **kwargs) cert = ssl_get_cert_from_request(request) if not cert: # no certificate information - show normal login window return fn(*args, **kwargs) def retfun(): """Wrap function again for call by _external_login_or_signup""" return fn(*args, **kwargs) (_user, email, fullname) = _ssl_dn_extract_info(cert) return _external_login_or_signup( request, external_id=email, external_domain="ssl:MIT", credentials=cert, email=email, fullname=fullname, retfun=retfun ) return wrapped @csrf_exempt def ssl_login(request): """ This is called by branding.views.index when FEATURES['AUTH_USE_CERTIFICATES'] = True Used for MIT user authentication. This presumes the web server (nginx) has been configured to require specific client certificates. If the incoming protocol is HTTPS (SSL) then authenticate via client certificate. The certificate provides user email and fullname; this populates the ExternalAuthMap. The user is nevertheless still asked to complete the edX signup. Else continues on with student.views.index, and no authentication. """ # Just to make sure we're calling this only at MIT: if not settings.FEATURES['AUTH_USE_CERTIFICATES']: return HttpResponseForbidden() cert = ssl_get_cert_from_request(request) if not cert: # no certificate information - go onward to main index return student.views.index(request) (_user, email, fullname) = _ssl_dn_extract_info(cert) redirect_to = get_next_url_for_login_page(request) retfun = functools.partial(redirect, redirect_to) return _external_login_or_signup( request, external_id=email, external_domain="ssl:MIT", credentials=cert, email=email, fullname=fullname, retfun=retfun ) # ----------------------------------------------------------------------------- # CAS (Central Authentication Service) # ----------------------------------------------------------------------------- def cas_login(request, next_page=None, required=False): """ Uses django_cas for authentication. CAS is a common authentcation method pioneered by Yale. See http://en.wikipedia.org/wiki/Central_Authentication_Service Does normal CAS login then generates user_profile if nonexistent, and if login was successful. We assume that user details are maintained by the central service, and thus an empty user profile is appropriate. """ ret = django_cas_login(request, next_page, required) if request.user.is_authenticated(): user = request.user if not UserProfile.objects.filter(user=user): user_profile = UserProfile(name=user.username, user=user) user_profile.save() return ret # ----------------------------------------------------------------------------- # Shibboleth (Stanford and others. Uses *Apache* environment variables) # ----------------------------------------------------------------------------- def shib_login(request): """ Uses Apache's REMOTE_USER environment variable as the external id. This in turn typically uses EduPersonPrincipalName http://www.incommonfederation.org/attributesummary.html#eduPersonPrincipal but the configuration is in the shibboleth software. """ shib_error_msg = _(dedent( """ Your university identity server did not return your ID information to us. Please try logging in again. (You may need to restart your browser.) """)) if not request.META.get('REMOTE_USER'): log.error(u"SHIB: no REMOTE_USER found in request.META") return default_render_failure(request, shib_error_msg) elif not request.META.get('Shib-Identity-Provider'): log.error(u"SHIB: no Shib-Identity-Provider in request.META") return default_render_failure(request, shib_error_msg) else: # If we get here, the user has authenticated properly shib = {attr: request.META.get(attr, '').decode('utf-8') for attr in ['REMOTE_USER', 'givenName', 'sn', 'mail', 'Shib-Identity-Provider', 'displayName']} # Clean up first name, last name, and email address # TODO: Make this less hardcoded re: format, but split will work # even if ";" is not present, since we are accessing 1st element shib['sn'] = shib['sn'].split(";")[0].strip().capitalize() shib['givenName'] = shib['givenName'].split(";")[0].strip().capitalize() # TODO: should we be logging creds here, at info level? log.info(u"SHIB creds returned: %r", shib) fullname = shib['displayName'] if shib['displayName'] else u'%s %s' % (shib['givenName'], shib['sn']) redirect_to = get_next_url_for_login_page(request) retfun = functools.partial(_safe_postlogin_redirect, redirect_to, request.get_host()) return _external_login_or_signup( request, external_id=shib['REMOTE_USER'], external_domain=SHIBBOLETH_DOMAIN_PREFIX + shib['Shib-Identity-Provider'], credentials=shib, email=shib['mail'], fullname=fullname, retfun=retfun ) def _safe_postlogin_redirect(redirect_to, safehost, default_redirect='/'): """ If redirect_to param is safe (not off this host), then perform the redirect. Otherwise just redirect to '/'. Basically copied from django.contrib.auth.views.login @param redirect_to: user-supplied redirect url @param safehost: which host is safe to redirect to @return: an HttpResponseRedirect """ if is_safe_url(url=redirect_to, host=safehost): return redirect(redirect_to) return redirect(default_redirect) def course_specific_login(request, course_id): """ Dispatcher function for selecting the specific login method required by the course """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = modulestore().get_course(course_key) if not course: # couldn't find the course, will just return vanilla signin page return redirect_with_get('signin_user', request.GET) # now the dispatching conditionals. Only shib for now if ( settings.FEATURES.get('AUTH_USE_SHIB') and course.enrollment_domain and course.enrollment_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX) ): return redirect_with_get('shib-login', request.GET) # Default fallthrough to normal signin page return redirect_with_get('signin_user', request.GET) def course_specific_register(request, course_id): """ Dispatcher function for selecting the specific registration method required by the course """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = modulestore().get_course(course_key) if not course: # couldn't find the course, will just return vanilla registration page return redirect_with_get('register_user', request.GET) # now the dispatching conditionals. Only shib for now if ( settings.FEATURES.get('AUTH_USE_SHIB') and course.enrollment_domain and course.enrollment_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX) ): # shib-login takes care of both registration and login flows return redirect_with_get('shib-login', request.GET) # Default fallthrough to normal registration page return redirect_with_get('register_user', request.GET) def redirect_with_get(view_name, get_querydict, do_reverse=True): """ Helper function to carry over get parameters across redirects Using urlencode(safe='/') because the @login_required decorator generates 'next' queryparams with '/' unencoded """ if do_reverse: url = reverse(view_name) else: url = view_name if get_querydict: return redirect("%s?%s" % (url, get_querydict.urlencode(safe='/'))) return redirect(view_name) # ----------------------------------------------------------------------------- # OpenID Provider # ----------------------------------------------------------------------------- def get_xrds_url(resource, request): """ Return the XRDS url for a resource """ host = request.get_host() location = host + '/openid/provider/' + resource + '/' if request.is_secure(): return 'https://' + location else: return 'http://' + location def add_openid_simple_registration(request, response, data): sreg_data = {} sreg_request = sreg.SRegRequest.fromOpenIDRequest(request) sreg_fields = sreg_request.allRequestedFields() # if consumer requested simple registration fields, add them if sreg_fields: for field in sreg_fields: if field == 'email' and 'email' in data: sreg_data['email'] = data['email'] elif field == 'fullname' and 'fullname' in data: sreg_data['fullname'] = data['fullname'] elif field == 'nickname' and 'nickname' in data: sreg_data['nickname'] = data['nickname'] # construct sreg response sreg_response = sreg.SRegResponse.extractResponse(sreg_request, sreg_data) sreg_response.toMessage(response.fields) def add_openid_attribute_exchange(request, response, data): try: ax_request = ax.FetchRequest.fromOpenIDRequest(request) except ax.AXError: # not using OpenID attribute exchange extension pass else: ax_response = ax.FetchResponse() # if consumer requested attribute exchange fields, add them if ax_request and ax_request.requested_attributes: for type_uri in ax_request.requested_attributes.iterkeys(): email_schema = 'http://axschema.org/contact/email' name_schema = 'http://axschema.org/namePerson' if type_uri == email_schema and 'email' in data: ax_response.addValue(email_schema, data['email']) elif type_uri == name_schema and 'fullname' in data: ax_response.addValue(name_schema, data['fullname']) # construct ax response ax_response.toMessage(response.fields) def provider_respond(server, request, response, data): """ Respond to an OpenID request """ # get and add extensions add_openid_simple_registration(request, response, data) add_openid_attribute_exchange(request, response, data) # create http response from OpenID response webresponse = server.encodeResponse(response) http_response = HttpResponse(webresponse.body) http_response.status_code = webresponse.code # add OpenID headers to response for k, v in webresponse.headers.iteritems(): http_response[k] = v return http_response def validate_trust_root(openid_request): """ Only allow OpenID requests from valid trust roots """ trusted_roots = getattr(settings, 'OPENID_PROVIDER_TRUSTED_ROOT', None) if not trusted_roots: # not using trusted roots return True # don't allow empty trust roots if (not hasattr(openid_request, 'trust_root') or not openid_request.trust_root): log.error('no trust_root') return False # ensure trust root parses cleanly (one wildcard, of form *.foo.com, etc.) trust_root = TrustRoot.parse(openid_request.trust_root) if not trust_root: log.error('invalid trust_root') return False # don't allow empty return tos if (not hasattr(openid_request, 'return_to') or not openid_request.return_to): log.error('empty return_to') return False # ensure return to is within trust root if not trust_root.validateURL(openid_request.return_to): log.error('invalid return_to') return False # check that the root matches the ones we trust if not any(r for r in trusted_roots if fnmatch.fnmatch(trust_root, r)): log.error('non-trusted root') return False return True @csrf_exempt def provider_login(request): """ OpenID login endpoint """ # make and validate endpoint endpoint = get_xrds_url('login', request) if not endpoint: return default_render_failure(request, "Invalid OpenID request") # initialize store and server store = DjangoOpenIDStore() server = Server(store, endpoint) # first check to see if the request is an OpenID request. # If so, the client will have specified an 'openid.mode' as part # of the request. querydict = dict(request.REQUEST.items()) error = False if 'openid.mode' in request.GET or 'openid.mode' in request.POST: # decode request try: openid_request = server.decodeRequest(querydict) except (UntrustedReturnURL, ProtocolError): openid_request = None if not openid_request: return default_render_failure(request, "Invalid OpenID request") # don't allow invalid and non-trusted trust roots if not validate_trust_root(openid_request): return default_render_failure(request, "Invalid OpenID trust root") # checkid_immediate not supported, require user interaction if openid_request.mode == 'checkid_immediate': return provider_respond(server, openid_request, openid_request.answer(False), {}) # checkid_setup, so display login page # (by falling through to the provider_login at the # bottom of this method). elif openid_request.mode == 'checkid_setup': if openid_request.idSelect(): # remember request and original path request.session['openid_setup'] = { 'request': openid_request, 'url': request.get_full_path(), 'post_params': request.POST, } # user failed login on previous attempt if 'openid_error' in request.session: error = True del request.session['openid_error'] # OpenID response else: return provider_respond(server, openid_request, server.handleRequest(openid_request), {}) # handle login redirection: these are also sent to this view function, # but are distinguished by lacking the openid mode. We also know that # they are posts, because they come from the popup elif request.method == 'POST' and 'openid_setup' in request.session: # get OpenID request from session openid_setup = request.session['openid_setup'] openid_request = openid_setup['request'] openid_request_url = openid_setup['url'] post_params = openid_setup['post_params'] # We need to preserve the parameters, and the easiest way to do this is # through the URL url_post_params = { param: post_params[param] for param in post_params if param.startswith('openid') } encoded_params = urllib.urlencode(url_post_params) if '?' not in openid_request_url: openid_request_url = openid_request_url + '?' + encoded_params else: openid_request_url = openid_request_url + '&' + encoded_params del request.session['openid_setup'] # don't allow invalid trust roots if not validate_trust_root(openid_request): return default_render_failure(request, "Invalid OpenID trust root") # check if user with given email exists # Failure is redirected to this method (by using the original URL), # which will bring up the login dialog. email = request.POST.get('email', None) try: user = User.objects.get(email=email) except User.DoesNotExist: request.session['openid_error'] = True if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.warning("OpenID login failed - Unknown user email") else: msg = "OpenID login failed - Unknown user email: {0}".format(email) AUDIT_LOG.warning(msg) return HttpResponseRedirect(openid_request_url) # attempt to authenticate user (but not actually log them in...) # Failure is again redirected to the login dialog. username = user.username password = request.POST.get('password', None) try: user = authenticate(username=username, password=password, request=request) except RateLimitException: AUDIT_LOG.warning('OpenID - Too many failed login attempts.') return HttpResponseRedirect(openid_request_url) if user is None: request.session['openid_error'] = True if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.warning("OpenID login failed - invalid password") else: msg = "OpenID login failed - password for {0} is invalid".format(email) AUDIT_LOG.warning(msg) return HttpResponseRedirect(openid_request_url) # authentication succeeded, so fetch user information # that was requested if user is not None and user.is_active: # remove error from session since login succeeded if 'openid_error' in request.session: del request.session['openid_error'] if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.info("OpenID login success - user.id: {0}".format(user.id)) else: AUDIT_LOG.info("OpenID login success - {0} ({1})".format( user.username, user.email)) # redirect user to return_to location url = endpoint + urlquote(user.username) response = openid_request.answer(True, None, url) # Note too that this is hardcoded, and not really responding to # the extensions that were registered in the first place. results = { 'nickname': user.username, 'email': user.email, 'fullname': user.profile.name, } # the request succeeded: return provider_respond(server, openid_request, response, results) # the account is not active, so redirect back to the login page: request.session['openid_error'] = True if settings.FEATURES['SQUELCH_PII_IN_LOGS']: AUDIT_LOG.warning("Login failed - Account not active for user.id {0}".format(user.id)) else: msg = "Login failed - Account not active for user {0}".format(username) AUDIT_LOG.warning(msg) return HttpResponseRedirect(openid_request_url) # determine consumer domain if applicable return_to = '' if 'openid.return_to' in request.REQUEST: return_to = request.REQUEST['openid.return_to'] matches = re.match(r'\w+:\/\/([\w\.-]+)', return_to) return_to = matches.group(1) # display login page response = render_to_response('provider_login.html', { 'error': error, 'return_to': return_to }) # add custom XRDS header necessary for discovery process response['X-XRDS-Location'] = get_xrds_url('xrds', request) return response def provider_identity(request): """ XRDS for identity discovery """ response = render_to_response('identity.xml', {'url': get_xrds_url('login', request)}, mimetype='text/xml') # custom XRDS header necessary for discovery process response['X-XRDS-Location'] = get_xrds_url('identity', request) return response def provider_xrds(request): """ XRDS for endpoint discovery """ response = render_to_response('xrds.xml', {'url': get_xrds_url('login', request)}, mimetype='text/xml') # custom XRDS header necessary for discovery process response['X-XRDS-Location'] = get_xrds_url('xrds', request) return response
agpl-3.0
mgardne8/discord.py
examples/basic_voice.py
1
3846
import asyncio import discord import youtube_dl from discord.ext import commands # Suppress noise about console usage from errors youtube_dl.utils.bug_reports_message = lambda: '' ytdl_format_options = { 'format': 'bestaudio/best', 'outtmpl': '%(extractor)s-%(id)s-%(title)s.%(ext)s', 'restrictfilenames': True, 'noplaylist': True, 'nocheckcertificate': True, 'ignoreerrors': False, 'logtostderr': False, 'quiet': True, 'no_warnings': True, 'default_search': 'auto', 'source_address': '0.0.0.0' # ipv6 addresses cause issues sometimes } ffmpeg_options = { 'before_options': '-nostdin', 'options': '-vn' } ytdl = youtube_dl.YoutubeDL(ytdl_format_options) class YTDLSource(discord.PCMVolumeTransformer): def __init__(self, source, *, data, volume=0.5): super().__init__(source, volume) self.data = data self.title = data.get('title') self.url = data.get('url') @classmethod async def from_url(cls, url, *, loop=None): loop = loop or asyncio.get_event_loop() data = await loop.run_in_executor(None, ytdl.extract_info, url) if 'entries' in data: # take first item from a playlist data = data['entries'][0] filename = ytdl.prepare_filename(data) return cls(discord.FFmpegPCMAudio(filename, **ffmpeg_options), data=data) class Music: def __init__(self, bot): self.bot = bot @commands.command() async def join(self, ctx, *, channel: discord.VoiceChannel): """Joins a voice channel""" if ctx.voice_client is not None: return await ctx.voice_client.move_to(channel) await channel.connect() @commands.command() async def play(self, ctx, *, query): """Plays a file from the local filesystem""" if ctx.voice_client is None: if ctx.author.voice.channel: await ctx.author.voice.channel.connect() else: return await ctx.send("Not connected to a voice channel.") if ctx.voice_client.is_playing(): ctx.voice_client.stop() source = discord.PCMVolumeTransformer(discord.FFmpegPCMAudio(query)) ctx.voice_client.play(source, after=lambda e: print('Player error: %s' % e) if e else None) await ctx.send('Now playing: {}'.format(query)) @commands.command() async def yt(self, ctx, *, url): """Streams from a url (almost anything youtube_dl supports)""" if ctx.voice_client is None: if ctx.author.voice.channel: await ctx.author.voice.channel.connect() else: return await ctx.send("Not connected to a voice channel.") if ctx.voice_client.is_playing(): ctx.voice_client.stop() player = await YTDLSource.from_url(url, loop=self.bot.loop) ctx.voice_client.play(player, after=lambda e: print('Player error: %s' % e) if e else None) await ctx.send('Now playing: {}'.format(player.title)) @commands.command() async def volume(self, ctx, volume: int): """Changes the player's volume""" if ctx.voice_client is None: return await ctx.send("Not connected to a voice channel.") ctx.voice_client.source.volume = volume await ctx.send("Changed volume to {}%".format(volume)) @commands.command() async def stop(self, ctx): """Stops and disconnects the bot from voice""" await ctx.voice_client.disconnect() bot = commands.Bot(command_prefix=commands.when_mentioned_or("!"), description='Music bot example') @bot.event async def on_ready(): print('Logged in as {0.id}/{0}'.format(bot.user)) print('------') bot.add_cog(Music(bot)) bot.run('token')
mit
tgsd96/gargnotes
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/chardetect.py
1786
2504
#!/usr/bin/env python """ Script which takes one or more file paths and reports on their detected encodings Example:: % chardetect somefile someotherfile somefile: windows-1252 with confidence 0.5 someotherfile: ascii with confidence 1.0 If no paths are provided, it takes its input from stdin. """ from __future__ import absolute_import, print_function, unicode_literals import argparse import sys from io import open from chardet import __version__ from chardet.universaldetector import UniversalDetector def description_of(lines, name='stdin'): """ Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str """ u = UniversalDetector() for line in lines: u.feed(line) u.close() result = u.result if result['encoding']: return '{0}: {1} with confidence {2}'.format(name, result['encoding'], result['confidence']) else: return '{0}: no result'.format(name) def main(argv=None): ''' Handles command line arguments and gets things started. :param argv: List of arguments, as if specified on the command-line. If None, ``sys.argv[1:]`` is used instead. :type argv: list of str ''' # Get command line arguments parser = argparse.ArgumentParser( description="Takes one or more file paths and reports their detected \ encodings", formatter_class=argparse.ArgumentDefaultsHelpFormatter, conflict_handler='resolve') parser.add_argument('input', help='File whose encoding we would like to determine.', type=argparse.FileType('rb'), nargs='*', default=[sys.stdin]) parser.add_argument('--version', action='version', version='%(prog)s {0}'.format(__version__)) args = parser.parse_args(argv) for f in args.input: if f.isatty(): print("You are running chardetect interactively. Press " + "CTRL-D twice at the start of a blank line to signal the " + "end of your input. If you want help, run chardetect " + "--help\n", file=sys.stderr) print(description_of(f, f.name)) if __name__ == '__main__': main()
mit
mwilliamson/locket.py
locket/__init__.py
1
4604
import time import errno import threading import weakref __all__ = ["lock_file"] try: import fcntl except ImportError: try: import msvcrt except ImportError: raise ImportError("Platform not supported (failed to import fcntl, msvcrt)") else: _lock_file_blocking_available = False def _lock_file_non_blocking(file_): try: msvcrt.locking(file_.fileno(), msvcrt.LK_NBLCK, 1) return True # TODO: check errno except IOError: return False def _unlock_file(file_): msvcrt.locking(file_.fileno(), msvcrt.LK_UNLCK, 1) else: _lock_file_blocking_available = True def _lock_file_blocking(file_): fcntl.flock(file_.fileno(), fcntl.LOCK_EX) def _lock_file_non_blocking(file_): try: fcntl.flock(file_.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) return True except IOError as error: if error.errno in [errno.EACCES, errno.EAGAIN]: return False else: raise def _unlock_file(file_): fcntl.flock(file_.fileno(), fcntl.LOCK_UN) _locks_lock = threading.Lock() _locks = weakref.WeakValueDictionary() def lock_file(path, **kwargs): _locks_lock.acquire() try: lock = _locks.get(path) if lock is None: lock = _create_lock_file(path) _locks[path] = lock finally: _locks_lock.release() return _Locker(lock, **kwargs) def _create_lock_file(path): thread_lock = _ThreadLock(path) file_lock = _LockFile(path) return _LockSet([thread_lock, file_lock]) class LockError(Exception): pass def _acquire_non_blocking(acquire, timeout, retry_period, path): if retry_period is None: retry_period = 0.05 start_time = time.time() while True: success = acquire() if success: return elif (timeout is not None and time.time() - start_time > timeout): raise LockError("Couldn't lock {0}".format(path)) else: time.sleep(retry_period) class _LockSet(object): def __init__(self, locks): self._locks = locks def acquire(self, timeout, retry_period): acquired_locks = [] try: for lock in self._locks: lock.acquire(timeout, retry_period) acquired_locks.append(lock) except: for acquired_lock in reversed(acquired_locks): # TODO: handle exceptions acquired_lock.release() raise def release(self): for lock in reversed(self._locks): # TODO: Handle exceptions lock.release() class _ThreadLock(object): def __init__(self, path): self._path = path self._lock = threading.Lock() def acquire(self, timeout=None, retry_period=None): if timeout is None: self._lock.acquire() else: _acquire_non_blocking( acquire=lambda: self._lock.acquire(False), timeout=timeout, retry_period=retry_period, path=self._path, ) def release(self): self._lock.release() class _LockFile(object): def __init__(self, path): self._path = path self._file = None def acquire(self, timeout=None, retry_period=None): if self._file is None: self._file = open(self._path, "wb") if timeout is None and _lock_file_blocking_available: _lock_file_blocking(self._file) else: _acquire_non_blocking( acquire=lambda: _lock_file_non_blocking(self._file), timeout=timeout, retry_period=retry_period, path=self._path, ) def release(self): _unlock_file(self._file) self._file.close() self._file = None class _Locker(object): """ A lock wrapper to always apply the given *timeout* and *retry_period* to acquire() calls. """ def __init__(self, lock, timeout=None, retry_period=None): self._lock = lock self._timeout = timeout self._retry_period = retry_period def acquire(self): self._lock.acquire(self._timeout, self._retry_period) def release(self): self._lock.release() def __enter__(self): self.acquire() return self def __exit__(self, *args): self.release()
bsd-2-clause
IceCubeDev/SpaceOrNot
psycopg2/tests/test_connection.py
39
39512
#!/usr/bin/env python # test_connection.py - unit test for connection attributes # # Copyright (C) 2008-2011 James Henstridge <[email protected]> # # psycopg2 is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # In addition, as a special exception, the copyright holders give # permission to link this program with the OpenSSL library (or with # modified versions of OpenSSL that use the same license as OpenSSL), # and distribute linked combinations including the two. # # You must obey the GNU Lesser General Public License in all respects for # all of the code used other than OpenSSL. # # psycopg2 is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # License for more details. import os import time import threading from operator import attrgetter import psycopg2 import psycopg2.errorcodes import psycopg2.extensions from testutils import unittest, decorate_all_tests, skip_if_no_superuser from testutils import skip_before_postgres, skip_after_postgres from testutils import ConnectingTestCase, skip_if_tpc_disabled from testutils import skip_if_windows from testconfig import dsn, dbname class ConnectionTests(ConnectingTestCase): def test_closed_attribute(self): conn = self.conn self.assertEqual(conn.closed, False) conn.close() self.assertEqual(conn.closed, True) def test_close_idempotent(self): conn = self.conn conn.close() conn.close() self.assert_(conn.closed) def test_cursor_closed_attribute(self): conn = self.conn curs = conn.cursor() self.assertEqual(curs.closed, False) curs.close() self.assertEqual(curs.closed, True) # Closing the connection closes the cursor: curs = conn.cursor() conn.close() self.assertEqual(curs.closed, True) @skip_before_postgres(8, 4) @skip_if_no_superuser @skip_if_windows def test_cleanup_on_badconn_close(self): # ticket #148 conn = self.conn cur = conn.cursor() try: cur.execute("select pg_terminate_backend(pg_backend_pid())") except psycopg2.OperationalError, e: if e.pgcode != psycopg2.errorcodes.ADMIN_SHUTDOWN: raise except psycopg2.DatabaseError, e: # curiously when disconnected in green mode we get a DatabaseError # without pgcode. if e.pgcode is not None: raise self.assertEqual(conn.closed, 2) conn.close() self.assertEqual(conn.closed, 1) def test_reset(self): conn = self.conn # switch isolation level, then reset level = conn.isolation_level conn.set_isolation_level(0) self.assertEqual(conn.isolation_level, 0) conn.reset() # now the isolation level should be equal to saved one self.assertEqual(conn.isolation_level, level) def test_notices(self): conn = self.conn cur = conn.cursor() if self.conn.server_version >= 90300: cur.execute("set client_min_messages=debug1") cur.execute("create temp table chatty (id serial primary key);") self.assertEqual("CREATE TABLE", cur.statusmessage) self.assert_(conn.notices) def test_notices_consistent_order(self): conn = self.conn cur = conn.cursor() if self.conn.server_version >= 90300: cur.execute("set client_min_messages=debug1") cur.execute("create temp table table1 (id serial); create temp table table2 (id serial);") cur.execute("create temp table table3 (id serial); create temp table table4 (id serial);") self.assertEqual(4, len(conn.notices)) self.assert_('table1' in conn.notices[0]) self.assert_('table2' in conn.notices[1]) self.assert_('table3' in conn.notices[2]) self.assert_('table4' in conn.notices[3]) def test_notices_limited(self): conn = self.conn cur = conn.cursor() if self.conn.server_version >= 90300: cur.execute("set client_min_messages=debug1") for i in range(0, 100, 10): sql = " ".join(["create temp table table%d (id serial);" % j for j in range(i, i+10)]) cur.execute(sql) self.assertEqual(50, len(conn.notices)) self.assert_('table99' in conn.notices[-1], conn.notices[-1]) def test_server_version(self): self.assert_(self.conn.server_version) def test_protocol_version(self): self.assert_(self.conn.protocol_version in (2,3), self.conn.protocol_version) def test_tpc_unsupported(self): cnn = self.conn if cnn.server_version >= 80100: return self.skipTest("tpc is supported") self.assertRaises(psycopg2.NotSupportedError, cnn.xid, 42, "foo", "bar") @skip_before_postgres(8, 2) def test_concurrent_execution(self): def slave(): cnn = self.connect() cur = cnn.cursor() cur.execute("select pg_sleep(4)") cur.close() cnn.close() t1 = threading.Thread(target=slave) t2 = threading.Thread(target=slave) t0 = time.time() t1.start() t2.start() t1.join() t2.join() self.assert_(time.time() - t0 < 7, "something broken in concurrency") def test_encoding_name(self): self.conn.set_client_encoding("EUC_JP") # conn.encoding is 'EUCJP' now. cur = self.conn.cursor() psycopg2.extensions.register_type(psycopg2.extensions.UNICODE, cur) cur.execute("select 'foo'::text;") self.assertEqual(cur.fetchone()[0], u'foo') def test_connect_nonnormal_envvar(self): # We must perform encoding normalization at connection time self.conn.close() oldenc = os.environ.get('PGCLIENTENCODING') os.environ['PGCLIENTENCODING'] = 'utf-8' # malformed spelling try: self.conn = self.connect() finally: if oldenc is not None: os.environ['PGCLIENTENCODING'] = oldenc else: del os.environ['PGCLIENTENCODING'] def test_weakref(self): from weakref import ref import gc conn = psycopg2.connect(dsn) w = ref(conn) conn.close() del conn gc.collect() self.assert_(w() is None) def test_commit_concurrency(self): # The problem is the one reported in ticket #103. Because of bad # status check, we commit even when a commit is already on its way. # We can detect this condition by the warnings. conn = self.conn notices = [] stop = [] def committer(): while not stop: conn.commit() while conn.notices: notices.append((2, conn.notices.pop())) cur = conn.cursor() t1 = threading.Thread(target=committer) t1.start() i = 1 for i in range(1000): cur.execute("select %s;",(i,)) conn.commit() while conn.notices: notices.append((1, conn.notices.pop())) # Stop the committer thread stop.append(True) self.assert_(not notices, "%d notices raised" % len(notices)) def test_connect_cursor_factory(self): import psycopg2.extras conn = self.connect(cursor_factory=psycopg2.extras.DictCursor) cur = conn.cursor() cur.execute("select 1 as a") self.assertEqual(cur.fetchone()['a'], 1) def test_cursor_factory(self): self.assertEqual(self.conn.cursor_factory, None) cur = self.conn.cursor() cur.execute("select 1 as a") self.assertRaises(TypeError, (lambda r: r['a']), cur.fetchone()) self.conn.cursor_factory = psycopg2.extras.DictCursor self.assertEqual(self.conn.cursor_factory, psycopg2.extras.DictCursor) cur = self.conn.cursor() cur.execute("select 1 as a") self.assertEqual(cur.fetchone()['a'], 1) self.conn.cursor_factory = None self.assertEqual(self.conn.cursor_factory, None) cur = self.conn.cursor() cur.execute("select 1 as a") self.assertRaises(TypeError, (lambda r: r['a']), cur.fetchone()) def test_cursor_factory_none(self): # issue #210 conn = self.connect() cur = conn.cursor(cursor_factory=None) self.assertEqual(type(cur), psycopg2.extensions.cursor) conn = self.connect(cursor_factory=psycopg2.extras.DictCursor) cur = conn.cursor(cursor_factory=None) self.assertEqual(type(cur), psycopg2.extras.DictCursor) def test_failed_init_status(self): class SubConnection(psycopg2.extensions.connection): def __init__(self, dsn): try: super(SubConnection, self).__init__(dsn) except Exception: pass c = SubConnection("dbname=thereisnosuchdatabasemate password=foobar") self.assert_(c.closed, "connection failed so it must be closed") self.assert_('foobar' not in c.dsn, "password was not obscured") class IsolationLevelsTestCase(ConnectingTestCase): def setUp(self): ConnectingTestCase.setUp(self) conn = self.connect() cur = conn.cursor() try: cur.execute("drop table isolevel;") except psycopg2.ProgrammingError: conn.rollback() cur.execute("create table isolevel (id integer);") conn.commit() conn.close() def test_isolation_level(self): conn = self.connect() self.assertEqual( conn.isolation_level, psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) def test_encoding(self): conn = self.connect() self.assert_(conn.encoding in psycopg2.extensions.encodings) def test_set_isolation_level(self): conn = self.connect() curs = conn.cursor() levels = [ (None, psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT), ('read uncommitted', psycopg2.extensions.ISOLATION_LEVEL_READ_UNCOMMITTED), ('read committed', psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED), ('repeatable read', psycopg2.extensions.ISOLATION_LEVEL_REPEATABLE_READ), ('serializable', psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE), ] for name, level in levels: conn.set_isolation_level(level) # the only values available on prehistoric PG versions if conn.server_version < 80000: if level in ( psycopg2.extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, psycopg2.extensions.ISOLATION_LEVEL_REPEATABLE_READ): name, level = levels[levels.index((name, level)) + 1] self.assertEqual(conn.isolation_level, level) curs.execute('show transaction_isolation;') got_name = curs.fetchone()[0] if name is None: curs.execute('show default_transaction_isolation;') name = curs.fetchone()[0] self.assertEqual(name, got_name) conn.commit() self.assertRaises(ValueError, conn.set_isolation_level, -1) self.assertRaises(ValueError, conn.set_isolation_level, 5) def test_set_isolation_level_abort(self): conn = self.connect() cur = conn.cursor() self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, conn.get_transaction_status()) cur.execute("insert into isolevel values (10);") self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_INTRANS, conn.get_transaction_status()) conn.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, conn.get_transaction_status()) cur.execute("select count(*) from isolevel;") self.assertEqual(0, cur.fetchone()[0]) cur.execute("insert into isolevel values (10);") self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_INTRANS, conn.get_transaction_status()) conn.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, conn.get_transaction_status()) cur.execute("select count(*) from isolevel;") self.assertEqual(0, cur.fetchone()[0]) cur.execute("insert into isolevel values (10);") self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, conn.get_transaction_status()) conn.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) self.assertEqual(psycopg2.extensions.TRANSACTION_STATUS_IDLE, conn.get_transaction_status()) cur.execute("select count(*) from isolevel;") self.assertEqual(1, cur.fetchone()[0]) def test_isolation_level_autocommit(self): cnn1 = self.connect() cnn2 = self.connect() cnn2.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cur1 = cnn1.cursor() cur1.execute("select count(*) from isolevel;") self.assertEqual(0, cur1.fetchone()[0]) cnn1.commit() cur2 = cnn2.cursor() cur2.execute("insert into isolevel values (10);") cur1.execute("select count(*) from isolevel;") self.assertEqual(1, cur1.fetchone()[0]) def test_isolation_level_read_committed(self): cnn1 = self.connect() cnn2 = self.connect() cnn2.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) cur1 = cnn1.cursor() cur1.execute("select count(*) from isolevel;") self.assertEqual(0, cur1.fetchone()[0]) cnn1.commit() cur2 = cnn2.cursor() cur2.execute("insert into isolevel values (10);") cur1.execute("insert into isolevel values (20);") cur2.execute("select count(*) from isolevel;") self.assertEqual(1, cur2.fetchone()[0]) cnn1.commit() cur2.execute("select count(*) from isolevel;") self.assertEqual(2, cur2.fetchone()[0]) cur1.execute("select count(*) from isolevel;") self.assertEqual(1, cur1.fetchone()[0]) cnn2.commit() cur1.execute("select count(*) from isolevel;") self.assertEqual(2, cur1.fetchone()[0]) def test_isolation_level_serializable(self): cnn1 = self.connect() cnn2 = self.connect() cnn2.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) cur1 = cnn1.cursor() cur1.execute("select count(*) from isolevel;") self.assertEqual(0, cur1.fetchone()[0]) cnn1.commit() cur2 = cnn2.cursor() cur2.execute("insert into isolevel values (10);") cur1.execute("insert into isolevel values (20);") cur2.execute("select count(*) from isolevel;") self.assertEqual(1, cur2.fetchone()[0]) cnn1.commit() cur2.execute("select count(*) from isolevel;") self.assertEqual(1, cur2.fetchone()[0]) cur1.execute("select count(*) from isolevel;") self.assertEqual(1, cur1.fetchone()[0]) cnn2.commit() cur1.execute("select count(*) from isolevel;") self.assertEqual(2, cur1.fetchone()[0]) cur2.execute("select count(*) from isolevel;") self.assertEqual(2, cur2.fetchone()[0]) def test_isolation_level_closed(self): cnn = self.connect() cnn.close() self.assertRaises(psycopg2.InterfaceError, getattr, cnn, 'isolation_level') self.assertRaises(psycopg2.InterfaceError, cnn.set_isolation_level, 0) self.assertRaises(psycopg2.InterfaceError, cnn.set_isolation_level, 1) class ConnectionTwoPhaseTests(ConnectingTestCase): def setUp(self): ConnectingTestCase.setUp(self) self.make_test_table() self.clear_test_xacts() def tearDown(self): self.clear_test_xacts() ConnectingTestCase.tearDown(self) def clear_test_xacts(self): """Rollback all the prepared transaction in the testing db.""" cnn = self.connect() cnn.set_isolation_level(0) cur = cnn.cursor() try: cur.execute( "select gid from pg_prepared_xacts where database = %s", (dbname,)) except psycopg2.ProgrammingError: cnn.rollback() cnn.close() return gids = [ r[0] for r in cur ] for gid in gids: cur.execute("rollback prepared %s;", (gid,)) cnn.close() def make_test_table(self): cnn = self.connect() cur = cnn.cursor() try: cur.execute("DROP TABLE test_tpc;") except psycopg2.ProgrammingError: cnn.rollback() cur.execute("CREATE TABLE test_tpc (data text);") cnn.commit() cnn.close() def count_xacts(self): """Return the number of prepared xacts currently in the test db.""" cnn = self.connect() cur = cnn.cursor() cur.execute(""" select count(*) from pg_prepared_xacts where database = %s;""", (dbname,)) rv = cur.fetchone()[0] cnn.close() return rv def count_test_records(self): """Return the number of records in the test table.""" cnn = self.connect() cur = cnn.cursor() cur.execute("select count(*) from test_tpc;") rv = cur.fetchone()[0] cnn.close() return rv def test_tpc_commit(self): cnn = self.connect() xid = cnn.xid(1, "gtrid", "bqual") self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) cnn.tpc_begin(xid) self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) cur = cnn.cursor() cur.execute("insert into test_tpc values ('test_tpc_commit');") self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn.tpc_prepare() self.assertEqual(cnn.status, psycopg2.extensions.STATUS_PREPARED) self.assertEqual(1, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn.tpc_commit() self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(0, self.count_xacts()) self.assertEqual(1, self.count_test_records()) def test_tpc_commit_one_phase(self): cnn = self.connect() xid = cnn.xid(1, "gtrid", "bqual") self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) cnn.tpc_begin(xid) self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) cur = cnn.cursor() cur.execute("insert into test_tpc values ('test_tpc_commit_1p');") self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn.tpc_commit() self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(0, self.count_xacts()) self.assertEqual(1, self.count_test_records()) def test_tpc_commit_recovered(self): cnn = self.connect() xid = cnn.xid(1, "gtrid", "bqual") self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) cnn.tpc_begin(xid) self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) cur = cnn.cursor() cur.execute("insert into test_tpc values ('test_tpc_commit_rec');") self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn.tpc_prepare() cnn.close() self.assertEqual(1, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn = self.connect() xid = cnn.xid(1, "gtrid", "bqual") cnn.tpc_commit(xid) self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(0, self.count_xacts()) self.assertEqual(1, self.count_test_records()) def test_tpc_rollback(self): cnn = self.connect() xid = cnn.xid(1, "gtrid", "bqual") self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) cnn.tpc_begin(xid) self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) cur = cnn.cursor() cur.execute("insert into test_tpc values ('test_tpc_rollback');") self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn.tpc_prepare() self.assertEqual(cnn.status, psycopg2.extensions.STATUS_PREPARED) self.assertEqual(1, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn.tpc_rollback() self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) def test_tpc_rollback_one_phase(self): cnn = self.connect() xid = cnn.xid(1, "gtrid", "bqual") self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) cnn.tpc_begin(xid) self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) cur = cnn.cursor() cur.execute("insert into test_tpc values ('test_tpc_rollback_1p');") self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn.tpc_rollback() self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) def test_tpc_rollback_recovered(self): cnn = self.connect() xid = cnn.xid(1, "gtrid", "bqual") self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) cnn.tpc_begin(xid) self.assertEqual(cnn.status, psycopg2.extensions.STATUS_BEGIN) cur = cnn.cursor() cur.execute("insert into test_tpc values ('test_tpc_commit_rec');") self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn.tpc_prepare() cnn.close() self.assertEqual(1, self.count_xacts()) self.assertEqual(0, self.count_test_records()) cnn = self.connect() xid = cnn.xid(1, "gtrid", "bqual") cnn.tpc_rollback(xid) self.assertEqual(cnn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(0, self.count_xacts()) self.assertEqual(0, self.count_test_records()) def test_status_after_recover(self): cnn = self.connect() self.assertEqual(psycopg2.extensions.STATUS_READY, cnn.status) xns = cnn.tpc_recover() self.assertEqual(psycopg2.extensions.STATUS_READY, cnn.status) cur = cnn.cursor() cur.execute("select 1") self.assertEqual(psycopg2.extensions.STATUS_BEGIN, cnn.status) xns = cnn.tpc_recover() self.assertEqual(psycopg2.extensions.STATUS_BEGIN, cnn.status) def test_recovered_xids(self): # insert a few test xns cnn = self.connect() cnn.set_isolation_level(0) cur = cnn.cursor() cur.execute("begin; prepare transaction '1-foo';") cur.execute("begin; prepare transaction '2-bar';") # read the values to return cur.execute(""" select gid, prepared, owner, database from pg_prepared_xacts where database = %s;""", (dbname,)) okvals = cur.fetchall() okvals.sort() cnn = self.connect() xids = cnn.tpc_recover() xids = [ xid for xid in xids if xid.database == dbname ] xids.sort(key=attrgetter('gtrid')) # check the values returned self.assertEqual(len(okvals), len(xids)) for (xid, (gid, prepared, owner, database)) in zip (xids, okvals): self.assertEqual(xid.gtrid, gid) self.assertEqual(xid.prepared, prepared) self.assertEqual(xid.owner, owner) self.assertEqual(xid.database, database) def test_xid_encoding(self): cnn = self.connect() xid = cnn.xid(42, "gtrid", "bqual") cnn.tpc_begin(xid) cnn.tpc_prepare() cnn = self.connect() cur = cnn.cursor() cur.execute("select gid from pg_prepared_xacts where database = %s;", (dbname,)) self.assertEqual('42_Z3RyaWQ=_YnF1YWw=', cur.fetchone()[0]) def test_xid_roundtrip(self): for fid, gtrid, bqual in [ (0, "", ""), (42, "gtrid", "bqual"), (0x7fffffff, "x" * 64, "y" * 64), ]: cnn = self.connect() xid = cnn.xid(fid, gtrid, bqual) cnn.tpc_begin(xid) cnn.tpc_prepare() cnn.close() cnn = self.connect() xids = [ xid for xid in cnn.tpc_recover() if xid.database == dbname ] self.assertEqual(1, len(xids)) xid = xids[0] self.assertEqual(xid.format_id, fid) self.assertEqual(xid.gtrid, gtrid) self.assertEqual(xid.bqual, bqual) cnn.tpc_rollback(xid) def test_unparsed_roundtrip(self): for tid in [ '', 'hello, world!', 'x' * 199, # PostgreSQL's limit in transaction id length ]: cnn = self.connect() cnn.tpc_begin(tid) cnn.tpc_prepare() cnn.close() cnn = self.connect() xids = [ xid for xid in cnn.tpc_recover() if xid.database == dbname ] self.assertEqual(1, len(xids)) xid = xids[0] self.assertEqual(xid.format_id, None) self.assertEqual(xid.gtrid, tid) self.assertEqual(xid.bqual, None) cnn.tpc_rollback(xid) def test_xid_construction(self): from psycopg2.extensions import Xid x1 = Xid(74, 'foo', 'bar') self.assertEqual(74, x1.format_id) self.assertEqual('foo', x1.gtrid) self.assertEqual('bar', x1.bqual) def test_xid_from_string(self): from psycopg2.extensions import Xid x2 = Xid.from_string('42_Z3RyaWQ=_YnF1YWw=') self.assertEqual(42, x2.format_id) self.assertEqual('gtrid', x2.gtrid) self.assertEqual('bqual', x2.bqual) x3 = Xid.from_string('99_xxx_yyy') self.assertEqual(None, x3.format_id) self.assertEqual('99_xxx_yyy', x3.gtrid) self.assertEqual(None, x3.bqual) def test_xid_to_string(self): from psycopg2.extensions import Xid x1 = Xid.from_string('42_Z3RyaWQ=_YnF1YWw=') self.assertEqual(str(x1), '42_Z3RyaWQ=_YnF1YWw=') x2 = Xid.from_string('99_xxx_yyy') self.assertEqual(str(x2), '99_xxx_yyy') def test_xid_unicode(self): cnn = self.connect() x1 = cnn.xid(10, u'uni', u'code') cnn.tpc_begin(x1) cnn.tpc_prepare() cnn.reset() xid = [ xid for xid in cnn.tpc_recover() if xid.database == dbname ][0] self.assertEqual(10, xid.format_id) self.assertEqual('uni', xid.gtrid) self.assertEqual('code', xid.bqual) def test_xid_unicode_unparsed(self): # We don't expect people shooting snowmen as transaction ids, # so if something explodes in an encode error I don't mind. # Let's just check uniconde is accepted as type. cnn = self.connect() cnn.set_client_encoding('utf8') cnn.tpc_begin(u"transaction-id") cnn.tpc_prepare() cnn.reset() xid = [ xid for xid in cnn.tpc_recover() if xid.database == dbname ][0] self.assertEqual(None, xid.format_id) self.assertEqual('transaction-id', xid.gtrid) self.assertEqual(None, xid.bqual) def test_cancel_fails_prepared(self): cnn = self.connect() cnn.tpc_begin('cancel') cnn.tpc_prepare() self.assertRaises(psycopg2.ProgrammingError, cnn.cancel) def test_tpc_recover_non_dbapi_connection(self): from psycopg2.extras import RealDictConnection cnn = self.connect(connection_factory=RealDictConnection) cnn.tpc_begin('dict-connection') cnn.tpc_prepare() cnn.reset() xids = cnn.tpc_recover() xid = [ xid for xid in xids if xid.database == dbname ][0] self.assertEqual(None, xid.format_id) self.assertEqual('dict-connection', xid.gtrid) self.assertEqual(None, xid.bqual) decorate_all_tests(ConnectionTwoPhaseTests, skip_if_tpc_disabled) class TransactionControlTests(ConnectingTestCase): def test_closed(self): self.conn.close() self.assertRaises(psycopg2.InterfaceError, self.conn.set_session, psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) def test_not_in_transaction(self): cur = self.conn.cursor() cur.execute("select 1") self.assertRaises(psycopg2.ProgrammingError, self.conn.set_session, psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) def test_set_isolation_level(self): cur = self.conn.cursor() self.conn.set_session( psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) cur.execute("SHOW default_transaction_isolation;") self.assertEqual(cur.fetchone()[0], 'serializable') self.conn.rollback() self.conn.set_session( psycopg2.extensions.ISOLATION_LEVEL_REPEATABLE_READ) cur.execute("SHOW default_transaction_isolation;") if self.conn.server_version > 80000: self.assertEqual(cur.fetchone()[0], 'repeatable read') else: self.assertEqual(cur.fetchone()[0], 'serializable') self.conn.rollback() self.conn.set_session( isolation_level=psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED) cur.execute("SHOW default_transaction_isolation;") self.assertEqual(cur.fetchone()[0], 'read committed') self.conn.rollback() self.conn.set_session( isolation_level=psycopg2.extensions.ISOLATION_LEVEL_READ_UNCOMMITTED) cur.execute("SHOW default_transaction_isolation;") if self.conn.server_version > 80000: self.assertEqual(cur.fetchone()[0], 'read uncommitted') else: self.assertEqual(cur.fetchone()[0], 'read committed') self.conn.rollback() def test_set_isolation_level_str(self): cur = self.conn.cursor() self.conn.set_session("serializable") cur.execute("SHOW default_transaction_isolation;") self.assertEqual(cur.fetchone()[0], 'serializable') self.conn.rollback() self.conn.set_session("repeatable read") cur.execute("SHOW default_transaction_isolation;") if self.conn.server_version > 80000: self.assertEqual(cur.fetchone()[0], 'repeatable read') else: self.assertEqual(cur.fetchone()[0], 'serializable') self.conn.rollback() self.conn.set_session("read committed") cur.execute("SHOW default_transaction_isolation;") self.assertEqual(cur.fetchone()[0], 'read committed') self.conn.rollback() self.conn.set_session("read uncommitted") cur.execute("SHOW default_transaction_isolation;") if self.conn.server_version > 80000: self.assertEqual(cur.fetchone()[0], 'read uncommitted') else: self.assertEqual(cur.fetchone()[0], 'read committed') self.conn.rollback() def test_bad_isolation_level(self): self.assertRaises(ValueError, self.conn.set_session, 0) self.assertRaises(ValueError, self.conn.set_session, 5) self.assertRaises(ValueError, self.conn.set_session, 'whatever') def test_set_read_only(self): cur = self.conn.cursor() self.conn.set_session(readonly=True) cur.execute("SHOW default_transaction_read_only;") self.assertEqual(cur.fetchone()[0], 'on') self.conn.rollback() cur.execute("SHOW default_transaction_read_only;") self.assertEqual(cur.fetchone()[0], 'on') self.conn.rollback() cur = self.conn.cursor() self.conn.set_session(readonly=None) cur.execute("SHOW default_transaction_read_only;") self.assertEqual(cur.fetchone()[0], 'on') self.conn.rollback() self.conn.set_session(readonly=False) cur.execute("SHOW default_transaction_read_only;") self.assertEqual(cur.fetchone()[0], 'off') self.conn.rollback() def test_set_default(self): cur = self.conn.cursor() cur.execute("SHOW default_transaction_isolation;") default_isolevel = cur.fetchone()[0] cur.execute("SHOW default_transaction_read_only;") default_readonly = cur.fetchone()[0] self.conn.rollback() self.conn.set_session(isolation_level='serializable', readonly=True) self.conn.set_session(isolation_level='default', readonly='default') cur.execute("SHOW default_transaction_isolation;") self.assertEqual(cur.fetchone()[0], default_isolevel) cur.execute("SHOW default_transaction_read_only;") self.assertEqual(cur.fetchone()[0], default_readonly) @skip_before_postgres(9, 1) def test_set_deferrable(self): cur = self.conn.cursor() self.conn.set_session(readonly=True, deferrable=True) cur.execute("SHOW default_transaction_read_only;") self.assertEqual(cur.fetchone()[0], 'on') cur.execute("SHOW default_transaction_deferrable;") self.assertEqual(cur.fetchone()[0], 'on') self.conn.rollback() cur.execute("SHOW default_transaction_deferrable;") self.assertEqual(cur.fetchone()[0], 'on') self.conn.rollback() self.conn.set_session(deferrable=False) cur.execute("SHOW default_transaction_read_only;") self.assertEqual(cur.fetchone()[0], 'on') cur.execute("SHOW default_transaction_deferrable;") self.assertEqual(cur.fetchone()[0], 'off') self.conn.rollback() @skip_after_postgres(9, 1) def test_set_deferrable_error(self): self.assertRaises(psycopg2.ProgrammingError, self.conn.set_session, readonly=True, deferrable=True) class AutocommitTests(ConnectingTestCase): def test_closed(self): self.conn.close() self.assertRaises(psycopg2.InterfaceError, setattr, self.conn, 'autocommit', True) # The getter doesn't have a guard. We may change this in future # to make it consistent with other methods; meanwhile let's just check # it doesn't explode. try: self.assert_(self.conn.autocommit in (True, False)) except psycopg2.InterfaceError: pass def test_default_no_autocommit(self): self.assert_(not self.conn.autocommit) self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) cur = self.conn.cursor() cur.execute('select 1;') self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_BEGIN) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_INTRANS) self.conn.rollback() self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) def test_set_autocommit(self): self.conn.autocommit = True self.assert_(self.conn.autocommit) self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) cur = self.conn.cursor() cur.execute('select 1;') self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) self.conn.autocommit = False self.assert_(not self.conn.autocommit) self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) cur.execute('select 1;') self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_BEGIN) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_INTRANS) def test_set_intrans_error(self): cur = self.conn.cursor() cur.execute('select 1;') self.assertRaises(psycopg2.ProgrammingError, setattr, self.conn, 'autocommit', True) def test_set_session_autocommit(self): self.conn.set_session(autocommit=True) self.assert_(self.conn.autocommit) self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) cur = self.conn.cursor() cur.execute('select 1;') self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) self.conn.set_session(autocommit=False) self.assert_(not self.conn.autocommit) self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) cur.execute('select 1;') self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_BEGIN) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_INTRANS) self.conn.rollback() self.conn.set_session('serializable', readonly=True, autocommit=True) self.assert_(self.conn.autocommit) cur.execute('select 1;') self.assertEqual(self.conn.status, psycopg2.extensions.STATUS_READY) self.assertEqual(self.conn.get_transaction_status(), psycopg2.extensions.TRANSACTION_STATUS_IDLE) cur.execute("SHOW default_transaction_isolation;") self.assertEqual(cur.fetchone()[0], 'serializable') cur.execute("SHOW default_transaction_read_only;") self.assertEqual(cur.fetchone()[0], 'on') def test_suite(): return unittest.TestLoader().loadTestsFromName(__name__) if __name__ == "__main__": unittest.main()
apache-2.0
naziris/HomeSecPi
venv/lib/python2.7/site-packages/werkzeug/contrib/lint.py
295
12282
# -*- coding: utf-8 -*- """ werkzeug.contrib.lint ~~~~~~~~~~~~~~~~~~~~~ .. versionadded:: 0.5 This module provides a middleware that performs sanity checks of the WSGI application. It checks that :pep:`333` is properly implemented and warns on some common HTTP errors such as non-empty responses for 304 status codes. This module provides a middleware, the :class:`LintMiddleware`. Wrap your application with it and it will warn about common problems with WSGI and HTTP while your application is running. It's strongly recommended to use it during development. :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from urlparse import urlparse from warnings import warn from werkzeug.datastructures import Headers from werkzeug.http import is_entity_header from werkzeug.wsgi import FileWrapper from werkzeug._compat import string_types class WSGIWarning(Warning): """Warning class for WSGI warnings.""" class HTTPWarning(Warning): """Warning class for HTTP warnings.""" def check_string(context, obj, stacklevel=3): if type(obj) is not str: warn(WSGIWarning('%s requires bytestrings, got %s' % (context, obj.__class__.__name__))) class InputStream(object): def __init__(self, stream): self._stream = stream def read(self, *args): if len(args) == 0: warn(WSGIWarning('wsgi does not guarantee an EOF marker on the ' 'input stream, thus making calls to ' 'wsgi.input.read() unsafe. Conforming servers ' 'may never return from this call.'), stacklevel=2) elif len(args) != 1: warn(WSGIWarning('too many parameters passed to wsgi.input.read()'), stacklevel=2) return self._stream.read(*args) def readline(self, *args): if len(args) == 0: warn(WSGIWarning('Calls to wsgi.input.readline() without arguments' ' are unsafe. Use wsgi.input.read() instead.'), stacklevel=2) elif len(args) == 1: warn(WSGIWarning('wsgi.input.readline() was called with a size hint. ' 'WSGI does not support this, although it\'s available ' 'on all major servers.'), stacklevel=2) else: raise TypeError('too many arguments passed to wsgi.input.readline()') return self._stream.readline(*args) def __iter__(self): try: return iter(self._stream) except TypeError: warn(WSGIWarning('wsgi.input is not iterable.'), stacklevel=2) return iter(()) def close(self): warn(WSGIWarning('application closed the input stream!'), stacklevel=2) self._stream.close() class ErrorStream(object): def __init__(self, stream): self._stream = stream def write(self, s): check_string('wsgi.error.write()', s) self._stream.write(s) def flush(self): self._stream.flush() def writelines(self, seq): for line in seq: self.write(seq) def close(self): warn(WSGIWarning('application closed the error stream!'), stacklevel=2) self._stream.close() class GuardedWrite(object): def __init__(self, write, chunks): self._write = write self._chunks = chunks def __call__(self, s): check_string('write()', s) self._write.write(s) self._chunks.append(len(s)) class GuardedIterator(object): def __init__(self, iterator, headers_set, chunks): self._iterator = iterator self._next = iter(iterator).next self.closed = False self.headers_set = headers_set self.chunks = chunks def __iter__(self): return self def next(self): if self.closed: warn(WSGIWarning('iterated over closed app_iter'), stacklevel=2) rv = self._next() if not self.headers_set: warn(WSGIWarning('Application returned before it ' 'started the response'), stacklevel=2) check_string('application iterator items', rv) self.chunks.append(len(rv)) return rv def close(self): self.closed = True if hasattr(self._iterator, 'close'): self._iterator.close() if self.headers_set: status_code, headers = self.headers_set bytes_sent = sum(self.chunks) content_length = headers.get('content-length', type=int) if status_code == 304: for key, value in headers: key = key.lower() if key not in ('expires', 'content-location') and \ is_entity_header(key): warn(HTTPWarning('entity header %r found in 304 ' 'response' % key)) if bytes_sent: warn(HTTPWarning('304 responses must not have a body')) elif 100 <= status_code < 200 or status_code == 204: if content_length != 0: warn(HTTPWarning('%r responses must have an empty ' 'content length') % status_code) if bytes_sent: warn(HTTPWarning('%r responses must not have a body' % status_code)) elif content_length is not None and content_length != bytes_sent: warn(WSGIWarning('Content-Length and the number of bytes ' 'sent to the client do not match.')) def __del__(self): if not self.closed: try: warn(WSGIWarning('Iterator was garbage collected before ' 'it was closed.')) except Exception: pass class LintMiddleware(object): """This middleware wraps an application and warns on common errors. Among other thing it currently checks for the following problems: - invalid status codes - non-bytestrings sent to the WSGI server - strings returned from the WSGI application - non-empty conditional responses - unquoted etags - relative URLs in the Location header - unsafe calls to wsgi.input - unclosed iterators Detected errors are emitted using the standard Python :mod:`warnings` system and usually end up on :data:`stderr`. :: from werkzeug.contrib.lint import LintMiddleware app = LintMiddleware(app) :param app: the application to wrap """ def __init__(self, app): self.app = app def check_environ(self, environ): if type(environ) is not dict: warn(WSGIWarning('WSGI environment is not a standard python dict.'), stacklevel=4) for key in ('REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT', 'wsgi.version', 'wsgi.input', 'wsgi.errors', 'wsgi.multithread', 'wsgi.multiprocess', 'wsgi.run_once'): if key not in environ: warn(WSGIWarning('required environment key %r not found' % key), stacklevel=3) if environ['wsgi.version'] != (1, 0): warn(WSGIWarning('environ is not a WSGI 1.0 environ'), stacklevel=3) script_name = environ.get('SCRIPT_NAME', '') if script_name and script_name[:1] != '/': warn(WSGIWarning('SCRIPT_NAME does not start with a slash: %r' % script_name), stacklevel=3) path_info = environ.get('PATH_INFO', '') if path_info[:1] != '/': warn(WSGIWarning('PATH_INFO does not start with a slash: %r' % path_info), stacklevel=3) def check_start_response(self, status, headers, exc_info): check_string('status', status) status_code = status.split(None, 1)[0] if len(status_code) != 3 or not status_code.isdigit(): warn(WSGIWarning('Status code must be three digits'), stacklevel=3) if len(status) < 4 or status[3] != ' ': warn(WSGIWarning('Invalid value for status %r. Valid ' 'status strings are three digits, a space ' 'and a status explanation'), stacklevel=3) status_code = int(status_code) if status_code < 100: warn(WSGIWarning('status code < 100 detected'), stacklevel=3) if type(headers) is not list: warn(WSGIWarning('header list is not a list'), stacklevel=3) for item in headers: if type(item) is not tuple or len(item) != 2: warn(WSGIWarning('Headers must tuple 2-item tuples'), stacklevel=3) name, value = item if type(name) is not str or type(value) is not str: warn(WSGIWarning('header items must be strings'), stacklevel=3) if name.lower() == 'status': warn(WSGIWarning('The status header is not supported due to ' 'conflicts with the CGI spec.'), stacklevel=3) if exc_info is not None and not isinstance(exc_info, tuple): warn(WSGIWarning('invalid value for exc_info'), stacklevel=3) headers = Headers(headers) self.check_headers(headers) return status_code, headers def check_headers(self, headers): etag = headers.get('etag') if etag is not None: if etag.startswith('w/'): etag = etag[2:] if not (etag[:1] == etag[-1:] == '"'): warn(HTTPWarning('unquoted etag emitted.'), stacklevel=4) location = headers.get('location') if location is not None: if not urlparse(location).netloc: warn(HTTPWarning('absolute URLs required for location header'), stacklevel=4) def check_iterator(self, app_iter): if isinstance(app_iter, string_types): warn(WSGIWarning('application returned string. Response will ' 'send character for character to the client ' 'which will kill the performance. Return a ' 'list or iterable instead.'), stacklevel=3) def __call__(self, *args, **kwargs): if len(args) != 2: warn(WSGIWarning('Two arguments to WSGI app required'), stacklevel=2) if kwargs: warn(WSGIWarning('No keyword arguments to WSGI app allowed'), stacklevel=2) environ, start_response = args self.check_environ(environ) environ['wsgi.input'] = InputStream(environ['wsgi.input']) environ['wsgi.errors'] = ErrorStream(environ['wsgi.errors']) # hook our own file wrapper in so that applications will always # iterate to the end and we can check the content length environ['wsgi.file_wrapper'] = FileWrapper headers_set = [] chunks = [] def checking_start_response(*args, **kwargs): if len(args) not in (2, 3): warn(WSGIWarning('Invalid number of arguments: %s, expected ' '2 or 3' % len(args), stacklevel=2)) if kwargs: warn(WSGIWarning('no keyword arguments allowed.')) status, headers = args[:2] if len(args) == 3: exc_info = args[2] else: exc_info = None headers_set[:] = self.check_start_response(status, headers, exc_info) return GuardedWrite(start_response(status, headers, exc_info), chunks) app_iter = self.app(environ, checking_start_response) self.check_iterator(app_iter) return GuardedIterator(app_iter, headers_set, chunks)
apache-2.0
dpiekacz/cumulus-linux-ansible-modules
tests/test_cl_prefix_check.py
6
6062
import mock from nose.tools import set_trace from library.cl_prefix_check import main, loop_route_check from asserts import assert_equals global_values = { 'prefix': '1.1.1.1/24', 'poll_interval': '1', 'timeout': '2', 'state': 'present', 'nexthop': '1.2.3.4', 'nonexthop': '1.2.3.4' } def mod_args_generator(values, *args): newvalues = global_values.copy() newvalues.update(values) def mod_args(args): return newvalues[args] return mod_args @mock.patch('library.cl_prefix_check.loop_route_check') @mock.patch('library.cl_prefix_check.AnsibleModule') def test_module_args(mock_module, mock_loop_route_check): """ cl_prefix_check - test module arguments """ instance = mock_module.return_value values = {'nexthop': '1.1.1.1'} instance.params.get.side_effect = mod_args_generator(values) main() mock_module.assert_called_with( argument_spec={'prefix': {'required': True, 'type': 'str'}, 'poll_interval': {'type': 'int', 'default': 1}, 'timeout': {'type': 'int', 'default': 2}, 'state': {'type': 'str', 'default': 'present', 'choices': ['present', 'absent']}, 'nexthop': {'type': 'str', 'default': ''}, 'nonexthop': {'type': 'str', 'default': ''} }) @mock.patch('library.cl_prefix_check.loop_route_check') @mock.patch('library.cl_prefix_check.AnsibleModule') def test_printing_module_exit_msg_loop_passed(mock_module, mock_loop_route_check): """ cl_prefix_check - test exit_json messages when loop check is true or false """ instance = mock_module.return_value values = {'nexthop': '2.2.2.2'} instance.params.get.side_effect = mod_args_generator(values) # loop check is true, i.e condition is matched mock_loop_route_check.return_value = True main() _msg = 'Testing whether route is present. Condition Met' instance.exit_json.assert_called_with(msg=_msg, changed=False) # loop check is false, i.e condition is not matched mock_loop_route_check.return_value = False main() _msg = 'paremeters not found' instance.fail_json.assert_called_with(msg=_msg) def mock_loop_check_arg(arg): values = { 'prefix': '10.1.1.1/24', 'state': 'present', 'timeout': '10', 'poll_interval': '2', 'nexthop': '', 'nonexthop': '' } return values[arg] def mock_loop_check_arg_absent(arg): values = { 'prefix': '10.1.1.1/24', 'state': 'absent', 'timeout': '10', 'poll_interval': '2', 'nexthop': '', 'nonexthop': '' } return values[arg] @mock.patch('library.cl_prefix_check.run_cl_cmd') @mock.patch('library.cl_prefix_check.AnsibleModule') def test_loop_route_check_state_present(mock_module, mock_run_cl_cmd): """ cl_prefix_check - state is present route is present """ instance = mock_module.return_value instance.params.get.side_effect = mock_loop_check_arg # run_cl_cmd returns an array if there is match # returns any empty array if nothing is found. mock_run_cl_cmd.return_value = ['something'] # state is present, route is found assert_equals(loop_route_check(instance), True) @mock.patch('library.cl_prefix_check.run_cl_cmd') @mock.patch('library.cl_prefix_check.AnsibleModule') def test_loop_route_check_state_absent(mock_module, mock_run_cl_cmd): """ cl_prefix_check - state is absent route is absent should return True """ instance = mock_module.return_value instance.params.get.side_effect = mock_loop_check_arg_absent # run_cl_cmd returns an array if there is match # returns any empty array if nothing is found. mock_run_cl_cmd.return_value = [] # state is present, route is found assert_equals(loop_route_check(instance), True) @mock.patch('library.cl_prefix_check.time.sleep') @mock.patch('library.cl_prefix_check.run_cl_cmd') @mock.patch('library.cl_prefix_check.AnsibleModule') def test_loop_route_check_state_absent_route_present(mock_module, mock_run_cl_cmd, mock_sleep): """ cl_prefix_check - state is absent route is absent should return True """ instance = mock_module.return_value instance.params.get.side_effect = mock_loop_check_arg_absent # run_cl_cmd returns an array if there is match # returns any empty array if nothing is found. mock_run_cl_cmd.return_value = ['something'] # state is present, route is found assert_equals(loop_route_check(instance), False) # test command that outputs route mock_run_cl_cmd.assert_called_with(instance, '/sbin/ip route show 10.1.1.1/24') @mock.patch('library.cl_prefix_check.time.sleep') @mock.patch('library.cl_prefix_check.run_cl_cmd') @mock.patch('library.cl_prefix_check.AnsibleModule') def test_loop_route_check_state_present_route_failed(mock_module, mock_run_cl_cmd, mock_sleep): """ cl_prefix_check - state is present route is not present, timeout occurs """ # state is present, object is not found # function takes 10 seconds to run. Difficult to # test timers in nose..havent found a good way yet instance = mock_module.return_value instance.params.get.side_effect = mock_loop_check_arg mock_run_cl_cmd.return_value = [] assert_equals(loop_route_check(instance), False) # test sleep ! figured it out # sleep should be called 5 times with a poll # interval of 2 assert_equals(mock_sleep.call_count, 5) mock_sleep.assert_called_with(2)
gpl-3.0
deweller/PHPCodeIntel
phpdaemon.py
1
3844
import sublime import os import json import subprocess import threading import socket import time # sends a remote command to the php daemon # and returns the result def runRemoteCommandInPHPDaemon(prefs, command, args, aSync=False): payload = {} payload['cmd'] = command payload['args'] = args json_string = sendMessageToPHPDaemon(prefs, json.dumps(payload), aSync) if aSync: return if json_string == None or json_string == '': debugMsg(prefs, "runRemoteCommandInPHPDaemon response: None") return response = json.loads(json_string) debugMsg(prefs, "runRemoteCommandInPHPDaemon response: "+json.dumps(response['msg'])) return response['msg'] def runAsyncRemoteCommandInPHPDaemon(prefs, command, args): runRemoteCommandInPHPDaemon(prefs, command, args, True) # connects to the socket, sends the message and returns the result def sendMessageToPHPDaemon(prefs, message, aSync=False): sock = None try: sock = connectToSocket(prefs) except socket.error as e: debugMsg(prefs, "e.errno="+str(e.errno)) if e.errno == 61: # connection refused - try restarting daemon debugMsg(prefs, "starting daemon") startPHPDaemon(prefs) # wait 250ms for daemon to start time.sleep(0.25) # connect again sock = connectToSocket(prefs) except Exception as e: debugMsg(prefs, "error starting PHP daemon: %s" % e) if not sock: warnMsg("unable to connect to socket on port "+str(prefs.daemon_port)) return netstring = str(len(message))+":"+message+"," sent = sock.send(netstring.encode('utf-8')) if aSync: threading.Thread(target=processAsyncResponse, args=(prefs,sock)).start() return data = readDataFromSocket(sock) return data def processAsyncResponse(prefs, sock): json_string = readDataFromSocket(sock) if json_string == None or json_string == '': warnMsg("runRemoteCommandInPHPDaemon response (async): None") return response = json.loads(json_string) # print("response read: "+json.dumps(response['msg'])) # do something with response['msg'] sublime.status_message("PHPCI: done") def readDataFromSocket(sock): data = '' while True: chunk = sock.recv(1024) if not chunk: break data = data + chunk.decode('utf-8') sock.close() return data # initiates a socket connection def connectToSocket(prefs): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(5) sock.connect(('127.0.0.1', int(prefs.daemon_port))) return sock # starts the PHP daemon that processes commands def startPHPDaemon(prefs): debugMsg(prefs, "startPHPDaemon") args = [] args.append(prefs.php_path) args.append("daemon.php") args.append(str(prefs.daemon_port)) # Hide the console window on Windows startupinfo = None if os.name == "nt": startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW proc_env = os.environ.copy() debugMsg(prefs, "starting proc " + ' '.join(args)) bin_path = os.path.join(sublime.packages_path(), 'PHPCodeIntel', 'PHP', 'bin') proc = subprocess.Popen(args, stdout=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, cwd=bin_path) ############################################################################################################################## # debug ############################################################################################################################## def debugMsg(prefs, msg): if prefs.debug_enabled == True: print("[PHPCodeIntel] " + str(msg)) def warnMsg(msg): print("[PHPCodeIntel] WARN: " + str(msg))
mit
40123103/2015cd_40123103
static/Brython3.1.1-20150328-091302/Lib/warnings.py
752
13825
"""Python part of the warnings subsystem.""" # Note: function level imports should *not* be used # in this module as it may cause import lock deadlock. # See bug 683658. import linecache import sys __all__ = ["warn", "showwarning", "formatwarning", "filterwarnings", "resetwarnings", "catch_warnings"] def showwarning(message, category, filename, lineno, file=None, line=None): """Hook to write a warning to a file; replace if you like.""" if file is None: file = sys.stderr try: file.write(formatwarning(message, category, filename, lineno, line)) except IOError: pass # the file (probably stderr) is invalid - this warning gets lost. def formatwarning(message, category, filename, lineno, line=None): """Function to format a warning the standard way.""" s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message) line = linecache.getline(filename, lineno) if line is None else line if line: line = line.strip() s += " %s\n" % line return s def filterwarnings(action, message="", category=Warning, module="", lineno=0, append=False): """Insert an entry into the list of warnings filters (at the front). 'action' -- one of "error", "ignore", "always", "default", "module", or "once" 'message' -- a regex that the warning message must match 'category' -- a class that the warning must be a subclass of 'module' -- a regex that the module name must match 'lineno' -- an integer line number, 0 matches all warnings 'append' -- if true, append to the list of filters """ import re assert action in ("error", "ignore", "always", "default", "module", "once"), "invalid action: %r" % (action,) assert isinstance(message, str), "message must be a string" assert isinstance(category, type), "category must be a class" assert issubclass(category, Warning), "category must be a Warning subclass" assert isinstance(module, str), "module must be a string" assert isinstance(lineno, int) and lineno >= 0, \ "lineno must be an int >= 0" item = (action, re.compile(message, re.I), category, re.compile(module), lineno) if append: filters.append(item) else: filters.insert(0, item) def simplefilter(action, category=Warning, lineno=0, append=False): """Insert a simple entry into the list of warnings filters (at the front). A simple filter matches all modules and messages. 'action' -- one of "error", "ignore", "always", "default", "module", or "once" 'category' -- a class that the warning must be a subclass of 'lineno' -- an integer line number, 0 matches all warnings 'append' -- if true, append to the list of filters """ assert action in ("error", "ignore", "always", "default", "module", "once"), "invalid action: %r" % (action,) assert isinstance(lineno, int) and lineno >= 0, \ "lineno must be an int >= 0" item = (action, None, category, None, lineno) if append: filters.append(item) else: filters.insert(0, item) def resetwarnings(): """Clear the list of warning filters, so that no filters are active.""" filters[:] = [] class _OptionError(Exception): """Exception used by option processing helpers.""" pass # Helper to process -W options passed via sys.warnoptions def _processoptions(args): for arg in args: try: _setoption(arg) except _OptionError as msg: print("Invalid -W option ignored:", msg, file=sys.stderr) # Helper for _processoptions() def _setoption(arg): import re parts = arg.split(':') if len(parts) > 5: raise _OptionError("too many fields (max 5): %r" % (arg,)) while len(parts) < 5: parts.append('') action, message, category, module, lineno = [s.strip() for s in parts] action = _getaction(action) message = re.escape(message) category = _getcategory(category) module = re.escape(module) if module: module = module + '$' if lineno: try: lineno = int(lineno) if lineno < 0: raise ValueError except (ValueError, OverflowError): raise _OptionError("invalid lineno %r" % (lineno,)) else: lineno = 0 filterwarnings(action, message, category, module, lineno) # Helper for _setoption() def _getaction(action): if not action: return "default" if action == "all": return "always" # Alias for a in ('default', 'always', 'ignore', 'module', 'once', 'error'): if a.startswith(action): return a raise _OptionError("invalid action: %r" % (action,)) # Helper for _setoption() def _getcategory(category): import re if not category: return Warning if re.match("^[a-zA-Z0-9_]+$", category): try: cat = eval(category) except NameError: raise _OptionError("unknown warning category: %r" % (category,)) else: i = category.rfind(".") module = category[:i] klass = category[i+1:] try: m = __import__(module, None, None, [klass]) except ImportError: raise _OptionError("invalid module name: %r" % (module,)) try: cat = getattr(m, klass) except AttributeError: raise _OptionError("unknown warning category: %r" % (category,)) if not issubclass(cat, Warning): raise _OptionError("invalid warning category: %r" % (category,)) return cat # Code typically replaced by _warnings def warn(message, category=None, stacklevel=1): """Issue a warning, or maybe ignore it or raise an exception.""" # Check if message is already a Warning object if isinstance(message, Warning): category = message.__class__ # Check category argument if category is None: category = UserWarning assert issubclass(category, Warning) # Get context information try: caller = sys._getframe(stacklevel) except ValueError: globals = sys.__dict__ lineno = 1 else: globals = caller.f_globals lineno = caller.f_lineno if '__name__' in globals: module = globals['__name__'] else: module = "<string>" filename = globals.get('__file__') if filename: fnl = filename.lower() if fnl.endswith((".pyc", ".pyo")): filename = filename[:-1] else: if module == "__main__": try: filename = sys.argv[0] except AttributeError: # embedded interpreters don't have sys.argv, see bug #839151 filename = '__main__' if not filename: filename = module registry = globals.setdefault("__warningregistry__", {}) warn_explicit(message, category, filename, lineno, module, registry, globals) def warn_explicit(message, category, filename, lineno, module=None, registry=None, module_globals=None): lineno = int(lineno) if module is None: module = filename or "<unknown>" if module[-3:].lower() == ".py": module = module[:-3] # XXX What about leading pathname? if registry is None: registry = {} if isinstance(message, Warning): text = str(message) category = message.__class__ else: text = message message = category(message) key = (text, category, lineno) # Quick test for common case if registry.get(key): return # Search the filters for item in filters: action, msg, cat, mod, ln = item if ((msg is None or msg.match(text)) and issubclass(category, cat) and (mod is None or mod.match(module)) and (ln == 0 or lineno == ln)): break else: action = defaultaction # Early exit actions if action == "ignore": registry[key] = 1 return # Prime the linecache for formatting, in case the # "file" is actually in a zipfile or something. linecache.getlines(filename, module_globals) if action == "error": raise message # Other actions if action == "once": registry[key] = 1 oncekey = (text, category) if onceregistry.get(oncekey): return onceregistry[oncekey] = 1 elif action == "always": pass elif action == "module": registry[key] = 1 altkey = (text, category, 0) if registry.get(altkey): return registry[altkey] = 1 elif action == "default": registry[key] = 1 else: # Unrecognized actions are errors raise RuntimeError( "Unrecognized action (%r) in warnings.filters:\n %s" % (action, item)) if not callable(showwarning): raise TypeError("warnings.showwarning() must be set to a " "function or method") # Print message and context showwarning(message, category, filename, lineno) class WarningMessage(object): """Holds the result of a single showwarning() call.""" _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file", "line") def __init__(self, message, category, filename, lineno, file=None, line=None): local_values = locals() for attr in self._WARNING_DETAILS: setattr(self, attr, local_values[attr]) self._category_name = category.__name__ if category else None def __str__(self): return ("{message : %r, category : %r, filename : %r, lineno : %s, " "line : %r}" % (self.message, self._category_name, self.filename, self.lineno, self.line)) class catch_warnings(object): """A context manager that copies and restores the warnings filter upon exiting the context. The 'record' argument specifies whether warnings should be captured by a custom implementation of warnings.showwarning() and be appended to a list returned by the context manager. Otherwise None is returned by the context manager. The objects appended to the list are arguments whose attributes mirror the arguments to showwarning(). The 'module' argument is to specify an alternative module to the module named 'warnings' and imported under that name. This argument is only useful when testing the warnings module itself. """ def __init__(self, *, record=False, module=None): """Specify whether to record warnings and if an alternative module should be used other than sys.modules['warnings']. For compatibility with Python 3.0, please consider all arguments to be keyword-only. """ self._record = record self._module = sys.modules['warnings'] if module is None else module self._entered = False def __repr__(self): args = [] if self._record: args.append("record=True") if self._module is not sys.modules['warnings']: args.append("module=%r" % self._module) name = type(self).__name__ return "%s(%s)" % (name, ", ".join(args)) def __enter__(self): if self._entered: raise RuntimeError("Cannot enter %r twice" % self) self._entered = True self._filters = self._module.filters self._module.filters = self._filters[:] self._showwarning = self._module.showwarning if self._record: log = [] def showwarning(*args, **kwargs): log.append(WarningMessage(*args, **kwargs)) self._module.showwarning = showwarning return log else: return None def __exit__(self, *exc_info): if not self._entered: raise RuntimeError("Cannot exit %r without entering first" % self) self._module.filters = self._filters self._module.showwarning = self._showwarning # filters contains a sequence of filter 5-tuples # The components of the 5-tuple are: # - an action: error, ignore, always, default, module, or once # - a compiled regex that must match the warning message # - a class representing the warning category # - a compiled regex that must match the module that is being warned # - a line number for the line being warning, or 0 to mean any line # If either if the compiled regexs are None, match anything. _warnings_defaults = False try: from _warnings import (filters, _defaultaction, _onceregistry, warn, warn_explicit) defaultaction = _defaultaction onceregistry = _onceregistry _warnings_defaults = True except ImportError: filters = [] defaultaction = "default" onceregistry = {} # Module initialization _processoptions(sys.warnoptions) if not _warnings_defaults: silence = [ImportWarning, PendingDeprecationWarning] silence.append(DeprecationWarning) for cls in silence: simplefilter("ignore", category=cls) bytes_warning = sys.flags.bytes_warning if bytes_warning > 1: bytes_action = "error" elif bytes_warning: bytes_action = "default" else: bytes_action = "ignore" simplefilter(bytes_action, category=BytesWarning, append=1) # resource usage warnings are enabled by default in pydebug mode if hasattr(sys, 'gettotalrefcount'): resource_action = "always" else: resource_action = "ignore" simplefilter(resource_action, category=ResourceWarning, append=1) del _warnings_defaults
gpl-3.0
Mozta/pagina-diagnostijuego
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/escprober.py
2936
3187
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from . import constants from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, ISO2022KRSMModel) from .charsetprober import CharSetProber from .codingstatemachine import CodingStateMachine from .compat import wrap_ord class EscCharSetProber(CharSetProber): def __init__(self): CharSetProber.__init__(self) self._mCodingSM = [ CodingStateMachine(HZSMModel), CodingStateMachine(ISO2022CNSMModel), CodingStateMachine(ISO2022JPSMModel), CodingStateMachine(ISO2022KRSMModel) ] self.reset() def reset(self): CharSetProber.reset(self) for codingSM in self._mCodingSM: if not codingSM: continue codingSM.active = True codingSM.reset() self._mActiveSM = len(self._mCodingSM) self._mDetectedCharset = None def get_charset_name(self): return self._mDetectedCharset def get_confidence(self): if self._mDetectedCharset: return 0.99 else: return 0.00 def feed(self, aBuf): for c in aBuf: # PY3K: aBuf is a byte array, so c is an int, not a byte for codingSM in self._mCodingSM: if not codingSM: continue if not codingSM.active: continue codingState = codingSM.next_state(wrap_ord(c)) if codingState == constants.eError: codingSM.active = False self._mActiveSM -= 1 if self._mActiveSM <= 0: self._mState = constants.eNotMe return self.get_state() elif codingState == constants.eItsMe: self._mState = constants.eFoundIt self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8 return self.get_state() return self.get_state()
gpl-3.0
ryfeus/lambda-packs
Opencv_pil/source/numpy/distutils/__config__.py
6
1287
# This file is generated by /tmp/pip-alns09-build/-c # It contains system_info results at the time of building this package. __all__ = ["get_info","show"] lapack_opt_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]} openblas_lapack_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]} blas_mkl_info={} openblas_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]} blas_opt_info={'libraries': ['openblas', 'openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'c', 'define_macros': [('HAVE_CBLAS', None)]} def get_info(name): g = globals() return g.get(name, g.get(name + "_info", {})) def show(): for name,info_dict in globals().items(): if name[0] == "_" or type(info_dict) is not type({}): continue print(name + ":") if not info_dict: print(" NOT AVAILABLE") for k,v in info_dict.items(): v = str(v) if k == "sources" and len(v) > 200: v = v[:60] + " ...\n... " + v[-60:] print(" %s = %s" % (k,v))
mit
hdinsight/hue
desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/Random/OSRNG/test_generic.py
131
1746
# -*- coding: utf-8 -*- # # SelfTest/Util/test_generic.py: Self-test for the OSRNG.new() function # # Written in 2008 by Dwayne C. Litzenberger <[email protected]> # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """Self-test suite for Crypto.Random.OSRNG""" __revision__ = "$Id$" import unittest class SimpleTest(unittest.TestCase): def runTest(self): """Crypto.Random.OSRNG.new()""" # Import the OSRNG module and try to use it import Crypto.Random.OSRNG randobj = Crypto.Random.OSRNG.new() x = randobj.read(16) y = randobj.read(16) self.assertNotEqual(x, y) def get_tests(config={}): return [SimpleTest()] if __name__ == '__main__': suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite') # vim:set ts=4 sw=4 sts=4 expandtab:
apache-2.0
skosukhin/spack
var/spack/repos/builtin/packages/font-winitzki-cyrillic/package.py
1
2138
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class FontWinitzkiCyrillic(Package): """X.org winitzki-cyrillic font.""" homepage = "http://cgit.freedesktop.org/xorg/font/winitzki-cyrillic" url = "https://www.x.org/archive/individual/font/font-winitzki-cyrillic-1.0.3.tar.gz" version('1.0.3', '777c667b080b33793528d5abf3247a48') depends_on('font-util') depends_on('fontconfig', type='build') depends_on('mkfontdir', type='build') depends_on('bdftopcf', type='build') depends_on('[email protected]:', type='build') depends_on('util-macros', type='build') def install(self, spec, prefix): configure('--prefix={0}'.format(prefix)) make() make('install') # `make install` copies the files to the font-util installation. # Create a fake directory to convince Spack that we actually # installed something. mkdir(prefix.lib)
lgpl-2.1
johan--/Geotrek
geotrek/maintenance/filters.py
3
1944
from django.utils.translation import ugettext_lazy as _ from mapentity.filters import PolygonFilter, PythonPolygonFilter from geotrek.core.models import Topology from geotrek.common.filters import ( StructureRelatedFilterSet, YearFilter, YearBetweenFilter) from geotrek.common.widgets import YearSelect from .models import Intervention, Project class PolygonTopologyFilter(PolygonFilter): def filter(self, qs, value): if not value: return qs lookup = self.lookup_type inner_qs = Topology.objects.filter(**{'geom__%s' % lookup: value}) return qs.filter(**{'%s__in' % self.name: inner_qs}) class InterventionYearSelect(YearSelect): label = _(u"Year") def get_years(self): return Intervention.objects.all_years() class InterventionFilterSet(StructureRelatedFilterSet): bbox = PolygonTopologyFilter(name='topology', lookup_type='intersects') year = YearFilter(name='date', widget=InterventionYearSelect, label=_(u"Year")) class Meta(StructureRelatedFilterSet.Meta): model = Intervention fields = StructureRelatedFilterSet.Meta.fields + [ 'status', 'type', 'stake', 'subcontracting', 'project' ] class ProjectYearSelect(YearSelect): label = _(u"Year of activity") def get_years(self): return Project.objects.all_years() class ProjectFilterSet(StructureRelatedFilterSet): bbox = PythonPolygonFilter(name='geom') in_year = YearBetweenFilter(name=('begin_year', 'end_year'), widget=ProjectYearSelect, label=_(u"Year of activity")) class Meta(StructureRelatedFilterSet.Meta): model = Project fields = StructureRelatedFilterSet.Meta.fields + [ 'in_year', 'type', 'domain', 'contractors', 'project_owner', 'project_manager', 'founders' ]
bsd-2-clause
YUNZHONGTAN/MY_Python_script
soft_ware/pymongo-3.2/ez_setup.py
164
12155
#!python """Bootstrap setuptools installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from ez_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import os import shutil import sys import tempfile import tarfile import optparse import subprocess import platform from distutils import log try: from site import USER_SITE except ImportError: USER_SITE = None DEFAULT_VERSION = "1.4.2" DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" def _python_cmd(*args): args = (sys.executable,) + args return subprocess.call(args) == 0 def _check_call_py24(cmd, *args, **kwargs): res = subprocess.call(cmd, *args, **kwargs) class CalledProcessError(Exception): pass if not res == 0: msg = "Command '%s' return non-zero exit status %d" % (cmd, res) raise CalledProcessError(msg) vars(subprocess).setdefault('check_call', _check_call_py24) def _install(tarball, install_args=()): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # installing log.warn('Installing Setuptools') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') # exitcode will be 2 return 2 finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _build_egg(egg, tarball, to_dir): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) finally: os.chdir(old_wd) shutil.rmtree(tmpdir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') def _do_download(version, download_base, to_dir, download_delay): egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' % (version, sys.version_info[0], sys.version_info[1])) if not os.path.exists(egg): tarball = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, tarball, to_dir) sys.path.insert(0, egg) # Remove previously-imported pkg_resources if present (see # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). if 'pkg_resources' in sys.modules: del sys.modules['pkg_resources'] import setuptools setuptools.bootstrap_install_from = egg def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15): # making sure we use the absolute path to_dir = os.path.abspath(to_dir) was_imported = 'pkg_resources' in sys.modules or \ 'setuptools' in sys.modules try: import pkg_resources except ImportError: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("setuptools>=" + version) return except pkg_resources.VersionConflict: e = sys.exc_info()[1] if was_imported: sys.stderr.write( "The required version of setuptools (>=%s) is not available,\n" "and can't be installed while this script is running. Please\n" "install a more recent version first, using\n" "'easy_install -U setuptools'." "\n\n(Currently using %r)\n" % (version, e.args[0])) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) def _clean_check(cmd, target): """ Run the command to download target. If the command fails, clean up before re-raising the error. """ try: subprocess.check_call(cmd) except subprocess.CalledProcessError: if os.access(target, os.F_OK): os.unlink(target) raise def download_file_powershell(url, target): """ Download the file at url to target using Powershell (which will validate trust). Raise an exception if the command cannot complete. """ target = os.path.abspath(target) cmd = [ 'powershell', '-Command', "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), ] _clean_check(cmd, target) def has_powershell(): if platform.system() != 'Windows': return False cmd = ['powershell', '-Command', 'echo test'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_powershell.viable = has_powershell def download_file_curl(url, target): cmd = ['curl', url, '--silent', '--output', target] _clean_check(cmd, target) def has_curl(): cmd = ['curl', '--version'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_curl.viable = has_curl def download_file_wget(url, target): cmd = ['wget', url, '--quiet', '--output-document', target] _clean_check(cmd, target) def has_wget(): cmd = ['wget', '--version'] devnull = open(os.path.devnull, 'wb') try: try: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except: return False finally: devnull.close() return True download_file_wget.viable = has_wget def download_file_insecure(url, target): """ Use Python to download the file, even though it cannot authenticate the connection. """ try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen src = dst = None try: src = urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src.read() dst = open(target, "wb") dst.write(data) finally: if src: src.close() if dst: dst.close() download_file_insecure.viable = lambda: True def get_best_downloader(): downloaders = [ download_file_powershell, download_file_curl, download_file_wget, download_file_insecure, ] for dl in downloaders: if dl.viable(): return dl def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader): """Download setuptools from a specified location and return its filename `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. ``downloader_factory`` should be a function taking no arguments and returning a function for downloading a URL to a target. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) tgz_name = "setuptools-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) if not os.path.exists(saveto): # Avoid repeated downloads log.warn("Downloading %s", url) downloader = downloader_factory() downloader(url, saveto) return os.path.realpath(saveto) def _extractall(self, path=".", members=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). """ import copy import operator from tarfile import ExtractError directories = [] if members is None: members = self for tarinfo in members: if tarinfo.isdir(): # Extract directories with a safe mode. directories.append(tarinfo) tarinfo = copy.copy(tarinfo) tarinfo.mode = 448 # decimal for oct 0700 self.extract(tarinfo, path) # Reverse sort directories. if sys.version_info < (2, 4): def sorter(dir1, dir2): return cmp(dir1.name, dir2.name) directories.sort(sorter) directories.reverse() else: directories.sort(key=operator.attrgetter('name'), reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: dirpath = os.path.join(path, tarinfo.name) try: self.chown(tarinfo, dirpath) self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError: e = sys.exc_info()[1] if self.errorlevel > 1: raise else: self._dbg(1, "tarfile: %s" % e) def _build_install_args(options): """ Build the arguments to 'python setup.py install' on the setuptools package """ install_args = [] if options.user_install: if sys.version_info < (2, 6): log.warn("--user requires Python 2.6 or later") raise SystemExit(1) install_args.append('--user') return install_args def _parse_args(): """ Parse the command line for options """ parser = optparse.OptionParser() parser.add_option( '--user', dest='user_install', action='store_true', default=False, help='install in user site package (requires Python 2.6 or later)') parser.add_option( '--download-base', dest='download_base', metavar="URL", default=DEFAULT_URL, help='alternative URL from where to download the setuptools package') parser.add_option( '--insecure', dest='downloader_factory', action='store_const', const=lambda: download_file_insecure, default=get_best_downloader, help='Use internal, non-validating downloader' ) options, args = parser.parse_args() # positional arguments are ignored return options def main(version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" options = _parse_args() tarball = download_setuptools(download_base=options.download_base, downloader_factory=options.downloader_factory) return _install(tarball, _build_install_args(options)) if __name__ == '__main__': sys.exit(main())
gpl-3.0
rupran/ansible
lib/ansible/modules/network/cumulus/_cl_img_install.py
60
10997
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2016, Cumulus Networks <[email protected]> # # This file is part of Ansible # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['deprecated'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: cl_img_install version_added: "2.1" author: "Cumulus Networks (@CumulusLinux)" short_description: Install a different Cumulus Linux version. deprecated: Deprecated in 2.3. The image slot system no longer exists in Cumulus Linux. description: - install a different version of Cumulus Linux in the inactive slot. For more details go the Image Management User Guide at U(http://docs.cumulusnetworks.com/). options: src: description: - The full path to the Cumulus Linux binary image. Can be a local path, http or https URL. If the code version is in the name of the file, the module will assume this is the version of code you wish to install. required: true version: description: - Inform the module of the exact version one is installing. This overrides the automatic check of version in the file name. For example, if the binary file name is called CumulusLinux-2.2.3.bin, and version is set to '2.5.0', then the module will assume it is installing '2.5.0' not '2.2.3'. If version is not included, then the module will assume '2.2.3' is the version to install. default: None required: false switch_slot: description: - Switch slots after installing the image. To run the installed code, reboot the switch. choices: ['yes', 'no'] default: 'no' required: false requirements: ["Cumulus Linux OS"] ''' EXAMPLES = ''' ## Download and install the image from a webserver. - name: Install image using using http url. Switch slots so the subsequent will load the new version cl_img_install: version: 2.0.1 src: http://10.1.1.1/CumulusLinux-2.0.1.bin switch_slot: yes ## Copy the software from the ansible server to the switch. ## The module will get the code version from the filename ## The code will be installed in the alternate slot but the slot will not be primary ## A subsequent reload will not run the new code - name: Download cumulus linux to local system get_url: src: ftp://cumuluslinux.bin dest: /root/CumulusLinux-2.0.1.bin - name: Install image from local filesystem. Get version from the filename. cl_img_install: src: /root/CumulusLinux-2.0.1.bin ## If the image name has been changed from the original name, use the `version` option ## to inform the module exactly what code version is been installed - name: Download cumulus linux to local system get_url: src: ftp://CumulusLinux-2.0.1.bin dest: /root/image.bin - name: install image and switch slots. Only reboot needed cl_img_install: version: 2.0.1 src: /root/image.bin switch_slot: yes ''' RETURN = ''' changed: description: whether the interface was changed returned: changed type: bool sample: True msg: description: human-readable report of success or failure returned: always type: string sample: "interface bond0 config updated" ''' def check_url(module, url): parsed_url = urlparse(url) if len(parsed_url.path) > 0: sch = parsed_url.scheme if (sch == 'http' or sch == 'https' or len(parsed_url.scheme) == 0): return True module.fail_json(msg="Image Path URL. Wrong Format %s" % (url)) return False def run_cl_cmd(module, cmd, check_rc=True): try: (rc, out, err) = module.run_command(cmd, check_rc=check_rc) except Exception: e = get_exception() module.fail_json(msg=e.strerror) # trim last line as it is always empty ret = out.splitlines() return ret def get_slot_info(module): slots = {} slots['1'] = {} slots['2'] = {} active_slotnum = get_active_slot(module) primary_slotnum = get_primary_slot_num(module) for _num in range(1, 3): slot = slots[str(_num)] slot['version'] = get_slot_version(module, str(_num)) if _num == int(active_slotnum): slot['active'] = True if _num == int(primary_slotnum): slot['primary'] = True return slots def get_slot_version(module, slot_num): lsb_release = check_mnt_root_lsb_release(slot_num) switch_firm_ver = check_fw_print_env(module, slot_num) _version = module.sw_version if lsb_release == _version or switch_firm_ver == _version: return _version elif lsb_release: return lsb_release else: return switch_firm_ver def check_mnt_root_lsb_release(slot_num): _path = '/mnt/root-rw/config%s/etc/lsb-release' % (slot_num) try: lsb_release = open(_path) lines = lsb_release.readlines() for line in lines: _match = re.search('DISTRIB_RELEASE=([0-9a-zA-Z.]+)', line) if _match: return _match.group(1).split('-')[0] except: pass return None def check_fw_print_env(module, slot_num): cmd = None if platform.machine() == 'ppc': cmd = "/usr/sbin/fw_printenv -n cl.ver%s" % (slot_num) fw_output = run_cl_cmd(module, cmd) return fw_output[0].split('-')[0] elif platform.machine() == 'x86_64': cmd = "/usr/bin/grub-editenv list" grub_output = run_cl_cmd(module, cmd) for _line in grub_output: _regex_str = re.compile('cl.ver' + slot_num + '=([\w.]+)-') m0 = re.match(_regex_str, _line) if m0: return m0.group(1) def get_primary_slot_num(module): cmd = None if platform.machine() == 'ppc': cmd = "/usr/sbin/fw_printenv -n cl.active" return ''.join(run_cl_cmd(module, cmd)) elif platform.machine() == 'x86_64': cmd = "/usr/bin/grub-editenv list" grub_output = run_cl_cmd(module, cmd) for _line in grub_output: _regex_str = re.compile('cl.active=(\d)') m0 = re.match(_regex_str, _line) if m0: return m0.group(1) def get_active_slot(module): try: cmdline = open('/proc/cmdline').readline() except: module.fail_json(msg='Failed to open /proc/cmdline. ' + 'Unable to determine active slot') _match = re.search('active=(\d+)', cmdline) if _match: return _match.group(1) return None def install_img(module): src = module.params.get('src') _version = module.sw_version app_path = '/usr/cumulus/bin/cl-img-install -f %s' % (src) run_cl_cmd(module, app_path) perform_switch_slot = module.params.get('switch_slot') if perform_switch_slot is True: check_sw_version(module) else: _changed = True _msg = "Cumulus Linux Version " + _version + " successfully" + \ " installed in alternate slot" module.exit_json(changed=_changed, msg=_msg) def switch_slot(module, slotnum): _switch_slot = module.params.get('switch_slot') if _switch_slot is True: app_path = '/usr/cumulus/bin/cl-img-select %s' % (slotnum) run_cl_cmd(module, app_path) def determine_sw_version(module): _version = module.params.get('version') _filename = '' # Use _version if user defines it if _version: module.sw_version = _version return else: _filename = module.params.get('src').split('/')[-1] _match = re.search('\d+\W\d+\W\w+', _filename) if _match: module.sw_version = re.sub('\W', '.', _match.group()) return _msg = 'Unable to determine version from file %s' % (_filename) module.exit_json(changed=False, msg=_msg) def check_sw_version(module): slots = get_slot_info(module) _version = module.sw_version perform_switch_slot = module.params.get('switch_slot') for _num, slot in slots.items(): if slot['version'] == _version: if 'active' in slot: _msg = "Version %s is installed in the active slot" \ % (_version) module.exit_json(changed=False, msg=_msg) else: _msg = "Version " + _version + \ " is installed in the alternate slot. " if 'primary' not in slot: if perform_switch_slot is True: switch_slot(module, _num) _msg = _msg + \ "cl-img-select has made the alternate " + \ "slot the primary slot. " +\ "Next reboot, switch will load " + _version + "." module.exit_json(changed=True, msg=_msg) else: _msg = _msg + \ "Next reboot will not load " + _version + ". " + \ "switch_slot keyword set to 'no'." module.exit_json(changed=False, msg=_msg) else: if perform_switch_slot is True: _msg = _msg + \ "Next reboot, switch will load " + _version + "." module.exit_json(changed=False, msg=_msg) else: _msg = _msg + \ 'switch_slot set to "no". ' + \ 'No further action to take' module.exit_json(changed=False, msg=_msg) def main(): module = AnsibleModule( argument_spec=dict( src=dict(required=True, type='str'), version=dict(type='str'), switch_slot=dict(type='bool', choices=BOOLEANS, default=False), ), ) determine_sw_version(module) _url = module.params.get('src') check_sw_version(module) check_url(module, _url) install_img(module) # import module snippets from ansible.module_utils.basic import * # incompatible with ansible 1.4.4 - ubuntu 12.04 version # from ansible.module_utils.urls import * from urlparse import urlparse import re if __name__ == '__main__': main()
gpl-3.0
myles-archive/asgard-calendar
events/feeds.py
2
1617
from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.contrib.sites.models import Site from django.contrib.syndication.views import Feed, FeedDoesNotExist from events.models import Event current_site = Site.objects.get_current() class BaseFeed(Feed): subtitle = u"More than a hapax legomenon." title_description = 'feeds/events_event_title.html' description_template = 'feeds/events_event_description.html' def item_pubdate(self, item): return item.published def item_updated(self, item): return item.date_modified def item_id(self, item): return item.get_absolute_url() def item_author_name(self, item): return u"%s %s" % (item.author.first_name, item.author.last_name) def item_author_email(self, item): return u"%s" % (item.author.email) def item_author_link(self, item): return reverse('events_index') def item_categories(self, item): return item.tags.all() def item_copyright(self, item): return u"Copyright (c) %s, %s %s" % (current_site.name, item.author.first_name, item.author.last_name) def feed_title(self): return u"%s" % current_site.name def feed_authors(self): return ({"name": user.name} for user in User.objects.filter(is_staff=True)) class EventsFeed(BaseFeed): title = u"%s: events calendar." % current_site.name def link(self): return reverse('events_index') def items(self): return Event.objects.upcoming()[:10] def item_link(self, item): return item.get_absolute_url() def item_enclosure_url(self, item): return "http://%s%s" % (current_site.domain, item.get_ical_url())
bsd-3-clause
gangadhar-kadam/smrterpfrappe
frappe/patches/v4_0/rename_sitemap_to_route.py
4
1443
import frappe from frappe.model import rename_field def execute(): tables = frappe.db.sql_list("show tables") if "tabWebsite Route" not in tables: frappe.rename_doc("DocType", "Website Sitemap", "Website Route", force=True) if "tabWebsite Template" not in tables: frappe.rename_doc("DocType", "Website Sitemap Config", "Website Template", force=True) if "tabWebsite Route Permission" not in tables: frappe.rename_doc("DocType", "Website Sitemap Permission", "Website Route Permission", force=True) for d in ("Blog Category", "Blog Post", "Web Page", "Website Route", "Website Group"): frappe.reload_doc("website", "doctype", frappe.scrub(d)) rename_field_if_exists(d, "parent_website_sitemap", "parent_website_route") #frappe.reload_doc("website", "doctype", "website_template") frappe.reload_doc("website", "doctype", "website_route") frappe.reload_doc("website", "doctype", "website_route_permission") #rename_field_if_exists("Website Route", "website_sitemap_config", "website_template") rename_field_if_exists("Website Route Permission", "website_sitemap", "website_route") for d in ("blog_category", "blog_post", "web_page", "website_route", "website_group", "post", "user_vote"): frappe.reload_doc("website", "doctype", d) def rename_field_if_exists(doctype, old_fieldname, new_fieldname): try: rename_field(doctype, old_fieldname, new_fieldname) except Exception, e: if e.args[0] != 1054: raise
mit
YangSongzhou/django
django/conf/locale/sr_Latn/formats.py
1008
2011
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # from __future__ import unicode_literals # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'j. F Y.' TIME_FORMAT = 'H:i' DATETIME_FORMAT = 'j. F Y. H:i' YEAR_MONTH_FORMAT = 'F Y.' MONTH_DAY_FORMAT = 'j. F' SHORT_DATE_FORMAT = 'j.m.Y.' SHORT_DATETIME_FORMAT = 'j.m.Y. H:i' FIRST_DAY_OF_WEEK = 1 # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior DATE_INPUT_FORMATS = [ '%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.' '%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.' # '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.' # '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.' # '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.' ] DATETIME_INPUT_FORMATS = [ '%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59' '%d.%m.%Y. %H:%M:%S.%f', # '25.10.2006. 14:30:59.000200' '%d.%m.%Y. %H:%M', # '25.10.2006. 14:30' '%d.%m.%Y.', # '25.10.2006.' '%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59' '%d.%m.%y. %H:%M:%S.%f', # '25.10.06. 14:30:59.000200' '%d.%m.%y. %H:%M', # '25.10.06. 14:30' '%d.%m.%y.', # '25.10.06.' '%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59' '%d. %m. %Y. %H:%M:%S.%f', # '25. 10. 2006. 14:30:59.000200' '%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30' '%d. %m. %Y.', # '25. 10. 2006.' '%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59' '%d. %m. %y. %H:%M:%S.%f', # '25. 10. 06. 14:30:59.000200' '%d. %m. %y. %H:%M', # '25. 10. 06. 14:30' '%d. %m. %y.', # '25. 10. 06.' ] DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' NUMBER_GROUPING = 3
bsd-3-clause
alanchavez88/theHarvester
discovery/dnssearch.py
1
7399
import IPy import dns import string import sys class DNSReverse(): def __init__(self, range, verbose=True): self.range = range self.iplist = '' self.results = [] self.verbose = verbose try: DNS.ParseResolvConf("/etc/resolv.conf") except: print("Error in DNS resolvers") sys.exit() def run(self, host): a = string.split(host, '.') a.reverse() b = string.join(a, '.') + '.in-addr.arpa' if self.verbose: ESC = chr(27) sys.stdout.write(ESC + '[2K' + ESC + '[G') sys.stdout.write("\r\t" + host) sys.stdout.flush() try: name = DNS.Base.DnsRequest(b, qtype='ptr').req().answers[0]['data'] return host + ":" + name except DNS.exception: pass def get_ip_list(self, ips): try: list = IPy.IP(ips) except DNS.exception: print("Error in IP format, check the input and try again. (Eg. 192.168.1.0/24)") sys.exit() name = [] for x in list: name.append(str(x)) return name def list(self): self.iplist = self.get_ip_list(self.range) return self.iplist def process(self): for x in self.iplist: host = self.run(x) if host is not None: self.results.append(host) return self.results class DNSForce(): def __init__(self, domain, dnsserver, verbose=False): self.domain = domain self.nameserver = dnsserver self.file = "dns-names.txt" self.subdo = False self.verbose = verbose try: f = open(self.file, "r") except DNS.exception: print("Error opening dns dictionary file") sys.exit() self.list = f.readlines() def getdns(self, domain): DNS.ParseResolvConf("/etc/resolv.conf") nameserver = DNS.defaults['server'][0] dom = domain if self.subdo: dom = domain.split(".") dom.pop(0) rootdom = ".".join(dom) else: rootdom = dom if not self.nameserver: r = DNS.Request(rootdom, qtype='SOA').req() primary, email, serial, refresh, retry, expire, minimum = r.answers[0]['data'] test = DNS.Request(rootdom, qtype='NS', server=primary, aa=1).req() if test.header['status'] != "NOERROR": print("Error") sys.exit() self.nameserver = test.answers[0]['data'] elif self.nameserver == "local": self.nameserver = nameserver return self.nameserver def run(self, host): if self.nameserver == "": self.nameserver = self.getdns(self.domain) hostname = str(host.split("\n")[0]) + "." + str(self.domain) if self.verbose: esc = chr(27) sys.stdout.write(esc + '[2K' + esc + '[G') sys.stdout.write("\r" + hostname) sys.stdout.flush() try: test = DNS.Request(hostname, qtype='a', server=self.nameserver).req() hostip = test.answers[0]['data'] return hostip + ":" + hostname except DNS.exception: pass def process(self): results = [] for x in self.list: host = self.run(x) if host is not None: results.append(host) return results class DNSTLD(): def __init__(self, domain, dnsserver, verbose=False): self.domain = domain self.nameserver = dnsserver self.subdo = False self.verbose = verbose self.tlds = ["com", "org", "net", "edu", "mil", "gov", "uk", "af", "al", "dz", "as", "ad", "ao", "ai", "aq", "ag", "ar", "am", "aw", "ac", "au", "at", "az", "bs", "bh", "bd", "bb", "by", "be", "bz", "bj", "bm", "bt", "bo", "ba", "bw", "bv", "br", "io", "bn", "bg", "bf", "bi", "kh", "cm", "ca", "cv", "ky", "cf", "td", "cl", "cn", "cx", "cc", "co", "km", "cd", "cg", "ck", "cr", "ci", "hr", "cu", "cy", "cz", "dk", "dj", "dm", "do", "tp", "ec", "eg", "sv", "gq", "er", "ee", "et", "fk", "fo", "fj", "fi", "fr", "gf", "pf", "tf", "ga", "gm", "ge", "de", "gh", "gi", "gr", "gl", "gd", "gp", "gu", "gt", "gg", "gn", "gw", "gy", "ht", "hm", "va", "hn", "hk", "hu", "is", "in", "id", "ir", "iq", "ie", "im", "il", "it", "jm", "jp", "je", "jo", "kz", "ke", "ki", "kp", "kr", "kw", "kg", "la", "lv", "lb", "ls", "lr", "ly", "li", "lt", "lu", "mo", "mk", "mg", "mw", "my", "mv", "ml", "mt", "mh", "mq", "mr", "mu", "yt", "mx", "fm", "md", "mc", "mn", "ms", "ma", "mz", "mm", "na", "nr", "np", "nl", "an", "nc", "nz", "ni", "ne", "ng", "nu", "nf", "mp", "no", "om", "pk", "pw", "pa", "pg", "py", "pe", "ph", "pn", "pl", "pt", "pr", "qa", "re", "ro", "ru", "rw", "kn", "lc", "vc", "ws", "sm", "st", "sa", "sn", "sc", "sl", "sg", "sk", "si", "sb", "so", "za", "gz", "es", "lk", "sh", "pm", "sd", "sr", "sj", "sz", "se", "ch", "sy", "tw", "tj", "tz", "th", "tg", "tk", "to", "tt", "tn", "tr", "tm", "tc", "tv", "ug", "ua", "ae", "gb", "us", "um", "uy", "uz", "vu", "ve", "vn", "vg", "vi", "wf", "eh", "ye", "yu", "za", "zr", "zm", "zw", "int", "gs", "info", "biz", "su", "name", "coop", "aero"] def getdns(self, domain): DNS.ParseResolvConf("/etc/resolv.conf") nameserver = DNS.defaults['server'][0] dom = domain if self.subdo: dom = domain.split(".") dom.pop(0) rootdom = ".".join(dom) else: rootdom = dom if not self.nameserver: r = DNS.Request(rootdom, qtype='SOA').req() primary, email, serial, refresh, retry, expire, minimum = r.answers[0]['data'] test = DNS.Request(rootdom, qtype='NS', server=primary, aa=1).req() if test.header['status'] != "NOERROR": print("Error") sys.exit() self.nameserver = test.answers[0]['data'] elif self.nameserver == "local": self.nameserver = nameserver return self.nameserver def run(self, tld): self.nameserver = self.getdns(self.domain) hostname = self.domain.split(".")[0] + "." + tld if self.verbose: esc = chr(27) sys.stdout.write(esc + '[2K' + esc + '[G') sys.stdout.write("\r\tSearching for: " + hostname) sys.stdout.flush() try: test = DNS.Request(hostname, qtype='a', server=self.nameserver).req() hostip = test.answers[0]['data'] return hostip + ":" + hostname except DNS.exception: pass def process(self): results = [] for x in self.tlds: host = self.run(x) if host is not None: results.append(host) return results
gpl-2.0
fperez/sympy
sympy/functions/elementary/integers.py
1
5025
from sympy.core.basic import Basic, S, C, sympify from sympy.core.function import Lambda, Function from sympy.core.evalf import get_integer_part, PrecisionExhausted from sympy.utilities.decorator import deprecated ############################################################################### ######################### FLOOR and CEILING FUNCTIONS ######################### ############################################################################### class RoundFunction(Function): nargs = 1 @classmethod def eval(cls, arg): if arg.is_integer: return arg if arg.is_imaginary: return cls(C.im(arg))*S.ImaginaryUnit v = cls._eval_number(arg) if v is not None: return v # Integral, numerical, symbolic part ipart = npart = spart = S.Zero # Extract integral (or complex integral) terms if arg.is_Add: terms = arg.args else: terms = [arg] for t in terms: if t.is_integer or (t.is_imaginary and C.im(t).is_integer): ipart += t elif t.atoms(C.Symbol): spart += t else: npart += t if not (npart or spart): return ipart # Evaluate npart numerically if independent of spart orthogonal = (npart.is_real and spart.is_imaginary) or \ (npart.is_imaginary and spart.is_real) if npart and ((not spart) or orthogonal): try: re, im = get_integer_part(npart, cls._dir, {}, return_ints=True) ipart += C.Integer(re) + C.Integer(im)*S.ImaginaryUnit npart = S.Zero except (PrecisionExhausted, NotImplementedError): pass spart = npart + spart if not spart: return ipart elif spart.is_imaginary: return ipart + cls(C.im(spart),evaluate=False)*S.ImaginaryUnit else: return ipart + cls(spart, evaluate=False) def _eval_is_bounded(self): return self.args[0].is_bounded def _eval_is_real(self): return self.args[0].is_real def _eval_is_integer(self): return self.args[0].is_real class floor(RoundFunction): """ Floor is a univariate function which returns the largest integer value not greater than its argument. However this implementation generalizes floor to complex numbers. More information can be found in "Concrete mathematics" by Graham, pp. 87 or visit http://mathworld.wolfram.com/FloorFunction.html. >>> from sympy import floor, E, I, Real, Rational >>> floor(17) 17 >>> floor(Rational(23, 10)) 2 >>> floor(2*E) 5 >>> floor(-Real(0.567)) -1 >>> floor(-I/2) -I """ _dir = -1 @classmethod def _eval_number(cls, arg): if arg.is_Number: if arg.is_Rational: if not arg.q: return arg return C.Integer(arg.p // arg.q) elif arg.is_Real: return C.Integer(int(arg.floor())) if arg.is_NumberSymbol: return arg.approximation_interval(C.Integer)[0] def _eval_nseries(self, x, x0, n): r = self.subs(x, x0) args = self.args[0] if args.subs(x, x0) == r: direction = (args.subs(x, x+x0) - args.subs(x, x0)).leadterm(x)[0] if direction.is_positive: return r else: return r-1 else: return r class ceiling(RoundFunction): """ Ceiling is a univariate function which returns the smallest integer value not less than its argument. Ceiling function is generalized in this implementation to complex numbers. More information can be found in "Concrete mathematics" by Graham, pp. 87 or visit http://mathworld.wolfram.com/CeilingFunction.html. >>> from sympy import ceiling, E, I, Real, Rational >>> ceiling(17) 17 >>> ceiling(Rational(23, 10)) 3 >>> ceiling(2*E) 6 >>> ceiling(-Real(0.567)) 0 >>> ceiling(I/2) I """ _dir = 1 @classmethod def _eval_number(cls, arg): if arg.is_Number: if arg.is_Rational: if not arg.q: return arg return -C.Integer(-arg.p // arg.q) elif arg.is_Real: return C.Integer(int(arg.ceiling())) if arg.is_NumberSymbol: return arg.approximation_interval(C.Integer)[1] def _eval_nseries(self, x, x0, n): r = self.subs(x, x0) args = self.args[0] if args.subs(x,x0) == r: direction = (args.subs(x, x+x0) - args.subs(x, x0)).leadterm(x)[0] if direction.is_positive: return r+1 else: return r else: return r
bsd-3-clause
antaril/AGK
scripts/rt-tester/rt-tester.py
11005
5307
#!/usr/bin/python # # rt-mutex tester # # (C) 2006 Thomas Gleixner <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # import os import sys import getopt import shutil import string # Globals quiet = 0 test = 0 comments = 0 sysfsprefix = "/sys/devices/system/rttest/rttest" statusfile = "/status" commandfile = "/command" # Command opcodes cmd_opcodes = { "schedother" : "1", "schedfifo" : "2", "lock" : "3", "locknowait" : "4", "lockint" : "5", "lockintnowait" : "6", "lockcont" : "7", "unlock" : "8", "signal" : "11", "resetevent" : "98", "reset" : "99", } test_opcodes = { "prioeq" : ["P" , "eq" , None], "priolt" : ["P" , "lt" , None], "priogt" : ["P" , "gt" , None], "nprioeq" : ["N" , "eq" , None], "npriolt" : ["N" , "lt" , None], "npriogt" : ["N" , "gt" , None], "unlocked" : ["M" , "eq" , 0], "trylock" : ["M" , "eq" , 1], "blocked" : ["M" , "eq" , 2], "blockedwake" : ["M" , "eq" , 3], "locked" : ["M" , "eq" , 4], "opcodeeq" : ["O" , "eq" , None], "opcodelt" : ["O" , "lt" , None], "opcodegt" : ["O" , "gt" , None], "eventeq" : ["E" , "eq" , None], "eventlt" : ["E" , "lt" , None], "eventgt" : ["E" , "gt" , None], } # Print usage information def usage(): print "rt-tester.py <-c -h -q -t> <testfile>" print " -c display comments after first command" print " -h help" print " -q quiet mode" print " -t test mode (syntax check)" print " testfile: read test specification from testfile" print " otherwise from stdin" return # Print progress when not in quiet mode def progress(str): if not quiet: print str # Analyse a status value def analyse(val, top, arg): intval = int(val) if top[0] == "M": intval = intval / (10 ** int(arg)) intval = intval % 10 argval = top[2] elif top[0] == "O": argval = int(cmd_opcodes.get(arg, arg)) else: argval = int(arg) # progress("%d %s %d" %(intval, top[1], argval)) if top[1] == "eq" and intval == argval: return 1 if top[1] == "lt" and intval < argval: return 1 if top[1] == "gt" and intval > argval: return 1 return 0 # Parse the commandline try: (options, arguments) = getopt.getopt(sys.argv[1:],'chqt') except getopt.GetoptError, ex: usage() sys.exit(1) # Parse commandline options for option, value in options: if option == "-c": comments = 1 elif option == "-q": quiet = 1 elif option == "-t": test = 1 elif option == '-h': usage() sys.exit(0) # Select the input source if arguments: try: fd = open(arguments[0]) except Exception,ex: sys.stderr.write("File not found %s\n" %(arguments[0])) sys.exit(1) else: fd = sys.stdin linenr = 0 # Read the test patterns while 1: linenr = linenr + 1 line = fd.readline() if not len(line): break line = line.strip() parts = line.split(":") if not parts or len(parts) < 1: continue if len(parts[0]) == 0: continue if parts[0].startswith("#"): if comments > 1: progress(line) continue if comments == 1: comments = 2 progress(line) cmd = parts[0].strip().lower() opc = parts[1].strip().lower() tid = parts[2].strip() dat = parts[3].strip() try: # Test or wait for a status value if cmd == "t" or cmd == "w": testop = test_opcodes[opc] fname = "%s%s%s" %(sysfsprefix, tid, statusfile) if test: print fname continue while 1: query = 1 fsta = open(fname, 'r') status = fsta.readline().strip() fsta.close() stat = status.split(",") for s in stat: s = s.strip() if s.startswith(testop[0]): # Separate status value val = s[2:].strip() query = analyse(val, testop, dat) break if query or cmd == "t": break progress(" " + status) if not query: sys.stderr.write("Test failed in line %d\n" %(linenr)) sys.exit(1) # Issue a command to the tester elif cmd == "c": cmdnr = cmd_opcodes[opc] # Build command string and sys filename cmdstr = "%s:%s" %(cmdnr, dat) fname = "%s%s%s" %(sysfsprefix, tid, commandfile) if test: print fname continue fcmd = open(fname, 'w') fcmd.write(cmdstr) fcmd.close() except Exception,ex: sys.stderr.write(str(ex)) sys.stderr.write("\nSyntax error in line %d\n" %(linenr)) if not test: fd.close() sys.exit(1) # Normal exit pass print "Pass" sys.exit(0)
gpl-2.0
itbabu/django-oscar
src/oscar/apps/dashboard/nav.py
23
3617
import re from django.core.exceptions import ImproperlyConfigured from django.core.urlresolvers import NoReverseMatch, resolve, reverse from django.http import Http404 from oscar.core.loading import AppNotFoundError, get_class from oscar.views.decorators import check_permissions class Node(object): """ A node in the dashboard navigation menu """ def __init__(self, label, url_name=None, url_args=None, url_kwargs=None, access_fn=None, icon=None): self.label = label self.icon = icon self.url_name = url_name self.url_args = url_args self.url_kwargs = url_kwargs self.access_fn = access_fn self.children = [] @property def is_heading(self): return self.url_name is None @property def url(self): return reverse(self.url_name, args=self.url_args, kwargs=self.url_kwargs) def add_child(self, node): self.children.append(node) def is_visible(self, user): return self.access_fn is None or self.access_fn( user, self.url_name, self.url_args, self.url_kwargs) def filter(self, user): if not self.is_visible(user): return None node = Node( label=self.label, url_name=self.url_name, url_args=self.url_args, url_kwargs=self.url_kwargs, access_fn=self.access_fn, icon=self.icon ) for child in self.children: if child.is_visible(user): node.add_child(child) return node def has_children(self): return len(self.children) > 0 def default_access_fn(user, url_name, url_args=None, url_kwargs=None): """ Given a url_name and a user, this function tries to assess whether the user has the right to access the URL. The application instance of the view is fetched via dynamic imports, and those assumptions will only hold true if the standard Oscar layout is followed. Once the permissions for the view are known, the access logic used by the dashboard decorator is evaluated This function might seem costly, but a simple comparison with DTT did not show any change in response time """ exception = ImproperlyConfigured( "Please follow Oscar's default dashboard app layout or set a " "custom access_fn") if url_name is None: # it's a heading return True # get view module string try: url = reverse(url_name, args=url_args, kwargs=url_kwargs) view_module = resolve(url).func.__module__ except (NoReverseMatch, Http404): # if there's no match, no need to display it return False # We can't assume that the view has the same parent module as the app, # as either the app or view can be customised. So we turn the module # string (e.g. 'oscar.apps.dashboard.catalogue.views') into an app # label that can be loaded by get_class (e.g. # 'dashboard.catalogue.app), which then essentially checks # INSTALLED_APPS for the right module to load match = re.search('(dashboard[\w\.]*)\.views$', view_module) if not match: raise exception app_label_str = match.groups()[0] + '.app' try: app_instance = get_class(app_label_str, 'application') except AppNotFoundError: raise exception # handle name-spaced view names if ':' in url_name: view_name = url_name.split(':')[1] else: view_name = url_name permissions = app_instance.get_permissions(view_name) return check_permissions(user, permissions)
bsd-3-clause
bodi000/odoo
addons/l10n_at/account_wizard.py
379
1234
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) conexus.at # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import tools from openerp.osv import osv from openerp import addons class AccountWizard_cd(osv.osv_memory): _inherit='wizard.multi.charts.accounts' _defaults = { 'code_digits' : 0, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
penuel-leo/justniffer
python/webserver.py
2
5432
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2009 Plecno s.r.l. All Rights Reserved # [email protected] # via Giovio 8, 20144 Milano, Italy # Released under the terms of the GPLv3 or later # Author: Oreste Notelli <[email protected]> import SimpleHTTPServer import BaseHTTPServer import urlparse import urllib import os import posixpath import string from StringIO import StringIO import cgi import mimetypes # minimal web server. serves files relative to the # current directory. def is_of_main_content_type(path, content_type): try: return mimetypes.guess_type(path)[0].split("/")[0].startswith(content_type) except: return False def is_image(path): return is_of_main_content_type(path, "image") def is_text(path): return is_of_main_content_type(path, "text") docuemnt_root = "/tmp/pippo/" class dir_link: def __init__(self, path, name): self.path = path self.name = name def get_html(self): displayname = self.name + "/" linkname = self.name + "/" linkname = urllib.quote(linkname) return '<li><a href="%s">%s</a>\n' % (linkname, cgi.escape(displayname)) class link_link: def __init__(self, path, name): self.path = path self.name = name def get_html(self): displayname = self.name + "@" linkname = self.name + "/" linkname = urllib.quote(linkname) return '<li><a href="%s">%s</a>\n' % (linkname, cgi.escape(displayname)) class host_link: def __init__(self, path, name): self.path = path self.name = name def get_html(self): linkname="http://"+self.name displayname = linkname return '<li><a href="%s">%s</a>\n' % (linkname, cgi.escape(displayname)) class None_link: def __init__(self, path, name): self.path = path self.name = name def get_html(self): displayname = self.name linkname = self.name linkname = urllib.quote(linkname) return '<li><a href="%s">%s</a>\n' % (linkname, cgi.escape(displayname)) class img_link: def __init__(self, path, name): self.path = path self.name = name def get_html(self): linkname = self.name linkname = urllib.quote(linkname) return '<li><img src="%s"></img>\n' % (linkname) class text_link: def __init__(self, path, name): self.path = path self.name = name def get_html(self): linkname = self.name linkname = urllib.quote(linkname) return '<li><iframe src="%s"></iframe>\n' % (linkname) class MyHTTPRequestHandler (SimpleHTTPServer.SimpleHTTPRequestHandler): def __init__(self, *args, **kargs): return SimpleHTTPServer.SimpleHTTPRequestHandler.__init__(self, *args, **kargs) def translate_path(self, path): """Translate a /-separated PATH to the local filename syntax. Components that mean special things to the local file system (e.g. drive or directory names) are ignored. (XXX They should probably be diagnosed.) """ # abandon query parameters parsed = urlparse.urlparse(path) host = parsed[1] if (host.startswith("localhost")): host ="" path = parsed[2] path = posixpath.normpath(urllib.unquote(path)) words = path.split('/') words = filter(None, words) path = string.rstrip(self.get_root()+host, "/") for word in words: drive, word = os.path.splitdrive(word) head, word = os.path.split(word) if word in (os.curdir, os.pardir): continue path = os.path.join(path, word) print "path=%s"%(path) return path def list_directory(self, path): """Helper to produce a directory listing (absent index.html). Return value is either a file object, or None (indicating an error). In either case, the headers are sent, making the interface the same as for send_head(). """ try: list = os.listdir(path) except os.error: self.send_error(404, "No permission to list directory") return None list.sort(key=lambda a: a.lower()) f = StringIO() displaypath = cgi.escape(urllib.unquote(self.path)) f.write("<title>Directory listing for %s</title>\n" % displaypath) f.write("<h2>Directory listing for %s</h2>\n" % displaypath) f.write("<hr>\n<ul>\n") for name in list: fullname = os.path.join(path, name) displayname = linkname = name l = None_link(path, name) # Append / for directories or @ for symbolic links if os.path.isdir(fullname): l = dir_link(path, name) if os.path.islink(fullname): l = link_link(path, name) if (os.path.isfile(fullname)): if (is_image(fullname)): l = img_link(path, name) elif (is_text(fullname)): l = text_link(path, name) # Note: a link to a directory displays with @ and links with / if (path.rstrip("/") == docuemnt_root.rstrip("/")): l = host_link(path, name) f.write(l.get_html()) f.write("</ul>\n<hr>\n") length = f.tell() f.seek(0) self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", str(length)) self.end_headers() return f def get_root(self): #return self.root return docuemnt_root def MyFactory(root): def _Factory(*args, **kargs): h = MyHTTPRequestHandler(*args, **kargs) setattr(h, "root", root) return h return _Factory PORT = 8000 Handler = MyHTTPRequestHandler httpd = BaseHTTPServer.HTTPServer(("", PORT), MyFactory(docuemnt_root)) print "serving at port", PORT httpd.serve_forever()
gpl-3.0
chen2aaron/SnirteneCodes
PythonCookbookPractise/chapter8/create_managed_attributes.py
1
1177
# 8.6. Creating Managed Attributes class Person: def __init__(self, first_name): self.first_name = first_name # Getter function @property def first_name(self): return self._first_name # Setter function @first_name.setter def first_name(self, value): if not isinstance(value, str): raise TypeError('Expected a string') self._first_name = value # Deleter function (optional) @first_name.deleter def first_name(self): raise AttributeError("Can't delete attribute") class Person2: def __init__(self, first_name): self.set_first_name(first_name) # Getter function def get_first_name(self): return self._first_name # Setter function def set_first_name(self, value): if not isinstance(value, str): raise TypeError('Expected a string') self._first_name = value # Deleter function (optional) def del_first_name(self): raise AttributeError("Can't delete attribute") # Make a property from existing get/set methods name = property(get_first_name, set_first_name, del_first_name) p = Person('Ver')
gpl-2.0
caot/intellij-community
python/lib/Lib/site-packages/django/contrib/gis/gdal/srs.py
291
11717
""" The Spatial Reference class, represensents OGR Spatial Reference objects. Example: >>> from django.contrib.gis.gdal import SpatialReference >>> srs = SpatialReference('WGS84') >>> print srs GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], TOWGS84[0,0,0,0,0,0,0], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.01745329251994328, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4326"]] >>> print srs.proj +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs >>> print srs.ellipsoid (6378137.0, 6356752.3142451793, 298.25722356300003) >>> print srs.projected, srs.geographic False True >>> srs.import_epsg(32140) >>> print srs.name NAD83 / Texas South Central """ import re from ctypes import byref, c_char_p, c_int, c_void_p # Getting the error checking routine and exceptions from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import OGRException, SRSException from django.contrib.gis.gdal.prototypes import srs as capi #### Spatial Reference class. #### class SpatialReference(GDALBase): """ A wrapper for the OGRSpatialReference object. According to the GDAL Web site, the SpatialReference object "provide[s] services to represent coordinate systems (projections and datums) and to transform between them." """ #### Python 'magic' routines #### def __init__(self, srs_input=''): """ Creates a GDAL OSR Spatial Reference object from the given input. The input may be string of OGC Well Known Text (WKT), an integer EPSG code, a PROJ.4 string, and/or a projection "well known" shorthand string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83'). """ buf = c_char_p('') srs_type = 'user' if isinstance(srs_input, basestring): # Encoding to ASCII if unicode passed in. if isinstance(srs_input, unicode): srs_input = srs_input.encode('ascii') try: # If SRID is a string, e.g., '4326', then make acceptable # as user input. srid = int(srs_input) srs_input = 'EPSG:%d' % srid except ValueError: pass elif isinstance(srs_input, (int, long)): # EPSG integer code was input. srs_type = 'epsg' elif isinstance(srs_input, self.ptr_type): srs = srs_input srs_type = 'ogr' else: raise TypeError('Invalid SRS type "%s"' % srs_type) if srs_type == 'ogr': # Input is already an SRS pointer. srs = srs_input else: # Creating a new SRS pointer, using the string buffer. srs = capi.new_srs(buf) # If the pointer is NULL, throw an exception. if not srs: raise SRSException('Could not create spatial reference from: %s' % srs_input) else: self.ptr = srs # Importing from either the user input string or an integer SRID. if srs_type == 'user': self.import_user_input(srs_input) elif srs_type == 'epsg': self.import_epsg(srs_input) def __del__(self): "Destroys this spatial reference." if self._ptr: capi.release_srs(self._ptr) def __getitem__(self, target): """ Returns the value of the given string attribute node, None if the node doesn't exist. Can also take a tuple as a parameter, (target, child), where child is the index of the attribute in the WKT. For example: >>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]') >>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326 >>> print srs['GEOGCS'] WGS 84 >>> print srs['DATUM'] WGS_1984 >>> print srs['AUTHORITY'] EPSG >>> print srs['AUTHORITY', 1] # The authority value 4326 >>> print srs['TOWGS84', 4] # the fourth value in this wkt 0 >>> print srs['UNIT|AUTHORITY'] # For the units authority, have to use the pipe symbole. EPSG >>> print srs['UNIT|AUTHORITY', 1] # The authority value for the untis 9122 """ if isinstance(target, tuple): return self.attr_value(*target) else: return self.attr_value(target) def __str__(self): "The string representation uses 'pretty' WKT." return self.pretty_wkt #### SpatialReference Methods #### def attr_value(self, target, index=0): """ The attribute value for the given target node (e.g. 'PROJCS'). The index keyword specifies an index of the child node to return. """ if not isinstance(target, basestring) or not isinstance(index, int): raise TypeError return capi.get_attr_value(self.ptr, target, index) def auth_name(self, target): "Returns the authority name for the given string target node." return capi.get_auth_name(self.ptr, target) def auth_code(self, target): "Returns the authority code for the given string target node." return capi.get_auth_code(self.ptr, target) def clone(self): "Returns a clone of this SpatialReference object." return SpatialReference(capi.clone_srs(self.ptr)) def from_esri(self): "Morphs this SpatialReference from ESRI's format to EPSG." capi.morph_from_esri(self.ptr) def identify_epsg(self): """ This method inspects the WKT of this SpatialReference, and will add EPSG authority nodes where an EPSG identifier is applicable. """ capi.identify_epsg(self.ptr) def to_esri(self): "Morphs this SpatialReference to ESRI's format." capi.morph_to_esri(self.ptr) def validate(self): "Checks to see if the given spatial reference is valid." capi.srs_validate(self.ptr) #### Name & SRID properties #### @property def name(self): "Returns the name of this Spatial Reference." if self.projected: return self.attr_value('PROJCS') elif self.geographic: return self.attr_value('GEOGCS') elif self.local: return self.attr_value('LOCAL_CS') else: return None @property def srid(self): "Returns the SRID of top-level authority, or None if undefined." try: return int(self.attr_value('AUTHORITY', 1)) except (TypeError, ValueError): return None #### Unit Properties #### @property def linear_name(self): "Returns the name of the linear units." units, name = capi.linear_units(self.ptr, byref(c_char_p())) return name @property def linear_units(self): "Returns the value of the linear units." units, name = capi.linear_units(self.ptr, byref(c_char_p())) return units @property def angular_name(self): "Returns the name of the angular units." units, name = capi.angular_units(self.ptr, byref(c_char_p())) return name @property def angular_units(self): "Returns the value of the angular units." units, name = capi.angular_units(self.ptr, byref(c_char_p())) return units @property def units(self): """ Returns a 2-tuple of the units value and the units name, and will automatically determines whether to return the linear or angular units. """ if self.projected or self.local: return capi.linear_units(self.ptr, byref(c_char_p())) elif self.geographic: return capi.angular_units(self.ptr, byref(c_char_p())) else: return (None, None) #### Spheroid/Ellipsoid Properties #### @property def ellipsoid(self): """ Returns a tuple of the ellipsoid parameters: (semimajor axis, semiminor axis, and inverse flattening) """ return (self.semi_major, self.semi_minor, self.inverse_flattening) @property def semi_major(self): "Returns the Semi Major Axis for this Spatial Reference." return capi.semi_major(self.ptr, byref(c_int())) @property def semi_minor(self): "Returns the Semi Minor Axis for this Spatial Reference." return capi.semi_minor(self.ptr, byref(c_int())) @property def inverse_flattening(self): "Returns the Inverse Flattening for this Spatial Reference." return capi.invflattening(self.ptr, byref(c_int())) #### Boolean Properties #### @property def geographic(self): """ Returns True if this SpatialReference is geographic (root node is GEOGCS). """ return bool(capi.isgeographic(self.ptr)) @property def local(self): "Returns True if this SpatialReference is local (root node is LOCAL_CS)." return bool(capi.islocal(self.ptr)) @property def projected(self): """ Returns True if this SpatialReference is a projected coordinate system (root node is PROJCS). """ return bool(capi.isprojected(self.ptr)) #### Import Routines ##### def import_epsg(self, epsg): "Imports the Spatial Reference from the EPSG code (an integer)." capi.from_epsg(self.ptr, epsg) def import_proj(self, proj): "Imports the Spatial Reference from a PROJ.4 string." capi.from_proj(self.ptr, proj) def import_user_input(self, user_input): "Imports the Spatial Reference from the given user input string." capi.from_user_input(self.ptr, user_input) def import_wkt(self, wkt): "Imports the Spatial Reference from OGC WKT (string)" capi.from_wkt(self.ptr, byref(c_char_p(wkt))) def import_xml(self, xml): "Imports the Spatial Reference from an XML string." capi.from_xml(self.ptr, xml) #### Export Properties #### @property def wkt(self): "Returns the WKT representation of this Spatial Reference." return capi.to_wkt(self.ptr, byref(c_char_p())) @property def pretty_wkt(self, simplify=0): "Returns the 'pretty' representation of the WKT." return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify) @property def proj(self): "Returns the PROJ.4 representation for this Spatial Reference." return capi.to_proj(self.ptr, byref(c_char_p())) @property def proj4(self): "Alias for proj()." return self.proj @property def xml(self, dialect=''): "Returns the XML representation of this Spatial Reference." return capi.to_xml(self.ptr, byref(c_char_p()), dialect) class CoordTransform(GDALBase): "The coordinate system transformation object." def __init__(self, source, target): "Initializes on a source and target SpatialReference objects." if not isinstance(source, SpatialReference) or not isinstance(target, SpatialReference): raise TypeError('source and target must be of type SpatialReference') self.ptr = capi.new_ct(source._ptr, target._ptr) self._srs1_name = source.name self._srs2_name = target.name def __del__(self): "Deletes this Coordinate Transformation object." if self._ptr: capi.destroy_ct(self._ptr) def __str__(self): return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name)
apache-2.0
chaffra/sympy
sympy/functions/special/tests/test_bessel.py
36
22192
from itertools import product from sympy import (jn, yn, symbols, Symbol, sin, cos, pi, S, jn_zeros, besselj, bessely, besseli, besselk, hankel1, hankel2, hn1, hn2, expand_func, sqrt, sinh, cosh, diff, series, gamma, hyper, Abs, I, O, oo, conjugate) from sympy.functions.special.bessel import fn from sympy.functions.special.bessel import (airyai, airybi, airyaiprime, airybiprime) from sympy.utilities.randtest import (random_complex_number as randcplx, verify_numerically as tn, test_derivative_numerically as td, _randint) from sympy.utilities.pytest import raises from sympy.abc import z, n, k, x randint = _randint() def test_bessel_rand(): for f in [besselj, bessely, besseli, besselk, hankel1, hankel2]: assert td(f(randcplx(), z), z) for f in [jn, yn, hn1, hn2]: assert td(f(randint(-10, 10), z), z) def test_bessel_twoinputs(): for f in [besselj, bessely, besseli, besselk, hankel1, hankel2, jn, yn]: raises(TypeError, lambda: f(1)) raises(TypeError, lambda: f(1, 2, 3)) def test_diff(): assert besselj(n, z).diff(z) == besselj(n - 1, z)/2 - besselj(n + 1, z)/2 assert bessely(n, z).diff(z) == bessely(n - 1, z)/2 - bessely(n + 1, z)/2 assert besseli(n, z).diff(z) == besseli(n - 1, z)/2 + besseli(n + 1, z)/2 assert besselk(n, z).diff(z) == -besselk(n - 1, z)/2 - besselk(n + 1, z)/2 assert hankel1(n, z).diff(z) == hankel1(n - 1, z)/2 - hankel1(n + 1, z)/2 assert hankel2(n, z).diff(z) == hankel2(n - 1, z)/2 - hankel2(n + 1, z)/2 def test_rewrite(): from sympy import polar_lift, exp, I assert besselj(n, z).rewrite(jn) == sqrt(2*z/pi)*jn(n - S(1)/2, z) assert bessely(n, z).rewrite(yn) == sqrt(2*z/pi)*yn(n - S(1)/2, z) assert besseli(n, z).rewrite(besselj) == \ exp(-I*n*pi/2)*besselj(n, polar_lift(I)*z) assert besselj(n, z).rewrite(besseli) == \ exp(I*n*pi/2)*besseli(n, polar_lift(-I)*z) nu = randcplx() assert tn(besselj(nu, z), besselj(nu, z).rewrite(besseli), z) assert tn(besselj(nu, z), besselj(nu, z).rewrite(bessely), z) assert tn(besseli(nu, z), besseli(nu, z).rewrite(besselj), z) assert tn(besseli(nu, z), besseli(nu, z).rewrite(bessely), z) assert tn(bessely(nu, z), bessely(nu, z).rewrite(besselj), z) assert tn(bessely(nu, z), bessely(nu, z).rewrite(besseli), z) assert tn(besselk(nu, z), besselk(nu, z).rewrite(besselj), z) assert tn(besselk(nu, z), besselk(nu, z).rewrite(besseli), z) assert tn(besselk(nu, z), besselk(nu, z).rewrite(bessely), z) # check that a rewrite was triggered, when the order is set to a generic # symbol 'nu' assert yn(nu, z) != yn(nu, z).rewrite(jn) assert hn1(nu, z) != hn1(nu, z).rewrite(jn) assert hn2(nu, z) != hn2(nu, z).rewrite(jn) assert jn(nu, z) != jn(nu, z).rewrite(yn) assert hn1(nu, z) != hn1(nu, z).rewrite(yn) assert hn2(nu, z) != hn2(nu, z).rewrite(yn) # rewriting spherical bessel functions (SBFs) w.r.t. besselj, bessely is # not allowed if a generic symbol 'nu' is used as the order of the SBFs # to avoid inconsistencies (the order of bessel[jy] is allowed to be # complex-valued, whereas SBFs are defined only for integer orders) order = nu for f in (besselj, bessely): assert yn(order, z) == yn(order, z).rewrite(f) assert jn(order, z) == jn(order, z).rewrite(f) assert hn1(order, z) == hn1(order, z).rewrite(f) assert hn2(order, z) == hn2(order, z).rewrite(f) # for integral orders rewriting SBFs w.r.t bessel[jy] is allowed N = Symbol('n', integer=True) ri = randint(-11, 10) for order in (ri, N): for f in (besselj, bessely): assert yn(order, z) != yn(order, z).rewrite(f) assert jn(order, z) != jn(order, z).rewrite(f) assert hn1(order, z) != hn1(order, z).rewrite(f) assert hn2(order, z) != hn2(order, z).rewrite(f) for func, refunc in product((yn, jn, hn1, hn2), (jn, yn, besselj, bessely)): assert tn(func(ri, z), func(ri, z).rewrite(refunc), z) def test_expand(): from sympy import besselsimp, Symbol, exp, exp_polar, I assert expand_func(besselj(S(1)/2, z).rewrite(jn)) == \ sqrt(2)*sin(z)/(sqrt(pi)*sqrt(z)) assert expand_func(bessely(S(1)/2, z).rewrite(yn)) == \ -sqrt(2)*cos(z)/(sqrt(pi)*sqrt(z)) # XXX: teach sin/cos to work around arguments like # x*exp_polar(I*pi*n/2). Then change besselsimp -> expand_func assert besselsimp(besselj(S(1)/2, z)) == sqrt(2)*sin(z)/(sqrt(pi)*sqrt(z)) assert besselsimp(besselj(S(-1)/2, z)) == sqrt(2)*cos(z)/(sqrt(pi)*sqrt(z)) assert besselsimp(besselj(S(5)/2, z)) == \ -sqrt(2)*(z**2*sin(z) + 3*z*cos(z) - 3*sin(z))/(sqrt(pi)*z**(S(5)/2)) assert besselsimp(besselj(-S(5)/2, z)) == \ -sqrt(2)*(z**2*cos(z) - 3*z*sin(z) - 3*cos(z))/(sqrt(pi)*z**(S(5)/2)) assert besselsimp(bessely(S(1)/2, z)) == \ -(sqrt(2)*cos(z))/(sqrt(pi)*sqrt(z)) assert besselsimp(bessely(S(-1)/2, z)) == sqrt(2)*sin(z)/(sqrt(pi)*sqrt(z)) assert besselsimp(bessely(S(5)/2, z)) == \ sqrt(2)*(z**2*cos(z) - 3*z*sin(z) - 3*cos(z))/(sqrt(pi)*z**(S(5)/2)) assert besselsimp(bessely(S(-5)/2, z)) == \ -sqrt(2)*(z**2*sin(z) + 3*z*cos(z) - 3*sin(z))/(sqrt(pi)*z**(S(5)/2)) assert besselsimp(besseli(S(1)/2, z)) == sqrt(2)*sinh(z)/(sqrt(pi)*sqrt(z)) assert besselsimp(besseli(S(-1)/2, z)) == \ sqrt(2)*cosh(z)/(sqrt(pi)*sqrt(z)) assert besselsimp(besseli(S(5)/2, z)) == \ sqrt(2)*(z**2*sinh(z) - 3*z*cosh(z) + 3*sinh(z))/(sqrt(pi)*z**(S(5)/2)) assert besselsimp(besseli(S(-5)/2, z)) == \ sqrt(2)*(z**2*cosh(z) - 3*z*sinh(z) + 3*cosh(z))/(sqrt(pi)*z**(S(5)/2)) assert besselsimp(besselk(S(1)/2, z)) == \ besselsimp(besselk(S(-1)/2, z)) == sqrt(pi)*exp(-z)/(sqrt(2)*sqrt(z)) assert besselsimp(besselk(S(5)/2, z)) == \ besselsimp(besselk(S(-5)/2, z)) == \ sqrt(2)*sqrt(pi)*(z**2 + 3*z + 3)*exp(-z)/(2*z**(S(5)/2)) def check(eq, ans): return tn(eq, ans) and eq == ans rn = randcplx(a=1, b=0, d=0, c=2) for besselx in [besselj, bessely, besseli, besselk]: ri = S(2*randint(-11, 10) + 1) / 2 # half integer in [-21/2, 21/2] assert tn(besselsimp(besselx(ri, z)), besselx(ri, z)) assert check(expand_func(besseli(rn, x)), besseli(rn - 2, x) - 2*(rn - 1)*besseli(rn - 1, x)/x) assert check(expand_func(besseli(-rn, x)), besseli(-rn + 2, x) + 2*(-rn + 1)*besseli(-rn + 1, x)/x) assert check(expand_func(besselj(rn, x)), -besselj(rn - 2, x) + 2*(rn - 1)*besselj(rn - 1, x)/x) assert check(expand_func(besselj(-rn, x)), -besselj(-rn + 2, x) + 2*(-rn + 1)*besselj(-rn + 1, x)/x) assert check(expand_func(besselk(rn, x)), besselk(rn - 2, x) + 2*(rn - 1)*besselk(rn - 1, x)/x) assert check(expand_func(besselk(-rn, x)), besselk(-rn + 2, x) - 2*(-rn + 1)*besselk(-rn + 1, x)/x) assert check(expand_func(bessely(rn, x)), -bessely(rn - 2, x) + 2*(rn - 1)*bessely(rn - 1, x)/x) assert check(expand_func(bessely(-rn, x)), -bessely(-rn + 2, x) + 2*(-rn + 1)*bessely(-rn + 1, x)/x) n = Symbol('n', integer=True, positive=True) assert expand_func(besseli(n + 2, z)) == \ besseli(n, z) + (-2*n - 2)*(-2*n*besseli(n, z)/z + besseli(n - 1, z))/z assert expand_func(besselj(n + 2, z)) == \ -besselj(n, z) + (2*n + 2)*(2*n*besselj(n, z)/z - besselj(n - 1, z))/z assert expand_func(besselk(n + 2, z)) == \ besselk(n, z) + (2*n + 2)*(2*n*besselk(n, z)/z + besselk(n - 1, z))/z assert expand_func(bessely(n + 2, z)) == \ -bessely(n, z) + (2*n + 2)*(2*n*bessely(n, z)/z - bessely(n - 1, z))/z assert expand_func(besseli(n + S(1)/2, z).rewrite(jn)) == \ (sqrt(2)*sqrt(z)*exp(-I*pi*(n + S(1)/2)/2) * exp_polar(I*pi/4)*jn(n, z*exp_polar(I*pi/2))/sqrt(pi)) assert expand_func(besselj(n + S(1)/2, z).rewrite(jn)) == \ sqrt(2)*sqrt(z)*jn(n, z)/sqrt(pi) r = Symbol('r', real=True) p = Symbol('p', positive=True) i = Symbol('i', integer=True) for besselx in [besselj, bessely, besseli, besselk]: assert besselx(i, p).is_real assert besselx(i, x).is_real is None assert besselx(x, z).is_real is None for besselx in [besselj, besseli]: assert besselx(i, r).is_real for besselx in [bessely, besselk]: assert besselx(i, r).is_real is None def test_fn(): x, z = symbols("x z") assert fn(1, z) == 1/z**2 assert fn(2, z) == -1/z + 3/z**3 assert fn(3, z) == -6/z**2 + 15/z**4 assert fn(4, z) == 1/z - 45/z**3 + 105/z**5 def mjn(n, z): return expand_func(jn(n, z)) def myn(n, z): return expand_func(yn(n, z)) def test_jn(): z = symbols("z") assert mjn(0, z) == sin(z)/z assert mjn(1, z) == sin(z)/z**2 - cos(z)/z assert mjn(2, z) == (3/z**3 - 1/z)*sin(z) - (3/z**2) * cos(z) assert mjn(3, z) == (15/z**4 - 6/z**2)*sin(z) + (1/z - 15/z**3)*cos(z) assert mjn(4, z) == (1/z + 105/z**5 - 45/z**3)*sin(z) + \ (-105/z**4 + 10/z**2)*cos(z) assert mjn(5, z) == (945/z**6 - 420/z**4 + 15/z**2)*sin(z) + \ (-1/z - 945/z**5 + 105/z**3)*cos(z) assert mjn(6, z) == (-1/z + 10395/z**7 - 4725/z**5 + 210/z**3)*sin(z) + \ (-10395/z**6 + 1260/z**4 - 21/z**2)*cos(z) assert expand_func(jn(n, z)) == jn(n, z) # SBFs not defined for complex-valued orders assert jn(2+3j, 5.2+0.3j).evalf() == jn(2+3j, 5.2+0.3j) assert eq([jn(2, 5.2+0.3j).evalf(10)], [0.09941975672 - 0.05452508024*I]) def test_yn(): z = symbols("z") assert myn(0, z) == -cos(z)/z assert myn(1, z) == -cos(z)/z**2 - sin(z)/z assert myn(2, z) == -((3/z**3 - 1/z)*cos(z) + (3/z**2)*sin(z)) assert expand_func(yn(n, z)) == yn(n, z) # SBFs not defined for complex-valued orders assert yn(2+3j, 5.2+0.3j).evalf() == yn(2+3j, 5.2+0.3j) assert eq([yn(2, 5.2+0.3j).evalf(10)], [0.185250342 + 0.01489557397*I]) def test_sympify_yn(): assert S(15) in myn(3, pi).atoms() assert myn(3, pi) == 15/pi**4 - 6/pi**2 def eq(a, b, tol=1e-6): for x, y in zip(a, b): if not (abs(x - y) < tol): return False return True def test_jn_zeros(): assert eq(jn_zeros(0, 4), [3.141592, 6.283185, 9.424777, 12.566370]) assert eq(jn_zeros(1, 4), [4.493409, 7.725251, 10.904121, 14.066193]) assert eq(jn_zeros(2, 4), [5.763459, 9.095011, 12.322940, 15.514603]) assert eq(jn_zeros(3, 4), [6.987932, 10.417118, 13.698023, 16.923621]) assert eq(jn_zeros(4, 4), [8.182561, 11.704907, 15.039664, 18.301255]) def test_bessel_eval(): from sympy import I, Symbol n, m, k = Symbol('n', integer=True), Symbol('m'), Symbol('k', integer=True, zero=False) for f in [besselj, besseli]: assert f(0, 0) == S.One assert f(2.1, 0) == S.Zero assert f(-3, 0) == S.Zero assert f(-10.2, 0) == S.ComplexInfinity assert f(1 + 3*I, 0) == S.Zero assert f(-3 + I, 0) == S.ComplexInfinity assert f(-2*I, 0) == S.NaN assert f(n, 0) != S.One and f(n, 0) != S.Zero assert f(m, 0) != S.One and f(m, 0) != S.Zero assert f(k, 0) == S.Zero assert bessely(0, 0) == S.NegativeInfinity assert besselk(0, 0) == S.Infinity for f in [bessely, besselk]: assert f(1 + I, 0) == S.ComplexInfinity assert f(I, 0) == S.NaN for f in [besselj, bessely]: assert f(m, S.Infinity) == S.Zero assert f(m, S.NegativeInfinity) == S.Zero for f in [besseli, besselk]: assert f(m, I*S.Infinity) == S.Zero assert f(m, I*S.NegativeInfinity) == S.Zero for f in [besseli, besselk]: assert f(-4, z) == f(4, z) assert f(-3, z) == f(3, z) assert f(-n, z) == f(n, z) assert f(-m, z) != f(m, z) for f in [besselj, bessely]: assert f(-4, z) == f(4, z) assert f(-3, z) == -f(3, z) assert f(-n, z) == (-1)**n*f(n, z) assert f(-m, z) != (-1)**m*f(m, z) for f in [besselj, besseli]: assert f(m, -z) == (-z)**m*z**(-m)*f(m, z) assert besseli(2, -z) == besseli(2, z) assert besseli(3, -z) == -besseli(3, z) assert besselj(0, -z) == besselj(0, z) assert besselj(1, -z) == -besselj(1, z) assert besseli(0, I*z) == besselj(0, z) assert besseli(1, I*z) == I*besselj(1, z) assert besselj(3, I*z) == -I*besseli(3, z) def test_bessel_nan(): # FIXME: could have these return NaN; for now just fix infinite recursion for f in [besselj, bessely, besseli, besselk, hankel1, hankel2, yn, jn]: assert f(1, S.NaN) == f(1, S.NaN, evaluate=False) def test_conjugate(): from sympy import conjugate, I, Symbol n, z, x = Symbol('n'), Symbol('z', real=False), Symbol('x', real=True) y, t = Symbol('y', real=True, positive=True), Symbol('t', negative=True) for f in [besseli, besselj, besselk, bessely, hankel1, hankel2]: assert f(n, -1).conjugate() != f(conjugate(n), -1) assert f(n, x).conjugate() != f(conjugate(n), x) assert f(n, t).conjugate() != f(conjugate(n), t) rz = randcplx(b=0.5) for f in [besseli, besselj, besselk, bessely]: assert f(n, 1 + I).conjugate() == f(conjugate(n), 1 - I) assert f(n, 0).conjugate() == f(conjugate(n), 0) assert f(n, 1).conjugate() == f(conjugate(n), 1) assert f(n, z).conjugate() == f(conjugate(n), conjugate(z)) assert f(n, y).conjugate() == f(conjugate(n), y) assert tn(f(n, rz).conjugate(), f(conjugate(n), conjugate(rz))) assert hankel1(n, 1 + I).conjugate() == hankel2(conjugate(n), 1 - I) assert hankel1(n, 0).conjugate() == hankel2(conjugate(n), 0) assert hankel1(n, 1).conjugate() == hankel2(conjugate(n), 1) assert hankel1(n, y).conjugate() == hankel2(conjugate(n), y) assert hankel1(n, z).conjugate() == hankel2(conjugate(n), conjugate(z)) assert tn(hankel1(n, rz).conjugate(), hankel2(conjugate(n), conjugate(rz))) assert hankel2(n, 1 + I).conjugate() == hankel1(conjugate(n), 1 - I) assert hankel2(n, 0).conjugate() == hankel1(conjugate(n), 0) assert hankel2(n, 1).conjugate() == hankel1(conjugate(n), 1) assert hankel2(n, y).conjugate() == hankel1(conjugate(n), y) assert hankel2(n, z).conjugate() == hankel1(conjugate(n), conjugate(z)) assert tn(hankel2(n, rz).conjugate(), hankel1(conjugate(n), conjugate(rz))) def test_branching(): from sympy import exp_polar, polar_lift, Symbol, I, exp assert besselj(polar_lift(k), x) == besselj(k, x) assert besseli(polar_lift(k), x) == besseli(k, x) n = Symbol('n', integer=True) assert besselj(n, exp_polar(2*pi*I)*x) == besselj(n, x) assert besselj(n, polar_lift(x)) == besselj(n, x) assert besseli(n, exp_polar(2*pi*I)*x) == besseli(n, x) assert besseli(n, polar_lift(x)) == besseli(n, x) def tn(func, s): from random import uniform c = uniform(1, 5) expr = func(s, c*exp_polar(I*pi)) - func(s, c*exp_polar(-I*pi)) eps = 1e-15 expr2 = func(s + eps, -c + eps*I) - func(s + eps, -c - eps*I) return abs(expr.n() - expr2.n()).n() < 1e-10 nu = Symbol('nu') assert besselj(nu, exp_polar(2*pi*I)*x) == exp(2*pi*I*nu)*besselj(nu, x) assert besseli(nu, exp_polar(2*pi*I)*x) == exp(2*pi*I*nu)*besseli(nu, x) assert tn(besselj, 2) assert tn(besselj, pi) assert tn(besselj, I) assert tn(besseli, 2) assert tn(besseli, pi) assert tn(besseli, I) def test_airy_base(): z = Symbol('z') x = Symbol('x', real=True) y = Symbol('y', real=True) assert conjugate(airyai(z)) == airyai(conjugate(z)) assert airyai(x).is_real assert airyai(x+I*y).as_real_imag() == ( airyai(x - I*x*Abs(y)/Abs(x))/2 + airyai(x + I*x*Abs(y)/Abs(x))/2, I*x*(airyai(x - I*x*Abs(y)/Abs(x)) - airyai(x + I*x*Abs(y)/Abs(x)))*Abs(y)/(2*y*Abs(x))) def test_airyai(): z = Symbol('z', real=False) t = Symbol('t', negative=True) p = Symbol('p', positive=True) assert isinstance(airyai(z), airyai) assert airyai(0) == 3**(S(1)/3)/(3*gamma(S(2)/3)) assert airyai(oo) == 0 assert airyai(-oo) == 0 assert diff(airyai(z), z) == airyaiprime(z) assert series(airyai(z), z, 0, 3) == ( 3**(S(5)/6)*gamma(S(1)/3)/(6*pi) - 3**(S(1)/6)*z*gamma(S(2)/3)/(2*pi) + O(z**3)) assert airyai(z).rewrite(hyper) == ( -3**(S(2)/3)*z*hyper((), (S(4)/3,), z**S(3)/9)/(3*gamma(S(1)/3)) + 3**(S(1)/3)*hyper((), (S(2)/3,), z**S(3)/9)/(3*gamma(S(2)/3))) assert isinstance(airyai(z).rewrite(besselj), airyai) assert airyai(t).rewrite(besselj) == ( sqrt(-t)*(besselj(-S(1)/3, 2*(-t)**(S(3)/2)/3) + besselj(S(1)/3, 2*(-t)**(S(3)/2)/3))/3) assert airyai(z).rewrite(besseli) == ( -z*besseli(S(1)/3, 2*z**(S(3)/2)/3)/(3*(z**(S(3)/2))**(S(1)/3)) + (z**(S(3)/2))**(S(1)/3)*besseli(-S(1)/3, 2*z**(S(3)/2)/3)/3) assert airyai(p).rewrite(besseli) == ( sqrt(p)*(besseli(-S(1)/3, 2*p**(S(3)/2)/3) - besseli(S(1)/3, 2*p**(S(3)/2)/3))/3) assert expand_func(airyai(2*(3*z**5)**(S(1)/3))) == ( -sqrt(3)*(-1 + (z**5)**(S(1)/3)/z**(S(5)/3))*airybi(2*3**(S(1)/3)*z**(S(5)/3))/6 + (1 + (z**5)**(S(1)/3)/z**(S(5)/3))*airyai(2*3**(S(1)/3)*z**(S(5)/3))/2) def test_airybi(): z = Symbol('z', real=False) t = Symbol('t', negative=True) p = Symbol('p', positive=True) assert isinstance(airybi(z), airybi) assert airybi(0) == 3**(S(5)/6)/(3*gamma(S(2)/3)) assert airybi(oo) == oo assert airybi(-oo) == 0 assert diff(airybi(z), z) == airybiprime(z) assert series(airybi(z), z, 0, 3) == ( 3**(S(1)/3)*gamma(S(1)/3)/(2*pi) + 3**(S(2)/3)*z*gamma(S(2)/3)/(2*pi) + O(z**3)) assert airybi(z).rewrite(hyper) == ( 3**(S(1)/6)*z*hyper((), (S(4)/3,), z**S(3)/9)/gamma(S(1)/3) + 3**(S(5)/6)*hyper((), (S(2)/3,), z**S(3)/9)/(3*gamma(S(2)/3))) assert isinstance(airybi(z).rewrite(besselj), airybi) assert airyai(t).rewrite(besselj) == ( sqrt(-t)*(besselj(-S(1)/3, 2*(-t)**(S(3)/2)/3) + besselj(S(1)/3, 2*(-t)**(S(3)/2)/3))/3) assert airybi(z).rewrite(besseli) == ( sqrt(3)*(z*besseli(S(1)/3, 2*z**(S(3)/2)/3)/(z**(S(3)/2))**(S(1)/3) + (z**(S(3)/2))**(S(1)/3)*besseli(-S(1)/3, 2*z**(S(3)/2)/3))/3) assert airybi(p).rewrite(besseli) == ( sqrt(3)*sqrt(p)*(besseli(-S(1)/3, 2*p**(S(3)/2)/3) + besseli(S(1)/3, 2*p**(S(3)/2)/3))/3) assert expand_func(airybi(2*(3*z**5)**(S(1)/3))) == ( sqrt(3)*(1 - (z**5)**(S(1)/3)/z**(S(5)/3))*airyai(2*3**(S(1)/3)*z**(S(5)/3))/2 + (1 + (z**5)**(S(1)/3)/z**(S(5)/3))*airybi(2*3**(S(1)/3)*z**(S(5)/3))/2) def test_airyaiprime(): z = Symbol('z', real=False) t = Symbol('t', negative=True) p = Symbol('p', positive=True) assert isinstance(airyaiprime(z), airyaiprime) assert airyaiprime(0) == -3**(S(2)/3)/(3*gamma(S(1)/3)) assert airyaiprime(oo) == 0 assert diff(airyaiprime(z), z) == z*airyai(z) assert series(airyaiprime(z), z, 0, 3) == ( -3**(S(2)/3)/(3*gamma(S(1)/3)) + 3**(S(1)/3)*z**2/(6*gamma(S(2)/3)) + O(z**3)) assert airyaiprime(z).rewrite(hyper) == ( 3**(S(1)/3)*z**2*hyper((), (S(5)/3,), z**S(3)/9)/(6*gamma(S(2)/3)) - 3**(S(2)/3)*hyper((), (S(1)/3,), z**S(3)/9)/(3*gamma(S(1)/3))) assert isinstance(airyaiprime(z).rewrite(besselj), airyaiprime) assert airyai(t).rewrite(besselj) == ( sqrt(-t)*(besselj(-S(1)/3, 2*(-t)**(S(3)/2)/3) + besselj(S(1)/3, 2*(-t)**(S(3)/2)/3))/3) assert airyaiprime(z).rewrite(besseli) == ( z**2*besseli(S(2)/3, 2*z**(S(3)/2)/3)/(3*(z**(S(3)/2))**(S(2)/3)) - (z**(S(3)/2))**(S(2)/3)*besseli(-S(1)/3, 2*z**(S(3)/2)/3)/3) assert airyaiprime(p).rewrite(besseli) == ( p*(-besseli(-S(2)/3, 2*p**(S(3)/2)/3) + besseli(S(2)/3, 2*p**(S(3)/2)/3))/3) assert expand_func(airyaiprime(2*(3*z**5)**(S(1)/3))) == ( sqrt(3)*(z**(S(5)/3)/(z**5)**(S(1)/3) - 1)*airybiprime(2*3**(S(1)/3)*z**(S(5)/3))/6 + (z**(S(5)/3)/(z**5)**(S(1)/3) + 1)*airyaiprime(2*3**(S(1)/3)*z**(S(5)/3))/2) def test_airybiprime(): z = Symbol('z', real=False) t = Symbol('t', negative=True) p = Symbol('p', positive=True) assert isinstance(airybiprime(z), airybiprime) assert airybiprime(0) == 3**(S(1)/6)/gamma(S(1)/3) assert airybiprime(oo) == oo assert airybiprime(-oo) == 0 assert diff(airybiprime(z), z) == z*airybi(z) assert series(airybiprime(z), z, 0, 3) == ( 3**(S(1)/6)/gamma(S(1)/3) + 3**(S(5)/6)*z**2/(6*gamma(S(2)/3)) + O(z**3)) assert airybiprime(z).rewrite(hyper) == ( 3**(S(5)/6)*z**2*hyper((), (S(5)/3,), z**S(3)/9)/(6*gamma(S(2)/3)) + 3**(S(1)/6)*hyper((), (S(1)/3,), z**S(3)/9)/gamma(S(1)/3)) assert isinstance(airybiprime(z).rewrite(besselj), airybiprime) assert airyai(t).rewrite(besselj) == ( sqrt(-t)*(besselj(-S(1)/3, 2*(-t)**(S(3)/2)/3) + besselj(S(1)/3, 2*(-t)**(S(3)/2)/3))/3) assert airybiprime(z).rewrite(besseli) == ( sqrt(3)*(z**2*besseli(S(2)/3, 2*z**(S(3)/2)/3)/(z**(S(3)/2))**(S(2)/3) + (z**(S(3)/2))**(S(2)/3)*besseli(-S(2)/3, 2*z**(S(3)/2)/3))/3) assert airybiprime(p).rewrite(besseli) == ( sqrt(3)*p*(besseli(-S(2)/3, 2*p**(S(3)/2)/3) + besseli(S(2)/3, 2*p**(S(3)/2)/3))/3) assert expand_func(airybiprime(2*(3*z**5)**(S(1)/3))) == ( sqrt(3)*(z**(S(5)/3)/(z**5)**(S(1)/3) - 1)*airyaiprime(2*3**(S(1)/3)*z**(S(5)/3))/2 + (z**(S(5)/3)/(z**5)**(S(1)/3) + 1)*airybiprime(2*3**(S(1)/3)*z**(S(5)/3))/2)
bsd-3-clause
plusseed/ViewCamCloud-API-Tool
VccReqRes/VccToken.py
1
2102
# -*- coding: utf-8 -*- import requests from requests_toolbelt.utils import dump from PySide import QtCore, QtGui, QtWebKit from utils import * from VccReqResBase import * class VccToken(VccReqResBase): """ 共通 > APIトークンの取得 """ def __init__(self, parent, grid): """ UIを設定する """ super(VccToken, self).__init__(parent, grid) ############################################################### (label, param) = self.set_defaultUI_request_Param() self.grid.addWidget(label, 0, 0) self.grid.addWidget(param, 1, 0) ############################################################### button = self.set_defaultUI_request_Button(self.on_click) self.grid.addWidget(button, 2, 0) ############################################################### (label, view) = self.set_defaultUI_response_TreeView() self.grid.addWidget(label, 3, 0) self.grid.addWidget(view, 4, 0) ############################################################### (label, raw) = self.set_defaultUI_raw_TextView() self.grid.addWidget(label, 5, 0) self.grid.addWidget(raw, 6, 0) def communicate(self): """ 通信を行う """ url = '%s/token/' % (confv("HOST")) params = { } headers = { 'X-VCC-API-KEYSET' : 'key=%s&secret=%s' % (confv("API_KEY"), confv("API_SECRET")), } r = requests.get(url, params=params, headers=headers) return r def on_click(self): """ クリック時 """ r = self.communicate() rawstr = dump.dump_all(r) self.inside('raw_TextView.raw').setPlainText(rawstr.decode('utf-8')) self.inside('response_TreeView.view').clear() if r.status_code == 200: save_history(rawstr, r) data = r.json() widget = self.inside('response_TreeView.view') self.set_response_TreeView_columnset(widget, "root", data)
lgpl-2.1
Vimos/scikit-learn
sklearn/mixture/bayesian_mixture.py
17
32965
"""Bayesian Gaussian Mixture Model.""" # Author: Wei Xue <[email protected]> # Thierry Guillemot <[email protected]> # License: BSD 3 clause import math import numpy as np from scipy.special import betaln, digamma, gammaln from .base import BaseMixture, _check_shape from .gaussian_mixture import _check_precision_matrix from .gaussian_mixture import _check_precision_positivity from .gaussian_mixture import _compute_log_det_cholesky from .gaussian_mixture import _compute_precision_cholesky from .gaussian_mixture import _estimate_gaussian_parameters from .gaussian_mixture import _estimate_log_gaussian_prob from ..utils import check_array from ..utils.validation import check_is_fitted def _log_dirichlet_norm(dirichlet_concentration): """Compute the log of the Dirichlet distribution normalization term. Parameters ---------- dirichlet_concentration : array-like, shape (n_samples,) The parameters values of the Dirichlet distribution. Returns ------- log_dirichlet_norm : float The log normalization of the Dirichlet distribution. """ return (gammaln(np.sum(dirichlet_concentration)) - np.sum(gammaln(dirichlet_concentration))) def _log_wishart_norm(degrees_of_freedom, log_det_precisions_chol, n_features): """Compute the log of the Wishart distribution normalization term. Parameters ---------- degrees_of_freedom : array-like, shape (n_components,) The number of degrees of freedom on the covariance Wishart distributions. log_det_precision_chol : array-like, shape (n_components,) The determinant of the precision matrix for each component. n_features : int The number of features. Return ------ log_wishart_norm : array-like, shape (n_components,) The log normalization of the Wishart distribution. """ # To simplify the computation we have removed the np.log(np.pi) term return -(degrees_of_freedom * log_det_precisions_chol + degrees_of_freedom * n_features * .5 * math.log(2.) + np.sum(gammaln(.5 * (degrees_of_freedom - np.arange(n_features)[:, np.newaxis])), 0)) class BayesianGaussianMixture(BaseMixture): """Variational Bayesian estimation of a Gaussian mixture. This class allows to infer an approximate posterior distribution over the parameters of a Gaussian mixture distribution. The effective number of components can be inferred from the data. This class implements two types of prior for the weights distribution: a finite mixture model with Dirichlet distribution and an infinite mixture model with the Dirichlet Process. In practice Dirichlet Process inference algorithm is approximated and uses a truncated distribution with a fixed maximum number of components (called the Stick-breaking representation). The number of components actually used almost always depends on the data. .. versionadded:: 0.18 *BayesianGaussianMixture*. Read more in the :ref:`User Guide <bgmm>`. Parameters ---------- n_components : int, defaults to 1. The number of mixture components. Depending on the data and the value of the `weight_concentration_prior` the model can decide to not use all the components by setting some component `weights_` to values very close to zero. The number of effective components is therefore smaller than n_components. covariance_type : {'full', 'tied', 'diag', 'spherical'}, defaults to 'full' String describing the type of covariance parameters to use. Must be one of:: 'full' (each component has its own general covariance matrix), 'tied' (all components share the same general covariance matrix), 'diag' (each component has its own diagonal covariance matrix), 'spherical' (each component has its own single variance). tol : float, defaults to 1e-3. The convergence threshold. EM iterations will stop when the lower bound average gain on the likelihood (of the training data with respect to the model) is below this threshold. reg_covar : float, defaults to 1e-6. Non-negative regularization added to the diagonal of covariance. Allows to assure that the covariance matrices are all positive. max_iter : int, defaults to 100. The number of EM iterations to perform. n_init : int, defaults to 1. The number of initializations to perform. The result with the highest lower bound value on the likelihood is kept. init_params : {'kmeans', 'random'}, defaults to 'kmeans'. The method used to initialize the weights, the means and the covariances. Must be one of:: 'kmeans' : responsibilities are initialized using kmeans. 'random' : responsibilities are initialized randomly. weight_concentration_prior_type : str, defaults to 'dirichlet_process'. String describing the type of the weight concentration prior. Must be one of:: 'dirichlet_process' (using the Stick-breaking representation), 'dirichlet_distribution' (can favor more uniform weights). weight_concentration_prior : float | None, optional. The dirichlet concentration of each component on the weight distribution (Dirichlet). The higher concentration puts more mass in the center and will lead to more components being active, while a lower concentration parameter will lead to more mass at the edge of the mixture weights simplex. The value of the parameter must be greater than 0. If it is None, it's set to ``1. / n_components``. mean_precision_prior : float | None, optional. The precision prior on the mean distribution (Gaussian). Controls the extend to where means can be placed. Smaller values concentrate the means of each clusters around `mean_prior`. The value of the parameter must be greater than 0. If it is None, it's set to 1. mean_prior : array-like, shape (n_features,), optional The prior on the mean distribution (Gaussian). If it is None, it's set to the mean of X. degrees_of_freedom_prior : float | None, optional. The prior of the number of degrees of freedom on the covariance distributions (Wishart). If it is None, it's set to `n_features`. covariance_prior : float or array-like, optional The prior on the covariance distribution (Wishart). If it is None, the emiprical covariance prior is initialized using the covariance of X. The shape depends on `covariance_type`:: (n_features, n_features) if 'full', (n_features, n_features) if 'tied', (n_features) if 'diag', float if 'spherical' random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. warm_start : bool, default to False. If 'warm_start' is True, the solution of the last fitting is used as initialization for the next call of fit(). This can speed up convergence when fit is called several time on similar problems. verbose : int, default to 0. Enable verbose output. If 1 then it prints the current initialization and each iteration step. If greater than 1 then it prints also the log probability and the time needed for each step. verbose_interval : int, default to 10. Number of iteration done before the next print. Attributes ---------- weights_ : array-like, shape (n_components,) The weights of each mixture components. means_ : array-like, shape (n_components, n_features) The mean of each mixture component. covariances_ : array-like The covariance of each mixture component. The shape depends on `covariance_type`:: (n_components,) if 'spherical', (n_features, n_features) if 'tied', (n_components, n_features) if 'diag', (n_components, n_features, n_features) if 'full' precisions_ : array-like The precision matrices for each component in the mixture. A precision matrix is the inverse of a covariance matrix. A covariance matrix is symmetric positive definite so the mixture of Gaussian can be equivalently parameterized by the precision matrices. Storing the precision matrices instead of the covariance matrices makes it more efficient to compute the log-likelihood of new samples at test time. The shape depends on ``covariance_type``:: (n_components,) if 'spherical', (n_features, n_features) if 'tied', (n_components, n_features) if 'diag', (n_components, n_features, n_features) if 'full' precisions_cholesky_ : array-like The cholesky decomposition of the precision matrices of each mixture component. A precision matrix is the inverse of a covariance matrix. A covariance matrix is symmetric positive definite so the mixture of Gaussian can be equivalently parameterized by the precision matrices. Storing the precision matrices instead of the covariance matrices makes it more efficient to compute the log-likelihood of new samples at test time. The shape depends on ``covariance_type``:: (n_components,) if 'spherical', (n_features, n_features) if 'tied', (n_components, n_features) if 'diag', (n_components, n_features, n_features) if 'full' converged_ : bool True when convergence was reached in fit(), False otherwise. n_iter_ : int Number of step used by the best fit of inference to reach the convergence. lower_bound_ : float Lower bound value on the likelihood (of the training data with respect to the model) of the best fit of inference. weight_concentration_prior_ : tuple or float The dirichlet concentration of each component on the weight distribution (Dirichlet). The type depends on ``weight_concentration_prior_type``:: (float, float) if 'dirichlet_process' (Beta parameters), float if 'dirichlet_distribution' (Dirichlet parameters). The higher concentration puts more mass in the center and will lead to more components being active, while a lower concentration parameter will lead to more mass at the edge of the simplex. weight_concentration_ : array-like, shape (n_components,) The dirichlet concentration of each component on the weight distribution (Dirichlet). mean_precision_prior : float The precision prior on the mean distribution (Gaussian). Controls the extend to where means can be placed. Smaller values concentrate the means of each clusters around `mean_prior`. mean_precision_ : array-like, shape (n_components,) The precision of each components on the mean distribution (Gaussian). means_prior_ : array-like, shape (n_features,) The prior on the mean distribution (Gaussian). degrees_of_freedom_prior_ : float The prior of the number of degrees of freedom on the covariance distributions (Wishart). degrees_of_freedom_ : array-like, shape (n_components,) The number of degrees of freedom of each components in the model. covariance_prior_ : float or array-like The prior on the covariance distribution (Wishart). The shape depends on `covariance_type`:: (n_features, n_features) if 'full', (n_features, n_features) if 'tied', (n_features) if 'diag', float if 'spherical' See Also -------- GaussianMixture : Finite Gaussian mixture fit with EM. References ---------- .. [1] `Bishop, Christopher M. (2006). "Pattern recognition and machine learning". Vol. 4 No. 4. New York: Springer. <http://www.springer.com/kr/book/9780387310732>`_ .. [2] `Hagai Attias. (2000). "A Variational Bayesian Framework for Graphical Models". In Advances in Neural Information Processing Systems 12. <http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.36.2841&rep=rep1&type=pdf>`_ .. [3] `Blei, David M. and Michael I. Jordan. (2006). "Variational inference for Dirichlet process mixtures". Bayesian analysis 1.1 <http://www.cs.princeton.edu/courses/archive/fall11/cos597C/reading/BleiJordan2005.pdf>`_ """ def __init__(self, n_components=1, covariance_type='full', tol=1e-3, reg_covar=1e-6, max_iter=100, n_init=1, init_params='kmeans', weight_concentration_prior_type='dirichlet_process', weight_concentration_prior=None, mean_precision_prior=None, mean_prior=None, degrees_of_freedom_prior=None, covariance_prior=None, random_state=None, warm_start=False, verbose=0, verbose_interval=10): super(BayesianGaussianMixture, self).__init__( n_components=n_components, tol=tol, reg_covar=reg_covar, max_iter=max_iter, n_init=n_init, init_params=init_params, random_state=random_state, warm_start=warm_start, verbose=verbose, verbose_interval=verbose_interval) self.covariance_type = covariance_type self.weight_concentration_prior_type = weight_concentration_prior_type self.weight_concentration_prior = weight_concentration_prior self.mean_precision_prior = mean_precision_prior self.mean_prior = mean_prior self.degrees_of_freedom_prior = degrees_of_freedom_prior self.covariance_prior = covariance_prior def _check_parameters(self, X): """Check that the parameters are well defined. Parameters ---------- X : array-like, shape (n_samples, n_features) """ if self.covariance_type not in ['spherical', 'tied', 'diag', 'full']: raise ValueError("Invalid value for 'covariance_type': %s " "'covariance_type' should be in " "['spherical', 'tied', 'diag', 'full']" % self.covariance_type) if (self.weight_concentration_prior_type not in ['dirichlet_process', 'dirichlet_distribution']): raise ValueError( "Invalid value for 'weight_concentration_prior_type': %s " "'weight_concentration_prior_type' should be in " "['dirichlet_process', 'dirichlet_distribution']" % self.weight_concentration_prior_type) self._check_weights_parameters() self._check_means_parameters(X) self._check_precision_parameters(X) self._checkcovariance_prior_parameter(X) def _check_weights_parameters(self): """Check the parameter of the Dirichlet distribution.""" if self.weight_concentration_prior is None: self.weight_concentration_prior_ = 1. / self.n_components elif self.weight_concentration_prior > 0.: self.weight_concentration_prior_ = ( self.weight_concentration_prior) else: raise ValueError("The parameter 'weight_concentration_prior' " "should be greater than 0., but got %.3f." % self.weight_concentration_prior) def _check_means_parameters(self, X): """Check the parameters of the Gaussian distribution. Parameters ---------- X : array-like, shape (n_samples, n_features) """ _, n_features = X.shape if self.mean_precision_prior is None: self.mean_precision_prior_ = 1. elif self.mean_precision_prior > 0.: self.mean_precision_prior_ = self.mean_precision_prior else: raise ValueError("The parameter 'mean_precision_prior' should be " "greater than 0., but got %.3f." % self.mean_precision_prior) if self.mean_prior is None: self.mean_prior_ = X.mean(axis=0) else: self.mean_prior_ = check_array(self.mean_prior, dtype=[np.float64, np.float32], ensure_2d=False) _check_shape(self.mean_prior_, (n_features, ), 'means') def _check_precision_parameters(self, X): """Check the prior parameters of the precision distribution. Parameters ---------- X : array-like, shape (n_samples, n_features) """ _, n_features = X.shape if self.degrees_of_freedom_prior is None: self.degrees_of_freedom_prior_ = n_features elif self.degrees_of_freedom_prior > n_features - 1.: self.degrees_of_freedom_prior_ = self.degrees_of_freedom_prior else: raise ValueError("The parameter 'degrees_of_freedom_prior' " "should be greater than %d, but got %.3f." % (n_features - 1, self.degrees_of_freedom_prior)) def _checkcovariance_prior_parameter(self, X): """Check the `covariance_prior_`. Parameters ---------- X : array-like, shape (n_samples, n_features) """ _, n_features = X.shape if self.covariance_prior is None: self.covariance_prior_ = { 'full': np.atleast_2d(np.cov(X.T)), 'tied': np.atleast_2d(np.cov(X.T)), 'diag': np.var(X, axis=0, ddof=1), 'spherical': np.var(X, axis=0, ddof=1).mean() }[self.covariance_type] elif self.covariance_type in ['full', 'tied']: self.covariance_prior_ = check_array( self.covariance_prior, dtype=[np.float64, np.float32], ensure_2d=False) _check_shape(self.covariance_prior_, (n_features, n_features), '%s covariance_prior' % self.covariance_type) _check_precision_matrix(self.covariance_prior_, self.covariance_type) elif self.covariance_type == 'diag': self.covariance_prior_ = check_array( self.covariance_prior, dtype=[np.float64, np.float32], ensure_2d=False) _check_shape(self.covariance_prior_, (n_features,), '%s covariance_prior' % self.covariance_type) _check_precision_positivity(self.covariance_prior_, self.covariance_type) # spherical case elif self.covariance_prior > 0.: self.covariance_prior_ = self.covariance_prior else: raise ValueError("The parameter 'spherical covariance_prior' " "should be greater than 0., but got %.3f." % self.covariance_prior) def _initialize(self, X, resp): """Initialization of the mixture parameters. Parameters ---------- X : array-like, shape (n_samples, n_features) resp : array-like, shape (n_samples, n_components) """ nk, xk, sk = _estimate_gaussian_parameters(X, resp, self.reg_covar, self.covariance_type) self._estimate_weights(nk) self._estimate_means(nk, xk) self._estimate_precisions(nk, xk, sk) def _estimate_weights(self, nk): """Estimate the parameters of the Dirichlet distribution. Parameters ---------- nk : array-like, shape (n_components,) """ if self.weight_concentration_prior_type == 'dirichlet_process': # For dirichlet process weight_concentration will be a tuple # containing the two parameters of the beta distribution self.weight_concentration_ = ( 1. + nk, (self.weight_concentration_prior_ + np.hstack((np.cumsum(nk[::-1])[-2::-1], 0)))) else: # case Variationnal Gaussian mixture with dirichlet distribution self.weight_concentration_ = self.weight_concentration_prior_ + nk def _estimate_means(self, nk, xk): """Estimate the parameters of the Gaussian distribution. Parameters ---------- nk : array-like, shape (n_components,) xk : array-like, shape (n_components, n_features) """ self.mean_precision_ = self.mean_precision_prior_ + nk self.means_ = ((self.mean_precision_prior_ * self.mean_prior_ + nk[:, np.newaxis] * xk) / self.mean_precision_[:, np.newaxis]) def _estimate_precisions(self, nk, xk, sk): """Estimate the precisions parameters of the precision distribution. Parameters ---------- nk : array-like, shape (n_components,) xk : array-like, shape (n_components, n_features) sk : array-like The shape depends of `covariance_type`: 'full' : (n_components, n_features, n_features) 'tied' : (n_features, n_features) 'diag' : (n_components, n_features) 'spherical' : (n_components,) """ {"full": self._estimate_wishart_full, "tied": self._estimate_wishart_tied, "diag": self._estimate_wishart_diag, "spherical": self._estimate_wishart_spherical }[self.covariance_type](nk, xk, sk) self.precisions_cholesky_ = _compute_precision_cholesky( self.covariances_, self.covariance_type) def _estimate_wishart_full(self, nk, xk, sk): """Estimate the full Wishart distribution parameters. Parameters ---------- X : array-like, shape (n_samples, n_features) nk : array-like, shape (n_components,) xk : array-like, shape (n_components, n_features) sk : array-like, shape (n_components, n_features, n_features) """ _, n_features = xk.shape # Warning : in some Bishop book, there is a typo on the formula 10.63 # `degrees_of_freedom_k = degrees_of_freedom_0 + Nk` is # the correct formula self.degrees_of_freedom_ = self.degrees_of_freedom_prior_ + nk self.covariances_ = np.empty((self.n_components, n_features, n_features)) for k in range(self.n_components): diff = xk[k] - self.mean_prior_ self.covariances_[k] = (self.covariance_prior_ + nk[k] * sk[k] + nk[k] * self.mean_precision_prior_ / self.mean_precision_[k] * np.outer(diff, diff)) # Contrary to the original bishop book, we normalize the covariances self.covariances_ /= ( self.degrees_of_freedom_[:, np.newaxis, np.newaxis]) def _estimate_wishart_tied(self, nk, xk, sk): """Estimate the tied Wishart distribution parameters. Parameters ---------- X : array-like, shape (n_samples, n_features) nk : array-like, shape (n_components,) xk : array-like, shape (n_components, n_features) sk : array-like, shape (n_features, n_features) """ _, n_features = xk.shape # Warning : in some Bishop book, there is a typo on the formula 10.63 # `degrees_of_freedom_k = degrees_of_freedom_0 + Nk` # is the correct formula self.degrees_of_freedom_ = ( self.degrees_of_freedom_prior_ + nk.sum() / self.n_components) diff = xk - self.mean_prior_ self.covariances_ = ( self.covariance_prior_ + sk * nk.sum() / self.n_components + self.mean_precision_prior_ / self.n_components * np.dot( (nk / self.mean_precision_) * diff.T, diff)) # Contrary to the original bishop book, we normalize the covariances self.covariances_ /= self.degrees_of_freedom_ def _estimate_wishart_diag(self, nk, xk, sk): """Estimate the diag Wishart distribution parameters. Parameters ---------- X : array-like, shape (n_samples, n_features) nk : array-like, shape (n_components,) xk : array-like, shape (n_components, n_features) sk : array-like, shape (n_components, n_features) """ _, n_features = xk.shape # Warning : in some Bishop book, there is a typo on the formula 10.63 # `degrees_of_freedom_k = degrees_of_freedom_0 + Nk` # is the correct formula self.degrees_of_freedom_ = self.degrees_of_freedom_prior_ + nk diff = xk - self.mean_prior_ self.covariances_ = ( self.covariance_prior_ + nk[:, np.newaxis] * ( sk + (self.mean_precision_prior_ / self.mean_precision_)[:, np.newaxis] * np.square(diff))) # Contrary to the original bishop book, we normalize the covariances self.covariances_ /= self.degrees_of_freedom_[:, np.newaxis] def _estimate_wishart_spherical(self, nk, xk, sk): """Estimate the spherical Wishart distribution parameters. Parameters ---------- X : array-like, shape (n_samples, n_features) nk : array-like, shape (n_components,) xk : array-like, shape (n_components, n_features) sk : array-like, shape (n_components,) """ _, n_features = xk.shape # Warning : in some Bishop book, there is a typo on the formula 10.63 # `degrees_of_freedom_k = degrees_of_freedom_0 + Nk` # is the correct formula self.degrees_of_freedom_ = self.degrees_of_freedom_prior_ + nk diff = xk - self.mean_prior_ self.covariances_ = ( self.covariance_prior_ + nk * ( sk + self.mean_precision_prior_ / self.mean_precision_ * np.mean(np.square(diff), 1))) # Contrary to the original bishop book, we normalize the covariances self.covariances_ /= self.degrees_of_freedom_ def _check_is_fitted(self): check_is_fitted(self, ['weight_concentration_', 'mean_precision_', 'means_', 'degrees_of_freedom_', 'covariances_', 'precisions_', 'precisions_cholesky_']) def _m_step(self, X, log_resp): """M step. Parameters ---------- X : array-like, shape (n_samples, n_features) log_resp : array-like, shape (n_samples, n_components) Logarithm of the posterior probabilities (or responsibilities) of the point of each sample in X. """ n_samples, _ = X.shape nk, xk, sk = _estimate_gaussian_parameters( X, np.exp(log_resp), self.reg_covar, self.covariance_type) self._estimate_weights(nk) self._estimate_means(nk, xk) self._estimate_precisions(nk, xk, sk) def _estimate_log_weights(self): if self.weight_concentration_prior_type == 'dirichlet_process': digamma_sum = digamma(self.weight_concentration_[0] + self.weight_concentration_[1]) digamma_a = digamma(self.weight_concentration_[0]) digamma_b = digamma(self.weight_concentration_[1]) return (digamma_a - digamma_sum + np.hstack((0, np.cumsum(digamma_b - digamma_sum)[:-1]))) else: # case Variationnal Gaussian mixture with dirichlet distribution return (digamma(self.weight_concentration_) - digamma(np.sum(self.weight_concentration_))) def _estimate_log_prob(self, X): _, n_features = X.shape # We remove `n_features * np.log(self.degrees_of_freedom_)` because # the precision matrix is normalized log_gauss = (_estimate_log_gaussian_prob( X, self.means_, self.precisions_cholesky_, self.covariance_type) - .5 * n_features * np.log(self.degrees_of_freedom_)) log_lambda = n_features * np.log(2.) + np.sum(digamma( .5 * (self.degrees_of_freedom_ - np.arange(0, n_features)[:, np.newaxis])), 0) return log_gauss + .5 * (log_lambda - n_features / self.mean_precision_) def _compute_lower_bound(self, log_resp, log_prob_norm): """Estimate the lower bound of the model. The lower bound on the likelihood (of the training data with respect to the model) is used to detect the convergence and has to decrease at each iteration. Parameters ---------- X : array-like, shape (n_samples, n_features) log_resp : array, shape (n_samples, n_components) Logarithm of the posterior probabilities (or responsibilities) of the point of each sample in X. log_prob_norm : float Logarithm of the probability of each sample in X. Returns ------- lower_bound : float """ # Contrary to the original formula, we have done some simplification # and removed all the constant terms. n_features, = self.mean_prior_.shape # We removed `.5 * n_features * np.log(self.degrees_of_freedom_)` # because the precision matrix is normalized. log_det_precisions_chol = (_compute_log_det_cholesky( self.precisions_cholesky_, self.covariance_type, n_features) - .5 * n_features * np.log(self.degrees_of_freedom_)) if self.covariance_type == 'tied': log_wishart = self.n_components * np.float64(_log_wishart_norm( self.degrees_of_freedom_, log_det_precisions_chol, n_features)) else: log_wishart = np.sum(_log_wishart_norm( self.degrees_of_freedom_, log_det_precisions_chol, n_features)) if self.weight_concentration_prior_type == 'dirichlet_process': log_norm_weight = -np.sum(betaln(self.weight_concentration_[0], self.weight_concentration_[1])) else: log_norm_weight = _log_dirichlet_norm(self.weight_concentration_) return (-np.sum(np.exp(log_resp) * log_resp) - log_wishart - log_norm_weight - 0.5 * n_features * np.sum(np.log(self.mean_precision_))) def _get_parameters(self): return (self.weight_concentration_, self.mean_precision_, self.means_, self.degrees_of_freedom_, self.covariances_, self.precisions_cholesky_) def _set_parameters(self, params): (self.weight_concentration_, self.mean_precision_, self.means_, self.degrees_of_freedom_, self.covariances_, self.precisions_cholesky_) = params # Weights computation if self.weight_concentration_prior_type == "dirichlet_process": weight_dirichlet_sum = (self.weight_concentration_[0] + self.weight_concentration_[1]) tmp = self.weight_concentration_[1] / weight_dirichlet_sum self.weights_ = ( self.weight_concentration_[0] / weight_dirichlet_sum * np.hstack((1, np.cumprod(tmp[:-1])))) self.weights_ /= np.sum(self.weights_) else: self. weights_ = (self.weight_concentration_ / np.sum(self.weight_concentration_)) # Precisions matrices computation if self.covariance_type == 'full': self.precisions_ = np.array([ np.dot(prec_chol, prec_chol.T) for prec_chol in self.precisions_cholesky_]) elif self.covariance_type == 'tied': self.precisions_ = np.dot(self.precisions_cholesky_, self.precisions_cholesky_.T) else: self.precisions_ = self.precisions_cholesky_ ** 2
bsd-3-clause
GbalsaC/bitnamiP
venv/lib/python2.7/site-packages/rest_framework/serializers.py
5
41575
""" Serializers and ModelSerializers are similar to Forms and ModelForms. Unlike forms, they are not constrained to dealing with HTML output, and form encoded input. Serialization in REST framework is a two-phase process: 1. Serializers marshal between complex types like model instances, and python primitives. 2. The process of marshalling between python primitives and request and response content is handled by parsers and renderers. """ from __future__ import unicode_literals import copy import datetime import inspect import types from decimal import Decimal from django.contrib.contenttypes.generic import GenericForeignKey from django.core.paginator import Page from django.db import models from django.forms import widgets from django.utils.datastructures import SortedDict from django.core.exceptions import ObjectDoesNotExist from rest_framework.compat import get_concrete_model, six from rest_framework.settings import api_settings # Note: We do the following so that users of the framework can use this style: # # example_field = serializers.CharField(...) # # This helps keep the separation between model fields, form fields, and # serializer fields more explicit. from rest_framework.relations import * # NOQA from rest_framework.fields import * # NOQA def _resolve_model(obj): """ Resolve supplied `obj` to a Django model class. `obj` must be a Django model class itself, or a string representation of one. Useful in situtations like GH #1225 where Django may not have resolved a string-based reference to a model in another model's foreign key definition. String representations should have the format: 'appname.ModelName' """ if isinstance(obj, six.string_types) and len(obj.split('.')) == 2: app_name, model_name = obj.split('.') return models.get_model(app_name, model_name) elif inspect.isclass(obj) and issubclass(obj, models.Model): return obj else: raise ValueError("{0} is not a Django model".format(obj)) def pretty_name(name): """Converts 'first_name' to 'First name'""" if not name: return '' return name.replace('_', ' ').capitalize() class RelationsList(list): _deleted = [] class NestedValidationError(ValidationError): """ The default ValidationError behavior is to stringify each item in the list if the messages are a list of error messages. In the case of nested serializers, where the parent has many children, then the child's `serializer.errors` will be a list of dicts. In the case of a single child, the `serializer.errors` will be a dict. We need to override the default behavior to get properly nested error dicts. """ def __init__(self, message): if isinstance(message, dict): self._messages = [message] else: self._messages = message @property def messages(self): return self._messages class DictWithMetadata(dict): """ A dict-like object, that can have additional properties attached. """ def __getstate__(self): """ Used by pickle (e.g., caching). Overridden to remove the metadata from the dict, since it shouldn't be pickled and may in some instances be unpickleable. """ return dict(self) class SortedDictWithMetadata(SortedDict): """ A sorted dict-like object, that can have additional properties attached. """ def __getstate__(self): """ Used by pickle (e.g., caching). Overriden to remove the metadata from the dict, since it shouldn't be pickle and may in some instances be unpickleable. """ return SortedDict(self).__dict__ def _is_protected_type(obj): """ True if the object is a native datatype that does not need to be serialized further. """ return isinstance(obj, ( types.NoneType, int, long, datetime.datetime, datetime.date, datetime.time, float, Decimal, basestring) ) def _get_declared_fields(bases, attrs): """ Create a list of serializer field instances from the passed in 'attrs', plus any fields on the base classes (in 'bases'). Note that all fields from the base classes are used. """ fields = [(field_name, attrs.pop(field_name)) for field_name, obj in list(six.iteritems(attrs)) if isinstance(obj, Field)] fields.sort(key=lambda x: x[1].creation_counter) # If this class is subclassing another Serializer, add that Serializer's # fields. Note that we loop over the bases in *reverse*. This is necessary # in order to maintain the correct order of fields. for base in bases[::-1]: if hasattr(base, 'base_fields'): fields = list(base.base_fields.items()) + fields return SortedDict(fields) class SerializerMetaclass(type): def __new__(cls, name, bases, attrs): attrs['base_fields'] = _get_declared_fields(bases, attrs) return super(SerializerMetaclass, cls).__new__(cls, name, bases, attrs) class SerializerOptions(object): """ Meta class options for Serializer """ def __init__(self, meta): self.depth = getattr(meta, 'depth', 0) self.fields = getattr(meta, 'fields', ()) self.exclude = getattr(meta, 'exclude', ()) class BaseSerializer(WritableField): """ This is the Serializer implementation. We need to implement it as `BaseSerializer` due to metaclass magicks. """ class Meta(object): pass _options_class = SerializerOptions _dict_class = SortedDictWithMetadata def __init__(self, instance=None, data=None, files=None, context=None, partial=False, many=None, allow_add_remove=False, **kwargs): super(BaseSerializer, self).__init__(**kwargs) self.opts = self._options_class(self.Meta) self.parent = None self.root = None self.partial = partial self.many = many self.allow_add_remove = allow_add_remove self.context = context or {} self.init_data = data self.init_files = files self.object = instance self.fields = self.get_fields() self._data = None self._files = None self._errors = None if many and instance is not None and not hasattr(instance, '__iter__'): raise ValueError('instance should be a queryset or other iterable with many=True') if allow_add_remove and not many: raise ValueError('allow_add_remove should only be used for bulk updates, but you have not set many=True') ##### # Methods to determine which fields to use when (de)serializing objects. def get_default_fields(self): """ Return the complete set of default fields for the object, as a dict. """ return {} def get_fields(self): """ Returns the complete set of fields for the object as a dict. This will be the set of any explicitly declared fields, plus the set of fields returned by get_default_fields(). """ ret = SortedDict() # Get the explicitly declared fields base_fields = copy.deepcopy(self.base_fields) for key, field in base_fields.items(): ret[key] = field # Add in the default fields default_fields = self.get_default_fields() for key, val in default_fields.items(): if key not in ret: ret[key] = val # If 'fields' is specified, use those fields, in that order. if self.opts.fields: assert isinstance(self.opts.fields, (list, tuple)), '`fields` must be a list or tuple' new = SortedDict() for key in self.opts.fields: new[key] = ret[key] ret = new # Remove anything in 'exclude' if self.opts.exclude: assert isinstance(self.opts.exclude, (list, tuple)), '`exclude` must be a list or tuple' for key in self.opts.exclude: ret.pop(key, None) for key, field in ret.items(): field.initialize(parent=self, field_name=key) return ret ##### # Methods to convert or revert from objects <--> primitive representations. def get_field_key(self, field_name): """ Return the key that should be used for a given field. """ return field_name def restore_fields(self, data, files): """ Core of deserialization, together with `restore_object`. Converts a dictionary of data into a dictionary of deserialized fields. """ reverted_data = {} if data is not None and not isinstance(data, dict): self._errors['non_field_errors'] = ['Invalid data'] return None for field_name, field in self.fields.items(): field.initialize(parent=self, field_name=field_name) try: field.field_from_native(data, files, field_name, reverted_data) except ValidationError as err: self._errors[field_name] = list(err.messages) return reverted_data def perform_validation(self, attrs): """ Run `validate_<fieldname>()` and `validate()` methods on the serializer """ for field_name, field in self.fields.items(): if field_name in self._errors: continue source = field.source or field_name if self.partial and source not in attrs: continue try: validate_method = getattr(self, 'validate_%s' % field_name, None) if validate_method: attrs = validate_method(attrs, source) except ValidationError as err: self._errors[field_name] = self._errors.get(field_name, []) + list(err.messages) # If there are already errors, we don't run .validate() because # field-validation failed and thus `attrs` may not be complete. # which in turn can cause inconsistent validation errors. if not self._errors: try: attrs = self.validate(attrs) except ValidationError as err: if hasattr(err, 'message_dict'): for field_name, error_messages in err.message_dict.items(): self._errors[field_name] = self._errors.get(field_name, []) + list(error_messages) elif hasattr(err, 'messages'): self._errors['non_field_errors'] = err.messages return attrs def validate(self, attrs): """ Stub method, to be overridden in Serializer subclasses """ return attrs def restore_object(self, attrs, instance=None): """ Deserialize a dictionary of attributes into an object instance. You should override this method to control how deserialized objects are instantiated. """ if instance is not None: instance.update(attrs) return instance return attrs def to_native(self, obj): """ Serialize objects -> primitives. """ ret = self._dict_class() ret.fields = self._dict_class() for field_name, field in self.fields.items(): if field.read_only and obj is None: continue field.initialize(parent=self, field_name=field_name) key = self.get_field_key(field_name) value = field.field_to_native(obj, field_name) method = getattr(self, 'transform_%s' % field_name, None) if callable(method): value = method(obj, value) if not getattr(field, 'write_only', False): ret[key] = value ret.fields[key] = self.augment_field(field, field_name, key, value) return ret def from_native(self, data, files=None): """ Deserialize primitives -> objects. """ self._errors = {} if data is not None or files is not None: attrs = self.restore_fields(data, files) if attrs is not None: attrs = self.perform_validation(attrs) else: self._errors['non_field_errors'] = ['No input provided'] if not self._errors: return self.restore_object(attrs, instance=getattr(self, 'object', None)) def augment_field(self, field, field_name, key, value): # This horrible stuff is to manage serializers rendering to HTML field._errors = self._errors.get(key) if self._errors else None field._name = field_name field._value = self.init_data.get(key) if self._errors and self.init_data else value if not field.label: field.label = pretty_name(key) return field def field_to_native(self, obj, field_name): """ Override default so that the serializer can be used as a nested field across relationships. """ if self.write_only: return None if self.source == '*': return self.to_native(obj) # Get the raw field value try: source = self.source or field_name value = obj for component in source.split('.'): if value is None: break value = get_component(value, component) except ObjectDoesNotExist: return None if is_simple_callable(getattr(value, 'all', None)): return [self.to_native(item) for item in value.all()] if value is None: return None if self.many is not None: many = self.many else: many = hasattr(value, '__iter__') and not isinstance(value, (Page, dict, six.text_type)) if many: return [self.to_native(item) for item in value] return self.to_native(value) def field_from_native(self, data, files, field_name, into): """ Override default so that the serializer can be used as a writable nested field across relationships. """ if self.read_only: return try: value = data[field_name] except KeyError: if self.default is not None and not self.partial: # Note: partial updates shouldn't set defaults value = copy.deepcopy(self.default) else: if self.required: raise ValidationError(self.error_messages['required']) return if self.source == '*': if value: reverted_data = self.restore_fields(value, {}) if not self._errors: into.update(reverted_data) else: if value in (None, ''): into[(self.source or field_name)] = None else: # Set the serializer object if it exists obj = get_component(self.parent.object, self.source or field_name) if self.parent.object else None # If we have a model manager or similar object then we need # to iterate through each instance. if (self.many and not hasattr(obj, '__iter__') and is_simple_callable(getattr(obj, 'all', None))): obj = obj.all() kwargs = { 'instance': obj, 'data': value, 'context': self.context, 'partial': self.partial, 'many': self.many, 'allow_add_remove': self.allow_add_remove } serializer = self.__class__(**kwargs) if serializer.is_valid(): into[self.source or field_name] = serializer.object else: # Propagate errors up to our parent raise NestedValidationError(serializer.errors) def get_identity(self, data): """ This hook is required for bulk update. It is used to determine the canonical identity of a given object. Note that the data has not been validated at this point, so we need to make sure that we catch any cases of incorrect datatypes being passed to this method. """ try: return data.get('id', None) except AttributeError: return None @property def errors(self): """ Run deserialization and return error data, setting self.object if no errors occurred. """ if self._errors is None: data, files = self.init_data, self.init_files if self.many is not None: many = self.many else: many = hasattr(data, '__iter__') and not isinstance(data, (Page, dict, six.text_type)) if many: warnings.warn('Implicit list/queryset serialization is deprecated. ' 'Use the `many=True` flag when instantiating the serializer.', DeprecationWarning, stacklevel=3) if many: ret = RelationsList() errors = [] update = self.object is not None if update: # If this is a bulk update we need to map all the objects # to a canonical identity so we can determine which # individual object is being updated for each item in the # incoming data objects = self.object identities = [self.get_identity(self.to_native(obj)) for obj in objects] identity_to_objects = dict(zip(identities, objects)) if hasattr(data, '__iter__') and not isinstance(data, (dict, six.text_type)): for item in data: if update: # Determine which object we're updating identity = self.get_identity(item) self.object = identity_to_objects.pop(identity, None) if self.object is None and not self.allow_add_remove: ret.append(None) errors.append({'non_field_errors': ['Cannot create a new item, only existing items may be updated.']}) continue ret.append(self.from_native(item, None)) errors.append(self._errors) if update and self.allow_add_remove: ret._deleted = identity_to_objects.values() self._errors = any(errors) and errors or [] else: self._errors = {'non_field_errors': ['Expected a list of items.']} else: ret = self.from_native(data, files) if not self._errors: self.object = ret return self._errors def is_valid(self): return not self.errors @property def data(self): """ Returns the serialized data on the serializer. """ if self._data is None: obj = self.object if self.many is not None: many = self.many else: many = hasattr(obj, '__iter__') and not isinstance(obj, (Page, dict)) if many: warnings.warn('Implicit list/queryset serialization is deprecated. ' 'Use the `many=True` flag when instantiating the serializer.', DeprecationWarning, stacklevel=2) if many: self._data = [self.to_native(item) for item in obj] else: self._data = self.to_native(obj) return self._data def save_object(self, obj, **kwargs): obj.save(**kwargs) def delete_object(self, obj): obj.delete() def save(self, **kwargs): """ Save the deserialized object and return it. """ # Clear cached _data, which may be invalidated by `save()` self._data = None if isinstance(self.object, list): [self.save_object(item, **kwargs) for item in self.object] if self.object._deleted: [self.delete_object(item) for item in self.object._deleted] else: self.save_object(self.object, **kwargs) return self.object def metadata(self): """ Return a dictionary of metadata about the fields on the serializer. Useful for things like responding to OPTIONS requests, or generating API schemas for auto-documentation. """ return SortedDict( [(field_name, field.metadata()) for field_name, field in six.iteritems(self.fields)] ) class Serializer(six.with_metaclass(SerializerMetaclass, BaseSerializer)): pass class ModelSerializerOptions(SerializerOptions): """ Meta class options for ModelSerializer """ def __init__(self, meta): super(ModelSerializerOptions, self).__init__(meta) self.model = getattr(meta, 'model', None) self.read_only_fields = getattr(meta, 'read_only_fields', ()) self.write_only_fields = getattr(meta, 'write_only_fields', ()) class ModelSerializer(Serializer): """ A serializer that deals with model instances and querysets. """ _options_class = ModelSerializerOptions field_mapping = { models.AutoField: IntegerField, models.FloatField: FloatField, models.IntegerField: IntegerField, models.PositiveIntegerField: IntegerField, models.SmallIntegerField: IntegerField, models.PositiveSmallIntegerField: IntegerField, models.DateTimeField: DateTimeField, models.DateField: DateField, models.TimeField: TimeField, models.DecimalField: DecimalField, models.EmailField: EmailField, models.CharField: CharField, models.URLField: URLField, models.SlugField: SlugField, models.TextField: CharField, models.CommaSeparatedIntegerField: CharField, models.BooleanField: BooleanField, models.NullBooleanField: BooleanField, models.FileField: FileField, models.ImageField: ImageField, } def get_default_fields(self): """ Return all the fields that should be serialized for the model. """ cls = self.opts.model assert cls is not None, \ "Serializer class '%s' is missing 'model' Meta option" % self.__class__.__name__ opts = get_concrete_model(cls)._meta ret = SortedDict() nested = bool(self.opts.depth) # Deal with adding the primary key field pk_field = opts.pk while pk_field.rel and pk_field.rel.parent_link: # If model is a child via multitable inheritance, use parent's pk pk_field = pk_field.rel.to._meta.pk field = self.get_pk_field(pk_field) if field: ret[pk_field.name] = field # Deal with forward relationships forward_rels = [field for field in opts.fields if field.serialize] forward_rels += [field for field in opts.many_to_many if field.serialize] for model_field in forward_rels: has_through_model = False if model_field.rel: to_many = isinstance(model_field, models.fields.related.ManyToManyField) related_model = _resolve_model(model_field.rel.to) if to_many and not model_field.rel.through._meta.auto_created: has_through_model = True if model_field.rel and nested: if len(inspect.getargspec(self.get_nested_field).args) == 2: warnings.warn( 'The `get_nested_field(model_field)` call signature ' 'is due to be deprecated. ' 'Use `get_nested_field(model_field, related_model, ' 'to_many) instead', PendingDeprecationWarning ) field = self.get_nested_field(model_field) else: field = self.get_nested_field(model_field, related_model, to_many) elif model_field.rel: if len(inspect.getargspec(self.get_nested_field).args) == 3: warnings.warn( 'The `get_related_field(model_field, to_many)` call ' 'signature is due to be deprecated. ' 'Use `get_related_field(model_field, related_model, ' 'to_many) instead', PendingDeprecationWarning ) field = self.get_related_field(model_field, to_many=to_many) else: field = self.get_related_field(model_field, related_model, to_many) else: field = self.get_field(model_field) if field: if has_through_model: field.read_only = True ret[model_field.name] = field # Deal with reverse relationships if not self.opts.fields: reverse_rels = [] else: # Reverse relationships are only included if they are explicitly # present in the `fields` option on the serializer reverse_rels = opts.get_all_related_objects() reverse_rels += opts.get_all_related_many_to_many_objects() for relation in reverse_rels: accessor_name = relation.get_accessor_name() if not self.opts.fields or accessor_name not in self.opts.fields: continue related_model = relation.model to_many = relation.field.rel.multiple has_through_model = False is_m2m = isinstance(relation.field, models.fields.related.ManyToManyField) if (is_m2m and hasattr(relation.field.rel, 'through') and not relation.field.rel.through._meta.auto_created): has_through_model = True if nested: field = self.get_nested_field(None, related_model, to_many) else: field = self.get_related_field(None, related_model, to_many) if field: if has_through_model: field.read_only = True ret[accessor_name] = field # Ensure that 'read_only_fields' is an iterable assert isinstance(self.opts.read_only_fields, (list, tuple)), '`read_only_fields` must be a list or tuple' # Add the `read_only` flag to any fields that have been specified # in the `read_only_fields` option for field_name in self.opts.read_only_fields: assert field_name not in self.base_fields.keys(), ( "field '%s' on serializer '%s' specified in " "`read_only_fields`, but also added " "as an explicit field. Remove it from `read_only_fields`." % (field_name, self.__class__.__name__)) assert field_name in ret, ( "Non-existant field '%s' specified in `read_only_fields` " "on serializer '%s'." % (field_name, self.__class__.__name__)) ret[field_name].read_only = True # Ensure that 'write_only_fields' is an iterable assert isinstance(self.opts.write_only_fields, (list, tuple)), '`write_only_fields` must be a list or tuple' for field_name in self.opts.write_only_fields: assert field_name not in self.base_fields.keys(), ( "field '%s' on serializer '%s' specified in " "`write_only_fields`, but also added " "as an explicit field. Remove it from `write_only_fields`." % (field_name, self.__class__.__name__)) assert field_name in ret, ( "Non-existant field '%s' specified in `write_only_fields` " "on serializer '%s'." % (field_name, self.__class__.__name__)) ret[field_name].write_only = True return ret def get_pk_field(self, model_field): """ Returns a default instance of the pk field. """ return self.get_field(model_field) def get_nested_field(self, model_field, related_model, to_many): """ Creates a default instance of a nested relational field. Note that model_field will be `None` for reverse relationships. """ class NestedModelSerializer(ModelSerializer): class Meta: model = related_model depth = self.opts.depth - 1 return NestedModelSerializer(many=to_many) def get_related_field(self, model_field, related_model, to_many): """ Creates a default instance of a flat relational field. Note that model_field will be `None` for reverse relationships. """ # TODO: filter queryset using: # .using(db).complex_filter(self.rel.limit_choices_to) kwargs = { 'queryset': related_model._default_manager, 'many': to_many } if model_field: kwargs['required'] = not(model_field.null or model_field.blank) if model_field.help_text is not None: kwargs['help_text'] = model_field.help_text if model_field.verbose_name is not None: kwargs['label'] = model_field.verbose_name if not model_field.editable: kwargs['read_only'] = True if model_field.verbose_name is not None: kwargs['label'] = model_field.verbose_name if model_field.help_text is not None: kwargs['help_text'] = model_field.help_text return PrimaryKeyRelatedField(**kwargs) def get_field(self, model_field): """ Creates a default instance of a basic non-relational field. """ kwargs = {} if model_field.null or model_field.blank: kwargs['required'] = False if isinstance(model_field, models.AutoField) or not model_field.editable: kwargs['read_only'] = True if model_field.has_default(): kwargs['default'] = model_field.get_default() if issubclass(model_field.__class__, models.TextField): kwargs['widget'] = widgets.Textarea if model_field.verbose_name is not None: kwargs['label'] = model_field.verbose_name if model_field.help_text is not None: kwargs['help_text'] = model_field.help_text # TODO: TypedChoiceField? if model_field.flatchoices: # This ModelField contains choices kwargs['choices'] = model_field.flatchoices if model_field.null: kwargs['empty'] = None return ChoiceField(**kwargs) # put this below the ChoiceField because min_value isn't a valid initializer if issubclass(model_field.__class__, models.PositiveIntegerField) or\ issubclass(model_field.__class__, models.PositiveSmallIntegerField): kwargs['min_value'] = 0 attribute_dict = { models.CharField: ['max_length'], models.CommaSeparatedIntegerField: ['max_length'], models.DecimalField: ['max_digits', 'decimal_places'], models.EmailField: ['max_length'], models.FileField: ['max_length'], models.ImageField: ['max_length'], models.SlugField: ['max_length'], models.URLField: ['max_length'], } if model_field.__class__ in attribute_dict: attributes = attribute_dict[model_field.__class__] for attribute in attributes: kwargs.update({attribute: getattr(model_field, attribute)}) try: return self.field_mapping[model_field.__class__](**kwargs) except KeyError: return ModelField(model_field=model_field, **kwargs) def get_validation_exclusions(self, instance=None): """ Return a list of field names to exclude from model validation. """ cls = self.opts.model opts = get_concrete_model(cls)._meta exclusions = [field.name for field in opts.fields + opts.many_to_many] for field_name, field in self.fields.items(): field_name = field.source or field_name if field_name in exclusions \ and not field.read_only \ and (field.required or hasattr(instance, field_name)) \ and not isinstance(field, Serializer): exclusions.remove(field_name) return exclusions def full_clean(self, instance): """ Perform Django's full_clean, and populate the `errors` dictionary if any validation errors occur. Note that we don't perform this inside the `.restore_object()` method, so that subclasses can override `.restore_object()`, and still get the full_clean validation checking. """ try: instance.full_clean(exclude=self.get_validation_exclusions(instance)) except ValidationError as err: self._errors = err.message_dict return None return instance def restore_object(self, attrs, instance=None): """ Restore the model instance. """ m2m_data = {} related_data = {} nested_forward_relations = {} meta = self.opts.model._meta # Reverse fk or one-to-one relations for (obj, model) in meta.get_all_related_objects_with_model(): field_name = obj.get_accessor_name() if field_name in attrs: related_data[field_name] = attrs.pop(field_name) # Reverse m2m relations for (obj, model) in meta.get_all_related_m2m_objects_with_model(): field_name = obj.get_accessor_name() if field_name in attrs: m2m_data[field_name] = attrs.pop(field_name) # Forward m2m relations for field in meta.many_to_many + meta.virtual_fields: if isinstance(field, GenericForeignKey): continue if field.name in attrs: m2m_data[field.name] = attrs.pop(field.name) # Nested forward relations - These need to be marked so we can save # them before saving the parent model instance. for field_name in attrs.keys(): if isinstance(self.fields.get(field_name, None), Serializer): nested_forward_relations[field_name] = attrs[field_name] # Create an empty instance of the model if instance is None: instance = self.opts.model() for key, val in attrs.items(): try: setattr(instance, key, val) except ValueError: self._errors[key] = self.error_messages['required'] # Any relations that cannot be set until we've # saved the model get hidden away on these # private attributes, so we can deal with them # at the point of save. instance._related_data = related_data instance._m2m_data = m2m_data instance._nested_forward_relations = nested_forward_relations return instance def from_native(self, data, files): """ Override the default method to also include model field validation. """ instance = super(ModelSerializer, self).from_native(data, files) if not self._errors: return self.full_clean(instance) def save_object(self, obj, **kwargs): """ Save the deserialized object. """ if getattr(obj, '_nested_forward_relations', None): # Nested relationships need to be saved before we can save the # parent instance. for field_name, sub_object in obj._nested_forward_relations.items(): if sub_object: self.save_object(sub_object) setattr(obj, field_name, sub_object) obj.save(**kwargs) if getattr(obj, '_m2m_data', None): for accessor_name, object_list in obj._m2m_data.items(): setattr(obj, accessor_name, object_list) del(obj._m2m_data) if getattr(obj, '_related_data', None): related_fields = dict([ (field.get_accessor_name(), field) for field, model in obj._meta.get_all_related_objects_with_model() ]) for accessor_name, related in obj._related_data.items(): if isinstance(related, RelationsList): # Nested reverse fk relationship for related_item in related: fk_field = related_fields[accessor_name].field.name setattr(related_item, fk_field, obj) self.save_object(related_item) # Delete any removed objects if related._deleted: [self.delete_object(item) for item in related._deleted] elif isinstance(related, models.Model): # Nested reverse one-one relationship fk_field = obj._meta.get_field_by_name(accessor_name)[0].field.name setattr(related, fk_field, obj) self.save_object(related) else: # Reverse FK or reverse one-one setattr(obj, accessor_name, related) del(obj._related_data) class HyperlinkedModelSerializerOptions(ModelSerializerOptions): """ Options for HyperlinkedModelSerializer """ def __init__(self, meta): super(HyperlinkedModelSerializerOptions, self).__init__(meta) self.view_name = getattr(meta, 'view_name', None) self.lookup_field = getattr(meta, 'lookup_field', None) self.url_field_name = getattr(meta, 'url_field_name', api_settings.URL_FIELD_NAME) class HyperlinkedModelSerializer(ModelSerializer): """ A subclass of ModelSerializer that uses hyperlinked relationships, instead of primary key relationships. """ _options_class = HyperlinkedModelSerializerOptions _default_view_name = '%(model_name)s-detail' _hyperlink_field_class = HyperlinkedRelatedField _hyperlink_identify_field_class = HyperlinkedIdentityField def get_default_fields(self): fields = super(HyperlinkedModelSerializer, self).get_default_fields() if self.opts.view_name is None: self.opts.view_name = self._get_default_view_name(self.opts.model) if self.opts.url_field_name not in fields: url_field = self._hyperlink_identify_field_class( view_name=self.opts.view_name, lookup_field=self.opts.lookup_field ) ret = self._dict_class() ret[self.opts.url_field_name] = url_field ret.update(fields) fields = ret return fields def get_pk_field(self, model_field): if self.opts.fields and model_field.name in self.opts.fields: return self.get_field(model_field) def get_related_field(self, model_field, related_model, to_many): """ Creates a default instance of a flat relational field. """ # TODO: filter queryset using: # .using(db).complex_filter(self.rel.limit_choices_to) kwargs = { 'queryset': related_model._default_manager, 'view_name': self._get_default_view_name(related_model), 'many': to_many } if model_field: kwargs['required'] = not(model_field.null or model_field.blank) if model_field.help_text is not None: kwargs['help_text'] = model_field.help_text if model_field.verbose_name is not None: kwargs['label'] = model_field.verbose_name if self.opts.lookup_field: kwargs['lookup_field'] = self.opts.lookup_field return self._hyperlink_field_class(**kwargs) def get_identity(self, data): """ This hook is required for bulk update. We need to override the default, to use the url as the identity. """ try: return data.get(self.opts.url_field_name, None) except AttributeError: return None def _get_default_view_name(self, model): """ Return the view name to use if 'view_name' is not specified in 'Meta' """ model_meta = model._meta format_kwargs = { 'app_label': model_meta.app_label, 'model_name': model_meta.object_name.lower() } return self._default_view_name % format_kwargs
agpl-3.0