repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
PXke/invenio | invenio/legacy/refextract/xml.py | 4 | 35251 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import absolute_import
import re
from xml.sax.saxutils import escape as encode_for_xml
from datetime import datetime
from invenio.legacy.refextract.regexs import re_num
from invenio.legacy.docextract.utils import write_message
from invenio.legacy.refextract.config import \
CFG_REFEXTRACT_TAG_ID_REFERENCE, \
CFG_REFEXTRACT_IND1_REFERENCE, \
CFG_REFEXTRACT_IND2_REFERENCE, \
CFG_REFEXTRACT_SUBFIELD_MARKER, \
CFG_REFEXTRACT_SUBFIELD_AUTH, \
CFG_REFEXTRACT_SUBFIELD_TITLE, \
CFG_REFEXTRACT_SUBFIELD_MISC, \
CGF_REFEXTRACT_SEMI_COLON_MISC_TEXT_SENSITIVITY, \
CFG_REFEXTRACT_SUBFIELD_REPORT_NUM, \
CFG_REFEXTRACT_XML_RECORD_OPEN, \
CFG_REFEXTRACT_CTRL_FIELD_RECID, \
CFG_REFEXTRACT_TAG_ID_EXTRACTION_STATS, \
CFG_REFEXTRACT_IND1_EXTRACTION_STATS, \
CFG_REFEXTRACT_IND2_EXTRACTION_STATS, \
CFG_REFEXTRACT_SUBFIELD_EXTRACTION_STATS, \
CFG_REFEXTRACT_SUBFIELD_EXTRACTION_TIME, \
CFG_REFEXTRACT_SUBFIELD_EXTRACTION_VERSION, \
CFG_REFEXTRACT_VERSION, \
CFG_REFEXTRACT_XML_RECORD_CLOSE, \
CFG_REFEXTRACT_SUBFIELD_URL_DESCR, \
CFG_REFEXTRACT_SUBFIELD_URL, \
CFG_REFEXTRACT_SUBFIELD_DOI, \
CGF_REFEXTRACT_ADJACENT_AUTH_MISC_SEPARATION, \
CFG_REFEXTRACT_SUBFIELD_QUOTED, \
CFG_REFEXTRACT_SUBFIELD_ISBN, \
CFG_REFEXTRACT_SUBFIELD_PUBLISHER, \
CFG_REFEXTRACT_SUBFIELD_YEAR, \
CFG_REFEXTRACT_SUBFIELD_BOOK
from invenio import config
CFG_INSPIRE_SITE = getattr(config, 'CFG_INSPIRE_SITE', False)
def format_marker(line_marker):
if line_marker:
num_match = re_num.search(line_marker)
if num_match:
line_marker = num_match.group(0)
return line_marker
def create_xml_record(counts, recid, xml_lines, status_code=0):
"""Given a series of MARC XML-ized reference lines and a record-id, write a
MARC XML record to the stdout stream. Include in the record some stats
for the extraction job.
The printed MARC XML record will essentially take the following
structure:
<record>
<controlfield tag="001">1</controlfield>
<datafield tag="999" ind1="C" ind2="5">
[...]
</datafield>
[...]
<datafield tag="999" ind1="C" ind2="6">
<subfield code="a">
Invenio/X.XX.X refextract/X.XX.X-timestamp-err-repnum-title-URL-misc
</subfield>
</datafield>
</record>
Timestamp, error(code), reportnum, title, URL, and misc will are of
course take the relevant values.
@param status_code: (integer)the status of reference-extraction for the
given record: was there an error or not? 0 = no error; 1 = error.
@param count_reportnum: (integer) - the number of institutional
report-number citations found in the document's reference lines.
@param count_title: (integer) - the number of journal title citations
found in the document's reference lines.
@param count_url: (integer) - the number of URL citations found in the
document's reference lines.
@param count_misc: (integer) - the number of sections of miscellaneous
text (i.e. 999C5$m) from the document's reference lines.
@param count_auth_group: (integer) - the total number of author groups
identified ($h)
@param recid: (string) - the record-id of the given document. (put into
001 field.)
@param xml_lines: (list) of strings. Each string in the list contains a
group of MARC XML 999C5 datafields, making up a single reference line.
These reference lines will make up the document body.
@return: The entire MARC XML textual output, plus recognition statistics.
"""
out = []
## Start with the opening record tag:
out += u"%(record-open)s\n" \
% {'record-open': CFG_REFEXTRACT_XML_RECORD_OPEN, }
## Display the record-id controlfield:
out += \
u""" <controlfield tag="%(cf-tag-recid)s">%(recid)d</controlfield>\n""" \
% {'cf-tag-recid' : CFG_REFEXTRACT_CTRL_FIELD_RECID,
'recid' : recid,
}
## Loop through all xml lines and add them to the output string:
out.extend(xml_lines)
## add the 999C6 status subfields:
out += u""" <datafield tag="%(df-tag-ref-stats)s" ind1="%(df-ind1-ref-stats)s" ind2="%(df-ind2-ref-stats)s">
<subfield code="%(sf-code-ref-stats)s">%(status)s-%(reportnum)s-%(title)s-%(author)s-%(url)s-%(doi)s-%(misc)s</subfield>
<subfield code="%(sf-code-ref-time)s">%(timestamp)s</subfield>
<subfield code="%(sf-code-ref-version)s">%(version)s</subfield>
</datafield>\n""" \
% {'df-tag-ref-stats' : CFG_REFEXTRACT_TAG_ID_EXTRACTION_STATS,
'df-ind1-ref-stats' : CFG_REFEXTRACT_IND1_EXTRACTION_STATS,
'df-ind2-ref-stats' : CFG_REFEXTRACT_IND2_EXTRACTION_STATS,
'sf-code-ref-stats' : CFG_REFEXTRACT_SUBFIELD_EXTRACTION_STATS,
'sf-code-ref-time' : CFG_REFEXTRACT_SUBFIELD_EXTRACTION_TIME,
'sf-code-ref-version': CFG_REFEXTRACT_SUBFIELD_EXTRACTION_VERSION,
'version' : CFG_REFEXTRACT_VERSION,
'timestamp' : datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
'status' : status_code,
'reportnum' : counts['reportnum'],
'title' : counts['title'],
'author' : counts['auth_group'],
'url' : counts['url'],
'doi' : counts['doi'],
'misc' : counts['misc'],
}
## Now add the closing tag to the record:
out += u"%(record-close)s\n" \
% {'record-close' : CFG_REFEXTRACT_XML_RECORD_CLOSE, }
## Be sure to call this BEFORE compress_subfields
out = filter_processed_references(''.join(out))
## Compress mulitple 'm' subfields in a datafield
out = compress_subfields(out, CFG_REFEXTRACT_SUBFIELD_MISC)
## Compress multiple 'h' subfields in a datafield
out = compress_subfields(out, CFG_REFEXTRACT_SUBFIELD_AUTH)
return out
def build_xml_citations(splitted_citations, line_marker):
return [build_xml_citation(citation_elements, line_marker) \
for citation_elements in splitted_citations]
def build_xml_citation(citation_elements, line_marker, inspire_format=None):
""" Create the MARC-XML string of the found reference information which was taken
from a tagged reference line.
@param citation_elements: (list) an ordered list of dictionary elements,
with each element corresponding to a found piece of information from a reference line.
@param line_marker: (string) The line marker for this single reference line (e.g. [19])
@return xml_line: (string) The MARC-XML representation of the list of reference elements
"""
if inspire_format is None:
inspire_format = CFG_INSPIRE_SITE
## Begin the datafield element
xml_line = start_datafield_element(line_marker)
## This will hold the ordering of tags which have been appended to the xml line
## This list will be used to control the desisions involving the creation of new citation lines
## (in the event of a new set of authors being recognised, or strange title ordering...)
line_elements = []
## This is a list which will hold the current 'over-view' of a single reference line,
## as a list of lists, where each list corresponds to the contents of a datafield element
## in the xml mark-up
citation_structure = []
auth_for_ibid = None
for element in citation_elements:
## Before going onto checking 'what' the next element is, handle misc text and semi-colons
## Multiple misc text subfields will be compressed later
## This will also be the only part of the code that deals with MISC tag_typed elements
if element['misc_txt'].strip(".,:;- []"):
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_MISC,
element['misc_txt'].strip(".,:;- []"))
# Now handle the type dependent actions
# TITLE
if element['type'] == "JOURNAL":
# Select the journal title output format
if inspire_format:
# ADD to current datafield
xml_line += """
<subfield code="%(sf-code-ref-title)s">%(title)s,%(volume)s,%(page)s</subfield>""" \
% {'sf-code-ref-title': CFG_REFEXTRACT_SUBFIELD_TITLE,
'title' : encode_for_xml(element['title']),
'volume' : encode_for_xml(element['volume']),
'page' : encode_for_xml(element['page']),
}
else:
# ADD to current datafield
xml_line += """
<subfield code="%(sf-code-ref-title)s">%(title)s %(volume)s (%(year)s) %(page)s</subfield>""" \
% {'sf-code-ref-title': CFG_REFEXTRACT_SUBFIELD_TITLE,
'title' : encode_for_xml(element['title']),
'volume' : encode_for_xml(element['volume']),
'year' : encode_for_xml(element['year']),
'page' : encode_for_xml(element['page']),
}
# Now, if there are any extra (numeration based) IBID's after this title
if len(element['extra_ibids']) > 0:
# At least one IBID is present, these are to be outputted each into their own datafield
for ibid in element['extra_ibids']:
# %%%%% Set as NEW citation line %%%%%
(xml_line, auth_for_ibid) = append_datafield_element(line_marker,
citation_structure,
line_elements,
auth_for_ibid,
xml_line)
if inspire_format:
xml_line += """
<subfield code="%(sf-code-ref-title)s">%(title)s,%(volume)s,%(page)s</subfield>""" \
% {'sf-code-ref-title': CFG_REFEXTRACT_SUBFIELD_TITLE,
'title' : encode_for_xml(ibid['title']),
'volume' : encode_for_xml(ibid['volume']),
'page' : encode_for_xml(ibid['page']),
}
else:
xml_line += """
<subfield code="%(sf-code-ref-title)s">%(title)s %(volume)s (%(year)s) %(page)s</subfield>""" \
% {'sf-code-ref-title': CFG_REFEXTRACT_SUBFIELD_TITLE,
'title' : encode_for_xml(ibid['title']),
'volume' : encode_for_xml(ibid['volume']),
'year' : encode_for_xml(ibid['year']),
'page' : encode_for_xml(ibid['page']),
}
# Add a Title element to the past elements list, since we last found an IBID
line_elements.append(element)
# REPORT NUMBER
elif element['type'] == "REPORTNUMBER":
# ADD to current datafield
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_REPORT_NUM,
element['report_num'])
line_elements.append(element)
# URL
elif element['type'] == "URL":
if element['url_string'] == element['url_desc']:
# Build the datafield for the URL segment of the reference line:
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_URL,
element['url_string'])
# Else, in the case that the url string and the description differ in some way, include them both
else:
# Build the datafield for the URL segment of the reference line:
xml_line += """
<subfield code="%(sf-code-ref-url)s">%(url)s</subfield>
<subfield code="%(sf-code-ref-url-desc)s">%(url-desc)s</subfield>""" \
% {'sf-code-ref-url' : CFG_REFEXTRACT_SUBFIELD_URL,
'sf-code-ref-url-desc': CFG_REFEXTRACT_SUBFIELD_URL_DESCR,
'url' : encode_for_xml(element['url_string']),
'url-desc' : encode_for_xml(element['url_desc'])
}
line_elements.append(element)
# DOI
elif element['type'] == "DOI":
## Split on hitting another DOI in the same line
if is_in_line_elements("DOI", line_elements):
## %%%%% Set as NEW citation line %%%%%
xml_line, auth_for_ibid = append_datafield_element(line_marker,
citation_structure,
line_elements,
auth_for_ibid,
xml_line)
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_DOI,
element['doi_string'])
line_elements.append(element)
# AUTHOR
elif element['type'] == "AUTH":
value = element['auth_txt']
if element['auth_type'] == 'incl':
value = "(%s)" % value
if is_in_line_elements("AUTH", line_elements) and line_elements[-1]['type'] != "AUTH":
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_MISC,
value)
else:
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_AUTH,
value)
line_elements.append(element)
elif element['type'] == "QUOTED":
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_QUOTED,
element['title'])
line_elements.append(element)
elif element['type'] == "ISBN":
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_ISBN,
element['ISBN'])
line_elements.append(element)
elif element['type'] == "BOOK":
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_QUOTED,
element['title'])
xml_line += '\n <subfield code="%s" />' % \
CFG_REFEXTRACT_SUBFIELD_BOOK
line_elements.append(element)
elif element['type'] == "PUBLISHER":
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_PUBLISHER,
element['publisher'])
line_elements.append(element)
elif element['type'] == "YEAR":
xml_line = append_subfield_element(xml_line,
CFG_REFEXTRACT_SUBFIELD_YEAR,
element['year'])
line_elements.append(element)
# Append the author, if needed for an ibid, for the last element
# in the entire line. Don't bother setting the author to be used
# for ibids, since the line is finished
xml_line += check_author_for_ibid(line_elements, auth_for_ibid)[0]
# Close the ending datafield element
xml_line += "\n </datafield>\n"
return xml_line
def append_subfield_element(xml_line, subfield_code, value):
xml_element = '\n <subfield code="' \
'%(sf-code-ref-auth)s">%(value)s</subfield>' % {
'value' : encode_for_xml(value),
'sf-code-ref-auth' : subfield_code,
}
return xml_line + xml_element
def start_datafield_element(line_marker):
""" Start a brand new datafield element with a marker subfield.
@param line_marker: (string) The line marker which will be the sole
content of the newly created marker subfield. This will always be the
first subfield to be created for a new datafield element.
@return: (string) The string holding the relevant datafield and
subfield tags.
"""
marker_subfield = """
<subfield code="%(sf-code-ref-marker)s">%(marker-val)s</subfield>""" \
% {'sf-code-ref-marker': CFG_REFEXTRACT_SUBFIELD_MARKER,
'marker-val' : encode_for_xml(format_marker(line_marker))}
new_datafield = """ <datafield tag="%(df-tag-ref)s" ind1="%(df-ind1-ref)s" ind2="%(df-ind2-ref)s">%(marker-subfield)s""" \
% {'df-tag-ref' : CFG_REFEXTRACT_TAG_ID_REFERENCE,
'df-ind1-ref' : CFG_REFEXTRACT_IND1_REFERENCE,
'df-ind2-ref' : CFG_REFEXTRACT_IND2_REFERENCE,
'marker-subfield': marker_subfield}
return new_datafield
def dump_or_split_author(misc_txt, line_elements):
"""
Given the list of current elements, and misc text, try to decide how to use this
author for splitting heuristics, and see if it is useful. Returning 'dump' indicates
put this author into misc text, since it had been identified as bad. 'split'
indicates split the line and place this author into the fresh datafield. The empty string
indicates add this author as normal to the current xml datafield.
A line will be split using author information in two situations:
1. When there already exists a previous author group in the same line
2. If the only item in the current line is a title, with no misc text
In both situations, the newly found author element is placed into the newly created
datafield.
This method heavily assumes that the first author group found in a single citation is the
most reliable (In accordance with the IEEE standard, which states that authors should
be written at the beginning of a citation, in the overwhelming majority of cases).
@param misc_txt: (string) The misc text for this reference line
@param line_elements: (list) The list of elements found for this current line
@return: (string) The action to take to deal with this author.
"""
## If an author has already been found in this reference line
if is_in_line_elements("AUTH", line_elements):
## If this author group is directly after another author group,
## with minimal misc text between, then this author group is very likely to be wrong.
if line_elements[-1]['type'] == "AUTH" \
and len(misc_txt) < CGF_REFEXTRACT_ADJACENT_AUTH_MISC_SEPARATION:
return "dump"
## Else, trigger a new reference line
return "split"
## In cases where an author is directly after an alone title (ibid or normal, with no misc),
## Trigger a new reference line
if is_in_line_elements("JOURNAL", line_elements) and len(line_elements) == 1 \
and len(misc_txt) == 0:
return "split"
return ""
def is_in_line_elements(element_type, line_elements):
""" Checks the list of current elements in the line for the given element type """
for i, element in enumerate(line_elements):
if element['type'] == element_type:
return (True, line_elements[i])
return False
def split_on_semi_colon(misc_txt, line_elements, elements_processed, total_elements):
""" Given some misc text, see if there are any semi-colons which may indiciate that
a reference line is in fact two separate citations.
@param misc_txt: (string) The misc_txt to look for semi-colons within.
@param line_elements: (list) The list of single upper-case chars which
represent an element of a reference which has been processed.
@param elements_processed: (integer) The number of elements which have been
*looked at* for this entire reference line, regardless of splits
@param total_elements: (integer) The total number of elements which
have been identified in the *entire* reference line
@return: (string) Dipicting where the semi-colon was found in relation to the
rest of the misc_txt. False if a semi-colon was not found, or one was found
relating to an escaped piece of text.
"""
## If there has already been meaningful information found in the reference
## and there are still elements to be processed beyond the element relating to
## this misc_txt
if (is_in_line_elements("JOURNAL", line_elements) \
or is_in_line_elements("REPORTNUMBER", line_elements) \
or len(misc_txt) >= CGF_REFEXTRACT_SEMI_COLON_MISC_TEXT_SENSITIVITY) \
and elements_processed < total_elements:
if len(misc_txt) >= 4 and \
(misc_txt[-5:] == '&' or misc_txt[-4:] == '<'):
## This is a semi-colon which does not indicate a new citation
return ""
else:
## If a semi-colon is at the end, make sure to append preceeding misc_txt to
## the current datafield element
if misc_txt.strip(" .,")[-1] == ";":
return "after"
## Else, make sure to append the misc_txt to the *newly created datafield element*
elif misc_txt.strip(" .,")[0] == ";":
return "before"
return ""
def check_author_for_ibid(line_elements, author):
""" Given a list of elements for an *entire* reference line, and the current
author element to be used for ibids, check to see if that author element needs
to be inserted into this line, depending on the presence of ibids and whether
or not there is already an author paired with an ibid.
Also, if no ibids are present in the line, see if the author element needs
to be updated, depending on the presence of a normal title and a corresponding
author group.
@param line_elements: List of line elements for the entire processed reference
line
@param author: The current parent author element to be used with an ibid
@return: (tuple) - containing a possible new author subfield, and the parent
author element to be used for future ibids (if any)
"""
## Upon splitting, check for ibids in the previous line,
## If an appropriate author was found, pair it with this ibid.
## (i.e., an author has not been explicitly paired with this ibid already
## and an author exists with the parent title to which this ibid refers)
if is_in_line_elements("JOURNAL", line_elements):
## Get the title element for this line
title_element = is_in_line_elements("JOURNAL", line_elements)[1]
if author != None and not is_in_line_elements("AUTH", line_elements) \
and title_element['is_ibid']:
## Return the author subfield which needs to be appended for an ibid in the line
## No need to reset the author to be used for ibids, since this line holds an ibid
return """
<subfield code="%(sf-code-ref-auth)s">%(authors)s</subfield>""" \
% {'authors' : encode_for_xml(author['auth_txt'].strip('()')),
'sf-code-ref-auth' : CFG_REFEXTRACT_SUBFIELD_AUTH,
}, author
## Set the author for to be used for ibids, when a standard title is present in this line,
## as well as an author
if not title_element['is_ibid'] and is_in_line_elements("AUTH", line_elements):
## Set the author to be used for ibids, in the event that a subsequent ibid is found
## this author element will be repeated.
## This author is only used when an ibid is in a line
## and there is no other author found in the line.
author = is_in_line_elements("AUTH", line_elements)[1]
## If there is no author associated with this head title, clear the author to be used for ibids
elif not title_element['is_ibid']:
author = None
## If an author does not need to be replicated for an ibid, append nothing to the xml line
return "", author
def append_datafield_element(line_marker,
citation_structure,
line_elements,
author,
xml_line):
""" Finish the current datafield element and start a new one, with a new
marker subfield.
@param line_marker: (string) The line marker which will be the sole
content of the newly created marker subfield. This will always be the
first subfield to be created for a new datafield element.
@return new_datafield: (string) The string holding the relevant
datafield and subfield tags.
"""
## Add an author, if one must be added for ibid's, before splitting this line
## Also, if a standard title and an author are both present, save the author for future use
new_datafield, author = check_author_for_ibid(line_elements, author)
xml_line += new_datafield
## Start the new datafield
xml_line += """
</datafield>
<datafield tag="%(df-tag-ref)s" ind1="%(df-ind1-ref)s" ind2="%(df-ind2-ref)s">
<subfield code="%(sf-code-ref-marker)s">%(marker-val)s</subfield>""" \
% {'df-tag-ref' : CFG_REFEXTRACT_TAG_ID_REFERENCE,
'df-ind1-ref' : CFG_REFEXTRACT_IND1_REFERENCE,
'df-ind2-ref' : CFG_REFEXTRACT_IND2_REFERENCE,
'sf-code-ref-marker' : CFG_REFEXTRACT_SUBFIELD_MARKER,
'marker-val' : encode_for_xml(format_marker(line_marker))
}
## add the past elements for end previous citation to the citation_structure list
## (citation_structure is a reference to the initial citation_structure list found in the calling method)
citation_structure.append(line_elements)
## Clear the elements in the referenced list of elements
del line_elements[:]
return xml_line, author
def filter_processed_references(out):
""" apply filters to reference lines found - to remove junk"""
reference_lines = out.split('\n')
# Removes too long and too short m tags
m_restricted, ref_lines = restrict_m_subfields(reference_lines)
if m_restricted:
a_tag = re.compile('\<subfield code=\"a\"\>(.*?)\<\/subfield\>')
for i in range(len(ref_lines)):
# Checks to see that the datafield has the attribute ind2="6",
# Before looking to see if the subfield code attribute is 'a'
if ref_lines[i].find('<datafield tag="999" ind1="C" ind2="6">') != -1 \
and (len(ref_lines) - 1) > i:
# For each line in this datafield element, try to find the subfield whose code attribute is 'a'
while ref_lines[i].find('</datafield>') != -1 and (len(ref_lines) - 1) > i:
i += 1
# <subfield code="a">Invenio/X.XX.X
# refextract/X.XX.X-timestamp-err-repnum-title-URL-misc
# remake the "a" tag for new numbe of "m" tags
if a_tag.search(ref_lines[i]):
data = a_tag.search(ref_lines[i]).group(1)
words1 = data.split()
words2 = words1[-1].split('-')
old_m = int(words2[-1])
words2[-1] = str(old_m - m_restricted)
data1 = '-'.join(words2)
words1[-1] = data1
new_data = ' '.join(words1)
ref_lines[i] = ' <subfield code="a">' + new_data + '</subfield>'
break
new_out = '\n'.join([l for l in [rec.rstrip() for rec in ref_lines] if l])
if len(reference_lines) != len(new_out):
write_message(" * filter results: unfilter references line length is %d and filtered length is %d" \
% (len(reference_lines), len(new_out)), verbose=2)
return new_out
def restrict_m_subfields(reference_lines):
"""Remove complete datafields which hold ONLY a single 'm' subfield,
AND where the misc content is too short or too long to be of use.
Min and max lengths derived by inspection of actual data. """
min_length = 4
max_length = 1024
m_tag = re.compile('\<subfield code=\"m\"\>(.*?)\<\/subfield\>')
filter_list = []
m_restricted = 0
for i in range(len(reference_lines)): # set up initial filter
filter_list.append(1)
for i in range(len(reference_lines)):
if m_tag.search(reference_lines[i]):
if (i - 2) >= 0 and (i + 1) < len(reference_lines):
if reference_lines[i + 1].find('</datafield>') != -1 and \
reference_lines[i - 1].find('<subfield code="o">') != -1 and \
reference_lines[i - 2].find('<datafield') != -1:
## If both of these are true then its a solitary "m" tag
mlength = len(m_tag.search(reference_lines[i]).group(1))
if mlength < min_length or mlength > max_length:
filter_list[i - 2] = filter_list[i - 1] = filter_list[i] = filter_list[i + 1] = 0
m_restricted += 1
new_reference_lines = []
for i in range(len(reference_lines)):
if filter_list[i]:
new_reference_lines.append(reference_lines[i])
return m_restricted, new_reference_lines
def get_subfield_content(line, subfield_code):
""" Given a line (subfield element) and a xml code attribute for a subfield element,
return the contents of the subfield element.
"""
content = line.split('<subfield code="' + subfield_code + '">')[1]
content = content.split('</subfield>')[0]
return content
def compress_subfields(out, subfield_code):
"""
For each datafield, compress multiple subfields of type 'subfield_code' into a single one
e.g. for MISC text, change xml format from:
<datafield tag="999" ind1="C" ind2="5">
<subfield code="o">1.</subfield>
<subfield code="m">J. Dukelsky, S. Pittel and G. Sierra</subfield>
<subfield code="s">Rev. Mod. Phys. 76 (2004) 643</subfield>
<subfield code="m">and this is some more misc text</subfield>
</datafield>
<datafield tag="999" ind1="C" ind2="5">
<subfield code="o">2.</subfield>
<subfield code="m">J. von Delft and D.C. Ralph,</subfield>
<subfield code="s">Phys. Rep. 345 (2001) 61</subfield>
</datafield>
to:
<datafield tag="999" ind1="C" ind2="5">
<subfield code="o">1.</subfield>
<subfield code="m">J. Dukelsky, S. Pittel and G. Sierra and this is some more misc text</subfield>
<subfield code="s">Rev. Mod. Phys. 76 (2004) 643</subfield>
</datafield>
<datafield tag="999" ind1="C" ind2="5">
<subfield code="o">2.</subfield>
<subfield code="m">J. von Delft and D.C. Ralph,</subfield>
<subfield code="s">Phys. Rep. 345 (2001) 61</subfield>
</datafield>
"""
in_lines = out.split('\n')
# hold the subfield compressed version of the xml, line by line
new_rec_lines = []
# Used to indicate when the selected subfield has already been reached
# inside a particular datafield
position = 0
# Where the concatenated misc text is held before appended at the end
content_text = ""
# Components of the misc subfield elements
subfield_start = " <subfield code=\"%s\">" % subfield_code
subfield_end = "</subfield>"
for line in in_lines:
## If reached the end of the datafield
if line.find('</datafield>') != -1:
if len(content_text) > 0:
# Insert the concatenated misc contents back where it was first
# encountered (dont RIGHTstrip semi-colons, as these may be
# needed for & or <)
if subfield_code == 'm':
content_text = content_text.strip(" ,.").lstrip(" ;")
new_rec_lines[position] = new_rec_lines[position] + \
content_text + subfield_end
content_text = ""
position = 0
new_rec_lines.append(line)
# Found subfield in question, concatenate subfield contents
# for this single datafield
elif line.find(subfield_start.strip()) != -1:
if position == 0:
## Save the position of this found subfield
## for later insertion into the same place
new_rec_lines.append(subfield_start)
position = len(new_rec_lines) - 1
new_text = get_subfield_content(line, subfield_code)
if content_text and new_text:
## Append spaces between merged text, if needed
if (content_text[-1] + new_text[0]).find(" ") == -1:
new_text = " " + new_text
content_text += new_text
else:
new_rec_lines.append(line)
## Create the readable file from the list of lines.
new_out = [l.rstrip() for l in new_rec_lines]
return '\n'.join(filter(None, new_out))
| gpl-2.0 |
pstratem/elements | qa/rpc-tests/netutil.py | 328 | 4562 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Linux network utilities
import sys
import socket
import fcntl
import struct
import array
import os
import binascii
# Roughly based on http://voorloopnul.com/blog/a-python-netstat-in-less-than-100-lines-of-code/ by Ricardo Pascal
STATE_ESTABLISHED = '01'
STATE_SYN_SENT = '02'
STATE_SYN_RECV = '03'
STATE_FIN_WAIT1 = '04'
STATE_FIN_WAIT2 = '05'
STATE_TIME_WAIT = '06'
STATE_CLOSE = '07'
STATE_CLOSE_WAIT = '08'
STATE_LAST_ACK = '09'
STATE_LISTEN = '0A'
STATE_CLOSING = '0B'
def get_socket_inodes(pid):
'''
Get list of socket inodes for process pid.
'''
base = '/proc/%i/fd' % pid
inodes = []
for item in os.listdir(base):
target = os.readlink(os.path.join(base, item))
if target.startswith('socket:'):
inodes.append(int(target[8:-1]))
return inodes
def _remove_empty(array):
return [x for x in array if x !='']
def _convert_ip_port(array):
host,port = array.split(':')
# convert host from mangled-per-four-bytes form as used by kernel
host = binascii.unhexlify(host)
host_out = ''
for x in range(0, len(host)/4):
(val,) = struct.unpack('=I', host[x*4:(x+1)*4])
host_out += '%08x' % val
return host_out,int(port,16)
def netstat(typ='tcp'):
'''
Function to return a list with status of tcp connections at linux systems
To get pid of all network process running on system, you must run this script
as superuser
'''
with open('/proc/net/'+typ,'r') as f:
content = f.readlines()
content.pop(0)
result = []
for line in content:
line_array = _remove_empty(line.split(' ')) # Split lines and remove empty spaces.
tcp_id = line_array[0]
l_addr = _convert_ip_port(line_array[1])
r_addr = _convert_ip_port(line_array[2])
state = line_array[3]
inode = int(line_array[9]) # Need the inode to match with process pid.
nline = [tcp_id, l_addr, r_addr, state, inode]
result.append(nline)
return result
def get_bind_addrs(pid):
'''
Get bind addresses as (host,port) tuples for process pid.
'''
inodes = get_socket_inodes(pid)
bind_addrs = []
for conn in netstat('tcp') + netstat('tcp6'):
if conn[3] == STATE_LISTEN and conn[4] in inodes:
bind_addrs.append(conn[1])
return bind_addrs
# from: http://code.activestate.com/recipes/439093/
def all_interfaces():
'''
Return all interfaces that are up
'''
is_64bits = sys.maxsize > 2**32
struct_size = 40 if is_64bits else 32
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
max_possible = 8 # initial value
while True:
bytes = max_possible * struct_size
names = array.array('B', '\0' * bytes)
outbytes = struct.unpack('iL', fcntl.ioctl(
s.fileno(),
0x8912, # SIOCGIFCONF
struct.pack('iL', bytes, names.buffer_info()[0])
))[0]
if outbytes == bytes:
max_possible *= 2
else:
break
namestr = names.tostring()
return [(namestr[i:i+16].split('\0', 1)[0],
socket.inet_ntoa(namestr[i+20:i+24]))
for i in range(0, outbytes, struct_size)]
def addr_to_hex(addr):
'''
Convert string IPv4 or IPv6 address to binary address as returned by
get_bind_addrs.
Very naive implementation that certainly doesn't work for all IPv6 variants.
'''
if '.' in addr: # IPv4
addr = [int(x) for x in addr.split('.')]
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
addr = sub[0] + ([0] * nullbytes) + sub[1]
else:
raise ValueError('Could not parse address %s' % addr)
return binascii.hexlify(bytearray(addr))
| mit |
Darthone/bug-free-octo-parakeet | technical-analysis/oneDayOHLC/sma_ema_vol_ohlc_rsi.py | 2 | 6893 | #!/usr/bin/env python
import matplotlib
# matplotlib.use('Agg')
import time
import datetime
import numpy as np
import matplotlib.pyplot as mplot
import matplotlib.ticker as mticker
import matplotlib.dates as mdates
from matplotlib.finance import candlestick_ochl
# custom matplotlib parameters
matplotlib.rcParams.update({'font.size': 9})
import urllib2
stocks = 'AAPL', 'FB', 'UAA'
'''
compute the n period relative strength indicator
n=14 (periods) as a default developed by J. Welles Wilder
momentum oscillator that measures the speed and change of price movements
'''
def rsiFunction(prices, n=14):
deltas = np.diff(prices)
seed = deltas[:n+1]
up = seed[seed >= 0].sum()/n
down = -seed[seed < 0].sum()/n
rs = up/down
rsi = np.zeros_like(prices)
rsi[:n] = 100. - 100./(1. + rs)
for i in range(n, len(prices)):
delta = deltas[i-1] # diff is 1 shorter
if delta > 0:
upval = delta
downval = 0.
else:
upval = 0.
downval = -delta
up = (up * (n - 1) + upval)/n
down = (down * (n - 1) + downval)/n
rs = up/down
rsi[i] = 100. - 100./(1. + rs)
return rsi
def movingaverage(values, window):
weights = np.repeat(1.0, window) / window
# line smoothening
smas = np.convolve(values, weights, 'valid')
# list of values being returned as numpy array
return smas
def ema(values, window):
weights = np.exp(np.linspace(-1., 0., window))
weights /= weights.sum()
a = np.convolve(values, weights, mode='full')[:len(values)]
a[:window] = a[window]
return a
'''
macd line = 12ema - 26ema
signal line = 9ema of the macd line
histogram = macd line - signal line
12 - two trading weeks
26 - one trading month
9 - one and half trading week
http://www.forexabode.com/forex-school/technical-indicators/macd/
5-day trading week -> 10,22,7 or 10,22,8
'''
def computeMACD(x, slow=26, fast=12):
slow = nslow
fast= nfast
emaslow = ema(x, slow)
emafast= ema(x, fast)
return emaslow, emafast, emafast-emaslow
def graphData(stock, MA1, MA2, dateRange):
try:
try:
print 'pulling data on', stock
urlToVisit = 'http://chartapi.finance.yahoo.com/instrument/1.0/' + stock + '/chartdata;type=quote;range=' + dateRange + '/csv'
stockFile = []
try:
sourceCode = urllib2.urlopen(urlToVisit).read()
splitSource = sourceCode.split('\n')
for eachLine in splitSource:
splitLine = eachLine.split(',')
if len(splitLine) == 6:
if 'values' not in eachLine:
stockFile.append(eachLine)
except Exception, e:
print str(e), 'error in organization of pulled data'
except Exception, e:
print str(e), 'error in pulling price data'
# load values and format the date
date, closePrice, highPrice, lowPrice, openPrice, volume = np.loadtxt(stockFile, delimiter=',', unpack=True, converters={0: mdates.strpdate2num('%Y%m%d')})
# add dates to data for candlestick to be plotted
i = 0
k = len(date)
candles = []
while i < k:
newLine = date[i], openPrice[i], closePrice[i], highPrice[i], lowPrice[i], volume[i]
candles.append(newLine)
i = i + 1
av1 = movingaverage(closePrice, MA1)
av2 = movingaverage(closePrice, MA2)
# starting point, plot exactly same amount of data
SP = len(date[MA2-1:])
label_1 = str(MA1) + ' SMA'
label_2 = str(MA2) + ' SMA'
f = mplot.figure()
# on a 4x4 figure, plot at (0,0)
a = mplot.subplot2grid((6,4), (1,0), rowspan=4, colspan=4)
# using matplotlib's candlestick charting
candlestick_ochl(a, candles[-SP:], width=0.5, colorup='g', colordown='r')
# moving average applied to data
a.plot(date[-SP:], av1[-SP:], label=label_1, linewidth=1.5)
a.plot(date[-SP:], av2[-SP:], label=label_2, linewidth=1.5)
mplot.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='upper'))
mplot.ylabel('Stock Price ($) and Volume')
mplot.legend(loc=9, ncol=2, prop={'size':7}, fancybox=True)
a.grid(True)
minVolume = 0
# rsi
rsiCol = '#1a8782'
posCol = '#386d13'
negCol = '#8f2020'
c = mplot.subplot2grid((6,4), (0,0), sharex=a, rowspan=1, colspan=4)
rsi = rsiFunction(closePrice)
c.plot(date[-SP:], rsi[-SP:], rsiCol, linewidth=1.5)
c.axhline(70, color=negCol)
c.axhline(30, color=posCol)
c.fill_between(date[-SP:], rsi[-SP:], 70, where=(rsi[-SP:]>=70), facecolor=negCol, edgecolor=negCol)
c.fill_between(date[-SP:], rsi[-SP:], 30, where=(rsi[-SP:]<=30), facecolor=posCol, edgecolor=posCol)
# 70 --> red, overbought
# 30 --> green, oversold
c.text(0.015, 0.95, 'RSI (14)', va='top', transform=c.transAxes)
c.tick_params(axis='x')
c.tick_params(axis='y')
c.set_yticks([30,70])
# mplot.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='lower'))
# fit 10 dates into graph and formatt properly
a.xaxis.set_major_locator(mticker.MaxNLocator(10))
a.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
avol = a.twinx()
avol.fill_between(date[-SP:], minVolume, volume[-SP:], facecolor='b', alpha=.5)
avol.axes.yaxis.set_ticklabels([])
avol.grid(False)
avol.set_ylim(0,2*volume.max())
avol.tick_params(axis='x')
avol.tick_params(axis='y')
# macd
d = mplot.subplot2grid((6,4), (5,0), sharex=a, rowspan=1, colspan=4)
d.tick_params(axis='x')
d.tick_params(axis='y')
# nslow = 26
# nfast = 12
# nema = 9
emaslow, emafast, macd = computeMACD(closePrice)
ema9 = ema(macd, nema)
d.plot(date[-SP:], macd[-SP:])
d.plot(date[-SP:], ema9[-SP:])
d.fill_between(date[-SP:], macd[-SP:]-ema9[-SP:], 0, alpha=0.5)
d.text(0.015, 0.95, 'MACD ' + str(nfast) + ' ' + str(nslow) + ' ' + str(nema), va='top', transform=d.transAxes)
d.yaxis.set_major_locator(mticker.MaxNLocator(nbins=5, prune='upper'))
# rotating angles by 90 degrees to fit properly
for label in d.xaxis.get_ticklabels():
label.set_rotation(45)
# subplot profile parameters
mplot.subplots_adjust(left=.10, bottom=.19, right=.93, top=.95, wspace=.20, hspace=.07)
# plot profiling
mplot.xlabel('Date (YYYY-MM-DD)')
# mplot.ylabel('Stock Price ($)')
mplot.suptitle(stock + ' Stock Price')
# remove x axis from first graph, used at bottom already
mplot.setp(c.get_xticklabels(), visible=False)
mplot.setp(a.get_xticklabels(), visible=False)
# adjusting plots in a clean manner
mplot.subplots_adjust(left=.09, bottom=.18, right=.94, top=.94, wspace=.20, hspace=0)
mplot.show()
f.savefig('financial_graph.png')
except Exception, e:
print 'error in main:', str(e)
stockToUse = raw_input('Stock to chart: ')
# Simple Moving Averages (SMA) - 10, 30
sma1 = raw_input('SMA 1: ') or "10"
sma2 = raw_input('SMA 2: ') or "30"
# date range - 1y for 1 year, 10d for 10 days
dateRange = raw_input('Length of Process: ') or "1y"
# EMA Vars
nslow = raw_input('Slow EMA: ') or "26"
nfast = raw_input('Fast EMA: ') or "12"
nema = raw_input('EMA Signal: ') or "9"
nslow = int(nslow)
nfast = int(nfast)
nema = int(nema)
graphData(stockToUse, int(sma1), int(sma2), dateRange)
| mit |
Dhivyap/ansible | lib/ansible/modules/network/check_point/cp_mgmt_multicast_address_range.py | 20 | 6483 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage Check Point Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cp_mgmt_multicast_address_range
short_description: Manages multicast-address-range objects on Check Point over Web Services API
description:
- Manages multicast-address-range objects on Check Point devices including creating, updating and removing objects.
- All operations are performed over Web Services API.
version_added: "2.9"
author: "Or Soffer (@chkp-orso)"
options:
name:
description:
- Object name.
type: str
required: True
ip_address:
description:
- IPv4 or IPv6 address. If both addresses are required use ipv4-address and ipv6-address fields explicitly.
type: str
ipv4_address:
description:
- IPv4 address.
type: str
ipv6_address:
description:
- IPv6 address.
type: str
ip_address_first:
description:
- First IP address in the range. If both IPv4 and IPv6 address ranges are required, use the ipv4-address-first and the ipv6-address-first fields instead.
type: str
ipv4_address_first:
description:
- First IPv4 address in the range.
type: str
ipv6_address_first:
description:
- First IPv6 address in the range.
type: str
ip_address_last:
description:
- Last IP address in the range. If both IPv4 and IPv6 address ranges are required, use the ipv4-address-first and the ipv6-address-first fields instead.
type: str
ipv4_address_last:
description:
- Last IPv4 address in the range.
type: str
ipv6_address_last:
description:
- Last IPv6 address in the range.
type: str
tags:
description:
- Collection of tag identifiers.
type: list
color:
description:
- Color of the object. Should be one of existing colors.
type: str
choices: ['aquamarine', 'black', 'blue', 'crete blue', 'burlywood', 'cyan', 'dark green', 'khaki', 'orchid', 'dark orange', 'dark sea green',
'pink', 'turquoise', 'dark blue', 'firebrick', 'brown', 'forest green', 'gold', 'dark gold', 'gray', 'dark gray', 'light green', 'lemon chiffon',
'coral', 'sea green', 'sky blue', 'magenta', 'purple', 'slate blue', 'violet red', 'navy blue', 'olive', 'orange', 'red', 'sienna', 'yellow']
comments:
description:
- Comments string.
type: str
details_level:
description:
- The level of detail for some of the fields in the response can vary from showing only the UID value of the object to a fully detailed
representation of the object.
type: str
choices: ['uid', 'standard', 'full']
groups:
description:
- Collection of group identifiers.
type: list
ignore_warnings:
description:
- Apply changes ignoring warnings.
type: bool
ignore_errors:
description:
- Apply changes ignoring errors. You won't be able to publish such a changes. If ignore-warnings flag was omitted - warnings will also be ignored.
type: bool
extends_documentation_fragment: checkpoint_objects
"""
EXAMPLES = """
- name: add-multicast-address-range
cp_mgmt_multicast_address_range:
ip_address_first: 224.0.0.1
ip_address_last: 224.0.0.4
name: New Multicast Address Range
state: present
- name: set-multicast-address-range
cp_mgmt_multicast_address_range:
ip_address_first: 224.0.0.7
ip_address_last: 224.0.0.10
name: New Multicast Address Range
state: present
- name: delete-multicast-address-range
cp_mgmt_multicast_address_range:
name: New Multicast Address Range
state: absent
"""
RETURN = """
cp_mgmt_multicast_address_range:
description: The checkpoint object created or updated.
returned: always, except when deleting the object.
type: dict
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.checkpoint.checkpoint import checkpoint_argument_spec_for_objects, api_call
def main():
argument_spec = dict(
name=dict(type='str', required=True),
ip_address=dict(type='str'),
ipv4_address=dict(type='str'),
ipv6_address=dict(type='str'),
ip_address_first=dict(type='str'),
ipv4_address_first=dict(type='str'),
ipv6_address_first=dict(type='str'),
ip_address_last=dict(type='str'),
ipv4_address_last=dict(type='str'),
ipv6_address_last=dict(type='str'),
tags=dict(type='list'),
color=dict(type='str', choices=['aquamarine', 'black', 'blue', 'crete blue', 'burlywood', 'cyan', 'dark green',
'khaki', 'orchid', 'dark orange', 'dark sea green', 'pink', 'turquoise', 'dark blue', 'firebrick', 'brown',
'forest green', 'gold', 'dark gold', 'gray', 'dark gray', 'light green', 'lemon chiffon', 'coral', 'sea green',
'sky blue', 'magenta', 'purple', 'slate blue', 'violet red', 'navy blue', 'olive', 'orange', 'red', 'sienna',
'yellow']),
comments=dict(type='str'),
details_level=dict(type='str', choices=['uid', 'standard', 'full']),
groups=dict(type='list'),
ignore_warnings=dict(type='bool'),
ignore_errors=dict(type='bool')
)
argument_spec.update(checkpoint_argument_spec_for_objects)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
api_call_object = 'multicast-address-range'
result = api_call(module, api_call_object)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
rwatson/chromium-capsicum | net/tools/testserver/testserver.py | 1 | 39247 | #!/usr/bin/python2.4
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This is a simple HTTP server used for testing Chrome.
It supports several test URLs, as specified by the handlers in TestPageHandler.
It defaults to living on localhost:8888.
It can use https if you specify the flag --https=CERT where CERT is the path
to a pem file containing the certificate and private key that should be used.
To shut it down properly, visit localhost:8888/kill.
"""
import base64
import BaseHTTPServer
import cgi
import optparse
import os
import re
import shutil
import SocketServer
import sys
import time
import tlslite
import tlslite.api
import pyftpdlib.ftpserver
try:
import hashlib
_new_md5 = hashlib.md5
except ImportError:
import md5
_new_md5 = md5.new
SERVER_HTTP = 0
SERVER_FTP = 1
debug_output = sys.stderr
def debug(str):
debug_output.write(str + "\n")
debug_output.flush()
class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
"""This is a specialization of of BaseHTTPServer to allow it
to be exited cleanly (by setting its "stop" member to True)."""
def serve_forever(self):
self.stop = False
self.nonce = None
while not self.stop:
self.handle_request()
self.socket.close()
class HTTPSServer(tlslite.api.TLSSocketServerMixIn, StoppableHTTPServer):
"""This is a specialization of StoppableHTTPerver that add https support."""
def __init__(self, server_address, request_hander_class, cert_path):
s = open(cert_path).read()
x509 = tlslite.api.X509()
x509.parse(s)
self.cert_chain = tlslite.api.X509CertChain([x509])
s = open(cert_path).read()
self.private_key = tlslite.api.parsePEMKey(s, private=True)
self.session_cache = tlslite.api.SessionCache()
StoppableHTTPServer.__init__(self, server_address, request_hander_class)
def handshake(self, tlsConnection):
"""Creates the SSL connection."""
try:
tlsConnection.handshakeServer(certChain=self.cert_chain,
privateKey=self.private_key,
sessionCache=self.session_cache)
tlsConnection.ignoreAbruptClose = True
return True
except tlslite.api.TLSError, error:
print "Handshake failure:", str(error)
return False
class ForkingHTTPServer(SocketServer.ForkingMixIn, StoppableHTTPServer):
"""This is a specialization of of StoppableHTTPServer which serves each
request in a separate process"""
pass
class ForkingHTTPSServer(SocketServer.ForkingMixIn, HTTPSServer):
"""This is a specialization of of HTTPSServer which serves each
request in a separate process"""
pass
class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def __init__(self, request, client_address, socket_server):
self._connect_handlers = [
self.RedirectConnectHandler,
self.ServerAuthConnectHandler,
self.DefaultConnectResponseHandler]
self._get_handlers = [
self.KillHandler,
self.NoCacheMaxAgeTimeHandler,
self.NoCacheTimeHandler,
self.CacheTimeHandler,
self.CacheExpiresHandler,
self.CacheProxyRevalidateHandler,
self.CachePrivateHandler,
self.CachePublicHandler,
self.CacheSMaxAgeHandler,
self.CacheMustRevalidateHandler,
self.CacheMustRevalidateMaxAgeHandler,
self.CacheNoStoreHandler,
self.CacheNoStoreMaxAgeHandler,
self.CacheNoTransformHandler,
self.DownloadHandler,
self.DownloadFinishHandler,
self.EchoHeader,
self.EchoHeaderOverride,
self.EchoAllHandler,
self.FileHandler,
self.RealFileWithCommonHeaderHandler,
self.RealBZ2FileWithCommonHeaderHandler,
self.SetCookieHandler,
self.AuthBasicHandler,
self.AuthDigestHandler,
self.SlowServerHandler,
self.ContentTypeHandler,
self.ServerRedirectHandler,
self.ClientRedirectHandler,
self.DefaultResponseHandler]
self._post_handlers = [
self.WriteFile,
self.EchoTitleHandler,
self.EchoAllHandler,
self.EchoHandler] + self._get_handlers
self._mime_types = {
'gif': 'image/gif',
'jpeg' : 'image/jpeg',
'jpg' : 'image/jpeg',
'xml' : 'text/xml'
}
self._default_mime_type = 'text/html'
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, request,
client_address,
socket_server)
def _ShouldHandleRequest(self, handler_name):
"""Determines if the path can be handled by the handler.
We consider a handler valid if the path begins with the
handler name. It can optionally be followed by "?*", "/*".
"""
pattern = re.compile('%s($|\?|/).*' % handler_name)
return pattern.match(self.path)
def GetMIMETypeFromName(self, file_name):
"""Returns the mime type for the specified file_name. So far it only looks
at the file extension."""
(shortname, extension) = os.path.splitext(file_name)
if len(extension) == 0:
# no extension.
return self._default_mime_type
# extension starts with a dot, so we need to remove it
return self._mime_types.get(extension[1:], self._default_mime_type)
def KillHandler(self):
"""This request handler kills the server, for use when we're done"
with the a particular test."""
if (self.path.find("kill") < 0):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'max-age=0')
self.end_headers()
self.wfile.write("Time to die")
self.server.stop = True
return True
def NoCacheMaxAgeTimeHandler(self):
"""This request handler yields a page with the title set to the current
system time, and no caching requested."""
if not self._ShouldHandleRequest("/nocachetime/maxage"):
return False
self.send_response(200)
self.send_header('Cache-Control', 'max-age=0')
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def NoCacheTimeHandler(self):
"""This request handler yields a page with the title set to the current
system time, and no caching requested."""
if not self._ShouldHandleRequest("/nocachetime"):
return False
self.send_response(200)
self.send_header('Cache-Control', 'no-cache')
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheTimeHandler(self):
"""This request handler yields a page with the title set to the current
system time, and allows caching for one minute."""
if not self._ShouldHandleRequest("/cachetime"):
return False
self.send_response(200)
self.send_header('Cache-Control', 'max-age=60')
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheExpiresHandler(self):
"""This request handler yields a page with the title set to the current
system time, and set the page to expire on 1 Jan 2099."""
if not self._ShouldHandleRequest("/cache/expires"):
return False
self.send_response(200)
self.send_header('Expires', 'Thu, 1 Jan 2099 00:00:00 GMT')
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheProxyRevalidateHandler(self):
"""This request handler yields a page with the title set to the current
system time, and allows caching for 60 seconds"""
if not self._ShouldHandleRequest("/cache/proxy-revalidate"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'max-age=60, proxy-revalidate')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CachePrivateHandler(self):
"""This request handler yields a page with the title set to the current
system time, and allows caching for 5 seconds."""
if not self._ShouldHandleRequest("/cache/private"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'max-age=3, private')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CachePublicHandler(self):
"""This request handler yields a page with the title set to the current
system time, and allows caching for 5 seconds."""
if not self._ShouldHandleRequest("/cache/public"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'max-age=3, public')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheSMaxAgeHandler(self):
"""This request handler yields a page with the title set to the current
system time, and does not allow for caching."""
if not self._ShouldHandleRequest("/cache/s-maxage"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'public, s-maxage = 60, max-age = 0')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheMustRevalidateHandler(self):
"""This request handler yields a page with the title set to the current
system time, and does not allow caching."""
if not self._ShouldHandleRequest("/cache/must-revalidate"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'must-revalidate')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheMustRevalidateMaxAgeHandler(self):
"""This request handler yields a page with the title set to the current
system time, and does not allow caching event though max-age of 60
seconds is specified."""
if not self._ShouldHandleRequest("/cache/must-revalidate/max-age"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'max-age=60, must-revalidate')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheNoStoreHandler(self):
"""This request handler yields a page with the title set to the current
system time, and does not allow the page to be stored."""
if not self._ShouldHandleRequest("/cache/no-store"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'no-store')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheNoStoreMaxAgeHandler(self):
"""This request handler yields a page with the title set to the current
system time, and does not allow the page to be stored even though max-age
of 60 seconds is specified."""
if not self._ShouldHandleRequest("/cache/no-store/max-age"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'max-age=60, no-store')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def CacheNoTransformHandler(self):
"""This request handler yields a page with the title set to the current
system time, and does not allow the content to transformed during
user-agent caching"""
if not self._ShouldHandleRequest("/cache/no-transform"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'no-transform')
self.end_headers()
self.wfile.write('<html><head><title>%s</title></head></html>' %
time.time())
return True
def EchoHeader(self):
"""This handler echoes back the value of a specific request header."""
"""The only difference between this function and the EchoHeaderOverride"""
"""function is in the parameter being passed to the helper function"""
return self.EchoHeaderHelper("/echoheader")
def EchoHeaderOverride(self):
"""This handler echoes back the value of a specific request header."""
"""The UrlRequest unit tests also execute for ChromeFrame which uses"""
"""IE to issue HTTP requests using the host network stack."""
"""The Accept and Charset tests which expect the server to echo back"""
"""the corresponding headers fail here as IE returns cached responses"""
"""The EchoHeaderOverride parameter is an easy way to ensure that IE"""
"""treats this request as a new request and does not cache it."""
return self.EchoHeaderHelper("/echoheaderoverride")
def EchoHeaderHelper(self, echo_header):
"""This function echoes back the value of the request header passed in."""
if not self._ShouldHandleRequest(echo_header):
return False
query_char = self.path.find('?')
if query_char != -1:
header_name = self.path[query_char+1:]
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.send_header('Cache-control', 'max-age=60000')
# insert a vary header to properly indicate that the cachability of this
# request is subject to value of the request header being echoed.
if len(header_name) > 0:
self.send_header('Vary', header_name)
self.end_headers()
if len(header_name) > 0:
self.wfile.write(self.headers.getheader(header_name))
return True
def EchoHandler(self):
"""This handler just echoes back the payload of the request, for testing
form submission."""
if not self._ShouldHandleRequest("/echo"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
length = int(self.headers.getheader('content-length'))
request = self.rfile.read(length)
self.wfile.write(request)
return True
def WriteFile(self):
"""This is handler dumps the content of POST request to a disk file into
the data_dir/dump. Sub-directories are not supported."""
prefix='/writefile/'
if not self.path.startswith(prefix):
return False
file_name = self.path[len(prefix):]
# do not allow fancy chars in file name
re.sub('[^a-zA-Z0-9_.-]+', '', file_name)
if len(file_name) and file_name[0] != '.':
path = os.path.join(self.server.data_dir, 'dump', file_name);
length = int(self.headers.getheader('content-length'))
request = self.rfile.read(length)
f = open(path, "wb")
f.write(request);
f.close()
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html>%s</html>' % file_name)
return True
def EchoTitleHandler(self):
"""This handler is like Echo, but sets the page title to the request."""
if not self._ShouldHandleRequest("/echotitle"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
length = int(self.headers.getheader('content-length'))
request = self.rfile.read(length)
self.wfile.write('<html><head><title>')
self.wfile.write(request)
self.wfile.write('</title></head></html>')
return True
def EchoAllHandler(self):
"""This handler yields a (more) human-readable page listing information
about the request header & contents."""
if not self._ShouldHandleRequest("/echoall"):
return False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head><style>'
'pre { border: 1px solid black; margin: 5px; padding: 5px }'
'</style></head><body>'
'<div style="float: right">'
'<a href="http://localhost:8888/echo">back to referring page</a></div>'
'<h1>Request Body:</h1><pre>')
if self.command == 'POST':
length = int(self.headers.getheader('content-length'))
qs = self.rfile.read(length)
params = cgi.parse_qs(qs, keep_blank_values=1)
for param in params:
self.wfile.write('%s=%s\n' % (param, params[param][0]))
self.wfile.write('</pre>')
self.wfile.write('<h1>Request Headers:</h1><pre>%s</pre>' % self.headers)
self.wfile.write('</body></html>')
return True
def DownloadHandler(self):
"""This handler sends a downloadable file with or without reporting
the size (6K)."""
if self.path.startswith("/download-unknown-size"):
send_length = False
elif self.path.startswith("/download-known-size"):
send_length = True
else:
return False
#
# The test which uses this functionality is attempting to send
# small chunks of data to the client. Use a fairly large buffer
# so that we'll fill chrome's IO buffer enough to force it to
# actually write the data.
# See also the comments in the client-side of this test in
# download_uitest.cc
#
size_chunk1 = 35*1024
size_chunk2 = 10*1024
self.send_response(200)
self.send_header('Content-type', 'application/octet-stream')
self.send_header('Cache-Control', 'max-age=0')
if send_length:
self.send_header('Content-Length', size_chunk1 + size_chunk2)
self.end_headers()
# First chunk of data:
self.wfile.write("*" * size_chunk1)
self.wfile.flush()
# handle requests until one of them clears this flag.
self.server.waitForDownload = True
while self.server.waitForDownload:
self.server.handle_request()
# Second chunk of data:
self.wfile.write("*" * size_chunk2)
return True
def DownloadFinishHandler(self):
"""This handler just tells the server to finish the current download."""
if not self._ShouldHandleRequest("/download-finish"):
return False
self.server.waitForDownload = False
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-Control', 'max-age=0')
self.end_headers()
return True
def FileHandler(self):
"""This handler sends the contents of the requested file. Wow, it's like
a real webserver!"""
prefix = self.server.file_root_url
if not self.path.startswith(prefix):
return False
# Consume a request body if present.
if self.command == 'POST':
self.rfile.read(int(self.headers.getheader('content-length')))
file = self.path[len(prefix):]
if file.find('?') > -1:
# Ignore the query parameters entirely.
url, querystring = file.split('?')
else:
url = file
entries = url.split('/')
path = os.path.join(self.server.data_dir, *entries)
if os.path.isdir(path):
path = os.path.join(path, 'index.html')
if not os.path.isfile(path):
print "File not found " + file + " full path:" + path
self.send_error(404)
return True
f = open(path, "rb")
data = f.read()
f.close()
# If file.mock-http-headers exists, it contains the headers we
# should send. Read them in and parse them.
headers_path = path + '.mock-http-headers'
if os.path.isfile(headers_path):
f = open(headers_path, "r")
# "HTTP/1.1 200 OK"
response = f.readline()
status_code = re.findall('HTTP/\d+.\d+ (\d+)', response)[0]
self.send_response(int(status_code))
for line in f:
# "name: value"
name, value = re.findall('(\S+):\s*(.*)', line)[0]
self.send_header(name, value)
f.close()
else:
# Could be more generic once we support mime-type sniffing, but for
# now we need to set it explicitly.
self.send_response(200)
self.send_header('Content-type', self.GetMIMETypeFromName(file))
self.send_header('Content-Length', len(data))
self.end_headers()
self.wfile.write(data)
return True
def RealFileWithCommonHeaderHandler(self):
"""This handler sends the contents of the requested file without the pseudo
http head!"""
prefix='/realfiles/'
if not self.path.startswith(prefix):
return False
file = self.path[len(prefix):]
path = os.path.join(self.server.data_dir, file)
try:
f = open(path, "rb")
data = f.read()
f.close()
# just simply set the MIME as octal stream
self.send_response(200)
self.send_header('Content-type', 'application/octet-stream')
self.end_headers()
self.wfile.write(data)
except:
self.send_error(404)
return True
def RealBZ2FileWithCommonHeaderHandler(self):
"""This handler sends the bzip2 contents of the requested file with
corresponding Content-Encoding field in http head!"""
prefix='/realbz2files/'
if not self.path.startswith(prefix):
return False
parts = self.path.split('?')
file = parts[0][len(prefix):]
path = os.path.join(self.server.data_dir, file) + '.bz2'
if len(parts) > 1:
options = parts[1]
else:
options = ''
try:
self.send_response(200)
accept_encoding = self.headers.get("Accept-Encoding")
if accept_encoding.find("bzip2") != -1:
f = open(path, "rb")
data = f.read()
f.close()
self.send_header('Content-Encoding', 'bzip2')
self.send_header('Content-type', 'application/x-bzip2')
self.end_headers()
if options == 'incremental-header':
self.wfile.write(data[:1])
self.wfile.flush()
time.sleep(1.0)
self.wfile.write(data[1:])
else:
self.wfile.write(data)
else:
"""client do not support bzip2 format, send pseudo content
"""
self.send_header('Content-type', 'text/html; charset=ISO-8859-1')
self.end_headers()
self.wfile.write("you do not support bzip2 encoding")
except:
self.send_error(404)
return True
def SetCookieHandler(self):
"""This handler just sets a cookie, for testing cookie handling."""
if not self._ShouldHandleRequest("/set-cookie"):
return False
query_char = self.path.find('?')
if query_char != -1:
cookie_values = self.path[query_char + 1:].split('&')
else:
cookie_values = ("",)
self.send_response(200)
self.send_header('Content-type', 'text/html')
for cookie_value in cookie_values:
self.send_header('Set-Cookie', '%s' % cookie_value)
self.end_headers()
for cookie_value in cookie_values:
self.wfile.write('%s' % cookie_value)
return True
def AuthBasicHandler(self):
"""This handler tests 'Basic' authentication. It just sends a page with
title 'user/pass' if you succeed."""
if not self._ShouldHandleRequest("/auth-basic"):
return False
username = userpass = password = b64str = ""
set_cookie_if_challenged = self.path.find('?set-cookie-if-challenged') > 0
auth = self.headers.getheader('authorization')
try:
if not auth:
raise Exception('no auth')
b64str = re.findall(r'Basic (\S+)', auth)[0]
userpass = base64.b64decode(b64str)
username, password = re.findall(r'([^:]+):(\S+)', userpass)[0]
if password != 'secret':
raise Exception('wrong password')
except Exception, e:
# Authentication failed.
self.send_response(401)
self.send_header('WWW-Authenticate', 'Basic realm="testrealm"')
self.send_header('Content-type', 'text/html')
if set_cookie_if_challenged:
self.send_header('Set-Cookie', 'got_challenged=true')
self.end_headers()
self.wfile.write('<html><head>')
self.wfile.write('<title>Denied: %s</title>' % e)
self.wfile.write('</head><body>')
self.wfile.write('auth=%s<p>' % auth)
self.wfile.write('b64str=%s<p>' % b64str)
self.wfile.write('username: %s<p>' % username)
self.wfile.write('userpass: %s<p>' % userpass)
self.wfile.write('password: %s<p>' % password)
self.wfile.write('You sent:<br>%s<p>' % self.headers)
self.wfile.write('</body></html>')
return True
# Authentication successful. (Return a cachable response to allow for
# testing cached pages that require authentication.)
if_none_match = self.headers.getheader('if-none-match')
if if_none_match == "abc":
self.send_response(304)
self.end_headers()
else:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Cache-control', 'max-age=60000')
self.send_header('Etag', 'abc')
self.end_headers()
self.wfile.write('<html><head>')
self.wfile.write('<title>%s/%s</title>' % (username, password))
self.wfile.write('</head><body>')
self.wfile.write('auth=%s<p>' % auth)
self.wfile.write('You sent:<br>%s<p>' % self.headers)
self.wfile.write('</body></html>')
return True
def AuthDigestHandler(self):
"""This handler tests 'Digest' authentication. It just sends a page with
title 'user/pass' if you succeed."""
if not self._ShouldHandleRequest("/auth-digest"):
return False
# Periodically generate a new nonce. Technically we should incorporate
# the request URL into this, but we don't care for testing.
nonce_life = 10
stale = False
if (not self.server.nonce or
(time.time() - self.server.nonce_time > nonce_life)):
if self.server.nonce:
stale = True
self.server.nonce_time = time.time()
self.server.nonce = \
_new_md5(time.ctime(self.server.nonce_time) +
'privatekey').hexdigest()
nonce = self.server.nonce
opaque = _new_md5('opaque').hexdigest()
password = 'secret'
realm = 'testrealm'
auth = self.headers.getheader('authorization')
pairs = {}
try:
if not auth:
raise Exception('no auth')
if not auth.startswith('Digest'):
raise Exception('not digest')
# Pull out all the name="value" pairs as a dictionary.
pairs = dict(re.findall(r'(\b[^ ,=]+)="?([^",]+)"?', auth))
# Make sure it's all valid.
if pairs['nonce'] != nonce:
raise Exception('wrong nonce')
if pairs['opaque'] != opaque:
raise Exception('wrong opaque')
# Check the 'response' value and make sure it matches our magic hash.
# See http://www.ietf.org/rfc/rfc2617.txt
hash_a1 = _new_md5(
':'.join([pairs['username'], realm, password])).hexdigest()
hash_a2 = _new_md5(':'.join([self.command, pairs['uri']])).hexdigest()
if 'qop' in pairs and 'nc' in pairs and 'cnonce' in pairs:
response = _new_md5(':'.join([hash_a1, nonce, pairs['nc'],
pairs['cnonce'], pairs['qop'], hash_a2])).hexdigest()
else:
response = _new_md5(':'.join([hash_a1, nonce, hash_a2])).hexdigest()
if pairs['response'] != response:
raise Exception('wrong password')
except Exception, e:
# Authentication failed.
self.send_response(401)
hdr = ('Digest '
'realm="%s", '
'domain="/", '
'qop="auth", '
'algorithm=MD5, '
'nonce="%s", '
'opaque="%s"') % (realm, nonce, opaque)
if stale:
hdr += ', stale="TRUE"'
self.send_header('WWW-Authenticate', hdr)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head>')
self.wfile.write('<title>Denied: %s</title>' % e)
self.wfile.write('</head><body>')
self.wfile.write('auth=%s<p>' % auth)
self.wfile.write('pairs=%s<p>' % pairs)
self.wfile.write('You sent:<br>%s<p>' % self.headers)
self.wfile.write('We are replying:<br>%s<p>' % hdr)
self.wfile.write('</body></html>')
return True
# Authentication successful.
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head>')
self.wfile.write('<title>%s/%s</title>' % (pairs['username'], password))
self.wfile.write('</head><body>')
self.wfile.write('auth=%s<p>' % auth)
self.wfile.write('pairs=%s<p>' % pairs)
self.wfile.write('</body></html>')
return True
def SlowServerHandler(self):
"""Wait for the user suggested time before responding. The syntax is
/slow?0.5 to wait for half a second."""
if not self._ShouldHandleRequest("/slow"):
return False
query_char = self.path.find('?')
wait_sec = 1.0
if query_char >= 0:
try:
wait_sec = int(self.path[query_char + 1:])
except ValueError:
pass
time.sleep(wait_sec)
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("waited %d seconds" % wait_sec)
return True
def ContentTypeHandler(self):
"""Returns a string of html with the given content type. E.g.,
/contenttype?text/css returns an html file with the Content-Type
header set to text/css."""
if not self._ShouldHandleRequest("/contenttype"):
return False
query_char = self.path.find('?')
content_type = self.path[query_char + 1:].strip()
if not content_type:
content_type = 'text/html'
self.send_response(200)
self.send_header('Content-Type', content_type)
self.end_headers()
self.wfile.write("<html>\n<body>\n<p>HTML text</p>\n</body>\n</html>\n");
return True
def ServerRedirectHandler(self):
"""Sends a server redirect to the given URL. The syntax is
'/server-redirect?http://foo.bar/asdf' to redirect to
'http://foo.bar/asdf'"""
test_name = "/server-redirect"
if not self._ShouldHandleRequest(test_name):
return False
query_char = self.path.find('?')
if query_char < 0 or len(self.path) <= query_char + 1:
self.sendRedirectHelp(test_name)
return True
dest = self.path[query_char + 1:]
self.send_response(301) # moved permanently
self.send_header('Location', dest)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head>')
self.wfile.write('</head><body>Redirecting to %s</body></html>' % dest)
return True
def ClientRedirectHandler(self):
"""Sends a client redirect to the given URL. The syntax is
'/client-redirect?http://foo.bar/asdf' to redirect to
'http://foo.bar/asdf'"""
test_name = "/client-redirect"
if not self._ShouldHandleRequest(test_name):
return False
query_char = self.path.find('?');
if query_char < 0 or len(self.path) <= query_char + 1:
self.sendRedirectHelp(test_name)
return True
dest = self.path[query_char + 1:]
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><head>')
self.wfile.write('<meta http-equiv="refresh" content="0;url=%s">' % dest)
self.wfile.write('</head><body>Redirecting to %s</body></html>' % dest)
return True
def DefaultResponseHandler(self):
"""This is the catch-all response handler for requests that aren't handled
by one of the special handlers above.
Note that we specify the content-length as without it the https connection
is not closed properly (and the browser keeps expecting data)."""
contents = "Default response given for path: " + self.path
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header("Content-Length", len(contents))
self.end_headers()
self.wfile.write(contents)
return True
def RedirectConnectHandler(self):
"""Sends a redirect to the CONNECT request for www.redirect.com. This
response is not specified by the RFC, so the browser should not follow
the redirect."""
if (self.path.find("www.redirect.com") < 0):
return False
dest = "http://www.destination.com/foo.js"
self.send_response(302) # moved temporarily
self.send_header('Location', dest)
self.send_header('Connection', 'close')
self.end_headers()
return True
def ServerAuthConnectHandler(self):
"""Sends a 401 to the CONNECT request for www.server-auth.com. This
response doesn't make sense because the proxy server cannot request
server authentication."""
if (self.path.find("www.server-auth.com") < 0):
return False
challenge = 'Basic realm="WallyWorld"'
self.send_response(401) # unauthorized
self.send_header('WWW-Authenticate', challenge)
self.send_header('Connection', 'close')
self.end_headers()
return True
def DefaultConnectResponseHandler(self):
"""This is the catch-all response handler for CONNECT requests that aren't
handled by one of the special handlers above. Real Web servers respond
with 400 to CONNECT requests."""
contents = "Your client has issued a malformed or illegal request."
self.send_response(400) # bad request
self.send_header('Content-type', 'text/html')
self.send_header("Content-Length", len(contents))
self.end_headers()
self.wfile.write(contents)
return True
def do_CONNECT(self):
for handler in self._connect_handlers:
if handler():
return
def do_GET(self):
for handler in self._get_handlers:
if handler():
return
def do_POST(self):
for handler in self._post_handlers:
if handler():
return
# called by the redirect handling function when there is no parameter
def sendRedirectHelp(self, redirect_name):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html><body><h1>Error: no redirect destination</h1>')
self.wfile.write('Use <pre>%s?http://dest...</pre>' % redirect_name)
self.wfile.write('</body></html>')
def MakeDumpDir(data_dir):
"""Create directory named 'dump' where uploaded data via HTTP POST request
will be stored. If the directory already exists all files and subdirectories
will be deleted."""
dump_dir = os.path.join(data_dir, 'dump');
if os.path.isdir(dump_dir):
shutil.rmtree(dump_dir)
os.mkdir(dump_dir)
def MakeDataDir():
if options.data_dir:
if not os.path.isdir(options.data_dir):
print 'specified data dir not found: ' + options.data_dir + ' exiting...'
return None
my_data_dir = options.data_dir
else:
# Create the default path to our data dir, relative to the exe dir.
my_data_dir = os.path.dirname(sys.argv[0])
my_data_dir = os.path.join(my_data_dir, "..", "..", "..", "..",
"test", "data")
#TODO(ibrar): Must use Find* funtion defined in google\tools
#i.e my_data_dir = FindUpward(my_data_dir, "test", "data")
return my_data_dir
def main(options, args):
# redirect output to a log file so it doesn't spam the unit test output
logfile = open('testserver.log', 'w')
sys.stderr = sys.stdout = logfile
port = options.port
if options.server_type == SERVER_HTTP:
if options.cert:
# let's make sure the cert file exists.
if not os.path.isfile(options.cert):
print 'specified cert file not found: ' + options.cert + ' exiting...'
return
if options.forking:
server_class = ForkingHTTPSServer
else:
server_class = HTTPSServer
server = server_class(('127.0.0.1', port), TestPageHandler, options.cert)
print 'HTTPS server started on port %d...' % port
else:
if options.forking:
server_class = ForkingHTTPServer
else:
server_class = StoppableHTTPServer
server = server_class(('127.0.0.1', port), TestPageHandler)
print 'HTTP server started on port %d...' % port
server.data_dir = MakeDataDir()
server.file_root_url = options.file_root_url
MakeDumpDir(server.data_dir)
# means FTP Server
else:
my_data_dir = MakeDataDir()
def line_logger(msg):
if (msg.find("kill") >= 0):
server.stop = True
print 'shutting down server'
sys.exit(0)
# Instantiate a dummy authorizer for managing 'virtual' users
authorizer = pyftpdlib.ftpserver.DummyAuthorizer()
# Define a new user having full r/w permissions and a read-only
# anonymous user
authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw')
authorizer.add_anonymous(my_data_dir)
# Instantiate FTP handler class
ftp_handler = pyftpdlib.ftpserver.FTPHandler
ftp_handler.authorizer = authorizer
pyftpdlib.ftpserver.logline = line_logger
# Define a customized banner (string returned when client connects)
ftp_handler.banner = ("pyftpdlib %s based ftpd ready." %
pyftpdlib.ftpserver.__ver__)
# Instantiate FTP server class and listen to 127.0.0.1:port
address = ('127.0.0.1', port)
server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler)
print 'FTP server started on port %d...' % port
try:
server.serve_forever()
except KeyboardInterrupt:
print 'shutting down server'
server.stop = True
if __name__ == '__main__':
option_parser = optparse.OptionParser()
option_parser.add_option("-f", '--ftp', action='store_const',
const=SERVER_FTP, default=SERVER_HTTP,
dest='server_type',
help='FTP or HTTP server default HTTP')
option_parser.add_option('--forking', action='store_true', default=False,
dest='forking',
help='Serve each request in a separate process')
option_parser.add_option('', '--port', default='8888', type='int',
help='Port used by the server')
option_parser.add_option('', '--data-dir', dest='data_dir',
help='Directory from which to read the files')
option_parser.add_option('', '--https', dest='cert',
help='Specify that https should be used, specify '
'the path to the cert containing the private key '
'the server should use')
option_parser.add_option('', '--file-root-url', default='/files/',
help='Specify a root URL for files served.')
options, args = option_parser.parse_args()
sys.exit(main(options, args))
| bsd-3-clause |
pactools/pactools | pactools/utils/deprecation.py | 1 | 5676 | import sys
import warnings
from functools import wraps
__all__ = ["deprecated", ]
class deprecated(object):
"""Decorator to mark a function or class as deprecated.
Issue a warning when the function is called/the class is instantiated and
adds a warning to the docstring.
The optional extra argument will be appended to the deprecation message
and the docstring. Note: to use this with the default value for extra, put
in an empty of parentheses:
>>> from pactools.utils.deprecation import deprecated
>>> deprecated() # doctest: +ELLIPSIS
<pactools.utils.deprecation.deprecated object at ...>
>>> @deprecated()
... def some_function(): pass
"""
# Copied from scikit-learn
def __init__(self, extra=''):
"""
Parameters
----------
extra : string
to be added to the deprecation messages
"""
self.extra = extra
def __call__(self, obj):
clean_warning_registry()
if isinstance(obj, type):
return self._decorate_class(obj)
else:
return self._decorate_fun(obj)
def _decorate_class(self, cls):
msg = "Class %s is deprecated" % cls.__name__
if self.extra:
msg += "; %s" % self.extra
# FIXME: we should probably reset __new__ for full generality
init = cls.__init__
def wrapped(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning)
return init(*args, **kwargs)
cls.__init__ = wrapped
wrapped.__name__ = '__init__'
wrapped.__doc__ = self._update_doc(init.__doc__)
wrapped.deprecated_original = init
return cls
def _decorate_fun(self, fun):
"""Decorate function fun"""
msg = "Function %s is deprecated" % fun.__name__
if self.extra:
msg += "; %s" % self.extra
def wrapped(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning)
return fun(*args, **kwargs)
wrapped.__name__ = fun.__name__
wrapped.__dict__ = fun.__dict__
wrapped.__doc__ = self._update_doc(fun.__doc__)
return wrapped
def _update_doc(self, olddoc):
newdoc = "DEPRECATED"
if self.extra:
newdoc = "%s: %s" % (newdoc, self.extra)
if olddoc:
newdoc = "%s\n\n%s" % (newdoc, olddoc)
return newdoc
def ignore_warnings(obj=None, category=Warning):
"""Context manager and decorator to ignore warnings.
Note. Using this (in both variants) will clear all warnings
from all python modules loaded. In case you need to test
cross-module-warning-logging this is not your tool of choice.
Parameters
----------
category : warning class, defaults to Warning.
The category to filter. If Warning, all categories will be muted.
Examples
--------
>>> with ignore_warnings():
... warnings.warn('buhuhuhu')
>>> def nasty_warn():
... warnings.warn('buhuhuhu')
... print(42)
>>> ignore_warnings(nasty_warn)()
42
"""
if callable(obj):
return _IgnoreWarnings(category=category)(obj)
else:
return _IgnoreWarnings(category=category)
class _IgnoreWarnings(object):
"""Improved and simplified Python warnings context manager and decorator.
This class allows to ignore the warnings raise by a function.
Copied from Python 2.7.5 and modified as required.
Parameters
----------
category : tuple of warning class, default to Warning
The category to filter. By default, all the categories will be muted.
"""
def __init__(self, category):
self._record = True
self._module = sys.modules['warnings']
self._entered = False
self.log = []
self.category = category
def __call__(self, fn):
"""Decorator to catch and hide warnings without visual nesting."""
@wraps(fn)
def wrapper(*args, **kwargs):
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings():
warnings.simplefilter("ignore", self.category)
return fn(*args, **kwargs)
return wrapper
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
clean_warning_registry() # be safe and not propagate state + chaos
warnings.simplefilter("ignore", self.category)
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
self.log[:] = []
clean_warning_registry() # be safe and not propagate state + chaos
def clean_warning_registry():
"""Safe way to reset warnings."""
warnings.resetwarnings()
reg = "__warningregistry__"
for mod_name, mod in list(sys.modules.items()):
if 'six.moves' in mod_name:
continue
if hasattr(mod, reg):
getattr(mod, reg).clear()
| bsd-3-clause |
jaruba/chromium.src | third_party/pycoverage/coverage/templite.py | 160 | 6868 | """A simple Python template renderer, for a nano-subset of Django syntax."""
# Coincidentally named the same as http://code.activestate.com/recipes/496702/
import re
from coverage.backward import set # pylint: disable=W0622
class CodeBuilder(object):
"""Build source code conveniently."""
def __init__(self, indent=0):
self.code = []
self.indent_amount = indent
def add_line(self, line):
"""Add a line of source to the code.
Don't include indentations or newlines.
"""
self.code.append(" " * self.indent_amount)
self.code.append(line)
self.code.append("\n")
def add_section(self):
"""Add a section, a sub-CodeBuilder."""
sect = CodeBuilder(self.indent_amount)
self.code.append(sect)
return sect
def indent(self):
"""Increase the current indent for following lines."""
self.indent_amount += 4
def dedent(self):
"""Decrease the current indent for following lines."""
self.indent_amount -= 4
def __str__(self):
return "".join([str(c) for c in self.code])
def get_function(self, fn_name):
"""Compile the code, and return the function `fn_name`."""
assert self.indent_amount == 0
g = {}
code_text = str(self)
exec(code_text, g)
return g[fn_name]
class Templite(object):
"""A simple template renderer, for a nano-subset of Django syntax.
Supported constructs are extended variable access::
{{var.modifer.modifier|filter|filter}}
loops::
{% for var in list %}...{% endfor %}
and ifs::
{% if var %}...{% endif %}
Comments are within curly-hash markers::
{# This will be ignored #}
Construct a Templite with the template text, then use `render` against a
dictionary context to create a finished string.
"""
def __init__(self, text, *contexts):
"""Construct a Templite with the given `text`.
`contexts` are dictionaries of values to use for future renderings.
These are good for filters and global values.
"""
self.text = text
self.context = {}
for context in contexts:
self.context.update(context)
# We construct a function in source form, then compile it and hold onto
# it, and execute it to render the template.
code = CodeBuilder()
code.add_line("def render(ctx, dot):")
code.indent()
vars_code = code.add_section()
self.all_vars = set()
self.loop_vars = set()
code.add_line("result = []")
code.add_line("a = result.append")
code.add_line("e = result.extend")
code.add_line("s = str")
buffered = []
def flush_output():
"""Force `buffered` to the code builder."""
if len(buffered) == 1:
code.add_line("a(%s)" % buffered[0])
elif len(buffered) > 1:
code.add_line("e([%s])" % ",".join(buffered))
del buffered[:]
# Split the text to form a list of tokens.
toks = re.split(r"(?s)({{.*?}}|{%.*?%}|{#.*?#})", text)
ops_stack = []
for tok in toks:
if tok.startswith('{{'):
# An expression to evaluate.
buffered.append("s(%s)" % self.expr_code(tok[2:-2].strip()))
elif tok.startswith('{#'):
# Comment: ignore it and move on.
continue
elif tok.startswith('{%'):
# Action tag: split into words and parse further.
flush_output()
words = tok[2:-2].strip().split()
if words[0] == 'if':
# An if statement: evaluate the expression to determine if.
assert len(words) == 2
ops_stack.append('if')
code.add_line("if %s:" % self.expr_code(words[1]))
code.indent()
elif words[0] == 'for':
# A loop: iterate over expression result.
assert len(words) == 4 and words[2] == 'in'
ops_stack.append('for')
self.loop_vars.add(words[1])
code.add_line(
"for c_%s in %s:" % (
words[1],
self.expr_code(words[3])
)
)
code.indent()
elif words[0].startswith('end'):
# Endsomething. Pop the ops stack
end_what = words[0][3:]
if ops_stack[-1] != end_what:
raise SyntaxError("Mismatched end tag: %r" % end_what)
ops_stack.pop()
code.dedent()
else:
raise SyntaxError("Don't understand tag: %r" % words[0])
else:
# Literal content. If it isn't empty, output it.
if tok:
buffered.append("%r" % tok)
flush_output()
for var_name in self.all_vars - self.loop_vars:
vars_code.add_line("c_%s = ctx[%r]" % (var_name, var_name))
if ops_stack:
raise SyntaxError("Unmatched action tag: %r" % ops_stack[-1])
code.add_line("return ''.join(result)")
code.dedent()
self.render_function = code.get_function('render')
def expr_code(self, expr):
"""Generate a Python expression for `expr`."""
if "|" in expr:
pipes = expr.split("|")
code = self.expr_code(pipes[0])
for func in pipes[1:]:
self.all_vars.add(func)
code = "c_%s(%s)" % (func, code)
elif "." in expr:
dots = expr.split(".")
code = self.expr_code(dots[0])
args = [repr(d) for d in dots[1:]]
code = "dot(%s, %s)" % (code, ", ".join(args))
else:
self.all_vars.add(expr)
code = "c_%s" % expr
return code
def render(self, context=None):
"""Render this template by applying it to `context`.
`context` is a dictionary of values to use in this rendering.
"""
# Make the complete context we'll use.
ctx = dict(self.context)
if context:
ctx.update(context)
return self.render_function(ctx, self.do_dots)
def do_dots(self, value, *dots):
"""Evaluate dotted expressions at runtime."""
for dot in dots:
try:
value = getattr(value, dot)
except AttributeError:
value = value[dot]
if hasattr(value, '__call__'):
value = value()
return value
| bsd-3-clause |
sklnet/openblackhole-enigma2 | lib/python/Components/About.py | 1 | 2777 | # -*- coding: utf-8 -*-
import sys, os, time
from Tools.HardwareInfo import HardwareInfo
def getVersionString():
return getImageVersionString()
def getImageVersionString():
try:
if os.path.isfile('/var/lib/opkg/status'):
st = os.stat('/var/lib/opkg/status')
else:
st = os.stat('/usr/lib/ipkg/status')
tm = time.localtime(st.st_mtime)
if tm.tm_year >= 2011:
return time.strftime("%Y-%m-%d %H:%M:%S", tm)
except:
pass
return _("unavailable")
def getFlashDateString():
try:
return time.strftime(_("%Y-%m-%d %H:%M"), time.localtime(os.stat("/boot").st_ctime))
except:
return _("unknown")
def getEnigmaVersionString():
import enigma
enigma_version = enigma.getEnigmaVersionString()
if '-(no branch)' in enigma_version:
enigma_version = enigma_version [:-12]
return enigma_version
def getGStreamerVersionString():
import enigma
return enigma.getGStreamerVersionString()
def getKernelVersionString():
try:
return open("/proc/version","r").read().split(' ', 4)[2].split('-',2)[0]
except:
return _("unknown")
def getHardwareTypeString():
return HardwareInfo().get_device_string()
def getImageTypeString():
try:
return open("/etc/issue").readlines()[-2].capitalize().strip()[:-6]
except:
return _("undefined")
def getCPUInfoString():
try:
cpu_count = 0
for line in open("/proc/cpuinfo").readlines():
line = [x.strip() for x in line.strip().split(":")]
if line[0] == "system type":
processor = line[1].split()[0]
if line[0] == "cpu MHz":
cpu_speed = "%1.0f" % float(line[1])
cpu_count += 1
if os.path.isfile('/proc/stb/fp/temp_sensor_avs'):
temperature = open("/proc/stb/fp/temp_sensor_avs").readline().replace('\n','')
return "%s %s MHz (%s) %s°C" % (processor, cpu_speed, ngettext("%d core", "%d cores", cpu_count) % cpu_count, temperature)
return "%s %s MHz (%s)" % (processor, cpu_speed, ngettext("%d core", "%d cores", cpu_count) % cpu_count)
except:
return _("undefined")
def getDriverInstalledDate():
try:
from glob import glob
driver = [x.split("-")[-2:-1][0][-8:] for x in open(glob("/var/lib/opkg/info/*-dvb-modules-*.control")[0], "r") if x.startswith("Version:")][0]
return "%s-%s-%s" % (driver[:4], driver[4:6], driver[6:])
except:
return _("unknown")
def getPythonVersionString():
try:
import commands
status, output = commands.getstatusoutput("python -V")
return output.split(' ')[1]
except:
return _("unknown")
def getDriverVuInstalledDate():
try:
driver = os.popen("opkg list-installed | grep vuplus-dvb-").read().strip()
driver = driver.split("-")
#return driver[:4] + "-" + driver[4:6] + "-" + driver[6:]
return driver[5]
except:
return "unknown"
# For modules that do "from About import about"
about = sys.modules[__name__]
| gpl-2.0 |
lmmsoft/LeetCode | LeetCode-Algorithm/0041. First Missing Positive/Solution.py | 1 | 1858 | class Solution:
def firstMissingPositive(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
for index in range(0, len(nums)):
n = index + 1
if nums[index] == n: # 数字匹配位置,不用移动
continue
elif not self.isvalid(nums[index], nums): # 数字不合法,置零
nums[index] = 0
continue
else: # 数字合法,但不在正确位置
numToMove = nums[index]
nums[index] = 0
self.move(numToMove, nums)
for index in range(0, len(nums)):
if nums[index] != index + 1:
return index + 1
return len(nums) + 1
# move n to position n-1
def move(self, numToMove, nums):
if not self.isvalid(nums[numToMove - 1], nums): # 目标位置原有数字不合法,直接替换就行
nums[numToMove - 1] = numToMove
return
elif nums[numToMove - 1] == numToMove:
# 目标位置原有数字合法,但是匹配位置,不需要移动
return
else:
# 目标位置原有数字合法,递归移动
numToMove2 = nums[numToMove - 1]
nums[numToMove - 1] = numToMove
self.move(numToMove2, nums)
return
def isvalid(self, numberToCheck, nums):
if numberToCheck <= 0 or numberToCheck > len(nums):
return False
return True
if __name__ == '__main__':
assert 3 == Solution().firstMissingPositive([1, 2, 0])
assert 2 == Solution().firstMissingPositive([3, 4, -1, 1])
assert 1 == Solution().firstMissingPositive([7, 8, 9, 11, 12])
assert 5 == Solution().firstMissingPositive([4, 3, 2, 1])
assert 2 == Solution().firstMissingPositive([1, 1])
| gpl-2.0 |
iradul/phantomjs-clone | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/preparechangelogforrevert.py | 121 | 3180 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.common.checkout.changelog import ChangeLog
from webkitpy.common.config import urls
from webkitpy.tool.grammar import join_with_separators
from webkitpy.tool.steps.abstractstep import AbstractStep
class PrepareChangeLogForRevert(AbstractStep):
@classmethod
def _message_for_revert(cls, revision_list, reason, bug_url=None):
message = "Unreviewed, rolling out %s.\n" % join_with_separators(['r' + str(revision) for revision in revision_list])
for revision in revision_list:
message += "%s\n" % urls.view_revision_url(revision)
if bug_url:
message += "%s\n" % bug_url
# Add an extra new line after the rollout links, before any reason.
message += "\n"
if reason:
message += "%s\n\n" % reason
return message
def run(self, state):
# This could move to prepare-ChangeLog by adding a --revert= option.
self._tool.executive.run_and_throw_if_fail(self._tool.deprecated_port().prepare_changelog_command(), cwd=self._tool.scm().checkout_root)
changelog_paths = self._tool.checkout().modified_changelogs(git_commit=None)
bug_url = self._tool.bugs.bug_url_for_bug_id(state["bug_id"]) if state["bug_id"] else None
message = self._message_for_revert(state["revision_list"], state["reason"], bug_url)
for changelog_path in changelog_paths:
# FIXME: Seems we should prepare the message outside of changelogs.py and then just pass in
# text that we want to use to replace the reviewed by line.
ChangeLog(changelog_path).update_with_unreviewed_message(message)
| bsd-3-clause |
BigRocky/xiaohuangji | tools/crawl_to_chat.py | 7 | 1278 | #-*-coding:utf-8-*-
# 主动聊天
import sys
sys.path.append('..')
import random
from redis import Redis
from renren import RenRen
from my_accounts import accounts
import time
from crawl_info_config import crawl_info_list
kv = Redis(host='localhost')
account = accounts[0]
bot = RenRen(account[0], account[1])
def handle(keyword, responses):
statuses = bot.searchStatus(keyword, max_length=10)
for status in statuses:
user_id, status_id, status_time = status
status_id_hash = int(str(status_id)[1:])
if not kv.getbit('status_record', status_id_hash):
print keyword, user_id, status_id, status_time
bot.addComment({
'type': 'status',
'source_id': status_id,
'owner_id': user_id,
'message': random.choice(responses)
})
kv.setbit('status_record', status_id_hash, 1)
def main():
for crawl_info in crawl_info_list:
for keyword in crawl_info['keywords']:
try:
handle(keyword, crawl_info['responses'])
except Exception, e:
print e
continue
if __name__ == '__main__':
while True:
print 'fetching...'
main()
time.sleep(30)
| mit |
sneakywombat/capirca | definate/generator_factory.py | 9 | 1735 | #!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functionality to allow easily retrieving the right definition generator."""
__author__ = '[email protected] (Martin Suess)'
import dns_generator
class Error(Exception):
"""Base error class."""
class GeneratorIdentificationError(Error):
"""Exception to use when GeneratorFactory fails to identify the Generator."""
class GeneratorFactory(object):
"""Functionality to get a definition generator easily based on its name."""
def __init__(self):
"""Initializer."""
self._generators = {
'DnsGenerator': dns_generator.DnsGenerator,
}
def GetGenerator(self, identifier):
"""Returns a specific generator instance based on the identifier.
Args:
identifier: String identifier for the generator to get.
Raises:
GeneratorIdentificationError: If the generator cannot be identified.
Returns:
Generator instance based on the identifier passed in.
"""
if identifier not in self._generators:
raise GeneratorIdentificationError(
'Generator \'%s\' could not be identified.' % identifier)
return self._generators[identifier]()
| apache-2.0 |
kustodian/ansible | lib/ansible/plugins/netconf/junos.py | 10 | 9534 | #
# (c) 2017 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
author: Ansible Networking Team
netconf: junos
short_description: Use junos netconf plugin to run netconf commands on Juniper JUNOS platform
description:
- This junos plugin provides low level abstraction apis for
sending and receiving netconf commands from Juniper JUNOS network devices.
version_added: "2.9"
options:
ncclient_device_handler:
type: str
default: junos
description:
- Specifies the ncclient device handler name for Juniper junos network os. To
identify the ncclient device handler name refer ncclient library documentation.
"""
import json
import re
from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.six import string_types
from ansible.errors import AnsibleConnectionFailure
from ansible.plugins.netconf import NetconfBase, ensure_ncclient
try:
from ncclient import manager
from ncclient.operations import RPCError
from ncclient.transport.errors import SSHUnknownHostError
from ncclient.xml_ import to_ele, to_xml, new_ele, sub_ele
HAS_NCCLIENT = True
except (ImportError, AttributeError): # paramiko and gssapi are incompatible and raise AttributeError not ImportError
HAS_NCCLIENT = False
class Netconf(NetconfBase):
def get_text(self, ele, tag):
try:
return to_text(ele.find(tag).text, errors='surrogate_then_replace').strip()
except AttributeError:
pass
@ensure_ncclient
def get_device_info(self):
device_info = dict()
device_info['network_os'] = 'junos'
ele = new_ele('get-software-information')
data = self.execute_rpc(to_xml(ele))
reply = to_ele(data)
sw_info = reply.find('.//software-information')
device_info['network_os_version'] = self.get_text(sw_info, 'junos-version')
device_info['network_os_hostname'] = self.get_text(sw_info, 'host-name')
device_info['network_os_model'] = self.get_text(sw_info, 'product-model')
return device_info
def execute_rpc(self, name):
"""
RPC to be execute on remote device
:param name: Name of rpc in string format
:return: Received rpc response from remote host
"""
return self.rpc(name)
@ensure_ncclient
def load_configuration(self, format='xml', action='merge', target='candidate', config=None):
"""
Load given configuration on device
:param format: Format of configuration (xml, text, set)
:param action: Action to be performed (merge, replace, override, update)
:param target: The name of the configuration datastore being edited
:param config: The configuration to be loaded on remote host in string format
:return: Received rpc response from remote host in string format
"""
if config:
if format == 'xml':
config = to_ele(config)
try:
return self.m.load_configuration(format=format, action=action, target=target, config=config).data_xml
except RPCError as exc:
raise Exception(to_xml(exc.xml))
def get_capabilities(self):
result = dict()
result['rpc'] = self.get_base_rpc() + ['commit', 'discard_changes', 'validate', 'lock', 'unlock', 'copy_copy',
'execute_rpc', 'load_configuration', 'get_configuration', 'command',
'reboot', 'halt']
result['network_api'] = 'netconf'
result['device_info'] = self.get_device_info()
result['server_capabilities'] = [c for c in self.m.server_capabilities]
result['client_capabilities'] = [c for c in self.m.client_capabilities]
result['session_id'] = self.m.session_id
result['device_operations'] = self.get_device_operations(result['server_capabilities'])
return json.dumps(result)
@staticmethod
@ensure_ncclient
def guess_network_os(obj):
"""
Guess the remote network os name
:param obj: Netconf connection class object
:return: Network OS name
"""
try:
m = manager.connect(
host=obj._play_context.remote_addr,
port=obj._play_context.port or 830,
username=obj._play_context.remote_user,
password=obj._play_context.password,
key_filename=obj.key_filename,
hostkey_verify=obj.get_option('host_key_checking'),
look_for_keys=obj.get_option('look_for_keys'),
allow_agent=obj._play_context.allow_agent,
timeout=obj.get_option('persistent_connect_timeout'),
# We need to pass in the path to the ssh_config file when guessing
# the network_os so that a jumphost is correctly used if defined
ssh_config=obj._ssh_config
)
except SSHUnknownHostError as exc:
raise AnsibleConnectionFailure(to_native(exc))
guessed_os = None
for c in m.server_capabilities:
if re.search('junos', c):
guessed_os = 'junos'
m.close_session()
return guessed_os
def get_configuration(self, format='xml', filter=None):
"""
Retrieve all or part of a specified configuration.
:param format: format in which configuration should be retrieved
:param filter: specifies the portion of the configuration to retrieve
as either xml string rooted in <configuration> element
:return: Received rpc response from remote host in string format
"""
if filter is not None:
if not isinstance(filter, string_types):
raise AnsibleConnectionFailure("get configuration filter should be of type string,"
" received value '%s' is of type '%s'" % (filter, type(filter)))
filter = to_ele(filter)
return self.m.get_configuration(format=format, filter=filter).data_xml
def compare_configuration(self, rollback=0):
"""
Compare the candidate configuration with running configuration
by default. The candidate configuration can be compared with older
committed configuration by providing rollback id.
:param rollback: Rollback id of previously commited configuration
:return: Received rpc response from remote host in string format
"""
return self.m.compare_configuration(rollback=rollback).data_xml
def halt(self):
"""reboot the device"""
return self.m.halt().data_xml
def reboot(self):
"""reboot the device"""
return self.m.reboot().data_xml
# Due to issue in ncclient commit() method for Juniper (https://github.com/ncclient/ncclient/issues/238)
# below commit() is a workaround which build's raw `commit-configuration` xml with required tags and uses
# ncclient generic rpc() method to execute rpc on remote host.
# Remove below method after the issue in ncclient is fixed.
@ensure_ncclient
def commit(self, confirmed=False, check=False, timeout=None, comment=None, synchronize=False, at_time=None):
"""
Commit the candidate configuration as the device's new current configuration.
Depends on the `:candidate` capability.
A confirmed commit (i.e. if *confirmed* is `True`) is reverted if there is no
followup commit within the *timeout* interval. If no timeout is specified the
confirm timeout defaults to 600 seconds (10 minutes).
A confirming commit may have the *confirmed* parameter but this is not required.
Depends on the `:confirmed-commit` capability.
:param confirmed: whether this is a confirmed commit
:param check: Check correctness of syntax
:param timeout: specifies the confirm timeout in seconds
:param comment: Message to write to commit log
:param synchronize: Synchronize commit on remote peers
:param at_time: Time at which to activate configuration changes
:return: Received rpc response from remote host
"""
obj = new_ele('commit-configuration')
if confirmed:
sub_ele(obj, 'confirmed')
if check:
sub_ele(obj, 'check')
if synchronize:
sub_ele(obj, 'synchronize')
if at_time:
subele = sub_ele(obj, 'at-time')
subele.text = str(at_time)
if comment:
subele = sub_ele(obj, 'log')
subele.text = str(comment)
if timeout:
subele = sub_ele(obj, 'confirm-timeout')
subele.text = str(timeout)
return self.rpc(obj)
| gpl-3.0 |
alexandrujuncu/sos | sos/plugins/system.py | 12 | 1198 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class System(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
"""core system information
"""
plugin_name = "system"
profiles = ('system', 'kernel')
def setup(self):
self.add_copy_spec("/proc/sys")
self.add_forbidden_path(
"/proc/sys/net/ipv6/neigh/*/retrans_time")
self.add_forbidden_path(
"/proc/sys/net/ipv6/neigh/*/base_reachable_time")
# vim: set et ts=4 sw=4 :
| gpl-2.0 |
clstl/servo | tests/wpt/web-platform-tests/tools/pytest/testing/test_runner_xunit.py | 202 | 7133 | #
# test correct setup/teardowns at
# module, class, and instance level
def test_module_and_function_setup(testdir):
reprec = testdir.inline_runsource("""
modlevel = []
def setup_module(module):
assert not modlevel
module.modlevel.append(42)
def teardown_module(module):
modlevel.pop()
def setup_function(function):
function.answer = 17
def teardown_function(function):
del function.answer
def test_modlevel():
assert modlevel[0] == 42
assert test_modlevel.answer == 17
class TestFromClass:
def test_module(self):
assert modlevel[0] == 42
assert not hasattr(test_modlevel, 'answer')
""")
rep = reprec.matchreport("test_modlevel")
assert rep.passed
rep = reprec.matchreport("test_module")
assert rep.passed
def test_module_setup_failure_no_teardown(testdir):
reprec = testdir.inline_runsource("""
l = []
def setup_module(module):
l.append(1)
0/0
def test_nothing():
pass
def teardown_module(module):
l.append(2)
""")
reprec.assertoutcome(failed=1)
calls = reprec.getcalls("pytest_runtest_setup")
assert calls[0].item.module.l == [1]
def test_setup_function_failure_no_teardown(testdir):
reprec = testdir.inline_runsource("""
modlevel = []
def setup_function(function):
modlevel.append(1)
0/0
def teardown_function(module):
modlevel.append(2)
def test_func():
pass
""")
calls = reprec.getcalls("pytest_runtest_setup")
assert calls[0].item.module.modlevel == [1]
def test_class_setup(testdir):
reprec = testdir.inline_runsource("""
class TestSimpleClassSetup:
clslevel = []
def setup_class(cls):
cls.clslevel.append(23)
def teardown_class(cls):
cls.clslevel.pop()
def test_classlevel(self):
assert self.clslevel[0] == 23
class TestInheritedClassSetupStillWorks(TestSimpleClassSetup):
def test_classlevel_anothertime(self):
assert self.clslevel == [23]
def test_cleanup():
assert not TestSimpleClassSetup.clslevel
assert not TestInheritedClassSetupStillWorks.clslevel
""")
reprec.assertoutcome(passed=1+2+1)
def test_class_setup_failure_no_teardown(testdir):
reprec = testdir.inline_runsource("""
class TestSimpleClassSetup:
clslevel = []
def setup_class(cls):
0/0
def teardown_class(cls):
cls.clslevel.append(1)
def test_classlevel(self):
pass
def test_cleanup():
assert not TestSimpleClassSetup.clslevel
""")
reprec.assertoutcome(failed=1, passed=1)
def test_method_setup(testdir):
reprec = testdir.inline_runsource("""
class TestSetupMethod:
def setup_method(self, meth):
self.methsetup = meth
def teardown_method(self, meth):
del self.methsetup
def test_some(self):
assert self.methsetup == self.test_some
def test_other(self):
assert self.methsetup == self.test_other
""")
reprec.assertoutcome(passed=2)
def test_method_setup_failure_no_teardown(testdir):
reprec = testdir.inline_runsource("""
class TestMethodSetup:
clslevel = []
def setup_method(self, method):
self.clslevel.append(1)
0/0
def teardown_method(self, method):
self.clslevel.append(2)
def test_method(self):
pass
def test_cleanup():
assert TestMethodSetup.clslevel == [1]
""")
reprec.assertoutcome(failed=1, passed=1)
def test_method_generator_setup(testdir):
reprec = testdir.inline_runsource("""
class TestSetupTeardownOnInstance:
def setup_class(cls):
cls.classsetup = True
def setup_method(self, method):
self.methsetup = method
def test_generate(self):
assert self.classsetup
assert self.methsetup == self.test_generate
yield self.generated, 5
yield self.generated, 2
def generated(self, value):
assert self.classsetup
assert self.methsetup == self.test_generate
assert value == 5
""")
reprec.assertoutcome(passed=1, failed=1)
def test_func_generator_setup(testdir):
reprec = testdir.inline_runsource("""
import sys
def setup_module(mod):
print ("setup_module")
mod.x = []
def setup_function(fun):
print ("setup_function")
x.append(1)
def teardown_function(fun):
print ("teardown_function")
x.pop()
def test_one():
assert x == [1]
def check():
print ("check")
sys.stderr.write("e\\n")
assert x == [1]
yield check
assert x == [1]
""")
rep = reprec.matchreport("test_one", names="pytest_runtest_logreport")
assert rep.passed
def test_method_setup_uses_fresh_instances(testdir):
reprec = testdir.inline_runsource("""
class TestSelfState1:
memory = []
def test_hello(self):
self.memory.append(self)
def test_afterhello(self):
assert self != self.memory[0]
""")
reprec.assertoutcome(passed=2, failed=0)
def test_setup_that_skips_calledagain(testdir):
p = testdir.makepyfile("""
import pytest
def setup_module(mod):
pytest.skip("x")
def test_function1():
pass
def test_function2():
pass
""")
reprec = testdir.inline_run(p)
reprec.assertoutcome(skipped=2)
def test_setup_fails_again_on_all_tests(testdir):
p = testdir.makepyfile("""
import pytest
def setup_module(mod):
raise ValueError(42)
def test_function1():
pass
def test_function2():
pass
""")
reprec = testdir.inline_run(p)
reprec.assertoutcome(failed=2)
def test_setup_funcarg_setup_when_outer_scope_fails(testdir):
p = testdir.makepyfile("""
import pytest
def setup_module(mod):
raise ValueError(42)
def pytest_funcarg__hello(request):
raise ValueError("xyz43")
def test_function1(hello):
pass
def test_function2(hello):
pass
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*function1*",
"*ValueError*42*",
"*function2*",
"*ValueError*42*",
"*2 error*"
])
assert "xyz43" not in result.stdout.str()
| mpl-2.0 |
ar7z1/ansible | lib/ansible/modules/cloud/vmware/vmware_host_ntp.py | 18 | 7607 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Abhijeet Kasurde <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_ntp
short_description: Manage NTP configurations about an ESXi host
description:
- This module can be used to manage NTP configuration information about an ESXi host.
- User can specify an ESXi hostname or Cluster name. In case of cluster name, all ESXi hosts are updated.
version_added: '2.5'
author:
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
cluster_name:
description:
- Name of the cluster.
- NTP settings are applied to every ESXi host system in the given cluster.
- If C(esxi_hostname) is not given, this parameter is required.
esxi_hostname:
description:
- ESXi hostname.
- NTP settings are applied to this ESXi host system.
- If C(cluster_name) is not given, this parameter is required.
ntp_servers:
description:
- "IP or FQDN of NTP server/s."
- This accepts a list of NTP servers. For multiple servers, please look at the examples.
required: True
state:
description:
- "present: Add NTP server/s, if it specified server/s are absent else do nothing."
- "absent: Remove NTP server/s, if specified server/s are present else do nothing."
default: present
choices: [ present, absent ]
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Set NTP setting for all ESXi Host in given Cluster
vmware_host_ntp:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: cluster_name
state: present
ntp_servers:
- 0.pool.ntp.org
- 1.pool.ntp.org
delegate_to: localhost
- name: Set NTP setting for an ESXi Host
vmware_host_ntp:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
state: present
ntp_servers:
- 0.pool.ntp.org
- 1.pool.ntp.org
delegate_to: localhost
- name: Remove NTP setting for an ESXi Host
vmware_host_ntp:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
state: absent
ntp_servers:
- bad.server.ntp.org
delegate_to: localhost
'''
RETURN = r'''#
'''
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi
from ansible.module_utils._text import to_native
class VmwareNtpConfigManager(PyVmomi):
def __init__(self, module):
super(VmwareNtpConfigManager, self).__init__(module)
cluster_name = self.params.get('cluster_name', None)
esxi_host_name = self.params.get('esxi_hostname', None)
self.ntp_servers = self.params.get('ntp_servers', list())
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_host_name)
self.results = {}
self.desired_state = module.params['state']
def update_ntp_servers(self, host, ntp_servers, operation='add'):
changed = False
host_date_time_manager = host.configManager.dateTimeSystem
if host_date_time_manager:
available_ntp_servers = host_date_time_manager.dateTimeInfo.ntpConfig.server
available_ntp_servers = list(filter(None, available_ntp_servers))
if operation == 'add':
available_ntp_servers = available_ntp_servers + ntp_servers
elif operation == 'delete':
for server in ntp_servers:
if server in available_ntp_servers:
available_ntp_servers.remove(server)
ntp_config_spec = vim.host.NtpConfig()
ntp_config_spec.server = available_ntp_servers
date_config_spec = vim.host.DateTimeConfig()
date_config_spec.ntpConfig = ntp_config_spec
try:
if self.module.check_mode:
self.results[host.name]['after_change_ntp_servers'] = available_ntp_servers
else:
host_date_time_manager.UpdateDateTimeConfig(date_config_spec)
self.results[host.name]['after_change_ntp_servers'] = host_date_time_manager.dateTimeInfo.ntpConfig.server
changed = True
except vim.fault.HostConfigFault as e:
self.results[host.name]['error'] = to_native(e.msg)
except Exception as e:
self.results[host.name]['error'] = to_native(e)
return changed
def check_host_state(self):
change_list = []
changed = False
for host in self.hosts:
ntp_servers_to_change = self.check_ntp_servers(host=host)
self.results[host.name].update(dict(
ntp_servers_to_change=ntp_servers_to_change,
desired_state=self.desired_state,
)
)
if not ntp_servers_to_change:
change_list.append(False)
self.results[host.name]['current_state'] = self.desired_state
elif ntp_servers_to_change:
if self.desired_state == 'present':
changed = self.update_ntp_servers(host=host, ntp_servers=ntp_servers_to_change)
change_list.append(changed)
elif self.desired_state == 'absent':
changed = self.update_ntp_servers(host=host, ntp_servers=ntp_servers_to_change, operation='delete')
change_list.append(changed)
self.results[host.name]['current_state'] = self.desired_state
if any(change_list):
changed = True
self.module.exit_json(changed=changed, results=self.results)
def check_ntp_servers(self, host):
update_ntp_list = []
host_datetime_system = host.configManager.dateTimeSystem
if host_datetime_system:
ntp_servers = host_datetime_system.dateTimeInfo.ntpConfig.server
self.results[host.name] = dict(available_ntp_servers=ntp_servers)
for ntp_server in self.ntp_servers:
if self.desired_state == 'present' and ntp_server not in ntp_servers:
update_ntp_list.append(ntp_server)
if self.desired_state == 'absent' and ntp_server in ntp_servers:
update_ntp_list.append(ntp_server)
return update_ntp_list
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
ntp_servers=dict(type='list', required=True),
state=dict(type='str', default='present', choices=['absent', 'present']),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
],
supports_check_mode=True
)
vmware_host_ntp_config = VmwareNtpConfigManager(module)
vmware_host_ntp_config.check_host_state()
if __name__ == "__main__":
main()
| gpl-3.0 |
karna41317/personal_blog | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/_vimbuiltins.py | 376 | 39772 | # Split up in multiple functions so it's importable by jython, which has a
# per-method size limit.
def _getauto():
return [('BufAdd','BufAdd'),('BufCreate','BufCreate'),('BufDelete','BufDelete'),('BufEnter','BufEnter'),('BufFilePost','BufFilePost'),('BufFilePre','BufFilePre'),('BufHidden','BufHidden'),('BufLeave','BufLeave'),('BufNew','BufNew'),('BufNewFile','BufNewFile'),('BufRead','BufRead'),('BufReadCmd','BufReadCmd'),('BufReadPost','BufReadPost'),('BufReadPre','BufReadPre'),('BufUnload','BufUnload'),('BufWinEnter','BufWinEnter'),('BufWinLeave','BufWinLeave'),('BufWipeout','BufWipeout'),('BufWrite','BufWrite'),('BufWriteCmd','BufWriteCmd'),('BufWritePost','BufWritePost'),('BufWritePre','BufWritePre'),('Cmd','Cmd'),('CmdwinEnter','CmdwinEnter'),('CmdwinLeave','CmdwinLeave'),('ColorScheme','ColorScheme'),('CursorHold','CursorHold'),('CursorHoldI','CursorHoldI'),('CursorMoved','CursorMoved'),('CursorMovedI','CursorMovedI'),('EncodingChanged','EncodingChanged'),('FileAppendCmd','FileAppendCmd'),('FileAppendPost','FileAppendPost'),('FileAppendPre','FileAppendPre'),('FileChangedRO','FileChangedRO'),('FileChangedShell','FileChangedShell'),('FileChangedShellPost','FileChangedShellPost'),('FileEncoding','FileEncoding'),('FileReadCmd','FileReadCmd'),('FileReadPost','FileReadPost'),('FileReadPre','FileReadPre'),('FileType','FileType'),('FileWriteCmd','FileWriteCmd'),('FileWritePost','FileWritePost'),('FileWritePre','FileWritePre'),('FilterReadPost','FilterReadPost'),('FilterReadPre','FilterReadPre'),('FilterWritePost','FilterWritePost'),('FilterWritePre','FilterWritePre'),('FocusGained','FocusGained'),('FocusLost','FocusLost'),('FuncUndefined','FuncUndefined'),('GUIEnter','GUIEnter'),('GUIFailed','GUIFailed'),('InsertChange','InsertChange'),('InsertCharPre','InsertCharPre'),('InsertEnter','InsertEnter'),('InsertLeave','InsertLeave'),('MenuPopup','MenuPopup'),('QuickFixCmdPost','QuickFixCmdPost'),('QuickFixCmdPre','QuickFixCmdPre'),('RemoteReply','RemoteReply'),('SessionLoadPost','SessionLoadPost'),('ShellCmdPost','ShellCmdPost'),('ShellFilterPost','ShellFilterPost'),('SourceCmd','SourceCmd'),('SourcePre','SourcePre'),('SpellFileMissing','SpellFileMissing'),('StdinReadPost','StdinReadPost'),('StdinReadPre','StdinReadPre'),('SwapExists','SwapExists'),('Syntax','Syntax'),('TabEnter','TabEnter'),('TabLeave','TabLeave'),('TermChanged','TermChanged'),('TermResponse','TermResponse'),('User','User'),('UserGettingBored','UserGettingBored'),('VimEnter','VimEnter'),('VimLeave','VimLeave'),('VimLeavePre','VimLeavePre'),('VimResized','VimResized'),('WinEnter','WinEnter'),('WinLeave','WinLeave'),('event','event')]
def _getcommand():
return [('Allargs','Allargs'),('DiffOrig','DiffOrig'),('Error','Error'),('Man','Man'),('MyCommand','MyCommand'),('Mycmd','Mycmd'),('N','N'),('N','Next'),('P','P'),('P','Print'),('Ren','Ren'),('Rena','Rena'),('Renu','Renu'),('TOhtml','TOhtml'),('X','X'),('XMLent','XMLent'),('XMLns','XMLns'),('a','a'),('ab','ab'),('abc','abclear'),('abo','aboveleft'),('al','all'),('ar','ar'),('ar','args'),('arga','argadd'),('argd','argdelete'),('argdo','argdo'),('arge','argedit'),('argg','argglobal'),('argl','arglocal'),('argu','argument'),('as','ascii'),('au','au'),('b','buffer'),('bN','bNext'),('ba','ball'),('bad','badd'),('bar','bar'),('bd','bdelete'),('bel','belowright'),('bf','bfirst'),('bl','blast'),('bm','bmodified'),('bn','bnext'),('bo','botright'),('bp','bprevious'),('br','br'),('br','brewind'),('brea','break'),('breaka','breakadd'),('breakd','breakdel'),('breakl','breaklist'),('bro','browse'),('browseset','browseset'),('bu','bu'),('buf','buf'),('bufdo','bufdo'),('buffers','buffers'),('bun','bunload'),('bw','bwipeout'),('c','c'),('c','change'),('cN','cN'),('cN','cNext'),('cNf','cNf'),('cNf','cNfile'),('cabc','cabclear'),('cad','cad'),('cad','caddexpr'),('caddb','caddbuffer'),('caddf','caddfile'),('cal','call'),('cat','catch'),('cb','cbuffer'),('cc','cc'),('ccl','cclose'),('cd','cd'),('ce','center'),('cex','cexpr'),('cf','cfile'),('cfir','cfirst'),('cg','cgetfile'),('cgetb','cgetbuffer'),('cgete','cgetexpr'),('changes','changes'),('chd','chdir'),('che','checkpath'),('checkt','checktime'),('cl','cl'),('cl','clist'),('cla','clast'),('clo','close'),('cmapc','cmapclear'),('cmdname','cmdname'),('cn','cn'),('cn','cnext'),('cnew','cnewer'),('cnf','cnf'),('cnf','cnfile'),('co','copy'),('col','colder'),('colo','colorscheme'),('com','com'),('comc','comclear'),('comment','comment'),('comp','compiler'),('con','con'),('con','continue'),('conf','confirm'),('cope','copen'),('count','count'),('cp','cprevious'),('cpf','cpfile'),('cq','cquit'),('cr','crewind'),('cs','cs'),('cscope','cscope'),('cstag','cstag'),('cuna','cunabbrev'),('cw','cwindow'),('d','d'),('d','delete'),('de','de'),('debug','debug'),('debugg','debuggreedy'),('del','del'),('delc','delcommand'),('delf','delf'),('delf','delfunction'),('delm','delmarks'),('di','di'),('di','display'),('diffg','diffget'),('diffo','diffo'),('diffoff','diffoff'),('diffp','diffp'),('diffpatch','diffpatch'),('diffpu','diffput'),('diffsplit','diffsplit'),('difft','difft'),('diffthis','diffthis'),('diffu','diffupdate'),('dig','dig'),('dig','digraphs'),('dj','djump'),('dl','dlist'),('do','do'),('doau','doau'),('dr','drop'),('ds','dsearch'),('dsp','dsplit'),('dwim','dwim'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','edit'),('ea','ea'),('earlier','earlier'),('ec','ec'),('echoe','echoerr'),('echom','echomsg'),('echon','echon'),('el','else'),('elsei','elseif'),('em','emenu'),('emenu','emenu'),('en','en'),('en','endif'),('endf','endf'),('endf','endfunction'),('endfo','endfor'),('endfun','endfun'),('endt','endtry'),('endw','endwhile'),('ene','enew'),('ex','ex'),('exi','exit'),('exu','exusage'),('f','f'),('f','file'),('filename','filename'),('files','files'),('filet','filet'),('filetype','filetype'),('fin','fin'),('fin','find'),('fina','finally'),('fini','finish'),('fir','first'),('fix','fixdel'),('fo','fold'),('foldc','foldclose'),('foldd','folddoopen'),('folddoc','folddoclosed'),('foldo','foldopen'),('for','for'),('fu','fu'),('fu','function'),('fun','fun'),('g','g'),('get','get'),('go','goto'),('gr','grep'),('grepa','grepadd'),('gs','gs'),('gs','gs'),('gui','gui'),('gvim','gvim'),('h','h'),('h','h'),('h','h'),('h','h'),('h','help'),('ha','hardcopy'),('helpf','helpfind'),('helpg','helpgrep'),('helpt','helptags'),('hi','hi'),('hid','hide'),('his','history'),('i','i'),('ia','ia'),('iabc','iabclear'),('if','if'),('ij','ijump'),('il','ilist'),('imapc','imapclear'),('in','in'),('index','index'),('intro','intro'),('is','isearch'),('isp','isplit'),('iuna','iunabbrev'),('j','join'),('ju','jumps'),('k','k'),('kee','keepmarks'),('keepa','keepa'),('keepalt','keepalt'),('keepj','keepjumps'),('l','l'),('l','list'),('lN','lN'),('lN','lNext'),('lNf','lNf'),('lNf','lNfile'),('la','la'),('la','last'),('lad','lad'),('lad','laddexpr'),('laddb','laddbuffer'),('laddf','laddfile'),('lan','lan'),('lan','language'),('lat','lat'),('later','later'),('lb','lbuffer'),('lc','lcd'),('lch','lchdir'),('lcl','lclose'),('lcs','lcs'),('lcscope','lcscope'),('le','left'),('lefta','leftabove'),('let','let'),('lex','lexpr'),('lf','lfile'),('lfir','lfirst'),('lg','lgetfile'),('lgetb','lgetbuffer'),('lgete','lgetexpr'),('lgr','lgrep'),('lgrepa','lgrepadd'),('lh','lhelpgrep'),('ll','ll'),('lla','llast'),('lli','llist'),('lmak','lmake'),('lmapc','lmapclear'),('lne','lne'),('lne','lnext'),('lnew','lnewer'),('lnf','lnf'),('lnf','lnfile'),('lo','lo'),('lo','loadview'),('loadk','loadk'),('loadkeymap','loadkeymap'),('loc','lockmarks'),('locale','locale'),('lockv','lockvar'),('lol','lolder'),('lop','lopen'),('lp','lprevious'),('lpf','lpfile'),('lr','lrewind'),('ls','ls'),('lt','ltag'),('lua','lua'),('luado','luado'),('luafile','luafile'),('lv','lvimgrep'),('lvimgrepa','lvimgrepadd'),('lw','lwindow'),('m','move'),('ma','ma'),('ma','mark'),('main','main'),('main','main'),('mak','make'),('marks','marks'),('mat','match'),('menut','menut'),('menut','menutranslate'),('mes','mes'),('messages','messages'),('mk','mk'),('mk','mkexrc'),('mkdir','mkdir'),('mks','mksession'),('mksp','mkspell'),('mkv','mkv'),('mkv','mkvimrc'),('mkvie','mkview'),('mo','mo'),('mod','mode'),('mv','mv'),('mz','mz'),('mz','mzscheme'),('mzf','mzfile'),('n','n'),('n','n'),('n','next'),('nb','nbkey'),('nbc','nbclose'),('nbs','nbstart'),('ne','ne'),('new','new'),('nkf','nkf'),('nmapc','nmapclear'),('noa','noa'),('noautocmd','noautocmd'),('noh','nohlsearch'),('nu','number'),('o','o'),('o','open'),('ol','oldfiles'),('omapc','omapclear'),('on','only'),('opt','options'),('ownsyntax','ownsyntax'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','print'),('pat','pat'),('pat','pat'),('pc','pclose'),('pe','pe'),('pe','perl'),('ped','pedit'),('perld','perldo'),('po','pop'),('popu','popu'),('popu','popup'),('pp','ppop'),('pr','pr'),('pre','preserve'),('prev','previous'),('pro','pro'),('prof','profile'),('profd','profdel'),('promptf','promptfind'),('promptr','promptrepl'),('ps','psearch'),('ptN','ptN'),('ptN','ptNext'),('pta','ptag'),('ptf','ptfirst'),('ptj','ptjump'),('ptl','ptlast'),('ptn','ptn'),('ptn','ptnext'),('ptp','ptprevious'),('ptr','ptrewind'),('pts','ptselect'),('pu','put'),('pw','pwd'),('py','py'),('py','python'),('py3','py3'),('py3','py3'),('py3file','py3file'),('pyf','pyfile'),('python3','python3'),('q','q'),('q','quit'),('qa','qall'),('quita','quitall'),('quote','quote'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','read'),('re','re'),('rec','recover'),('red','red'),('red','redo'),('redi','redir'),('redr','redraw'),('redraws','redrawstatus'),('reg','registers'),('res','resize'),('ret','retab'),('retu','return'),('rew','rewind'),('ri','right'),('rightb','rightbelow'),('ru','ru'),('ru','runtime'),('rub','ruby'),('rubyd','rubydo'),('rubyf','rubyfile'),('rundo','rundo'),('rv','rviminfo'),('s','s'),('s','s'),('s','s'),('s','s'),('sN','sNext'),('sa','sargument'),('sal','sall'),('san','sandbox'),('sav','saveas'),('sb','sbuffer'),('sbN','sbNext'),('sba','sball'),('sbf','sbfirst'),('sbl','sblast'),('sbm','sbmodified'),('sbn','sbnext'),('sbp','sbprevious'),('sbr','sbrewind'),('scrip','scrip'),('scrip','scriptnames'),('scripte','scriptencoding'),('scs','scs'),('scscope','scscope'),('se','set'),('setf','setfiletype'),('setg','setglobal'),('setl','setlocal'),('sf','sfind'),('sfir','sfirst'),('sh','shell'),('si','si'),('sig','sig'),('sign','sign'),('sil','silent'),('sim','simalt'),('sl','sl'),('sl','sleep'),('sla','slast'),('sm','smagic'),('sm','smap'),('sme','sme'),('smenu','smenu'),('sn','snext'),('sni','sniff'),('sno','snomagic'),('snoreme','snoreme'),('snoremenu','snoremenu'),('so','so'),('so','source'),('sor','sort'),('sp','split'),('spe','spe'),('spe','spellgood'),('spelld','spelldump'),('spelli','spellinfo'),('spellr','spellrepall'),('spellu','spellundo'),('spellw','spellwrong'),('spr','sprevious'),('sre','srewind'),('st','st'),('st','stop'),('sta','stag'),('star','star'),('star','startinsert'),('start','start'),('startg','startgreplace'),('startr','startreplace'),('stj','stjump'),('stopi','stopinsert'),('sts','stselect'),('sub','sub'),('sub','sub'),('sun','sunhide'),('sunme','sunme'),('sunmenu','sunmenu'),('sus','suspend'),('sv','sview'),('sw','swapname'),('sy','sy'),('syn','syn'),('sync','sync'),('syncbind','syncbind'),('synlist','synlist'),('t','t'),('t','t'),('t','t'),('tN','tN'),('tN','tNext'),('ta','ta'),('ta','tag'),('tab','tab'),('tabN','tabN'),('tabN','tabNext'),('tabc','tabclose'),('tabd','tabdo'),('tabe','tabedit'),('tabf','tabfind'),('tabfir','tabfirst'),('tabl','tablast'),('tabm','tabmove'),('tabn','tabnext'),('tabnew','tabnew'),('tabo','tabonly'),('tabp','tabprevious'),('tabr','tabrewind'),('tabs','tabs'),('tags','tags'),('tc','tcl'),('tcld','tcldo'),('tclf','tclfile'),('te','tearoff'),('tf','tfirst'),('th','throw'),('tj','tjump'),('tl','tlast'),('tm','tm'),('tm','tmenu'),('tn','tn'),('tn','tnext'),('to','topleft'),('tp','tprevious'),('tr','tr'),('tr','trewind'),('try','try'),('ts','tselect'),('tu','tu'),('tu','tunmenu'),('u','u'),('u','undo'),('un','un'),('una','unabbreviate'),('undoj','undojoin'),('undol','undolist'),('unh','unhide'),('unl','unl'),('unlo','unlockvar'),('uns','unsilent'),('up','update'),('v','v'),('ve','ve'),('ve','version'),('verb','verbose'),('version','version'),('version','version'),('vert','vertical'),('vi','vi'),('vi','visual'),('vie','view'),('vim','vimgrep'),('vimgrepa','vimgrepadd'),('viu','viusage'),('vmapc','vmapclear'),('vne','vnew'),('vs','vsplit'),('w','w'),('w','write'),('wN','wNext'),('wa','wall'),('wh','while'),('win','win'),('win','winsize'),('winc','wincmd'),('windo','windo'),('winp','winpos'),('wn','wnext'),('wp','wprevious'),('wq','wq'),('wqa','wqall'),('ws','wsverb'),('wundo','wundo'),('wv','wviminfo'),('x','x'),('x','xit'),('xa','xall'),('xmapc','xmapclear'),('xme','xme'),('xmenu','xmenu'),('xnoreme','xnoreme'),('xnoremenu','xnoremenu'),('xterm','xterm'),('xunme','xunme'),('xunmenu','xunmenu'),('xwininfo','xwininfo'),('y','yank')]
def _getoption():
return [('acd','acd'),('ai','ai'),('akm','akm'),('al','al'),('aleph','aleph'),('allowrevins','allowrevins'),('altkeymap','altkeymap'),('ambiwidth','ambiwidth'),('ambw','ambw'),('anti','anti'),('antialias','antialias'),('ar','ar'),('arab','arab'),('arabic','arabic'),('arabicshape','arabicshape'),('ari','ari'),('arshape','arshape'),('autochdir','autochdir'),('autoindent','autoindent'),('autoread','autoread'),('autowrite','autowrite'),('autowriteall','autowriteall'),('aw','aw'),('awa','awa'),('background','background'),('backspace','backspace'),('backup','backup'),('backupcopy','backupcopy'),('backupdir','backupdir'),('backupext','backupext'),('backupskip','backupskip'),('balloondelay','balloondelay'),('ballooneval','ballooneval'),('balloonexpr','balloonexpr'),('bdir','bdir'),('bdlay','bdlay'),('beval','beval'),('bex','bex'),('bexpr','bexpr'),('bg','bg'),('bh','bh'),('bin','bin'),('binary','binary'),('biosk','biosk'),('bioskey','bioskey'),('bk','bk'),('bkc','bkc'),('bl','bl'),('bomb','bomb'),('breakat','breakat'),('brk','brk'),('browsedir','browsedir'),('bs','bs'),('bsdir','bsdir'),('bsk','bsk'),('bt','bt'),('bufhidden','bufhidden'),('buflisted','buflisted'),('buftype','buftype'),('casemap','casemap'),('cb','cb'),('cc','cc'),('ccv','ccv'),('cd','cd'),('cdpath','cdpath'),('cedit','cedit'),('cf','cf'),('cfu','cfu'),('ch','ch'),('charconvert','charconvert'),('ci','ci'),('cin','cin'),('cindent','cindent'),('cink','cink'),('cinkeys','cinkeys'),('cino','cino'),('cinoptions','cinoptions'),('cinw','cinw'),('cinwords','cinwords'),('clipboard','clipboard'),('cmdheight','cmdheight'),('cmdwinheight','cmdwinheight'),('cmp','cmp'),('cms','cms'),('co','co'),('cocu','cocu'),('cole','cole'),('colorcolumn','colorcolumn'),('columns','columns'),('com','com'),('comments','comments'),('commentstring','commentstring'),('compatible','compatible'),('complete','complete'),('completefunc','completefunc'),('completeopt','completeopt'),('concealcursor','concealcursor'),('conceallevel','conceallevel'),('confirm','confirm'),('consk','consk'),('conskey','conskey'),('copyindent','copyindent'),('cot','cot'),('cp','cp'),('cpo','cpo'),('cpoptions','cpoptions'),('cpt','cpt'),('crb','crb'),('cryptmethod','cryptmethod'),('cscopepathcomp','cscopepathcomp'),('cscopeprg','cscopeprg'),('cscopequickfix','cscopequickfix'),('cscoperelative','cscoperelative'),('cscopetag','cscopetag'),('cscopetagorder','cscopetagorder'),('cscopeverbose','cscopeverbose'),('cspc','cspc'),('csprg','csprg'),('csqf','csqf'),('csre','csre'),('cst','cst'),('csto','csto'),('csverb','csverb'),('cuc','cuc'),('cul','cul'),('cursorbind','cursorbind'),('cursorcolumn','cursorcolumn'),('cursorline','cursorline'),('cwh','cwh'),('debug','debug'),('deco','deco'),('def','def'),('define','define'),('delcombine','delcombine'),('dex','dex'),('dg','dg'),('dict','dict'),('dictionary','dictionary'),('diff','diff'),('diffexpr','diffexpr'),('diffopt','diffopt'),('digraph','digraph'),('dip','dip'),('dir','dir'),('directory','directory'),('display','display'),('dy','dy'),('ea','ea'),('ead','ead'),('eadirection','eadirection'),('eb','eb'),('ed','ed'),('edcompatible','edcompatible'),('ef','ef'),('efm','efm'),('ei','ei'),('ek','ek'),('enc','enc'),('encoding','encoding'),('endofline','endofline'),('eol','eol'),('ep','ep'),('equalalways','equalalways'),('equalprg','equalprg'),('errorbells','errorbells'),('errorfile','errorfile'),('errorformat','errorformat'),('esckeys','esckeys'),('et','et'),('eventignore','eventignore'),('ex','ex'),('expandtab','expandtab'),('exrc','exrc'),('fcl','fcl'),('fcs','fcs'),('fdc','fdc'),('fde','fde'),('fdi','fdi'),('fdl','fdl'),('fdls','fdls'),('fdm','fdm'),('fdn','fdn'),('fdo','fdo'),('fdt','fdt'),('fen','fen'),('fenc','fenc'),('fencs','fencs'),('fex','fex'),('ff','ff'),('ffs','ffs'),('fileencoding','fileencoding'),('fileencodings','fileencodings'),('fileformat','fileformat'),('fileformats','fileformats'),('filetype','filetype'),('fillchars','fillchars'),('fk','fk'),('fkmap','fkmap'),('flp','flp'),('fml','fml'),('fmr','fmr'),('fo','fo'),('foldclose','foldclose'),('foldcolumn','foldcolumn'),('foldenable','foldenable'),('foldexpr','foldexpr'),('foldignore','foldignore'),('foldlevel','foldlevel'),('foldlevelstart','foldlevelstart'),('foldmarker','foldmarker'),('foldmethod','foldmethod'),('foldminlines','foldminlines'),('foldnestmax','foldnestmax'),('foldopen','foldopen'),('foldtext','foldtext'),('formatexpr','formatexpr'),('formatlistpat','formatlistpat'),('formatoptions','formatoptions'),('formatprg','formatprg'),('fp','fp'),('fs','fs'),('fsync','fsync'),('ft','ft'),('gcr','gcr'),('gd','gd'),('gdefault','gdefault'),('gfm','gfm'),('gfn','gfn'),('gfs','gfs'),('gfw','gfw'),('ghr','ghr'),('go','go'),('gp','gp'),('grepformat','grepformat'),('grepprg','grepprg'),('gtl','gtl'),('gtt','gtt'),('guicursor','guicursor'),('guifont','guifont'),('guifontset','guifontset'),('guifontwide','guifontwide'),('guiheadroom','guiheadroom'),('guioptions','guioptions'),('guipty','guipty'),('guitablabel','guitablabel'),('guitabtooltip','guitabtooltip'),('helpfile','helpfile'),('helpheight','helpheight'),('helplang','helplang'),('hf','hf'),('hh','hh'),('hi','hi'),('hid','hid'),('hidden','hidden'),('highlight','highlight'),('history','history'),('hk','hk'),('hkmap','hkmap'),('hkmapp','hkmapp'),('hkp','hkp'),('hl','hl'),('hlg','hlg'),('hls','hls'),('hlsearch','hlsearch'),('ic','ic'),('icon','icon'),('iconstring','iconstring'),('ignorecase','ignorecase'),('im','im'),('imactivatekey','imactivatekey'),('imak','imak'),('imc','imc'),('imcmdline','imcmdline'),('imd','imd'),('imdisable','imdisable'),('imi','imi'),('iminsert','iminsert'),('ims','ims'),('imsearch','imsearch'),('inc','inc'),('include','include'),('includeexpr','includeexpr'),('incsearch','incsearch'),('inde','inde'),('indentexpr','indentexpr'),('indentkeys','indentkeys'),('indk','indk'),('inex','inex'),('inf','inf'),('infercase','infercase'),('inoremap','inoremap'),('insertmode','insertmode'),('invacd','invacd'),('invai','invai'),('invakm','invakm'),('invallowrevins','invallowrevins'),('invaltkeymap','invaltkeymap'),('invanti','invanti'),('invantialias','invantialias'),('invar','invar'),('invarab','invarab'),('invarabic','invarabic'),('invarabicshape','invarabicshape'),('invari','invari'),('invarshape','invarshape'),('invautochdir','invautochdir'),('invautoindent','invautoindent'),('invautoread','invautoread'),('invautowrite','invautowrite'),('invautowriteall','invautowriteall'),('invaw','invaw'),('invawa','invawa'),('invbackup','invbackup'),('invballooneval','invballooneval'),('invbeval','invbeval'),('invbin','invbin'),('invbinary','invbinary'),('invbiosk','invbiosk'),('invbioskey','invbioskey'),('invbk','invbk'),('invbl','invbl'),('invbomb','invbomb'),('invbuflisted','invbuflisted'),('invcf','invcf'),('invci','invci'),('invcin','invcin'),('invcindent','invcindent'),('invcompatible','invcompatible'),('invconfirm','invconfirm'),('invconsk','invconsk'),('invconskey','invconskey'),('invcopyindent','invcopyindent'),('invcp','invcp'),('invcrb','invcrb'),('invcscopetag','invcscopetag'),('invcscopeverbose','invcscopeverbose'),('invcst','invcst'),('invcsverb','invcsverb'),('invcuc','invcuc'),('invcul','invcul'),('invcursorbind','invcursorbind'),('invcursorcolumn','invcursorcolumn'),('invcursorline','invcursorline'),('invdeco','invdeco'),('invdelcombine','invdelcombine'),('invdg','invdg'),('invdiff','invdiff'),('invdigraph','invdigraph'),('invea','invea'),('inveb','inveb'),('inved','inved'),('invedcompatible','invedcompatible'),('invek','invek'),('invendofline','invendofline'),('inveol','inveol'),('invequalalways','invequalalways'),('inverrorbells','inverrorbells'),('invesckeys','invesckeys'),('invet','invet'),('invex','invex'),('invexpandtab','invexpandtab'),('invexrc','invexrc'),('invfen','invfen'),('invfk','invfk'),('invfkmap','invfkmap'),('invfoldenable','invfoldenable'),('invgd','invgd'),('invgdefault','invgdefault'),('invguipty','invguipty'),('invhid','invhid'),('invhidden','invhidden'),('invhk','invhk'),('invhkmap','invhkmap'),('invhkmapp','invhkmapp'),('invhkp','invhkp'),('invhls','invhls'),('invhlsearch','invhlsearch'),('invic','invic'),('invicon','invicon'),('invignorecase','invignorecase'),('invim','invim'),('invimc','invimc'),('invimcmdline','invimcmdline'),('invimd','invimd'),('invimdisable','invimdisable'),('invincsearch','invincsearch'),('invinf','invinf'),('invinfercase','invinfercase'),('invinsertmode','invinsertmode'),('invis','invis'),('invjoinspaces','invjoinspaces'),('invjs','invjs'),('invlazyredraw','invlazyredraw'),('invlbr','invlbr'),('invlinebreak','invlinebreak'),('invlisp','invlisp'),('invlist','invlist'),('invloadplugins','invloadplugins'),('invlpl','invlpl'),('invlz','invlz'),('invma','invma'),('invmacatsui','invmacatsui'),('invmagic','invmagic'),('invmh','invmh'),('invml','invml'),('invmod','invmod'),('invmodeline','invmodeline'),('invmodifiable','invmodifiable'),('invmodified','invmodified'),('invmore','invmore'),('invmousef','invmousef'),('invmousefocus','invmousefocus'),('invmousehide','invmousehide'),('invnu','invnu'),('invnumber','invnumber'),('invodev','invodev'),('invopendevice','invopendevice'),('invpaste','invpaste'),('invpi','invpi'),('invpreserveindent','invpreserveindent'),('invpreviewwindow','invpreviewwindow'),('invprompt','invprompt'),('invpvw','invpvw'),('invreadonly','invreadonly'),('invrelativenumber','invrelativenumber'),('invremap','invremap'),('invrestorescreen','invrestorescreen'),('invrevins','invrevins'),('invri','invri'),('invrightleft','invrightleft'),('invrl','invrl'),('invrnu','invrnu'),('invro','invro'),('invrs','invrs'),('invru','invru'),('invruler','invruler'),('invsb','invsb'),('invsc','invsc'),('invscb','invscb'),('invscrollbind','invscrollbind'),('invscs','invscs'),('invsecure','invsecure'),('invsft','invsft'),('invshellslash','invshellslash'),('invshelltemp','invshelltemp'),('invshiftround','invshiftround'),('invshortname','invshortname'),('invshowcmd','invshowcmd'),('invshowfulltag','invshowfulltag'),('invshowmatch','invshowmatch'),('invshowmode','invshowmode'),('invsi','invsi'),('invsm','invsm'),('invsmartcase','invsmartcase'),('invsmartindent','invsmartindent'),('invsmarttab','invsmarttab'),('invsmd','invsmd'),('invsn','invsn'),('invsol','invsol'),('invspell','invspell'),('invsplitbelow','invsplitbelow'),('invsplitright','invsplitright'),('invspr','invspr'),('invsr','invsr'),('invssl','invssl'),('invsta','invsta'),('invstartofline','invstartofline'),('invstmp','invstmp'),('invswapfile','invswapfile'),('invswf','invswf'),('invta','invta'),('invtagbsearch','invtagbsearch'),('invtagrelative','invtagrelative'),('invtagstack','invtagstack'),('invtbi','invtbi'),('invtbidi','invtbidi'),('invtbs','invtbs'),('invtermbidi','invtermbidi'),('invterse','invterse'),('invtextauto','invtextauto'),('invtextmode','invtextmode'),('invtf','invtf'),('invtgst','invtgst'),('invtildeop','invtildeop'),('invtimeout','invtimeout'),('invtitle','invtitle'),('invto','invto'),('invtop','invtop'),('invtr','invtr'),('invttimeout','invttimeout'),('invttybuiltin','invttybuiltin'),('invttyfast','invttyfast'),('invtx','invtx'),('invvb','invvb'),('invvisualbell','invvisualbell'),('invwa','invwa'),('invwarn','invwarn'),('invwb','invwb'),('invweirdinvert','invweirdinvert'),('invwfh','invwfh'),('invwfw','invwfw'),('invwildignorecase','invwildignorecase'),('invwildmenu','invwildmenu'),('invwinfixheight','invwinfixheight'),('invwinfixwidth','invwinfixwidth'),('invwiv','invwiv'),('invwmnu','invwmnu'),('invwrap','invwrap'),('invwrapscan','invwrapscan'),('invwrite','invwrite'),('invwriteany','invwriteany'),('invwritebackup','invwritebackup'),('invws','invws'),('is','is'),('isf','isf'),('isfname','isfname'),('isi','isi'),('isident','isident'),('isk','isk'),('iskeyword','iskeyword'),('isp','isp'),('isprint','isprint'),('joinspaces','joinspaces'),('js','js'),('key','key'),('keymap','keymap'),('keymodel','keymodel'),('keywordprg','keywordprg'),('km','km'),('kmp','kmp'),('kp','kp'),('langmap','langmap'),('langmenu','langmenu'),('laststatus','laststatus'),('lazyredraw','lazyredraw'),('lbr','lbr'),('lcs','lcs'),('linebreak','linebreak'),('lines','lines'),('linespace','linespace'),('lisp','lisp'),('lispwords','lispwords'),('list','list'),('listchars','listchars'),('lm','lm'),('lmap','lmap'),('loadplugins','loadplugins'),('lpl','lpl'),('ls','ls'),('lsp','lsp'),('lw','lw'),('lz','lz'),('ma','ma'),('macatsui','macatsui'),('magic','magic'),('makeef','makeef'),('makeprg','makeprg'),('mat','mat'),('matchpairs','matchpairs'),('matchtime','matchtime'),('maxcombine','maxcombine'),('maxfuncdepth','maxfuncdepth'),('maxmapdepth','maxmapdepth'),('maxmem','maxmem'),('maxmempattern','maxmempattern'),('maxmemtot','maxmemtot'),('mco','mco'),('mef','mef'),('menuitems','menuitems'),('mfd','mfd'),('mh','mh'),('mis','mis'),('mkspellmem','mkspellmem'),('ml','ml'),('mls','mls'),('mm','mm'),('mmd','mmd'),('mmp','mmp'),('mmt','mmt'),('mod','mod'),('modeline','modeline'),('modelines','modelines'),('modifiable','modifiable'),('modified','modified'),('more','more'),('mouse','mouse'),('mousef','mousef'),('mousefocus','mousefocus'),('mousehide','mousehide'),('mousem','mousem'),('mousemodel','mousemodel'),('mouses','mouses'),('mouseshape','mouseshape'),('mouset','mouset'),('mousetime','mousetime'),('mp','mp'),('mps','mps'),('msm','msm'),('mzq','mzq'),('mzquantum','mzquantum'),('nf','nf'),('nnoremap','nnoremap'),('noacd','noacd'),('noai','noai'),('noakm','noakm'),('noallowrevins','noallowrevins'),('noaltkeymap','noaltkeymap'),('noanti','noanti'),('noantialias','noantialias'),('noar','noar'),('noarab','noarab'),('noarabic','noarabic'),('noarabicshape','noarabicshape'),('noari','noari'),('noarshape','noarshape'),('noautochdir','noautochdir'),('noautoindent','noautoindent'),('noautoread','noautoread'),('noautowrite','noautowrite'),('noautowriteall','noautowriteall'),('noaw','noaw'),('noawa','noawa'),('nobackup','nobackup'),('noballooneval','noballooneval'),('nobeval','nobeval'),('nobin','nobin'),('nobinary','nobinary'),('nobiosk','nobiosk'),('nobioskey','nobioskey'),('nobk','nobk'),('nobl','nobl'),('nobomb','nobomb'),('nobuflisted','nobuflisted'),('nocf','nocf'),('noci','noci'),('nocin','nocin'),('nocindent','nocindent'),('nocompatible','nocompatible'),('noconfirm','noconfirm'),('noconsk','noconsk'),('noconskey','noconskey'),('nocopyindent','nocopyindent'),('nocp','nocp'),('nocrb','nocrb'),('nocscopetag','nocscopetag'),('nocscopeverbose','nocscopeverbose'),('nocst','nocst'),('nocsverb','nocsverb'),('nocuc','nocuc'),('nocul','nocul'),('nocursorbind','nocursorbind'),('nocursorcolumn','nocursorcolumn'),('nocursorline','nocursorline'),('nodeco','nodeco'),('nodelcombine','nodelcombine'),('nodg','nodg'),('nodiff','nodiff'),('nodigraph','nodigraph'),('noea','noea'),('noeb','noeb'),('noed','noed'),('noedcompatible','noedcompatible'),('noek','noek'),('noendofline','noendofline'),('noeol','noeol'),('noequalalways','noequalalways'),('noerrorbells','noerrorbells'),('noesckeys','noesckeys'),('noet','noet'),('noex','noex'),('noexpandtab','noexpandtab'),('noexrc','noexrc'),('nofen','nofen'),('nofk','nofk'),('nofkmap','nofkmap'),('nofoldenable','nofoldenable'),('nogd','nogd'),('nogdefault','nogdefault'),('noguipty','noguipty'),('nohid','nohid'),('nohidden','nohidden'),('nohk','nohk'),('nohkmap','nohkmap'),('nohkmapp','nohkmapp'),('nohkp','nohkp'),('nohls','nohls'),('nohlsearch','nohlsearch'),('noic','noic'),('noicon','noicon'),('noignorecase','noignorecase'),('noim','noim'),('noimc','noimc'),('noimcmdline','noimcmdline'),('noimd','noimd'),('noimdisable','noimdisable'),('noincsearch','noincsearch'),('noinf','noinf'),('noinfercase','noinfercase'),('noinsertmode','noinsertmode'),('nois','nois'),('nojoinspaces','nojoinspaces'),('nojs','nojs'),('nolazyredraw','nolazyredraw'),('nolbr','nolbr'),('nolinebreak','nolinebreak'),('nolisp','nolisp'),('nolist','nolist'),('noloadplugins','noloadplugins'),('nolpl','nolpl'),('nolz','nolz'),('noma','noma'),('nomacatsui','nomacatsui'),('nomagic','nomagic'),('nomh','nomh'),('noml','noml'),('nomod','nomod'),('nomodeline','nomodeline'),('nomodifiable','nomodifiable'),('nomodified','nomodified'),('nomore','nomore'),('nomousef','nomousef'),('nomousefocus','nomousefocus'),('nomousehide','nomousehide'),('nonu','nonu'),('nonumber','nonumber'),('noodev','noodev'),('noopendevice','noopendevice'),('nopaste','nopaste'),('nopi','nopi'),('nopreserveindent','nopreserveindent'),('nopreviewwindow','nopreviewwindow'),('noprompt','noprompt'),('nopvw','nopvw'),('noreadonly','noreadonly'),('norelativenumber','norelativenumber'),('noremap','noremap'),('norestorescreen','norestorescreen'),('norevins','norevins'),('nori','nori'),('norightleft','norightleft'),('norl','norl'),('nornu','nornu'),('noro','noro'),('nors','nors'),('noru','noru'),('noruler','noruler'),('nosb','nosb'),('nosc','nosc'),('noscb','noscb'),('noscrollbind','noscrollbind'),('noscs','noscs'),('nosecure','nosecure'),('nosft','nosft'),('noshellslash','noshellslash'),('noshelltemp','noshelltemp'),('noshiftround','noshiftround'),('noshortname','noshortname'),('noshowcmd','noshowcmd'),('noshowfulltag','noshowfulltag'),('noshowmatch','noshowmatch'),('noshowmode','noshowmode'),('nosi','nosi'),('nosm','nosm'),('nosmartcase','nosmartcase'),('nosmartindent','nosmartindent'),('nosmarttab','nosmarttab'),('nosmd','nosmd'),('nosn','nosn'),('nosol','nosol'),('nospell','nospell'),('nosplitbelow','nosplitbelow'),('nosplitright','nosplitright'),('nospr','nospr'),('nosr','nosr'),('nossl','nossl'),('nosta','nosta'),('nostartofline','nostartofline'),('nostmp','nostmp'),('noswapfile','noswapfile'),('noswf','noswf'),('nota','nota'),('notagbsearch','notagbsearch'),('notagrelative','notagrelative'),('notagstack','notagstack'),('notbi','notbi'),('notbidi','notbidi'),('notbs','notbs'),('notermbidi','notermbidi'),('noterse','noterse'),('notextauto','notextauto'),('notextmode','notextmode'),('notf','notf'),('notgst','notgst'),('notildeop','notildeop'),('notimeout','notimeout'),('notitle','notitle'),('noto','noto'),('notop','notop'),('notr','notr'),('nottimeout','nottimeout'),('nottybuiltin','nottybuiltin'),('nottyfast','nottyfast'),('notx','notx'),('novb','novb'),('novisualbell','novisualbell'),('nowa','nowa'),('nowarn','nowarn'),('nowb','nowb'),('noweirdinvert','noweirdinvert'),('nowfh','nowfh'),('nowfw','nowfw'),('nowildignorecase','nowildignorecase'),('nowildmenu','nowildmenu'),('nowinfixheight','nowinfixheight'),('nowinfixwidth','nowinfixwidth'),('nowiv','nowiv'),('nowmnu','nowmnu'),('nowrap','nowrap'),('nowrapscan','nowrapscan'),('nowrite','nowrite'),('nowriteany','nowriteany'),('nowritebackup','nowritebackup'),('nows','nows'),('nrformats','nrformats'),('nu','nu'),('number','number'),('numberwidth','numberwidth'),('nuw','nuw'),('odev','odev'),('oft','oft'),('ofu','ofu'),('omnifunc','omnifunc'),('opendevice','opendevice'),('operatorfunc','operatorfunc'),('opfunc','opfunc'),('osfiletype','osfiletype'),('pa','pa'),('para','para'),('paragraphs','paragraphs'),('paste','paste'),('pastetoggle','pastetoggle'),('patchexpr','patchexpr'),('patchmode','patchmode'),('path','path'),('pdev','pdev'),('penc','penc'),('pex','pex'),('pexpr','pexpr'),('pfn','pfn'),('ph','ph'),('pheader','pheader'),('pi','pi'),('pm','pm'),('pmbcs','pmbcs'),('pmbfn','pmbfn'),('popt','popt'),('preserveindent','preserveindent'),('previewheight','previewheight'),('previewwindow','previewwindow'),('printdevice','printdevice'),('printencoding','printencoding'),('printexpr','printexpr'),('printfont','printfont'),('printheader','printheader'),('printmbcharset','printmbcharset'),('printmbfont','printmbfont'),('printoptions','printoptions'),('prompt','prompt'),('pt','pt'),('pumheight','pumheight'),('pvh','pvh'),('pvw','pvw'),('qe','qe'),('quoteescape','quoteescape'),('rdt','rdt'),('readonly','readonly'),('redrawtime','redrawtime'),('relativenumber','relativenumber'),('remap','remap'),('report','report'),('restorescreen','restorescreen'),('revins','revins'),('ri','ri'),('rightleft','rightleft'),('rightleftcmd','rightleftcmd'),('rl','rl'),('rlc','rlc'),('rnu','rnu'),('ro','ro'),('rs','rs'),('rtp','rtp'),('ru','ru'),('ruf','ruf'),('ruler','ruler'),('rulerformat','rulerformat'),('runtimepath','runtimepath'),('sb','sb'),('sbo','sbo'),('sbr','sbr'),('sc','sc'),('scb','scb'),('scr','scr'),('scroll','scroll'),('scrollbind','scrollbind'),('scrolljump','scrolljump'),('scrolloff','scrolloff'),('scrollopt','scrollopt'),('scs','scs'),('sect','sect'),('sections','sections'),('secure','secure'),('sel','sel'),('selection','selection'),('selectmode','selectmode'),('sessionoptions','sessionoptions'),('sft','sft'),('sh','sh'),('shcf','shcf'),('shell','shell'),('shellcmdflag','shellcmdflag'),('shellpipe','shellpipe'),('shellquote','shellquote'),('shellredir','shellredir'),('shellslash','shellslash'),('shelltemp','shelltemp'),('shelltype','shelltype'),('shellxquote','shellxquote'),('shiftround','shiftround'),('shiftwidth','shiftwidth'),('shm','shm'),('shortmess','shortmess'),('shortname','shortname'),('showbreak','showbreak'),('showcmd','showcmd'),('showfulltag','showfulltag'),('showmatch','showmatch'),('showmode','showmode'),('showtabline','showtabline'),('shq','shq'),('si','si'),('sidescroll','sidescroll'),('sidescrolloff','sidescrolloff'),('siso','siso'),('sj','sj'),('slm','slm'),('sm','sm'),('smartcase','smartcase'),('smartindent','smartindent'),('smarttab','smarttab'),('smc','smc'),('smd','smd'),('sn','sn'),('so','so'),('softtabstop','softtabstop'),('sol','sol'),('sp','sp'),('spc','spc'),('spell','spell'),('spellcapcheck','spellcapcheck'),('spellfile','spellfile'),('spelllang','spelllang'),('spellsuggest','spellsuggest'),('spf','spf'),('spl','spl'),('splitbelow','splitbelow'),('splitright','splitright'),('spr','spr'),('sps','sps'),('sr','sr'),('srr','srr'),('ss','ss'),('ssl','ssl'),('ssop','ssop'),('st','st'),('sta','sta'),('stal','stal'),('startofline','startofline'),('statusline','statusline'),('stl','stl'),('stmp','stmp'),('sts','sts'),('su','su'),('sua','sua'),('suffixes','suffixes'),('suffixesadd','suffixesadd'),('sw','sw'),('swapfile','swapfile'),('swapsync','swapsync'),('swb','swb'),('swf','swf'),('switchbuf','switchbuf'),('sws','sws'),('sxq','sxq'),('syn','syn'),('synmaxcol','synmaxcol'),('syntax','syntax'),('t_AB','t_AB'),('t_AF','t_AF'),('t_AL','t_AL'),('t_CS','t_CS'),('t_CV','t_CV'),('t_Ce','t_Ce'),('t_Co','t_Co'),('t_Cs','t_Cs'),('t_DL','t_DL'),('t_EI','t_EI'),('t_F1','t_F1'),('t_F2','t_F2'),('t_F3','t_F3'),('t_F4','t_F4'),('t_F5','t_F5'),('t_F6','t_F6'),('t_F7','t_F7'),('t_F8','t_F8'),('t_F9','t_F9'),('t_IE','t_IE'),('t_IS','t_IS'),('t_K1','t_K1'),('t_K3','t_K3'),('t_K4','t_K4'),('t_K5','t_K5'),('t_K6','t_K6'),('t_K7','t_K7'),('t_K8','t_K8'),('t_K9','t_K9'),('t_KA','t_KA'),('t_KB','t_KB'),('t_KC','t_KC'),('t_KD','t_KD'),('t_KE','t_KE'),('t_KF','t_KF'),('t_KG','t_KG'),('t_KH','t_KH'),('t_KI','t_KI'),('t_KJ','t_KJ'),('t_KK','t_KK'),('t_KL','t_KL'),('t_RI','t_RI'),('t_RV','t_RV'),('t_SI','t_SI'),('t_Sb','t_Sb'),('t_Sf','t_Sf'),('t_WP','t_WP'),('t_WS','t_WS'),('t_ZH','t_ZH'),('t_ZR','t_ZR'),('t_al','t_al'),('t_bc','t_bc'),('t_cd','t_cd'),('t_ce','t_ce'),('t_cl','t_cl'),('t_cm','t_cm'),('t_cs','t_cs'),('t_da','t_da'),('t_db','t_db'),('t_dl','t_dl'),('t_fs','t_fs'),('t_k1','t_k1'),('t_k2','t_k2'),('t_k3','t_k3'),('t_k4','t_k4'),('t_k5','t_k5'),('t_k6','t_k6'),('t_k7','t_k7'),('t_k8','t_k8'),('t_k9','t_k9'),('t_kB','t_kB'),('t_kD','t_kD'),('t_kI','t_kI'),('t_kN','t_kN'),('t_kP','t_kP'),('t_kb','t_kb'),('t_kd','t_kd'),('t_ke','t_ke'),('t_kh','t_kh'),('t_kl','t_kl'),('t_kr','t_kr'),('t_ks','t_ks'),('t_ku','t_ku'),('t_le','t_le'),('t_mb','t_mb'),('t_md','t_md'),('t_me','t_me'),('t_mr','t_mr'),('t_ms','t_ms'),('t_nd','t_nd'),('t_op','t_op'),('t_se','t_se'),('t_so','t_so'),('t_sr','t_sr'),('t_te','t_te'),('t_ti','t_ti'),('t_ts','t_ts'),('t_ue','t_ue'),('t_us','t_us'),('t_ut','t_ut'),('t_vb','t_vb'),('t_ve','t_ve'),('t_vi','t_vi'),('t_vs','t_vs'),('t_xs','t_xs'),('ta','ta'),('tabline','tabline'),('tabpagemax','tabpagemax'),('tabstop','tabstop'),('tag','tag'),('tagbsearch','tagbsearch'),('taglength','taglength'),('tagrelative','tagrelative'),('tags','tags'),('tagstack','tagstack'),('tal','tal'),('tb','tb'),('tbi','tbi'),('tbidi','tbidi'),('tbis','tbis'),('tbs','tbs'),('tenc','tenc'),('term','term'),('termbidi','termbidi'),('termencoding','termencoding'),('terse','terse'),('textauto','textauto'),('textmode','textmode'),('textwidth','textwidth'),('tf','tf'),('tgst','tgst'),('thesaurus','thesaurus'),('tildeop','tildeop'),('timeout','timeout'),('timeoutlen','timeoutlen'),('title','title'),('titlelen','titlelen'),('titleold','titleold'),('titlestring','titlestring'),('tl','tl'),('tm','tm'),('to','to'),('toolbar','toolbar'),('toolbariconsize','toolbariconsize'),('top','top'),('tpm','tpm'),('tr','tr'),('ts','ts'),('tsl','tsl'),('tsr','tsr'),('ttimeout','ttimeout'),('ttimeoutlen','ttimeoutlen'),('ttm','ttm'),('tty','tty'),('ttybuiltin','ttybuiltin'),('ttyfast','ttyfast'),('ttym','ttym'),('ttymouse','ttymouse'),('ttyscroll','ttyscroll'),('ttytype','ttytype'),('tw','tw'),('tx','tx'),('uc','uc'),('udf','udf'),('udir','udir'),('ul','ul'),('undodir','undodir'),('undofile','undofile'),('undolevels','undolevels'),('undoreload','undoreload'),('updatecount','updatecount'),('updatetime','updatetime'),('ur','ur'),('ut','ut'),('vb','vb'),('vbs','vbs'),('vdir','vdir'),('ve','ve'),('verbose','verbose'),('verbosefile','verbosefile'),('vfile','vfile'),('vi','vi'),('viewdir','viewdir'),('viewoptions','viewoptions'),('viminfo','viminfo'),('virtualedit','virtualedit'),('visualbell','visualbell'),('vnoremap','vnoremap'),('vop','vop'),('wa','wa'),('wak','wak'),('warn','warn'),('wb','wb'),('wc','wc'),('wcm','wcm'),('wd','wd'),('weirdinvert','weirdinvert'),('wfh','wfh'),('wfw','wfw'),('wh','wh'),('whichwrap','whichwrap'),('wi','wi'),('wic','wic'),('wig','wig'),('wildchar','wildchar'),('wildcharm','wildcharm'),('wildignore','wildignore'),('wildignorecase','wildignorecase'),('wildmenu','wildmenu'),('wildmode','wildmode'),('wildoptions','wildoptions'),('wim','wim'),('winaltkeys','winaltkeys'),('window','window'),('winfixheight','winfixheight'),('winfixwidth','winfixwidth'),('winheight','winheight'),('winminheight','winminheight'),('winminwidth','winminwidth'),('winwidth','winwidth'),('wiv','wiv'),('wiw','wiw'),('wm','wm'),('wmh','wmh'),('wmnu','wmnu'),('wmw','wmw'),('wop','wop'),('wrap','wrap'),('wrapmargin','wrapmargin'),('wrapscan','wrapscan'),('write','write'),('writeany','writeany'),('writebackup','writebackup'),('writedelay','writedelay'),('ws','ws'),('ww','ww')]
option = _getoption()
command = _getcommand()
auto = _getauto()
| mit |
kchodorow/tensorflow | tensorflow/python/debug/cli/ui_factory.py | 97 | 2489 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow Debugger (tfdbg) User-Interface Factory."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
SUPPORTED_UI_TYPES = ["curses", "readline"]
def get_ui(ui_type, on_ui_exit=None, available_ui_types=None):
"""Create a `base_ui.BaseUI` subtype.
This factory method attempts to fallback to other available ui_types on
ImportError. For example, if `ui_type` is `curses`, but `curses` cannot be
imported properly, e.g., on Windows, will fallback to `readline`.
Args:
ui_type: (`str`) requested UI type. Currently supported:
(curses | readline)
on_ui_exit: (`Callable`) the callback to be called when the UI exits.
available_ui_types: (`None` or `list` of `str`) Manually-set available
ui_types.
Returns:
A `base_ui.BaseUI` subtype object.
Raises:
ValueError: on invalid ui_type or on exhausting or fallback ui_types.
"""
if available_ui_types is None:
available_ui_types = copy.deepcopy(SUPPORTED_UI_TYPES)
if ui_type and (ui_type not in available_ui_types):
raise ValueError("Invalid ui_type: '%s'" % ui_type)
try:
# pylint: disable=g-import-not-at-top
if not ui_type or ui_type == "curses":
from tensorflow.python.debug.cli import curses_ui
return curses_ui.CursesUI(on_ui_exit=on_ui_exit)
elif ui_type == "readline":
from tensorflow.python.debug.cli import readline_ui
return readline_ui.ReadlineUI(on_ui_exit=on_ui_exit)
# pylint: enable=g-import-not-at-top
except ImportError:
available_ui_types.remove(ui_type)
if not available_ui_types:
raise ValueError("Exhausted all fallback ui_types.")
return get_ui(available_ui_types[0],
available_ui_types=available_ui_types)
| apache-2.0 |
danieljaouen/ansible | lib/ansible/modules/cloud/opennebula/one_vm.py | 16 | 50014 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
"""
(c) 2017, Milan Ilic <[email protected]>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: one_vm
short_description: Creates or terminates OpenNebula instances
description:
- Manages OpenNebula instances
version_added: "2.6"
requirements:
- python-oca
options:
api_url:
description:
- URL of the OpenNebula RPC server.
- It is recommended to use HTTPS so that the username/password are not
- transferred over the network unencrypted.
- If not set then the value of the C(ONE_URL) environment variable is used.
api_username:
description:
- Name of the user to login into the OpenNebula RPC server. If not set
- then the value of the C(ONE_USERNAME) environment variable is used.
api_password:
description:
- Password of the user to login into OpenNebula RPC server. If not set
template_name:
description:
- Name of VM template to use to create a new instace
template_id:
description:
- ID of a VM template to use to create a new instance
instance_ids:
description:
- A list of instance ids used for states':' C(absent), C(running), C(rebooted), C(poweredoff)
aliases: ['ids']
state:
description:
- C(present) - create instances from a template specified with C(template_id)/C(template_name).
- C(running) - run instances
- C(poweredoff) - power-off instances
- C(rebooted) - reboot instances
- C(absent) - terminate instances
choices: ["present", "absent", "running", "rebooted", "poweredoff"]
default: present
hard:
description:
- Reboot, power-off or terminate instances C(hard)
default: no
type: bool
wait:
description:
- Wait for the instance to reach its desired state before returning. Keep
- in mind if you are waiting for instance to be in running state it
- doesn't mean that you will be able to SSH on that machine only that
- boot process have started on that instance, see 'wait_for' example for
- details.
default: yes
type: bool
wait_timeout:
description:
- How long before wait gives up, in seconds
default: 300
attributes:
description:
- A dictionary of key/value attributes to add to new instances, or for
- setting C(state) of instances with these attributes.
- Keys are case insensitive and OpenNebula automatically converts them to upper case.
- Be aware C(NAME) is a special attribute which sets the name of the VM when it's deployed.
- C(#) character(s) can be appended to the C(NAME) and the module will automatically add
- indexes to the names of VMs.
- For example':' C(NAME':' foo-###) would create VMs with names C(foo-000), C(foo-001),...
- When used with C(count_attributes) and C(exact_count) the module will
- match the base name without the index part.
default: {}
labels:
description:
- A list of labels to associate with new instances, or for setting
- C(state) of instances with these labels.
default: []
count_attributes:
description:
- A dictionary of key/value attributes that can only be used with
- C(exact_count) to determine how many nodes based on a specific
- attributes criteria should be deployed. This can be expressed in
- multiple ways and is shown in the EXAMPLES section.
count_labels:
description:
- A list of labels that can only be used with C(exact_count) to determine
- how many nodes based on a specific labels criteria should be deployed.
- This can be expressed in multiple ways and is shown in the EXAMPLES
- section.
count:
description:
- Number of instances to launch
default: 1
exact_count:
description:
- Indicates how many instances that match C(count_attributes) and
- C(count_labels) parameters should be deployed. Instances are either
- created or terminated based on this value.
- NOTE':' Instances with the least IDs will be terminated first.
mode:
description:
- Set permission mode of the instance in octet format, e.g. C(600) to give owner C(use) and C(manage) and nothing to group and others.
owner_id:
description:
- ID of the user which will be set as the owner of the instance
group_id:
description:
- ID of the group which will be set as the group of the instance
memory:
description:
- The size of the memory for new instances (in MB, GB, ...)
disk_size:
description:
- The size of the disk created for new instances (in MB, GB, TB,...).
- NOTE':' This option can be used only if the VM template specified with
- C(template_id)/C(template_name) has exactly one disk.
cpu:
description:
- Percentage of CPU divided by 100 required for the new instance. Half a
- processor is written 0.5.
vcpu:
description:
- Number of CPUs (cores) new VM will have.
networks:
description:
- A list of dictionaries with network parameters. See examples for more details.
default: []
disk_saveas:
description:
- Creates an image from a VM disk.
- It is a dictionary where you have to specife C(name) of the new image.
- Optionally you can specife C(disk_id) of the disk you want to save. By default C(disk_id) is 0.
- I(NOTE)':' This operation will only be performed on the first VM (if more than one VM ID is passed)
- and the VM has to be in the C(poweredoff) state.
- Also this operation will fail if an image with specified C(name) already exists.
author:
- "Milan Ilic (@ilicmilan)"
'''
EXAMPLES = '''
# Create a new instance
- one_vm:
template_id: 90
register: result
# Print VM properties
- debug:
msg: result
# Deploy a new VM and set its name to 'foo'
- one_vm:
template_name: 'app1_template'
attributes:
name: foo
# Deploy a new VM and set its group_id and mode
- one_vm:
template_id: 90
group_id: 16
mode: 660
# Change VM's permissions to 640
- one_vm:
instance_ids: 5
mode: 640
# Deploy 2 new instances and set memory, vcpu, disk_size and 3 networks
- one_vm:
template_id: 15
disk_size: 35.2 GB
memory: 4 GB
vcpu: 4
count: 2
networks:
- NETWORK_ID: 27
- NETWORK: "default-network"
NETWORK_UNAME: "app-user"
SECURITY_GROUPS: "120,124"
- NETWORK_ID: 27
SECURITY_GROUPS: "10"
# Deploy an new instance with attribute 'bar: bar1' and set its name to 'foo'
- one_vm:
template_id: 53
attributes:
name: foo
bar: bar1
# Enforce that 2 instances with attributes 'foo1: app1' and 'foo2: app2' are deployed
- one_vm:
template_id: 53
attributes:
foo1: app1
foo2: app2
exact_count: 2
count_attributes:
foo1: app1
foo2: app2
# Enforce that 4 instances with an attribute 'bar' are deployed
- one_vm:
template_id: 53
attributes:
name: app
bar: bar2
exact_count: 4
count_attributes:
bar:
# Deploy 2 new instances with attribute 'foo: bar' and labels 'app1' and 'app2' and names in format 'fooapp-##'
# Names will be: fooapp-00 and fooapp-01
- one_vm:
template_id: 53
attributes:
name: fooapp-##
foo: bar
labels:
- app1
- app2
count: 2
# Deploy 2 new instances with attribute 'app: app1' and names in format 'fooapp-###'
# Names will be: fooapp-002 and fooapp-003
- one_vm:
template_id: 53
attributes:
name: fooapp-###
app: app1
count: 2
# Reboot all instances with name in format 'fooapp-#'
# Instances 'fooapp-00', 'fooapp-01', 'fooapp-002' and 'fooapp-003' will be rebooted
- one_vm:
attributes:
name: fooapp-#
state: rebooted
# Enforce that only 1 instance with name in format 'fooapp-#' is deployed
# The task will delete oldest instances, so only the 'fooapp-003' will remain
- one_vm:
template_id: 53
exact_count: 1
count_attributes:
name: fooapp-#
# Deploy an new instance with a network
- one_vm:
template_id: 53
networks:
- NETWORK_ID: 27
register: vm
# Wait for SSH to come up
- wait_for_connection:
delegate_to: '{{ vm.instances[0].networks[0].ip }}'
# Terminate VMs by ids
- one_vm:
instance_ids:
- 153
- 160
state: absent
# Reboot all VMs that have labels 'foo' and 'app1'
- one_vm:
labels:
- foo
- app1
state: rebooted
# Fetch all VMs that have name 'foo' and attribute 'app: bar'
- one_vm:
attributes:
name: foo
app: bar
register: results
# Deploy 2 new instances with labels 'foo1' and 'foo2'
- one_vm:
template_name: app_template
labels:
- foo1
- foo2
count: 2
# Enforce that only 1 instance with label 'foo1' will be running
- one_vm:
template_name: app_template
labels:
- foo1
exact_count: 1
count_labels:
- foo1
# Terminate all instances that have attribute foo
- one_vm:
template_id: 53
exact_count: 0
count_attributes:
foo:
# Power-off the VM and save VM's disk with id=0 to the image with name 'foo-image'
- one_vm:
instance_ids: 351
state: powered-off
disk_saveas:
name: foo-image
# Save VM's disk with id=1 to the image with name 'bar-image'
- one_vm:
instance_ids: 351
disk_saveas:
name: bar-image
disk_id: 1
'''
RETURN = '''
instances_ids:
description: a list of instances ids whose state is changed or which are fetched with C(instance_ids) option.
type: list
returned: success
sample: [ 1234, 1235 ]
instances:
description: a list of instances info whose state is changed or which are fetched with C(instance_ids) option.
type: complex
returned: success
contains:
vm_id:
description: vm id
type: integer
sample: 153
vm_name:
description: vm name
type: string
sample: foo
template_id:
description: vm's template id
type: integer
sample: 153
group_id:
description: vm's group id
type: integer
sample: 1
group_name:
description: vm's group name
type: string
sample: one-users
owner_id:
description: vm's owner id
type: integer
sample: 143
owner_name:
description: vm's owner name
type: string
sample: app-user
mode:
description: vm's mode
type: string
returned: success
sample: 660
state:
description: state of an instance
type: string
sample: ACTIVE
lcm_state:
description: lcm state of an instance that is only relevant when the state is ACTIVE
type: string
sample: RUNNING
cpu:
description: Percentage of CPU divided by 100
type: float
sample: 0.2
vcpu:
description: Number of CPUs (cores)
type: int
sample: 2
memory:
description: The size of the memory in MB
type: string
sample: 4096 MB
disk_size:
description: The size of the disk in MB
type: string
sample: 20480 MB
networks:
description: a list of dictionaries with info about IP, NAME, MAC, SECURITY_GROUPS for each NIC
type: list
sample: [
{
"ip": "10.120.5.33",
"mac": "02:00:0a:78:05:21",
"name": "default-test-private",
"security_groups": "0,10"
},
{
"ip": "10.120.5.34",
"mac": "02:00:0a:78:05:22",
"name": "default-test-private",
"security_groups": "0"
}
]
uptime_h:
description: Uptime of the instance in hours
type: integer
sample: 35
labels:
description: A list of string labels that are associated with the instance
type: list
sample: [
"foo",
"spec-label"
]
attributes:
description: A dictionary of key/values attributes that are associated with the instance
type: dict
sample: {
"HYPERVISOR": "kvm",
"LOGO": "images/logos/centos.png",
"TE_GALAXY": "bar",
"USER_INPUTS": null
}
tagged_instances:
description:
- A list of instances info based on a specific attributes and/or
- labels that are specified with C(count_attributes) and C(count_labels)
- options.
type: complex
returned: success
contains:
vm_id:
description: vm id
type: integer
sample: 153
vm_name:
description: vm name
type: string
sample: foo
template_id:
description: vm's template id
type: integer
sample: 153
group_id:
description: vm's group id
type: integer
sample: 1
group_name:
description: vm's group name
type: string
sample: one-users
owner_id:
description: vm's user id
type: integer
sample: 143
owner_name:
description: vm's user name
type: string
sample: app-user
mode:
description: vm's mode
type: string
returned: success
sample: 660
state:
description: state of an instance
type: string
sample: ACTIVE
lcm_state:
description: lcm state of an instance that is only relevant when the state is ACTIVE
type: string
sample: RUNNING
cpu:
description: Percentage of CPU divided by 100
type: float
sample: 0.2
vcpu:
description: Number of CPUs (cores)
type: int
sample: 2
memory:
description: The size of the memory in MB
type: string
sample: 4096 MB
disk_size:
description: The size of the disk in MB
type: string
sample: 20480 MB
networks:
description: a list of dictionaries with info about IP, NAME, MAC, SECURITY_GROUPS for each NIC
type: list
sample: [
{
"ip": "10.120.5.33",
"mac": "02:00:0a:78:05:21",
"name": "default-test-private",
"security_groups": "0,10"
},
{
"ip": "10.120.5.34",
"mac": "02:00:0a:78:05:22",
"name": "default-test-private",
"security_groups": "0"
}
]
uptime_h:
description: Uptime of the instance in hours
type: integer
sample: 35
labels:
description: A list of string labels that are associated with the instance
type: list
sample: [
"foo",
"spec-label"
]
attributes:
description: A dictionary of key/values attributes that are associated with the instance
type: dict
sample: {
"HYPERVISOR": "kvm",
"LOGO": "images/logos/centos.png",
"TE_GALAXY": "bar",
"USER_INPUTS": null
}
'''
try:
import oca
HAS_OCA = True
except ImportError:
HAS_OCA = False
from ansible.module_utils.basic import AnsibleModule
import os
def get_template(module, client, predicate):
pool = oca.VmTemplatePool(client)
# Filter -2 means fetch all templates user can Use
pool.info(filter=-2)
found = 0
found_template = None
template_name = ''
for template in pool:
if predicate(template):
found = found + 1
found_template = template
template_name = template.name
if found == 0:
return None
elif found > 1:
module.fail_json(msg='There are more templates with name: ' + template_name)
return found_template
def get_template_by_name(module, client, template_name):
return get_template(module, client, lambda template: (template.name == template_name))
def get_template_by_id(module, client, template_id):
return get_template(module, client, lambda template: (template.id == template_id))
def get_template_id(module, client, requested_id, requested_name):
template = get_template_by_id(module, client, requested_id) if requested_id else get_template_by_name(module, client, requested_name)
if template:
return template.id
else:
return None
def get_vm_by_id(client, vm_id):
pool = oca.VirtualMachinePool(client)
# Retrieves information for all or part of the vms pool
# -4: Vms belonging to the user's primary group
# -3: Vms belonging to the user
# -2: All vms user can Use
# -1: Vms belonging to the user and any of his groups - default
# >= 0: UID User's vms
pool.info(filter=-2)
for vm in pool:
if str(vm.id) == str(vm_id):
return vm
return None
def get_vms_by_ids(module, client, state, ids):
vms = []
for vm_id in ids:
vm = get_vm_by_id(client, vm_id)
if vm is None and state != 'absent':
module.fail_json(msg='There is no VM with id=' + str(vm_id))
vms.append(vm)
return vms
def get_vm_info(client, vm):
vm.info()
networks_info = []
disk_size = ''
if hasattr(vm.template, 'disks'):
disk_size = vm.template.disks[0].size + ' MB'
if hasattr(vm.template, 'nics'):
for nic in vm.template.nics:
networks_info.append({'ip': nic.ip, 'mac': nic.mac, 'name': nic.network, 'security_groups': nic.security_groups})
import time
current_time = time.localtime()
vm_start_time = time.localtime(vm.stime)
vm_uptime = time.mktime(current_time) - time.mktime(vm_start_time)
vm_uptime /= (60 * 60)
permissions_str = parse_vm_permissions(client, vm)
# LCM_STATE is VM's sub-state that is relevant only when STATE is ACTIVE
vm_lcm_state = None
if vm.state == VM_STATES.index('ACTIVE'):
vm_lcm_state = LCM_STATES[vm.lcm_state]
vm_labels, vm_attributes = get_vm_labels_and_attributes_dict(client, vm.id)
info = {
'template_id': int(vm.template.template_id),
'vm_id': vm.id,
'vm_name': vm.name,
'state': VM_STATES[vm.state],
'lcm_state': vm_lcm_state,
'owner_name': vm.uname,
'owner_id': vm.uid,
'networks': networks_info,
'disk_size': disk_size,
'memory': vm.template.memory + ' MB',
'vcpu': vm.template.vcpu,
'cpu': vm.template.cpu,
'group_name': vm.gname,
'group_id': vm.gid,
'uptime_h': int(vm_uptime),
'attributes': vm_attributes,
'mode': permissions_str,
'labels': vm_labels
}
return info
def parse_vm_permissions(client, vm):
import xml.etree.ElementTree as ET
vm_XML = client.call('vm.info', vm.id)
root = ET.fromstring(vm_XML)
perm_dict = {}
root = root.find('PERMISSIONS')
for child in root:
perm_dict[child.tag] = child.text
'''
This is the structure of the 'PERMISSIONS' dictionary:
"PERMISSIONS": {
"OWNER_U": "1",
"OWNER_M": "1",
"OWNER_A": "0",
"GROUP_U": "0",
"GROUP_M": "0",
"GROUP_A": "0",
"OTHER_U": "0",
"OTHER_M": "0",
"OTHER_A": "0"
}
'''
owner_octal = int(perm_dict["OWNER_U"]) * 4 + int(perm_dict["OWNER_M"]) * 2 + int(perm_dict["OWNER_A"])
group_octal = int(perm_dict["GROUP_U"]) * 4 + int(perm_dict["GROUP_M"]) * 2 + int(perm_dict["GROUP_A"])
other_octal = int(perm_dict["OTHER_U"]) * 4 + int(perm_dict["OTHER_M"]) * 2 + int(perm_dict["OTHER_A"])
permissions = str(owner_octal) + str(group_octal) + str(other_octal)
return permissions
def set_vm_permissions(module, client, vms, permissions):
changed = False
for vm in vms:
vm.info()
print(vm.id)
old_permissions = parse_vm_permissions(client, vm)
changed = changed or old_permissions != permissions
if not module.check_mode and old_permissions != permissions:
permissions_str = bin(int(permissions, base=8))[2:] # 600 -> 110000000
mode_bits = [int(d) for d in permissions_str]
try:
client.call('vm.chmod', vm.id, mode_bits[0], mode_bits[1], mode_bits[2], mode_bits[3],
mode_bits[4], mode_bits[5], mode_bits[6], mode_bits[7], mode_bits[8])
except oca.OpenNebulaException:
module.fail_json(msg="Permissions changing is unsuccessful, but instances are present if you deployed them.")
return changed
def set_vm_ownership(module, client, vms, owner_id, group_id):
changed = False
for vm in vms:
vm.info()
if owner_id is None:
owner_id = vm.uid
if group_id is None:
group_id = vm.gid
changed = changed or owner_id != vm.uid or group_id != vm.gid
if not module.check_mode and (owner_id != vm.uid or group_id != vm.gid):
try:
client.call('vm.chown', vm.id, owner_id, group_id)
except oca.OpenNebulaException:
module.fail_json(msg="Ownership changing is unsuccessful, but instances are present if you deployed them.")
return changed
def get_size_in_MB(module, size_str):
SYMBOLS = ['B', 'KB', 'MB', 'GB', 'TB']
s = size_str
init = size_str
num = ""
while s and s[0:1].isdigit() or s[0:1] == '.':
num += s[0]
s = s[1:]
num = float(num)
symbol = s.strip()
if symbol not in SYMBOLS:
module.fail_json(msg="Cannot interpret %r %r %d" % (init, symbol, num))
prefix = {'B': 1}
for i, s in enumerate(SYMBOLS[1:]):
prefix[s] = 1 << (i + 1) * 10
size_in_bytes = int(num * prefix[symbol])
size_in_MB = size_in_bytes / (1024 * 1024)
return size_in_MB
def create_disk_str(module, client, template_id, disk_size_str):
if not disk_size_str:
return ''
import xml.etree.ElementTree as ET
template_XML = client.call('template.info', template_id)
root = ET.fromstring(template_XML)
disks_num = 0
disk = None
for child in root.find('TEMPLATE').findall('DISK'):
disks_num += 1
root = child
if disks_num != 1:
module.fail_json(msg='You can pass disk_size only if template has exact one disk. This template has ' + str(disks_num) + ' disks.')
disk = {}
# Get all info about existed disk e.g. IMAGE_ID,...
for child in root:
disk[child.tag] = child.text
result = 'DISK = [' + ','.join('{key}="{val}"'.format(key=key, val=val) for key, val in disk.items() if key != 'SIZE')
result += ', SIZE=' + str(int(get_size_in_MB(module, disk_size_str))) + ']\n'
return result
def create_attributes_str(attributes_dict, labels_list):
attributes_str = ''
if labels_list:
attributes_str += 'LABELS="' + ','.join('{label}'.format(label=label) for label in labels_list) + '"\n'
if attributes_dict:
attributes_str += '\n'.join('{key}="{val}"'.format(key=key.upper(), val=val) for key, val in attributes_dict.items()) + '\n'
return attributes_str
def create_nics_str(network_attrs_list):
nics_str = ''
for network in network_attrs_list:
# Packing key-value dict in string with format key="value", key="value"
network_str = ','.join('{key}="{val}"'.format(key=key, val=val) for key, val in network.items())
nics_str = nics_str + 'NIC = [' + network_str + ']\n'
return nics_str
def create_vm(module, client, template_id, attributes_dict, labels_list, disk_size, network_attrs_list):
if attributes_dict:
vm_name = attributes_dict.get('NAME', '')
disk_str = create_disk_str(module, client, template_id, disk_size)
vm_extra_template_str = create_attributes_str(attributes_dict, labels_list) + create_nics_str(network_attrs_list) + disk_str
vm_id = client.call('template.instantiate', template_id, vm_name, False, vm_extra_template_str)
vm = get_vm_by_id(client, vm_id)
return get_vm_info(client, vm)
def generate_next_index(vm_filled_indexes_list, num_sign_cnt):
counter = 0
cnt_str = str(counter).zfill(num_sign_cnt)
while cnt_str in vm_filled_indexes_list:
counter = counter + 1
cnt_str = str(counter).zfill(num_sign_cnt)
return cnt_str
def get_vm_labels_and_attributes_dict(client, vm_id):
import xml.etree.ElementTree as ET
vm_XML = client.call('vm.info', vm_id)
root = ET.fromstring(vm_XML)
attrs_dict = {}
labels_list = []
root = root.find('USER_TEMPLATE')
for child in root:
if child.tag != 'LABELS':
attrs_dict[child.tag] = child.text
else:
if child.text is not None:
labels_list = child.text.split(',')
return labels_list, attrs_dict
def get_all_vms_by_attributes(client, attributes_dict, labels_list):
pool = oca.VirtualMachinePool(client)
# Retrieves information for all or part of the vms pool
# -4: Vms belonging to the user's primary group
# -3: Vms belonging to the user
# -2: All vms user can Use
# -1: Vms belonging to the user and any of his groups - default
# >= 0: UID User's vms
pool.info(filter=-2)
vm_list = []
name = ''
if attributes_dict:
name = attributes_dict.pop('NAME', '')
if name != '':
base_name = name[:len(name) - name.count('#')]
# Check does the name have indexed format
with_hash = name.endswith('#')
for vm in pool:
if vm.name.startswith(base_name):
if with_hash and vm.name[len(base_name):].isdigit():
# If the name has indexed format and after base_name it has only digits it'll be matched
vm_list.append(vm)
elif not with_hash and vm.name == name:
# If the name is not indexed it has to be same
vm_list.append(vm)
pool = vm_list
import copy
vm_list = copy.copy(pool)
for vm in pool:
vm_labels_list, vm_attributes_dict = get_vm_labels_and_attributes_dict(client, vm.id)
if attributes_dict and len(attributes_dict) > 0:
for key, val in attributes_dict.items():
if key in vm_attributes_dict:
if val and vm_attributes_dict[key] != val and vm in vm_list:
vm_list.remove(vm)
break
else:
if vm in vm_list:
vm_list.remove(vm)
break
if labels_list and len(labels_list) > 0:
for label in labels_list:
if label not in vm_labels_list and vm in vm_list:
vm_list.remove(vm)
break
return vm_list
def create_count_of_vms(module, client, template_id, count, attributes_dict, labels_list, disk_size, network_attrs_list, wait, wait_timeout):
new_vms_list = []
vm_name = ''
if attributes_dict:
vm_name = attributes_dict.get('NAME', '')
if module.check_mode:
return True, [], []
# Create list of used indexes
vm_filled_indexes_list = None
num_sign_cnt = vm_name.count('#')
if vm_name != '' and num_sign_cnt > 0:
vm_list = get_all_vms_by_attributes(client, {'NAME': vm_name}, None)
base_name = vm_name[:len(vm_name) - num_sign_cnt]
vm_name = base_name
# Make list which contains used indexes in format ['000', '001',...]
vm_filled_indexes_list = list((vm.name[len(base_name):].zfill(num_sign_cnt)) for vm in vm_list)
while count > 0:
new_vm_name = vm_name
# Create indexed name
if vm_filled_indexes_list is not None:
next_index = generate_next_index(vm_filled_indexes_list, num_sign_cnt)
vm_filled_indexes_list.append(next_index)
new_vm_name += next_index
# Update NAME value in the attributes in case there is index
attributes_dict['NAME'] = new_vm_name
new_vm_dict = create_vm(module, client, template_id, attributes_dict, labels_list, disk_size, network_attrs_list)
new_vm_id = new_vm_dict.get('vm_id')
new_vm = get_vm_by_id(client, new_vm_id)
new_vms_list.append(new_vm)
count -= 1
if wait:
for vm in new_vms_list:
wait_for_running(module, vm, wait_timeout)
return True, new_vms_list, []
def create_exact_count_of_vms(module, client, template_id, exact_count, attributes_dict, count_attributes_dict,
labels_list, count_labels_list, disk_size, network_attrs_list, hard, wait, wait_timeout):
vm_list = get_all_vms_by_attributes(client, count_attributes_dict, count_labels_list)
vm_count_diff = exact_count - len(vm_list)
changed = vm_count_diff != 0
new_vms_list = []
instances_list = []
tagged_instances_list = vm_list
if module.check_mode:
return changed, instances_list, tagged_instances_list
if vm_count_diff > 0:
# Add more VMs
changed, instances_list, tagged_instances = create_count_of_vms(module, client, template_id, vm_count_diff, attributes_dict,
labels_list, disk_size, network_attrs_list, wait, wait_timeout)
tagged_instances_list += instances_list
elif vm_count_diff < 0:
# Delete surplus VMs
old_vms_list = []
while vm_count_diff < 0:
old_vm = vm_list.pop(0)
old_vms_list.append(old_vm)
terminate_vm(module, client, old_vm, hard)
vm_count_diff += 1
if wait:
for vm in old_vms_list:
wait_for_done(module, vm, wait_timeout)
instances_list = old_vms_list
# store only the remaining instances
old_vms_set = set(old_vms_list)
tagged_instances_list = [vm for vm in vm_list if vm not in old_vms_set]
return changed, instances_list, tagged_instances_list
VM_STATES = ['INIT', 'PENDING', 'HOLD', 'ACTIVE', 'STOPPED', 'SUSPENDED', 'DONE', '', 'POWEROFF', 'UNDEPLOYED', 'CLONING', 'CLONING_FAILURE']
LCM_STATES = ['LCM_INIT', 'PROLOG', 'BOOT', 'RUNNING', 'MIGRATE', 'SAVE_STOP',
'SAVE_SUSPEND', 'SAVE_MIGRATE', 'PROLOG_MIGRATE', 'PROLOG_RESUME',
'EPILOG_STOP', 'EPILOG', 'SHUTDOWN', 'STATE13', 'STATE14', 'CLEANUP_RESUBMIT', 'UNKNOWN', 'HOTPLUG', 'SHUTDOWN_POWEROFF',
'BOOT_UNKNOWN', 'BOOT_POWEROFF', 'BOOT_SUSPENDED', 'BOOT_STOPPED', 'CLEANUP_DELETE', 'HOTPLUG_SNAPSHOT', 'HOTPLUG_NIC',
'HOTPLUG_SAVEAS', 'HOTPLUG_SAVEAS_POWEROFF', 'HOTPULG_SAVEAS_SUSPENDED', 'SHUTDOWN_UNDEPLOY']
def wait_for_state(module, vm, wait_timeout, state_predicate):
import time
start_time = time.time()
while (time.time() - start_time) < wait_timeout:
vm.info()
state = vm.state
lcm_state = vm.lcm_state
if state_predicate(state, lcm_state):
return vm
elif state not in [VM_STATES.index('INIT'), VM_STATES.index('PENDING'), VM_STATES.index('HOLD'),
VM_STATES.index('ACTIVE'), VM_STATES.index('POWEROFF')]:
module.fail_json(msg='Action is unsuccessful. VM state: ' + VM_STATES[state])
time.sleep(1)
module.fail_json(msg="Wait timeout has expired!")
def wait_for_running(module, vm, wait_timeout):
return wait_for_state(module, vm, wait_timeout, lambda state,
lcm_state: (state in [VM_STATES.index('ACTIVE')] and lcm_state in [LCM_STATES.index('RUNNING')]))
def wait_for_done(module, vm, wait_timeout):
return wait_for_state(module, vm, wait_timeout, lambda state, lcm_state: (state in [VM_STATES.index('DONE')]))
def wait_for_poweroff(module, vm, wait_timeout):
return wait_for_state(module, vm, wait_timeout, lambda state, lcm_state: (state in [VM_STATES.index('POWEROFF')]))
def terminate_vm(module, client, vm, hard=False):
changed = False
if not vm:
return changed
changed = True
if not module.check_mode:
if hard:
client.call('vm.action', 'terminate-hard', vm.id)
else:
client.call('vm.action', 'terminate', vm.id)
return changed
def terminate_vms(module, client, vms, hard):
changed = False
for vm in vms:
changed = terminate_vm(module, client, vm, hard) or changed
return changed
def poweroff_vm(module, vm, hard):
vm.info()
changed = False
lcm_state = vm.lcm_state
state = vm.state
if lcm_state not in [LCM_STATES.index('SHUTDOWN'), LCM_STATES.index('SHUTDOWN_POWEROFF')] and state not in [VM_STATES.index('POWEROFF')]:
changed = True
if changed and not module.check_mode:
if not hard:
vm.poweroff()
else:
vm.poweroff_hard()
return changed
def poweroff_vms(module, client, vms, hard):
changed = False
for vm in vms:
changed = poweroff_vm(module, vm, hard) or changed
return changed
def reboot_vms(module, client, vms, wait_timeout, hard):
if not module.check_mode:
# Firstly, power-off all instances
for vm in vms:
vm.info()
lcm_state = vm.lcm_state
state = vm.state
if lcm_state not in [LCM_STATES.index('SHUTDOWN_POWEROFF')] and state not in [VM_STATES.index('POWEROFF')]:
poweroff_vm(module, vm, hard)
# Wait for all to be power-off
for vm in vms:
wait_for_poweroff(module, vm, wait_timeout)
for vm in vms:
resume_vm(module, vm)
return True
def resume_vm(module, vm):
vm.info()
changed = False
lcm_state = vm.lcm_state
if lcm_state == LCM_STATES.index('SHUTDOWN_POWEROFF'):
module.fail_json(msg="Cannot perform action 'resume' because this action is not available " +
"for LCM_STATE: 'SHUTDOWN_POWEROFF'. Wait for the VM to shutdown properly")
if lcm_state not in [LCM_STATES.index('RUNNING')]:
changed = True
if changed and not module.check_mode:
vm.resume()
return changed
def resume_vms(module, client, vms):
changed = False
for vm in vms:
changed = resume_vm(module, vm) or changed
return changed
def check_name_attribute(module, attributes):
if attributes.get("NAME"):
import re
if re.match(r'^[^#]+#*$', attributes.get("NAME")) is None:
module.fail_json(msg="Ilegal 'NAME' attribute: '" + attributes.get("NAME") +
"' .Signs '#' are allowed only at the end of the name and the name cannot contain only '#'.")
TEMPLATE_RESTRICTED_ATTRIBUTES = ["CPU", "VCPU", "OS", "FEATURES", "MEMORY", "DISK", "NIC", "INPUT", "GRAPHICS",
"CONTEXT", "CREATED_BY", "CPU_COST", "DISK_COST", "MEMORY_COST",
"TEMPLATE_ID", "VMID", "AUTOMATIC_DS_REQUIREMENTS", "DEPLOY_FOLDER", "LABELS"]
def check_attributes(module, attributes):
for key in attributes.keys():
if key in TEMPLATE_RESTRICTED_ATTRIBUTES:
module.fail_json(msg='Restricted attribute `' + key + '` cannot be used when filtering VMs.')
# Check the format of the name attribute
check_name_attribute(module, attributes)
def disk_save_as(module, client, vm, disk_saveas, wait_timeout):
if not disk_saveas.get('name'):
module.fail_json(msg="Key 'name' is required for 'disk_saveas' option")
image_name = disk_saveas.get('name')
disk_id = disk_saveas.get('disk_id', 0)
if not module.check_mode:
if vm.state != VM_STATES.index('POWEROFF'):
module.fail_json(msg="'disksaveas' option can be used only when the VM is in 'POWEROFF' state")
client.call('vm.disksaveas', vm.id, disk_id, image_name, 'OS', -1)
wait_for_poweroff(module, vm, wait_timeout) # wait for VM to leave the hotplug_saveas_poweroff state
def get_connection_info(module):
url = module.params.get('api_url')
username = module.params.get('api_username')
password = module.params.get('api_password')
if not url:
url = os.environ.get('ONE_URL')
if not username:
username = os.environ.get('ONE_USERNAME')
if not password:
password = os.environ.get('ONE_PASSWORD')
if not(url and username and password):
module.fail_json(msg="One or more connection parameters (api_url, api_username, api_password) were not specified")
from collections import namedtuple
auth_params = namedtuple('auth', ('url', 'username', 'password'))
return auth_params(url=url, username=username, password=password)
def main():
fields = {
"api_url": {"required": False, "type": "str"},
"api_username": {"required": False, "type": "str"},
"api_password": {"required": False, "type": "str", "no_log": True},
"instance_ids": {"required": False, "aliases": ['ids'], "type": "list"},
"template_name": {"required": False, "type": "str"},
"template_id": {"required": False, "type": "int"},
"state": {
"default": "present",
"choices": ['present', 'absent', 'rebooted', 'poweredoff', 'running'],
"type": "str"
},
"mode": {"required": False, "type": "str"},
"owner_id": {"required": False, "type": "int"},
"group_id": {"required": False, "type": "int"},
"wait": {"default": True, "type": "bool"},
"wait_timeout": {"default": 300, "type": "int"},
"hard": {"default": False, "type": "bool"},
"memory": {"required": False, "type": "str"},
"cpu": {"required": False, "type": "float"},
"vcpu": {"required": False, "type": "int"},
"disk_size": {"required": False, "type": "str"},
"networks": {"default": [], "type": "list"},
"count": {"default": 1, "type": "int"},
"exact_count": {"required": False, "type": "int"},
"attributes": {"default": {}, "type": "dict"},
"count_attributes": {"required": False, "type": "dict"},
"labels": {"default": [], "type": "list"},
"count_labels": {"required": False, "type": "list"},
"disk_saveas": {"type": "dict"}
}
module = AnsibleModule(argument_spec=fields,
mutually_exclusive=[
['template_id', 'template_name', 'instance_ids'],
['template_id', 'template_name', 'disk_saveas'],
['instance_ids', 'count_attributes', 'count'],
['instance_ids', 'count_labels', 'count'],
['instance_ids', 'exact_count'],
['instance_ids', 'attributes'],
['instance_ids', 'labels'],
['disk_saveas', 'attributes'],
['disk_saveas', 'labels'],
['exact_count', 'count'],
['count', 'hard'],
['instance_ids', 'cpu'], ['instance_ids', 'vcpu'],
['instance_ids', 'memory'], ['instance_ids', 'disk_size'],
['instance_ids', 'networks']
],
supports_check_mode=True)
if not HAS_OCA:
module.fail_json(msg='This module requires python-oca to work!')
auth = get_connection_info(module)
params = module.params
instance_ids = params.get('instance_ids')
requested_template_name = params.get('template_name')
requested_template_id = params.get('template_id')
state = params.get('state')
permissions = params.get('mode')
owner_id = params.get('owner_id')
group_id = params.get('group_id')
wait = params.get('wait')
wait_timeout = params.get('wait_timeout')
hard = params.get('hard')
memory = params.get('memory')
cpu = params.get('cpu')
vcpu = params.get('vcpu')
disk_size = params.get('disk_size')
networks = params.get('networks')
count = params.get('count')
exact_count = params.get('exact_count')
attributes = params.get('attributes')
count_attributes = params.get('count_attributes')
labels = params.get('labels')
count_labels = params.get('count_labels')
disk_saveas = params.get('disk_saveas')
client = oca.Client(auth.username + ':' + auth.password, auth.url)
if attributes:
attributes = dict((key.upper(), value) for key, value in attributes.items())
check_attributes(module, attributes)
if count_attributes:
count_attributes = dict((key.upper(), value) for key, value in count_attributes.items())
if not attributes:
import copy
module.warn('When you pass `count_attributes` without `attributes` option when deploying, `attributes` option will have same values implicitly.')
attributes = copy.copy(count_attributes)
check_attributes(module, count_attributes)
if count_labels and not labels:
module.warn('When you pass `count_labels` without `labels` option when deploying, `labels` option will have same values implicitly.')
labels = count_labels
# Fetch template
template_id = None
if requested_template_id or requested_template_name:
template_id = get_template_id(module, client, requested_template_id, requested_template_name)
if not template_id:
if requested_template_id:
module.fail_json(msg='There is no template with template_id: ' + str(requested_template_id))
elif requested_template_name:
module.fail_json(msg="There is no template with name: " + requested_template_name)
if exact_count and not template_id:
module.fail_json(msg='Option `exact_count` needs template_id or template_name')
if exact_count is not None and not (count_attributes or count_labels):
module.fail_json(msg='Either `count_attributes` or `count_labels` has to be specified with option `exact_count`.')
if (count_attributes or count_labels) and exact_count is None:
module.fail_json(msg='Option `exact_count` has to be specified when either `count_attributes` or `count_labels` is used.')
if template_id and state != 'present':
module.fail_json(msg="Only state 'present' is valid for the template")
if memory:
attributes['MEMORY'] = str(int(get_size_in_MB(module, memory)))
if cpu:
attributes['CPU'] = str(cpu)
if vcpu:
attributes['VCPU'] = str(vcpu)
if exact_count is not None and state != 'present':
module.fail_json(msg='The `exact_count` option is valid only for the `present` state')
if exact_count is not None and exact_count < 0:
module.fail_json(msg='`exact_count` cannot be less than 0')
if count <= 0:
module.fail_json(msg='`count` has to be grater than 0')
if permissions is not None:
import re
if re.match("^[0-7]{3}$", permissions) is None:
module.fail_json(msg="Option `mode` has to have exactly 3 digits and be in the octet format e.g. 600")
if exact_count is not None:
# Deploy an exact count of VMs
changed, instances_list, tagged_instances_list = create_exact_count_of_vms(module, client, template_id, exact_count, attributes,
count_attributes, labels, count_labels, disk_size,
networks, hard, wait, wait_timeout)
vms = tagged_instances_list
elif template_id and state == 'present':
# Deploy count VMs
changed, instances_list, tagged_instances_list = create_count_of_vms(module, client, template_id, count,
attributes, labels, disk_size, networks, wait, wait_timeout)
# instances_list - new instances
# tagged_instances_list - all instances with specified `count_attributes` and `count_labels`
vms = instances_list
else:
# Fetch data of instances, or change their state
if not (instance_ids or attributes or labels):
module.fail_json(msg="At least one of `instance_ids`,`attributes`,`labels` must be passed!")
if memory or cpu or vcpu or disk_size or networks:
module.fail_json(msg="Parameters as `memory`, `cpu`, `vcpu`, `disk_size` and `networks` you can only set when deploying a VM!")
if hard and state not in ['rebooted', 'poweredoff', 'absent', 'present']:
module.fail_json(msg="The 'hard' option can be used only for one of these states: 'rebooted', 'poweredoff', 'absent' and 'present'")
vms = []
tagged = False
changed = False
if instance_ids:
vms = get_vms_by_ids(module, client, state, instance_ids)
else:
tagged = True
vms = get_all_vms_by_attributes(client, attributes, labels)
if len(vms) == 0 and state != 'absent' and state != 'present':
module.fail_json(msg='There are no instances with specified `instance_ids`, `attributes` and/or `labels`')
if len(vms) == 0 and state == 'present' and not tagged:
module.fail_json(msg='There are no instances with specified `instance_ids`.')
if tagged and state == 'absent':
module.fail_json(msg='Option `instance_ids` is required when state is `absent`.')
if state == 'absent':
changed = terminate_vms(module, client, vms, hard)
elif state == 'rebooted':
changed = reboot_vms(module, client, vms, wait_timeout, hard)
elif state == 'poweredoff':
changed = poweroff_vms(module, client, vms, hard)
elif state == 'running':
changed = resume_vms(module, client, vms)
instances_list = vms
tagged_instances_list = []
if permissions is not None:
changed = set_vm_permissions(module, client, vms, permissions) or changed
if owner_id is not None or group_id is not None:
changed = set_vm_ownership(module, client, vms, owner_id, group_id) or changed
if wait and not module.check_mode and state != 'present':
wait_for = {
'absent': wait_for_done,
'rebooted': wait_for_running,
'poweredoff': wait_for_poweroff,
'running': wait_for_running
}
for vm in vms:
if vm is not None:
wait_for[state](module, vm, wait_timeout)
if disk_saveas is not None:
if len(vms) == 0:
module.fail_json(msg="There is no VM whose disk will be saved.")
disk_save_as(module, client, vms[0], disk_saveas, wait_timeout)
changed = True
# instances - a list of instances info whose state is changed or which are fetched with C(instance_ids) option
instances = list(get_vm_info(client, vm) for vm in instances_list if vm is not None)
instances_ids = list(vm.id for vm in instances_list if vm is not None)
# tagged_instances - A list of instances info based on a specific attributes and/or labels that are specified with C(count_attributes) and C(count_labels)
tagged_instances = list(get_vm_info(client, vm) for vm in tagged_instances_list if vm is not None)
result = {'changed': changed, 'instances': instances, 'instances_ids': instances_ids, 'tagged_instances': tagged_instances}
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
jn7163/django | django/contrib/gis/gdal/datasource.py | 357 | 4777 | """
DataSource is a wrapper for the OGR Data Source object, which provides
an interface for reading vector geometry data from many different file
formats (including ESRI shapefiles).
When instantiating a DataSource object, use the filename of a
GDAL-supported data source. For example, a SHP file or a
TIGER/Line file from the government.
The ds_driver keyword is used internally when a ctypes pointer
is passed in directly.
Example:
ds = DataSource('/home/foo/bar.shp')
for layer in ds:
for feature in layer:
# Getting the geometry for the feature.
g = feature.geom
# Getting the 'description' field for the feature.
desc = feature['description']
# We can also increment through all of the fields
# attached to this feature.
for field in feature:
# Get the name of the field (e.g. 'description')
nm = field.name
# Get the type (integer) of the field, e.g. 0 => OFTInteger
t = field.type
# Returns the value the field; OFTIntegers return ints,
# OFTReal returns floats, all else returns string.
val = field.value
"""
from ctypes import byref
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.error import GDALException, OGRIndexError
from django.contrib.gis.gdal.layer import Layer
from django.contrib.gis.gdal.prototypes import ds as capi
from django.utils import six
from django.utils.encoding import force_bytes, force_text
from django.utils.six.moves import range
# For more information, see the OGR C API source code:
# http://www.gdal.org/ogr/ogr__api_8h.html
#
# The OGR_DS_* routines are relevant here.
class DataSource(GDALBase):
"Wraps an OGR Data Source object."
def __init__(self, ds_input, ds_driver=False, write=False, encoding='utf-8'):
# The write flag.
if write:
self._write = 1
else:
self._write = 0
# See also http://trac.osgeo.org/gdal/wiki/rfc23_ogr_unicode
self.encoding = encoding
Driver.ensure_registered()
if isinstance(ds_input, six.string_types):
# The data source driver is a void pointer.
ds_driver = Driver.ptr_type()
try:
# OGROpen will auto-detect the data source type.
ds = capi.open_ds(force_bytes(ds_input), self._write, byref(ds_driver))
except GDALException:
# Making the error message more clear rather than something
# like "Invalid pointer returned from OGROpen".
raise GDALException('Could not open the datasource at "%s"' % ds_input)
elif isinstance(ds_input, self.ptr_type) and isinstance(ds_driver, Driver.ptr_type):
ds = ds_input
else:
raise GDALException('Invalid data source input type: %s' % type(ds_input))
if ds:
self.ptr = ds
self.driver = Driver(ds_driver)
else:
# Raise an exception if the returned pointer is NULL
raise GDALException('Invalid data source file "%s"' % ds_input)
def __del__(self):
"Destroys this DataStructure object."
if self._ptr and capi:
capi.destroy_ds(self._ptr)
def __iter__(self):
"Allows for iteration over the layers in a data source."
for i in range(self.layer_count):
yield self[i]
def __getitem__(self, index):
"Allows use of the index [] operator to get a layer at the index."
if isinstance(index, six.string_types):
l = capi.get_layer_by_name(self.ptr, force_bytes(index))
if not l:
raise OGRIndexError('invalid OGR Layer name given: "%s"' % index)
elif isinstance(index, int):
if index < 0 or index >= self.layer_count:
raise OGRIndexError('index out of range')
l = capi.get_layer(self._ptr, index)
else:
raise TypeError('Invalid index type: %s' % type(index))
return Layer(l, self)
def __len__(self):
"Returns the number of layers within the data source."
return self.layer_count
def __str__(self):
"Returns OGR GetName and Driver for the Data Source."
return '%s (%s)' % (self.name, str(self.driver))
@property
def layer_count(self):
"Returns the number of layers in the data source."
return capi.get_layer_count(self._ptr)
@property
def name(self):
"Returns the name of the data source."
name = capi.get_ds_name(self._ptr)
return force_text(name, self.encoding, strings_only=True)
| bsd-3-clause |
somehume/namebench | nb_third_party/dns/rdtypes/ANY/NXT.py | 248 | 3725 | # Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.exception
import dns.rdata
import dns.rdatatype
import dns.name
class NXT(dns.rdata.Rdata):
"""NXT record
@ivar next: the next name
@type next: dns.name.Name object
@ivar bitmap: the type bitmap
@type bitmap: string
@see: RFC 2535"""
__slots__ = ['next', 'bitmap']
def __init__(self, rdclass, rdtype, next, bitmap):
super(NXT, self).__init__(rdclass, rdtype)
self.next = next
self.bitmap = bitmap
def to_text(self, origin=None, relativize=True, **kw):
next = self.next.choose_relativity(origin, relativize)
bits = []
for i in xrange(0, len(self.bitmap)):
byte = ord(self.bitmap[i])
for j in xrange(0, 8):
if byte & (0x80 >> j):
bits.append(dns.rdatatype.to_text(i * 8 + j))
text = ' '.join(bits)
return '%s %s' % (next, text)
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
next = tok.get_name()
next = next.choose_relativity(origin, relativize)
bitmap = ['\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00',
'\x00', '\x00', '\x00', '\x00' ]
while 1:
token = tok.get().unescape()
if token.is_eol_or_eof():
break
if token.value.isdigit():
nrdtype = int(token.value)
else:
nrdtype = dns.rdatatype.from_text(token.value)
if nrdtype == 0:
raise dns.exception.SyntaxError("NXT with bit 0")
if nrdtype > 127:
raise dns.exception.SyntaxError("NXT with bit > 127")
i = nrdtype // 8
bitmap[i] = chr(ord(bitmap[i]) | (0x80 >> (nrdtype % 8)))
bitmap = dns.rdata._truncate_bitmap(bitmap)
return cls(rdclass, rdtype, next, bitmap)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
self.next.to_wire(file, None, origin)
file.write(self.bitmap)
def to_digestable(self, origin = None):
return self.next.to_digestable(origin) + self.bitmap
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
(next, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
bitmap = wire[current : current + rdlen]
if not origin is None:
next = next.relativize(origin)
return cls(rdclass, rdtype, next, bitmap)
from_wire = classmethod(from_wire)
def choose_relativity(self, origin = None, relativize = True):
self.next = self.next.choose_relativity(origin, relativize)
def _cmp(self, other):
v = cmp(self.next, other.next)
if v == 0:
v = cmp(self.bitmap, other.bitmap)
return v
| apache-2.0 |
chjw8016/GreenOdoo7-haibao | python/Scripts/rst2odt_prepstyles.py | 1 | 1704 | #!X:\Python27\python.exe
# $Id: rst2odt_prepstyles.py 5839 2009-01-07 19:09:28Z dkuhlman $
# Author: Dave Kuhlman <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
Fix a word-processor-generated styles.odt for odtwriter use: Drop page size
specifications from styles.xml in STYLE_FILE.odt.
"""
#
# Author: Michael Schutte <[email protected]>
from lxml import etree
import sys
import zipfile
from tempfile import mkstemp
import shutil
import os
NAMESPACES = {
"style": "urn:oasis:names:tc:opendocument:xmlns:style:1.0",
"fo": "urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0"
}
def prepstyle(filename):
zin = zipfile.ZipFile(filename)
styles = zin.read("styles.xml")
root = etree.fromstring(styles)
for el in root.xpath("//style:page-layout-properties",
namespaces=NAMESPACES):
for attr in el.attrib:
if attr.startswith("{%s}" % NAMESPACES["fo"]):
del el.attrib[attr]
tempname = mkstemp()
zout = zipfile.ZipFile(os.fdopen(tempname[0], "w"), "w",
zipfile.ZIP_DEFLATED)
for item in zin.infolist():
if item.filename == "styles.xml":
zout.writestr(item, etree.tostring(root))
else:
zout.writestr(item, zin.read(item.filename))
zout.close()
zin.close()
shutil.move(tempname[1], filename)
def main():
args = sys.argv[1:]
if len(args) != 1:
print >> sys.stderr, __doc__
print >> sys.stderr, "Usage: %s STYLE_FILE.odt\n" % sys.argv[0]
sys.exit(1)
filename = args[0]
prepstyle(filename)
if __name__ == '__main__':
main()
# vim:tw=78:sw=4:sts=4:et:
| mit |
airbnb/caravel | tests/sql_parse_tests.py | 1 | 15978 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from superset import sql_parse
class SupersetTestCase(unittest.TestCase):
def extract_tables(self, query):
sq = sql_parse.ParsedQuery(query)
return sq.tables
def test_simple_select(self):
query = 'SELECT * FROM tbname'
self.assertEquals({'tbname'}, self.extract_tables(query))
# underscores
query = 'SELECT * FROM tb_name'
self.assertEquals({'tb_name'},
self.extract_tables(query))
# quotes
query = 'SELECT * FROM "tbname"'
self.assertEquals({'tbname'}, self.extract_tables(query))
# unicode encoding
query = 'SELECT * FROM "tb_name" WHERE city = "Lübeck"'
self.assertEquals({'tb_name'}, self.extract_tables(query))
# schema
self.assertEquals(
{'schemaname.tbname'},
self.extract_tables('SELECT * FROM schemaname.tbname'))
# quotes
query = 'SELECT field1, field2 FROM tb_name'
self.assertEquals({'tb_name'}, self.extract_tables(query))
query = 'SELECT t1.f1, t2.f2 FROM t1, t2'
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
def test_select_named_table(self):
query = 'SELECT a.date, a.field FROM left_table a LIMIT 10'
self.assertEquals(
{'left_table'}, self.extract_tables(query))
def test_reverse_select(self):
query = 'FROM t1 SELECT field'
self.assertEquals({'t1'}, self.extract_tables(query))
def test_subselect(self):
query = """
SELECT sub.*
FROM (
SELECT *
FROM s1.t1
WHERE day_of_week = 'Friday'
) sub, s2.t2
WHERE sub.resolution = 'NONE'
"""
self.assertEquals({'s1.t1', 's2.t2'},
self.extract_tables(query))
query = """
SELECT sub.*
FROM (
SELECT *
FROM s1.t1
WHERE day_of_week = 'Friday'
) sub
WHERE sub.resolution = 'NONE'
"""
self.assertEquals({'s1.t1'}, self.extract_tables(query))
query = """
SELECT * FROM t1
WHERE s11 > ANY
(SELECT COUNT(*) /* no hint */ FROM t2
WHERE NOT EXISTS
(SELECT * FROM t3
WHERE ROW(5*t2.s1,77)=
(SELECT 50,11*s1 FROM t4)));
"""
self.assertEquals({'t1', 't2', 't3', 't4'},
self.extract_tables(query))
def test_select_in_expression(self):
query = 'SELECT f1, (SELECT count(1) FROM t2) FROM t1'
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
def test_union(self):
query = 'SELECT * FROM t1 UNION SELECT * FROM t2'
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
query = 'SELECT * FROM t1 UNION ALL SELECT * FROM t2'
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
query = 'SELECT * FROM t1 INTERSECT ALL SELECT * FROM t2'
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
def test_select_from_values(self):
query = 'SELECT * FROM VALUES (13, 42)'
self.assertFalse(self.extract_tables(query))
def test_select_array(self):
query = """
SELECT ARRAY[1, 2, 3] AS my_array
FROM t1 LIMIT 10
"""
self.assertEquals({'t1'}, self.extract_tables(query))
def test_select_if(self):
query = """
SELECT IF(CARDINALITY(my_array) >= 3, my_array[3], NULL)
FROM t1 LIMIT 10
"""
self.assertEquals({'t1'}, self.extract_tables(query))
# SHOW TABLES ((FROM | IN) qualifiedName)? (LIKE pattern=STRING)?
def test_show_tables(self):
query = "SHOW TABLES FROM s1 like '%order%'"
# TODO: figure out what should code do here
self.assertEquals({'s1'}, self.extract_tables(query))
# SHOW COLUMNS (FROM | IN) qualifiedName
def test_show_columns(self):
query = 'SHOW COLUMNS FROM t1'
self.assertEquals({'t1'}, self.extract_tables(query))
def test_where_subquery(self):
query = """
SELECT name
FROM t1
WHERE regionkey = (SELECT max(regionkey) FROM t2)
"""
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
query = """
SELECT name
FROM t1
WHERE regionkey IN (SELECT regionkey FROM t2)
"""
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
query = """
SELECT name
FROM t1
WHERE regionkey EXISTS (SELECT regionkey FROM t2)
"""
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
# DESCRIBE | DESC qualifiedName
def test_describe(self):
self.assertEquals({'t1'}, self.extract_tables('DESCRIBE t1'))
# SHOW PARTITIONS FROM qualifiedName (WHERE booleanExpression)?
# (ORDER BY sortItem (',' sortItem)*)? (LIMIT limit=(INTEGER_VALUE | ALL))?
def test_show_partitions(self):
query = """
SHOW PARTITIONS FROM orders
WHERE ds >= '2013-01-01' ORDER BY ds DESC;
"""
self.assertEquals({'orders'}, self.extract_tables(query))
def test_join(self):
query = 'SELECT t1.*, t2.* FROM t1 JOIN t2 ON t1.a = t2.a;'
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
# subquery + join
query = """
SELECT a.date, b.name FROM
left_table a
JOIN (
SELECT
CAST((b.year) as VARCHAR) date,
name
FROM right_table
) b
ON a.date = b.date
"""
self.assertEquals({'left_table', 'right_table'},
self.extract_tables(query))
query = """
SELECT a.date, b.name FROM
left_table a
LEFT INNER JOIN (
SELECT
CAST((b.year) as VARCHAR) date,
name
FROM right_table
) b
ON a.date = b.date
"""
self.assertEquals({'left_table', 'right_table'},
self.extract_tables(query))
query = """
SELECT a.date, b.name FROM
left_table a
RIGHT OUTER JOIN (
SELECT
CAST((b.year) as VARCHAR) date,
name
FROM right_table
) b
ON a.date = b.date
"""
self.assertEquals({'left_table', 'right_table'},
self.extract_tables(query))
query = """
SELECT a.date, b.name FROM
left_table a
FULL OUTER JOIN (
SELECT
CAST((b.year) as VARCHAR) date,
name
FROM right_table
) b
ON a.date = b.date
"""
self.assertEquals({'left_table', 'right_table'},
self.extract_tables(query))
# TODO: add SEMI join support, SQL Parse does not handle it.
# query = """
# SELECT a.date, b.name FROM
# left_table a
# LEFT SEMI JOIN (
# SELECT
# CAST((b.year) as VARCHAR) date,
# name
# FROM right_table
# ) b
# ON a.date = b.date
# """
# self.assertEquals({'left_table', 'right_table'},
# sql_parse.extract_tables(query))
def test_combinations(self):
query = """
SELECT * FROM t1
WHERE s11 > ANY
(SELECT * FROM t1 UNION ALL SELECT * FROM (
SELECT t6.*, t3.* FROM t6 JOIN t3 ON t6.a = t3.a) tmp_join
WHERE NOT EXISTS
(SELECT * FROM t3
WHERE ROW(5*t3.s1,77)=
(SELECT 50,11*s1 FROM t4)));
"""
self.assertEquals({'t1', 't3', 't4', 't6'},
self.extract_tables(query))
query = """
SELECT * FROM (SELECT * FROM (SELECT * FROM (SELECT * FROM EmployeeS)
AS S1) AS S2) AS S3;
"""
self.assertEquals({'EmployeeS'}, self.extract_tables(query))
def test_with(self):
query = """
WITH
x AS (SELECT a FROM t1),
y AS (SELECT a AS b FROM t2),
z AS (SELECT b AS c FROM t3)
SELECT c FROM z;
"""
self.assertEquals({'t1', 't2', 't3'},
self.extract_tables(query))
query = """
WITH
x AS (SELECT a FROM t1),
y AS (SELECT a AS b FROM x),
z AS (SELECT b AS c FROM y)
SELECT c FROM z;
"""
self.assertEquals({'t1'}, self.extract_tables(query))
def test_reusing_aliases(self):
query = """
with q1 as ( select key from q2 where key = '5'),
q2 as ( select key from src where key = '5')
select * from (select key from q1) a;
"""
self.assertEquals({'src'}, self.extract_tables(query))
def test_multistatement(self):
query = 'SELECT * FROM t1; SELECT * FROM t2'
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
query = 'SELECT * FROM t1; SELECT * FROM t2;'
self.assertEquals({'t1', 't2'}, self.extract_tables(query))
def test_update_not_select(self):
sql = sql_parse.ParsedQuery('UPDATE t1 SET col1 = NULL')
self.assertEquals(False, sql.is_select())
self.assertEquals(False, sql.is_readonly())
def test_explain(self):
sql = sql_parse.ParsedQuery('EXPLAIN SELECT 1')
self.assertEquals(True, sql.is_explain())
self.assertEquals(False, sql.is_select())
self.assertEquals(True, sql.is_readonly())
def test_complex_extract_tables(self):
query = """SELECT sum(m_examples) AS "sum__m_example"
FROM
(SELECT COUNT(DISTINCT id_userid) AS m_examples,
some_more_info
FROM my_b_table b
JOIN my_t_table t ON b.ds=t.ds
JOIN my_l_table l ON b.uid=l.uid
WHERE b.rid IN
(SELECT other_col
FROM inner_table)
AND l.bla IN ('x', 'y')
GROUP BY 2
ORDER BY 2 ASC) AS "meh"
ORDER BY "sum__m_example" DESC
LIMIT 10;"""
self.assertEquals(
{'my_l_table', 'my_b_table', 'my_t_table', 'inner_table'},
self.extract_tables(query))
def test_complex_extract_tables2(self):
query = """SELECT *
FROM table_a AS a, table_b AS b, table_c as c
WHERE a.id = b.id and b.id = c.id"""
self.assertEquals(
{'table_a', 'table_b', 'table_c'},
self.extract_tables(query))
def test_mixed_from_clause(self):
query = """SELECT *
FROM table_a AS a, (select * from table_b) AS b, table_c as c
WHERE a.id = b.id and b.id = c.id"""
self.assertEquals(
{'table_a', 'table_b', 'table_c'},
self.extract_tables(query))
def test_nested_selects(self):
query = """
select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(TABLE_NAME)
from INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA like "%bi%"),0x7e)));
"""
self.assertEquals(
{'INFORMATION_SCHEMA.COLUMNS'},
self.extract_tables(query))
query = """
select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(COLUMN_NAME)
from INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME="bi_achivement_daily"),0x7e)));
"""
self.assertEquals(
{'INFORMATION_SCHEMA.COLUMNS'},
self.extract_tables(query))
def test_complex_extract_tables3(self):
query = """SELECT somecol AS somecol
FROM
(WITH bla AS
(SELECT col_a
FROM a
WHERE 1=1
AND column_of_choice NOT IN
( SELECT interesting_col
FROM b ) ),
rb AS
( SELECT yet_another_column
FROM
( SELECT a
FROM c
GROUP BY the_other_col ) not_table
LEFT JOIN bla foo ON foo.prop = not_table.bad_col0
WHERE 1=1
GROUP BY not_table.bad_col1 ,
not_table.bad_col2 ,
ORDER BY not_table.bad_col_3 DESC , not_table.bad_col4 ,
not_table.bad_col5) SELECT random_col
FROM d
WHERE 1=1
UNION ALL SELECT even_more_cols
FROM e
WHERE 1=1
UNION ALL SELECT lets_go_deeper
FROM f
WHERE 1=1
WHERE 2=2
GROUP BY last_col
LIMIT 50000;"""
self.assertEquals(
{'a', 'b', 'c', 'd', 'e', 'f'},
self.extract_tables(query))
def test_complex_cte_with_prefix(self):
query = """
WITH CTE__test (SalesPersonID, SalesOrderID, SalesYear)
AS (
SELECT SalesPersonID, SalesOrderID, YEAR(OrderDate) AS SalesYear
FROM SalesOrderHeader
WHERE SalesPersonID IS NOT NULL
)
SELECT SalesPersonID, COUNT(SalesOrderID) AS TotalSales, SalesYear
FROM CTE__test
GROUP BY SalesYear, SalesPersonID
ORDER BY SalesPersonID, SalesYear;
"""
self.assertEquals({'SalesOrderHeader'}, self.extract_tables(query))
def test_basic_breakdown_statements(self):
multi_sql = """
SELECT * FROM ab_user;
SELECT * FROM ab_user LIMIT 1;
"""
parsed = sql_parse.ParsedQuery(multi_sql)
statements = parsed.get_statements()
self.assertEquals(len(statements), 2)
expected = [
'SELECT * FROM ab_user',
'SELECT * FROM ab_user LIMIT 1',
]
self.assertEquals(statements, expected)
def test_messy_breakdown_statements(self):
multi_sql = """
SELECT 1;\t\n\n\n \t
\t\nSELECT 2;
SELECT * FROM ab_user;;;
SELECT * FROM ab_user LIMIT 1
"""
parsed = sql_parse.ParsedQuery(multi_sql)
statements = parsed.get_statements()
self.assertEquals(len(statements), 4)
expected = [
'SELECT 1',
'SELECT 2',
'SELECT * FROM ab_user',
'SELECT * FROM ab_user LIMIT 1',
]
self.assertEquals(statements, expected)
| apache-2.0 |
def-/commandergenius | project/jni/python/src/Lib/lib2to3/tests/test_refactor.py | 49 | 5263 | """
Unit tests for refactor.py.
"""
import sys
import os
import operator
import StringIO
import tempfile
import unittest
from lib2to3 import refactor, pygram, fixer_base
from . import support
FIXER_DIR = os.path.join(os.path.dirname(__file__), "data/fixers")
sys.path.append(FIXER_DIR)
try:
_DEFAULT_FIXERS = refactor.get_fixers_from_package("myfixes")
finally:
sys.path.pop()
class TestRefactoringTool(unittest.TestCase):
def setUp(self):
sys.path.append(FIXER_DIR)
def tearDown(self):
sys.path.pop()
def check_instances(self, instances, classes):
for inst, cls in zip(instances, classes):
if not isinstance(inst, cls):
self.fail("%s are not instances of %s" % instances, classes)
def rt(self, options=None, fixers=_DEFAULT_FIXERS, explicit=None):
return refactor.RefactoringTool(fixers, options, explicit)
def test_print_function_option(self):
gram = pygram.python_grammar
save = gram.keywords["print"]
try:
rt = self.rt({"print_function" : True})
self.assertRaises(KeyError, operator.itemgetter("print"),
gram.keywords)
finally:
gram.keywords["print"] = save
def test_fixer_loading_helpers(self):
contents = ["explicit", "first", "last", "parrot", "preorder"]
non_prefixed = refactor.get_all_fix_names("myfixes")
prefixed = refactor.get_all_fix_names("myfixes", False)
full_names = refactor.get_fixers_from_package("myfixes")
self.assertEqual(prefixed, ["fix_" + name for name in contents])
self.assertEqual(non_prefixed, contents)
self.assertEqual(full_names,
["myfixes.fix_" + name for name in contents])
def test_get_headnode_dict(self):
class NoneFix(fixer_base.BaseFix):
PATTERN = None
class FileInputFix(fixer_base.BaseFix):
PATTERN = "file_input< any * >"
no_head = NoneFix({}, [])
with_head = FileInputFix({}, [])
d = refactor.get_headnode_dict([no_head, with_head])
expected = {None: [no_head],
pygram.python_symbols.file_input : [with_head]}
self.assertEqual(d, expected)
def test_fixer_loading(self):
from myfixes.fix_first import FixFirst
from myfixes.fix_last import FixLast
from myfixes.fix_parrot import FixParrot
from myfixes.fix_preorder import FixPreorder
rt = self.rt()
pre, post = rt.get_fixers()
self.check_instances(pre, [FixPreorder])
self.check_instances(post, [FixFirst, FixParrot, FixLast])
def test_naughty_fixers(self):
self.assertRaises(ImportError, self.rt, fixers=["not_here"])
self.assertRaises(refactor.FixerError, self.rt, fixers=["no_fixer_cls"])
self.assertRaises(refactor.FixerError, self.rt, fixers=["bad_order"])
def test_refactor_string(self):
rt = self.rt()
input = "def parrot(): pass\n\n"
tree = rt.refactor_string(input, "<test>")
self.assertNotEqual(str(tree), input)
input = "def f(): pass\n\n"
tree = rt.refactor_string(input, "<test>")
self.assertEqual(str(tree), input)
def test_refactor_stdin(self):
class MyRT(refactor.RefactoringTool):
def print_output(self, lines):
diff_lines.extend(lines)
diff_lines = []
rt = MyRT(_DEFAULT_FIXERS)
save = sys.stdin
sys.stdin = StringIO.StringIO("def parrot(): pass\n\n")
try:
rt.refactor_stdin()
finally:
sys.stdin = save
expected = """--- <stdin> (original)
+++ <stdin> (refactored)
@@ -1,2 +1,2 @@
-def parrot(): pass
+def cheese(): pass""".splitlines()
self.assertEqual(diff_lines[:-1], expected)
def test_refactor_file(self):
test_file = os.path.join(FIXER_DIR, "parrot_example.py")
old_contents = open(test_file, "r").read()
rt = self.rt()
rt.refactor_file(test_file)
self.assertEqual(old_contents, open(test_file, "r").read())
rt.refactor_file(test_file, True)
try:
self.assertNotEqual(old_contents, open(test_file, "r").read())
finally:
open(test_file, "w").write(old_contents)
def test_refactor_docstring(self):
rt = self.rt()
def example():
"""
>>> example()
42
"""
out = rt.refactor_docstring(example.__doc__, "<test>")
self.assertEqual(out, example.__doc__)
def parrot():
"""
>>> def parrot():
... return 43
"""
out = rt.refactor_docstring(parrot.__doc__, "<test>")
self.assertNotEqual(out, parrot.__doc__)
def test_explicit(self):
from myfixes.fix_explicit import FixExplicit
rt = self.rt(fixers=["myfixes.fix_explicit"])
self.assertEqual(len(rt.post_order), 0)
rt = self.rt(explicit=["myfixes.fix_explicit"])
for fix in rt.post_order:
if isinstance(fix, FixExplicit):
break
else:
self.fail("explicit fixer not loaded")
| lgpl-2.1 |
grahesh/Stock-Market-Event-Analysis | epydoc-3.0.1/epydoc/markup/javadoc.py | 100 | 10031 | #
# javadoc.py: javadoc docstring parsing
# Edward Loper
#
# Created [07/03/03 12:37 PM]
# $Id: javadoc.py 1574 2007-03-07 02:55:14Z dvarrazzo $
#
"""
Epydoc parser for U{Javadoc<http://java.sun.com/j2se/javadoc/>}
docstrings. Javadoc is an HTML-based markup language that was
developed for documenting Java APIs with inline comments. It consists
of raw HTML, augmented by Javadoc tags. There are two types of
Javadoc tag:
- X{Javadoc block tags} correspond to Epydoc fields. They are
marked by starting a line with a string of the form \"C{@M{tag}
[M{arg}]}\", where C{M{tag}} indicates the type of block, and
C{M{arg}} is an optional argument. (For fields that take
arguments, Javadoc assumes that the single word immediately
following the tag is an argument; multi-word arguments cannot be
used with javadoc.)
- X{inline Javadoc tags} are used for inline markup. In particular,
epydoc uses them for crossreference links between documentation.
Inline tags may appear anywhere in the text, and have the form
\"C{{@M{tag} M{[args...]}}}\", where C{M{tag}} indicates the
type of inline markup, and C{M{args}} are optional arguments.
Epydoc supports all Javadoc tags, I{except}:
- C{{@docRoot}}, which gives the (relative) URL of the generated
documentation's root.
- C{{@inheritDoc}}, which copies the documentation of the nearest
overridden object. This can be used to combine the documentation
of the overridden object with the documentation of the
overridding object.
- C{@serial}, C{@serialField}, and C{@serialData} which describe the
serialization (pickling) of an object.
- C{{@value}}, which copies the value of a constant.
@warning: Epydoc only supports HTML output for Javadoc docstrings.
"""
__docformat__ = 'epytext en'
# Imports
import re
from xml.dom.minidom import *
from epydoc.markup import *
def parse_docstring(docstring, errors, **options):
"""
Parse the given docstring, which is formatted using Javadoc; and
return a C{ParsedDocstring} representation of its contents.
@param docstring: The docstring to parse
@type docstring: C{string}
@param errors: A list where any errors generated during parsing
will be stored.
@type errors: C{list} of L{ParseError}
@param options: Extra options. Unknown options are ignored.
Currently, no extra options are defined.
@rtype: L{ParsedDocstring}
"""
return ParsedJavadocDocstring(docstring, errors)
class ParsedJavadocDocstring(ParsedDocstring):
"""
An encoded version of a Javadoc docstring. Since Javadoc is a
fairly simple markup language, we don't do any processing in
advance; instead, we wait to split fields or resolve
crossreference links until we need to.
@group Field Splitting: split_fields, _ARG_FIELDS, _FIELD_RE
@cvar _ARG_FIELDS: A list of the fields that take arguments.
Since Javadoc doesn't mark arguments in any special way, we
must consult this list to decide whether the first word of a
field is an argument or not.
@cvar _FIELD_RE: A regular expression used to search for Javadoc
block tags.
@group HTML Output: to_html, _LINK_SPLIT_RE, _LINK_RE
@cvar _LINK_SPLIT_RE: A regular expression used to search for
Javadoc inline tags.
@cvar _LINK_RE: A regular expression used to process Javadoc
inline tags.
"""
def __init__(self, docstring, errors=None):
"""
Create a new C{ParsedJavadocDocstring}.
@param docstring: The docstring that should be used to
construct this C{ParsedJavadocDocstring}.
@type docstring: C{string}
@param errors: A list where any errors generated during
parsing will be stored. If no list is given, then
all errors are ignored.
@type errors: C{list} of L{ParseError}
"""
self._docstring = docstring
if errors is None: errors = []
self._check_links(errors)
#////////////////////////////////////////////////////////////
# Field Splitting
#////////////////////////////////////////////////////////////
_ARG_FIELDS = ('group variable var type cvariable cvar ivariable '+
'ivar param '+
'parameter arg argument raise raises exception '+
'except deffield newfield keyword kwarg kwparam').split()
_FIELD_RE = re.compile(r'(^\s*\@\w+[\s$])', re.MULTILINE)
# Inherit docs from ParsedDocstring.
def split_fields(self, errors=None):
# Split the docstring into an alternating list of field tags
# and text (odd pieces are field tags).
pieces = self._FIELD_RE.split(self._docstring)
# The first piece is the description.
descr = ParsedJavadocDocstring(pieces[0])
# The remaining pieces are the block fields (alternating tags
# and bodies; odd pieces are tags).
fields = []
for i in range(1, len(pieces)):
if i%2 == 1:
# Get the field tag.
tag = pieces[i].strip()[1:]
else:
# Get the field argument (if appropriate).
if tag in self._ARG_FIELDS:
subpieces = pieces[i].strip().split(None, 1)+['','']
(arg, body) = subpieces[:2]
else:
(arg, body) = (None, pieces[i])
# Special processing for @see fields, since Epydoc
# allows unrestricted text in them, but Javadoc just
# uses them for xref links:
if tag == 'see' and body:
if body[0] in '"\'':
if body[-1] == body[0]: body = body[1:-1]
elif body[0] == '<': pass
else: body = '{@link %s}' % body
# Construct the field.
parsed_body = ParsedJavadocDocstring(body)
fields.append(Field(tag, arg, parsed_body))
if pieces[0].strip():
return (descr, fields)
else:
return (None, fields)
#////////////////////////////////////////////////////////////
# HTML Output.
#////////////////////////////////////////////////////////////
_LINK_SPLIT_RE = re.compile(r'({@link(?:plain)?\s[^}]+})')
_LINK_RE = re.compile(r'{@link(?:plain)?\s+' + r'([\w#.]+)' +
r'(?:\([^\)]*\))?' + r'(\s+.*)?' + r'}')
# Inherit docs from ParsedDocstring.
def to_html(self, docstring_linker, **options):
# Split the docstring into an alternating list of HTML and
# links (odd pieces are links).
pieces = self._LINK_SPLIT_RE.split(self._docstring)
# This function is used to translate {@link ...}s to HTML.
translate_xref = docstring_linker.translate_identifier_xref
# Build up the HTML string from the pieces. For HTML pieces
# (even), just add it to html. For link pieces (odd), use
# docstring_linker to translate the crossreference link to
# HTML for us.
html = ''
for i in range(len(pieces)):
if i%2 == 0:
html += pieces[i]
else:
# Decompose the link into pieces.
m = self._LINK_RE.match(pieces[i])
if m is None: continue # Error flagged by _check_links
(target, name) = m.groups()
# Normalize the target name.
if target[0] == '#': target = target[1:]
target = target.replace('#', '.')
target = re.sub(r'\(.*\)', '', target)
# Provide a name, if it wasn't specified.
if name is None: name = target
else: name = name.strip()
# Use docstring_linker to convert the name to html.
html += translate_xref(target, name)
return html
def _check_links(self, errors):
"""
Make sure that all @{link}s are valid. We need a separate
method for ths because we want to do this at parse time, not
html output time. Any errors found are appended to C{errors}.
"""
pieces = self._LINK_SPLIT_RE.split(self._docstring)
linenum = 0
for i in range(len(pieces)):
if i%2 == 1 and not self._LINK_RE.match(pieces[i]):
estr = 'Bad link %r' % pieces[i]
errors.append(ParseError(estr, linenum, is_fatal=0))
linenum += pieces[i].count('\n')
#////////////////////////////////////////////////////////////
# Plaintext Output.
#////////////////////////////////////////////////////////////
# Inherit docs from ParsedDocstring. Since we don't define
# to_latex, this is used when generating latex output.
def to_plaintext(self, docstring_linker, **options):
return self._docstring
_SUMMARY_RE = re.compile(r'(\s*[\w\W]*?\.)(\s|$)')
# Jeff's hack to get summary working
def summary(self):
# Drop tags
doc = "\n".join([ row for row in self._docstring.split('\n')
if not row.lstrip().startswith('@') ])
m = self._SUMMARY_RE.match(doc)
if m:
other = doc[m.end():]
return (ParsedJavadocDocstring(m.group(1)),
other != '' and not other.isspace())
else:
parts = doc.strip('\n').split('\n', 1)
if len(parts) == 1:
summary = parts[0]
other = False
else:
summary = parts[0] + '...'
other = True
return ParsedJavadocDocstring(summary), other
# def concatenate(self, other):
# if not isinstance(other, ParsedJavadocDocstring):
# raise ValueError, 'Could not concatenate docstrings'
# return ParsedJavadocDocstring(self._docstring+other._docstring)
| bsd-3-clause |
retr0h/ansible-modules-core | database/postgresql_db.py | 35 | 10201 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: postgresql_db
short_description: Add or remove PostgreSQL databases from a remote host.
description:
- Add or remove PostgreSQL databases from a remote host.
version_added: "0.6"
options:
name:
description:
- name of the database to add or remove
required: true
default: null
login_user:
description:
- The username used to authenticate with
required: false
default: null
login_password:
description:
- The password used to authenticate with
required: false
default: null
login_host:
description:
- Host running the database
required: false
default: localhost
owner:
description:
- Name of the role to set as owner of the database
required: false
default: null
port:
description:
- Database port to connect to.
required: false
default: 5432
template:
description:
- Template used to create the database
required: false
default: null
encoding:
description:
- Encoding of the database
required: false
default: null
encoding:
description:
- Encoding of the database
required: false
default: null
lc_collate:
description:
- Collation order (LC_COLLATE) to use in the database. Must match collation order of template database unless C(template0) is used as template.
required: false
default: null
lc_ctype:
description:
- Character classification (LC_CTYPE) to use in the database (e.g. lower, upper, ...) Must match LC_CTYPE of template database unless C(template0) is used as template.
required: false
default: null
state:
description:
- The database state
required: false
default: present
choices: [ "present", "absent" ]
notes:
- The default authentication assumes that you are either logging in as or sudo'ing to the C(postgres) account on the host.
- This module uses I(psycopg2), a Python PostgreSQL database adapter. You must ensure that psycopg2 is installed on
the host before using this module. If the remote host is the PostgreSQL server (which is the default case), then PostgreSQL must also be installed on the remote host. For Ubuntu-based systems, install the C(postgresql), C(libpq-dev), and C(python-psycopg2) packages on the remote host before using this module.
requirements: [ psycopg2 ]
author: Lorin Hochstein
'''
EXAMPLES = '''
# Create a new database with name "acme"
- postgresql_db: name=acme
# Create a new database with name "acme" and specific encoding and locale
# settings. If a template different from "template0" is specified, encoding
# and locale settings must match those of the template.
- postgresql_db: name=acme
encoding='UTF-8'
lc_collate='de_DE.UTF-8'
lc_ctype='de_DE.UTF-8'
template='template0'
'''
try:
import psycopg2
import psycopg2.extras
except ImportError:
postgresqldb_found = False
else:
postgresqldb_found = True
class NotSupportedError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def set_owner(cursor, db, owner):
query = "ALTER DATABASE \"%s\" OWNER TO \"%s\"" % (db, owner)
cursor.execute(query)
return True
def get_encoding_id(cursor, encoding):
query = "SELECT pg_char_to_encoding(%(encoding)s) AS encoding_id;"
cursor.execute(query, {'encoding': encoding})
return cursor.fetchone()['encoding_id']
def get_db_info(cursor, db):
query = """
SELECT rolname AS owner,
pg_encoding_to_char(encoding) AS encoding, encoding AS encoding_id,
datcollate AS lc_collate, datctype AS lc_ctype
FROM pg_database JOIN pg_roles ON pg_roles.oid = pg_database.datdba
WHERE datname = %(db)s
"""
cursor.execute(query, {'db':db})
return cursor.fetchone()
def db_exists(cursor, db):
query = "SELECT * FROM pg_database WHERE datname=%(db)s"
cursor.execute(query, {'db': db})
return cursor.rowcount == 1
def db_delete(cursor, db):
if db_exists(cursor, db):
query = "DROP DATABASE \"%s\"" % db
cursor.execute(query)
return True
else:
return False
def db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype):
if not db_exists(cursor, db):
if owner:
owner = " OWNER \"%s\"" % owner
if template:
template = " TEMPLATE \"%s\"" % template
if encoding:
encoding = " ENCODING '%s'" % encoding
if lc_collate:
lc_collate = " LC_COLLATE '%s'" % lc_collate
if lc_ctype:
lc_ctype = " LC_CTYPE '%s'" % lc_ctype
query = 'CREATE DATABASE "%s"%s%s%s%s%s' % (db, owner,
template, encoding,
lc_collate, lc_ctype)
cursor.execute(query)
return True
else:
db_info = get_db_info(cursor, db)
if (encoding and
get_encoding_id(cursor, encoding) != db_info['encoding_id']):
raise NotSupportedError(
'Changing database encoding is not supported. '
'Current encoding: %s' % db_info['encoding']
)
elif lc_collate and lc_collate != db_info['lc_collate']:
raise NotSupportedError(
'Changing LC_COLLATE is not supported. '
'Current LC_COLLATE: %s' % db_info['lc_collate']
)
elif lc_ctype and lc_ctype != db_info['lc_ctype']:
raise NotSupportedError(
'Changing LC_CTYPE is not supported.'
'Current LC_CTYPE: %s' % db_info['lc_ctype']
)
elif owner and owner != db_info['owner']:
return set_owner(cursor, db, owner)
else:
return False
def db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype):
if not db_exists(cursor, db):
return False
else:
db_info = get_db_info(cursor, db)
if (encoding and
get_encoding_id(cursor, encoding) != db_info['encoding_id']):
return False
elif lc_collate and lc_collate != db_info['lc_collate']:
return False
elif lc_ctype and lc_ctype != db_info['lc_ctype']:
return False
elif owner and owner != db_info['owner']:
return False
else:
return True
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default="postgres"),
login_password=dict(default=""),
login_host=dict(default=""),
port=dict(default="5432"),
db=dict(required=True, aliases=['name']),
owner=dict(default=""),
template=dict(default=""),
encoding=dict(default=""),
lc_collate=dict(default=""),
lc_ctype=dict(default=""),
state=dict(default="present", choices=["absent", "present"]),
),
supports_check_mode = True
)
if not postgresqldb_found:
module.fail_json(msg="the python psycopg2 module is required")
db = module.params["db"]
port = module.params["port"]
owner = module.params["owner"]
template = module.params["template"]
encoding = module.params["encoding"]
lc_collate = module.params["lc_collate"]
lc_ctype = module.params["lc_ctype"]
state = module.params["state"]
changed = False
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"login_host":"host",
"login_user":"user",
"login_password":"password",
"port":"port"
}
kw = dict( (params_map[k], v) for (k, v) in module.params.iteritems()
if k in params_map and v != '' )
try:
db_connection = psycopg2.connect(database="template1", **kw)
# Enable autocommit so we can create databases
if psycopg2.__version__ >= '2.4.2':
db_connection.autocommit = True
else:
db_connection.set_isolation_level(psycopg2
.extensions
.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = db_connection.cursor(
cursor_factory=psycopg2.extras.DictCursor)
except Exception, e:
module.fail_json(msg="unable to connect to database: %s" % e)
try:
if module.check_mode:
if state == "absent":
changed = not db_exists(cursor, db)
elif state == "present":
changed = not db_matches(cursor, db, owner, template, encoding,
lc_collate, lc_ctype)
module.exit_json(changed=changed,db=db)
if state == "absent":
changed = db_delete(cursor, db)
elif state == "present":
changed = db_create(cursor, db, owner, template, encoding,
lc_collate, lc_ctype)
except NotSupportedError, e:
module.fail_json(msg=str(e))
except Exception, e:
module.fail_json(msg="Database query failed: %s" % e)
module.exit_json(changed=changed, db=db)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
BitcoinUnlimited/BitcoinUnlimited | contrib/devtools/fix-whitespace.py | 5 | 1123 | #!/usr/bin/env python
'''
This tool removes trailing whitespace from files passed on command line.
Usage: remove-trailing-space.py [filepath ...]
Limitations:
- makes no backups of modified files
- modifies in place
- does not care what files you pass to it
- assumes it can keep the entire stripped file in memory
Always use only on files that are under version control!
Copyright (c) 2017 The Bitcoin Unlimited developers
Distributed under the MIT software license, see the accompanying
file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
import sys
SPACES_PER_TAB = 8
if __name__ == "__main__":
for filename in sys.argv[1:]:
lines = []
# open file in universal newline mode, then
# read it in, strip off trailing whitespace
with open(filename, mode='U') as f:
for line in f.readlines():
lines.append(line.rstrip().replace("\t", " " * SPACES_PER_TAB))
# overwrite with stripped content
with open(filename, mode='w') as f:
f.seek(0)
for line in lines:
f.write(line + '\n')
| mit |
djgroen/flee-release | outputanalysis/CalculateDiagnostics.py | 1 | 2712 | import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import StoreDiagnostics as dd
import analysis as a
def calculate_errors(out_dir, data, name, naieve_model=True):
"""
Advanced plotting function for validation of refugee registration numbers in camps.
"""
plt.clf()
# data.loc[:,["%s sim" % name,"%s data" % name]]).as_matrix()
y1 = data["%s sim" % name].as_matrix()
y2 = data["%s data" % name].as_matrix()
days = np.arange(len(y1))
naieve_early_day = 7
naieve_training_day = 30
# Rescaled values
plt.clf()
plt.xlabel("Days elapsed")
plt.ylabel("Number of refugees")
simtot = data["refugees in camps (simulation)"].as_matrix().flatten()
untot = data["refugees in camps (UNHCR)"].as_matrix().flatten()
y1_rescaled = np.zeros(len(y1))
for i in range(0, len(y1_rescaled)):
# Only rescale if simtot > 0
if simtot[i] > 0:
y1_rescaled[i] = y1[i] * untot[i] / simtot[i]
"""
Error quantification phase:
- Quantify the errors and mismatches for this camp.
"""
lerr = dd.LocationErrors()
# absolute difference
lerr.errors["absolute difference"] = a.abs_diffs(y1, y2)
# absolute difference (rescaled)
lerr.errors["absolute difference rescaled"] = a.abs_diffs(y1_rescaled, y2)
# ratio difference
lerr.errors["ratio difference"] = a.abs_diffs(y1, y2) / (np.maximum(untot, np.ones(len(untot))))
""" Errors of which I'm usure whether to report:
- accuracy ratio (forecast / true value), because it crashes if denominator is 0.
- ln(accuracy ratio).
"""
# We can only calculate the Mean Absolute Scaled Error if we have a naieve model in our plot.
if naieve_model:
# Number of observations (aggrgate refugee days in UNHCR data set for this location)
lerr.errors["N"] = np.sum(y2)
# flat naieve model (7 day)
lerr.errors["MASE7"] = a.calculate_MASE(y1_rescaled, y2, n1, naieve_early_day)
lerr.errors["MASE7-sloped"] = a.calculate_MASE(y1_rescaled, y2, n3, naieve_early_day)
lerr.errors["MASE7-ratio"] = a.calculate_MASE(y1_rescaled, y2, n5, naieve_early_day)
# flat naieve model (30 day)
lerr.errors["MASE30"] = a.calculate_MASE(y1_rescaled, y2, n2, naieve_training_day)
lerr.errors["MASE30-sloped"] = a.calculate_MASE(y1_rescaled, y2, n4, naieve_training_day)
lerr.errors["MASE30-ratio"] = a.calculate_MASE(y1_rescaled, y2, n6, naieve_training_day)
# Accuracy ratio doesn't work because of 0 values in the data.
print("%s,%s,%s,%s,%s,%s,%s,%s,%s" % (out_dir, name, lerr.errors["MASE7"],lerr.errors["MASE7-sloped"], lerr.errors["MASE7-ratio"],lerr.errors["MASE30"],lerr.errors["MASE30-sloped"],lerr.errors["MASE30-ratio"],lerr.errors["N"]))
return lerr
| bsd-3-clause |
ArcherSys/ArcherSys | eclipse/plugins/org.python.pydev.jython_4.5.5.201603221110/Lib/xml/etree/ElementInclude.py | 198 | 5062 | #
# ElementTree
# $Id: ElementInclude.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xinclude support for element trees
#
# history:
# 2003-08-15 fl created
# 2003-11-14 fl fixed default loader
#
# Copyright (c) 2003-2004 by Fredrik Lundh. All rights reserved.
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2008 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
##
# Limited XInclude support for the ElementTree package.
##
import copy
from . import ElementTree
XINCLUDE = "{http://www.w3.org/2001/XInclude}"
XINCLUDE_INCLUDE = XINCLUDE + "include"
XINCLUDE_FALLBACK = XINCLUDE + "fallback"
##
# Fatal include error.
class FatalIncludeError(SyntaxError):
pass
##
# Default loader. This loader reads an included resource from disk.
#
# @param href Resource reference.
# @param parse Parse mode. Either "xml" or "text".
# @param encoding Optional text encoding.
# @return The expanded resource. If the parse mode is "xml", this
# is an ElementTree instance. If the parse mode is "text", this
# is a Unicode string. If the loader fails, it can return None
# or raise an IOError exception.
# @throws IOError If the loader fails to load the resource.
def default_loader(href, parse, encoding=None):
file = open(href)
if parse == "xml":
data = ElementTree.parse(file).getroot()
else:
data = file.read()
if encoding:
data = data.decode(encoding)
file.close()
return data
##
# Expand XInclude directives.
#
# @param elem Root element.
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as <b>default_loader</b>.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
# @throws IOError If the function fails to load a given resource.
def include(elem, loader=None):
if loader is None:
loader = default_loader
# look for xinclude elements
i = 0
while i < len(elem):
e = elem[i]
if e.tag == XINCLUDE_INCLUDE:
# process xinclude directive
href = e.get("href")
parse = e.get("parse", "xml")
if parse == "xml":
node = loader(href, parse)
if node is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
node = copy.copy(node)
if e.tail:
node.tail = (node.tail or "") + e.tail
elem[i] = node
elif parse == "text":
text = loader(href, parse, e.get("encoding"))
if text is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
if i:
node = elem[i-1]
node.tail = (node.tail or "") + text + (e.tail or "")
else:
elem.text = (elem.text or "") + text + (e.tail or "")
del elem[i]
continue
else:
raise FatalIncludeError(
"unknown parse type in xi:include tag (%r)" % parse
)
elif e.tag == XINCLUDE_FALLBACK:
raise FatalIncludeError(
"xi:fallback tag must be child of xi:include (%r)" % e.tag
)
else:
include(e, loader)
i = i + 1
| mit |
stoianivanov/Connect-Four | start_menu.py | 1 | 1510 | import sys
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import (QApplication, QGridLayout,
QLCDNumber, QPushButton, QWidget)
from PySide.QtCore import *
from PySide.QtGui import *
from connect_four import TableWindow, Table
class StartGame(QWidget):
"""
Главния прозорец в играта.
Предоставя възможност за игра срешу човек и игра срещу компютър.
"""
def __init__(self):
super(StartGame, self).__init__()
self.table_window = TableWindow()
onePlayerButton = QPushButton("&Single Player")
onePlayerButton.setFocusPolicy(Qt.NoFocus)
onePlayerButton.clicked.connect(self.start_single_player)
multiPlayerButton = QPushButton("&Multi Player")
multiPlayerButton.setFocusPolicy(Qt.NoFocus)
multiPlayerButton.clicked.connect(self.start_multi_player)
layout = QGridLayout()
layout.addWidget(onePlayerButton, 0, 1)
layout.addWidget(multiPlayerButton, 1, 1)
self.setLayout(layout)
def start_single_player(self):
self.table_window.t.matrix = Table.MATRIX
self.table_window.show()
def start_multi_player(self):
self.table_window.t.matrix = Table.MATRIX
self.table_window.vsPC = False
self.table_window.show()
if __name__ == '__main__':
app = QApplication(sys.argv)
window = StartGame()
window.show()
sys.exit(app.exec_())
| gpl-2.0 |
aldian/tensorflow | tensorflow/contrib/training/python/training/evaluation.py | 32 | 18000 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains functions for evaluation and summarization of metrics.
The evaluation.py module contains helper functions for evaluating TensorFlow
modules using a variety of metrics and summarizing the results.
****************************************
* Evaluating a Checkpointed Model Once *
****************************************
Once we've trained a model, we'll want to evaluate it. The simplest way to do
this is to evaluate the performance of a saved model a single time. In order
to do this, we can specify a number of metrics we'll want to evaluate as well
as specify the summaries we want to save to disk. Furthermore, we can print
out the metrics values to stdout:
# Specify where the checkpoint is stored:
checkpoint_path = ...
# Create model and obtain the predictions:
images, labels = LoadData(...)
predictions = MyModel(images)
# Choose the metrics to compute:
names_to_values, names_to_updates = tf.contrib.metrics.aggregate_metric_map({
"accuracy": tf.contrib.metrics.streaming_accuracy(predictions, labels),
"mse": tf.contrib.metrics.streaming_mean_squared_error(
predictions, labels),
})
# Define the summaries to write:
for metric_name, metric_value in metrics_to_values.iteritems():
tf.summary.scalar(metric_name, metric_value)
checkpoint_dir = '/tmp/my_model_dir/'
log_dir = '/tmp/my_model_eval/'
# We'll evaluate 1000 batches:
num_evals = 1000
names_to_values = evaluate_once(
checkpoint_path=checkpoint_path,
eval_ops=names_to_updates.values(),
final_ops=names_to_values,
hooks=[
tf.contrib.training.StopAfterNEvalsHook(num_evals),
tf.contrib.training.SummaryAtEndHook(logdir),
],
config=None)
for name in names_to_values:
print('Metric %s has value %f.' % (name, names_to_values[name]))
************************************************
* Evaluating a Checkpointed Model with Metrics *
************************************************
Often, one wants to evaluate a model checkpoint saved on disk. This can be
performed once or repeatedly on a set schedule.
To evaluate a particular model, users define zero or more metrics and zero or
more summaries and call the evaluate_repeatedly method:
# Create model and obtain the predictions:
images, labels = LoadData(...)
predictions = MyModel(images)
# Choose the metrics to compute:
names_to_values, names_to_updates = tf.contrib.metrics.aggregate_metric_map({
"accuracy": tf.contrib.metrics.streaming_accuracy(predictions, labels),
"mse": tf.contrib.metrics.streaming_mean_squared_error(
predictions, labels),
})
# Define the summaries to write:
for metric_name, metric_value in metrics_to_values.iteritems():
tf.summary.scalar(metric_name, metric_value)
checkpoint_dir = '/tmp/my_model_dir/'
log_dir = '/tmp/my_model_eval/'
# We'll evaluate 1000 batches:
num_evals = 1000
# Evaluate every 10 minutes:
tf.contrib.training.evaluate_repeatedly(
checkpoint_dir,
eval_ops=names_to_updates.values(),
hooks=[
tf.contrib.training.StopAfterNEvalsHook(num_evals),
tf.contrib.training.SummaryAtEndHook(logdir),
],
eval_interval_secs=600)
*******************************************************
* Evaluating a Checkpointed Model with Summaries Only *
*******************************************************
At times, an evaluation can be performed without metrics at all but rather
with only summaries. The user need only leave out the 'eval_ops' argument:
# Create model and obtain the predictions:
images, labels = LoadData(...)
predictions = MyModel(images)
# Define the summaries to write:
tf.summary.scalar(...)
tf.summary.histogram(...)
checkpoint_dir = '/tmp/my_model_dir/'
log_dir = '/tmp/my_model_eval/'
# Evaluate once every 10 minutes.
tf.contrib.training.evaluate_repeatedly(
checkpoint_dir,
hooks=[
tf.contrib.training.SummaryAtEndHook(logdir),
],
eval_interval_secs=600)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.python.ops import state_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import evaluation
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training import session_run_hook
from tensorflow.python.training import training_util
__all__ = [
'StopAfterNEvalsHook',
'SummaryAtEndHook',
'checkpoints_iterator',
'evaluate_once',
'evaluate_repeatedly',
'get_or_create_eval_step',
'wait_for_new_checkpoint',
]
# pylint: disable=protected-access
# pylint: disable=invalid-name
StopAfterNEvalsHook = evaluation._StopAfterNEvalsHook
evaluate_once = evaluation._evaluate_once
get_or_create_eval_step = evaluation._get_or_create_eval_step
# pylint: enable=invalid-name
# pylint: enable=protected-access
def wait_for_new_checkpoint(checkpoint_dir,
last_checkpoint=None,
seconds_to_sleep=1,
timeout=None):
"""Waits until a new checkpoint file is found.
Args:
checkpoint_dir: The directory in which checkpoints are saved.
last_checkpoint: The last checkpoint path used or `None` if we're expecting
a checkpoint for the first time.
seconds_to_sleep: The number of seconds to sleep for before looking for a
new checkpoint.
timeout: The maximum amount of time to wait. If left as `None`, then the
process will wait indefinitely.
Returns:
a new checkpoint path, or None if the timeout was reached.
"""
logging.info('Waiting for new checkpoint at %s', checkpoint_dir)
stop_time = time.time() + timeout if timeout is not None else None
while True:
checkpoint_path = tf_saver.latest_checkpoint(checkpoint_dir)
if checkpoint_path is None or checkpoint_path == last_checkpoint:
if stop_time is not None and time.time() + seconds_to_sleep > stop_time:
return None
time.sleep(seconds_to_sleep)
else:
logging.info('Found new checkpoint at %s', checkpoint_path)
return checkpoint_path
def checkpoints_iterator(checkpoint_dir,
min_interval_secs=0,
timeout=None,
timeout_fn=None):
"""Continuously yield new checkpoint files as they appear.
The iterator only checks for new checkpoints when control flow has been
reverted to it. This means it can miss checkpoints if your code takes longer
to run between iterations than `min_interval_secs` or the interval at which
new checkpoints are written.
The `timeout` argument is the maximum number of seconds to block waiting for
a new checkpoint. It is used in combination with the `timeout_fn` as
follows:
* If the timeout expires and no `timeout_fn` was specified, the iterator
stops yielding.
* If a `timeout_fn` was specified, that function is called and if it returns
a true boolean value the iterator stops yielding.
* If the function returns a false boolean value then the iterator resumes the
wait for new checkpoints. At this point the timeout logic applies again.
This behavior gives control to callers on what to do if checkpoints do not
come fast enough or stop being generated. For example, if callers have a way
to detect that the training has stopped and know that no new checkpoints
will be generated, they can provide a `timeout_fn` that returns `True` when
the training has stopped. If they know that the training is still going on
they return `False` instead.
Args:
checkpoint_dir: The directory in which checkpoints are saved.
min_interval_secs: The minimum number of seconds between yielding
checkpoints.
timeout: The maximum amount of time to wait between checkpoints. If left as
`None`, then the process will wait indefinitely.
timeout_fn: Optional function to call after a timeout. If the function
returns True, then it means that no new checkpoints will be generated and
the iterator will exit. The function is called with no arguments.
Yields:
String paths to latest checkpoint files as they arrive.
"""
checkpoint_path = None
while True:
new_checkpoint_path = wait_for_new_checkpoint(
checkpoint_dir, checkpoint_path, timeout=timeout)
if new_checkpoint_path is None:
if not timeout_fn:
# timed out
logging.info('Timed-out waiting for a checkpoint.')
return
if timeout_fn():
# The timeout_fn indicated that we are truly done.
return
else:
# The timeout_fn indicated that more checkpoints may come.
continue
start = time.time()
checkpoint_path = new_checkpoint_path
yield checkpoint_path
time_to_next_eval = start + min_interval_secs - time.time()
if time_to_next_eval > 0:
time.sleep(time_to_next_eval)
class SummaryAtEndHook(session_run_hook.SessionRunHook):
"""A run hook that saves a summary with the results of evaluation."""
def __init__(self,
log_dir=None,
summary_writer=None,
summary_op=None,
feed_dict=None):
"""Constructs the Summary Hook.
Args:
log_dir: The directory where the summary events are saved to. Used only
when `summary_writer` is not specified.
summary_writer: A `tf.summary.FileWriter` to write summary events with.
summary_op: The summary op to run. If left as `None`, then all summaries
in the tf.GraphKeys.SUMMARIES collection are used.
feed_dict: An optional feed dictionary to use when evaluating the
summaries.
Raises:
ValueError: If both `log_dir` and `summary_writer` are `None`.
"""
self._summary_op = summary_op
self._replace_summary_op = summary_op is None
self._feed_dict = feed_dict
self._summary_writer = summary_writer
self._log_dir = log_dir
if self._log_dir is None and self._summary_writer is None:
raise ValueError('One of log_dir or summary_writer should be used.')
def begin(self):
if self._replace_summary_op:
self._summary_op = summary.merge_all()
self._global_step = variables.get_or_create_global_step()
def after_create_session(self, session, coord):
if self._summary_writer is None and self._log_dir:
self._summary_writer = summary.FileWriterCache.get(self._log_dir)
def end(self, session):
global_step = training_util.global_step(session, self._global_step)
summary_str = session.run(self._summary_op, self._feed_dict)
if self._summary_writer:
self._summary_writer.add_summary(summary_str, global_step)
self._summary_writer.flush()
def _scaffold_with_init(scaffold, saver, checkpoint_path):
"""Creates a scaffold that loads the given checkpoint using an init_fn.
Args:
scaffold: The scaffold to copy.
saver: The saver to use when restoring the checkpoint.
checkpoint_path: An absolute path to a checkpoint.
Returns:
A scaffold with an init_fn that loads the given checkpoint. If the scaffold
provided already has an init_fn, the scaffold is returned unchanged.
"""
def restore_checkpoint(_, session):
saver.restore(session, checkpoint_path)
if not scaffold.init_fn:
scaffold = monitored_session.Scaffold(
init_op=scaffold.init_op,
init_feed_dict=scaffold.init_feed_dict,
init_fn=restore_checkpoint,
ready_op=scaffold.ready_op,
local_init_op=scaffold.local_init_op,
summary_op=scaffold.summary_op,
saver=scaffold.saver)
return scaffold
def evaluate_repeatedly(checkpoint_dir,
master='',
scaffold=None,
eval_ops=None,
feed_dict=None,
final_ops=None,
final_ops_feed_dict=None,
eval_interval_secs=60,
hooks=None,
config=None,
max_number_of_evaluations=None,
timeout=None,
timeout_fn=None):
"""Repeatedly searches for a checkpoint in `checkpoint_dir` and evaluates it.
During a single evaluation, the `eval_ops` is run until the session is
interrupted or requested to finish. This is typically requested via a
`tf.contrib.training.StopAfterNEvalsHook` which results in `eval_ops` running
the requested number of times.
Optionally, a user can pass in `final_ops`, a single `Tensor`, a list of
`Tensors` or a dictionary from names to `Tensors`. The `final_ops` is
evaluated a single time after `eval_ops` has finished running and the fetched
values of `final_ops` are returned. If `final_ops` is left as `None`, then
`None` is returned.
One may also consider using a `tf.contrib.training.SummaryAtEndHook` to record
summaries after the `eval_ops` have run. If `eval_ops` is `None`, the
summaries run immediately after the model checkpoint has been restored.
Note that `evaluate_once` creates a local variable used to track the number of
evaluations run via `tf.contrib.training.get_or_create_eval_step`.
Consequently, if a custom local init op is provided via a `scaffold`, the
caller should ensure that the local init op also initializes the eval step.
Args:
checkpoint_dir: The directory where checkpoints are stored.
master: The address of the TensorFlow master.
scaffold: An tf.train.Scaffold instance for initializing variables and
restoring variables. Note that `scaffold.init_fn` is used by the function
to restore the checkpoint. If you supply a custom init_fn, then it must
also take care of restoring the model from its checkpoint.
eval_ops: A single `Tensor`, a list of `Tensors` or a dictionary of names
to `Tensors`, which is run until the session is requested to stop,
commonly done by a `tf.contrib.training.StopAfterNEvalsHook`.
feed_dict: The feed dictionary to use when executing the `eval_ops`.
final_ops: A single `Tensor`, a list of `Tensors` or a dictionary of names
to `Tensors`.
final_ops_feed_dict: A feed dictionary to use when evaluating `final_ops`.
eval_interval_secs: The minimum number of seconds between evaluations.
hooks: List of `tf.train.SessionRunHook` callbacks which are run inside the
evaluation loop.
config: An instance of `tf.ConfigProto` that will be used to
configure the `Session`. If left as `None`, the default will be used.
max_number_of_evaluations: The maximum times to run the evaluation. If left
as `None`, then evaluation runs indefinitely.
timeout: The maximum amount of time to wait between checkpoints. If left as
`None`, then the process will wait indefinitely.
timeout_fn: Optional function to call after a timeout. If the function
returns True, then it means that no new checkpoints will be generated and
the iterator will exit. The function is called with no arguments.
Returns:
The fetched values of `final_ops` or `None` if `final_ops` is `None`.
"""
eval_step = get_or_create_eval_step()
# Prepare the run hooks.
hooks = hooks or []
if eval_ops is not None:
update_eval_step = state_ops.assign_add(eval_step, 1)
for h in hooks:
if isinstance(h, StopAfterNEvalsHook):
h._set_evals_completed_tensor(update_eval_step) # pylint: disable=protected-access
if isinstance(eval_ops, dict):
eval_ops['update_eval_step'] = update_eval_step
elif isinstance(eval_ops, (tuple, list)):
eval_ops = list(eval_ops) + [update_eval_step]
else:
eval_ops = [eval_ops, update_eval_step]
final_ops_hook = basic_session_run_hooks.FinalOpsHook(final_ops,
final_ops_feed_dict)
hooks.append(final_ops_hook)
num_evaluations = 0
for checkpoint_path in checkpoints_iterator(
checkpoint_dir,
min_interval_secs=eval_interval_secs,
timeout=timeout,
timeout_fn=timeout_fn):
session_creator = monitored_session.ChiefSessionCreator(
scaffold=scaffold,
checkpoint_filename_with_path=checkpoint_path,
master=master,
config=config)
with monitored_session.MonitoredSession(
session_creator=session_creator, hooks=hooks) as session:
logging.info('Starting evaluation at ' + time.strftime(
'%Y-%m-%d-%H:%M:%S', time.gmtime()))
if eval_ops is not None:
while not session.should_stop():
session.run(eval_ops, feed_dict)
logging.info('Finished evaluation at ' + time.strftime(
'%Y-%m-%d-%H:%M:%S', time.gmtime()))
num_evaluations += 1
if (max_number_of_evaluations is not None and
num_evaluations >= max_number_of_evaluations):
return final_ops_hook.final_ops_values
return final_ops_hook.final_ops_values
| apache-2.0 |
pamfilos/invenio | modules/websubmit/lib/functions/Send_Approval_Request.py | 27 | 6385 | ## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
## Description: function Send_Approval_Request
## This function sends an email to the referee asking him/her
## to approve/reject a document
## Author: T.Baron
## PARAMETERS: directory: parameter to the link manager program
## addressesDAM: address of the referee(s)
## categformatDAM: variable needed to extract the category
## of the document and use it to derive the
## address.
## authorfile: name of the file containing the author list
## titleFile: name of the file containing the title
import os
import re
from invenio.config import CFG_SITE_NAME, \
CFG_SITE_URL, \
CFG_SITE_SUPPORT_EMAIL, \
CFG_SITE_RECORD
from invenio.dbquery import run_sql
from invenio.access_control_admin import acc_get_role_users,acc_get_role_id
from invenio.websubmit_config import CFG_WEBSUBMIT_COPY_MAILS_TO_ADMIN
from invenio.mailutils import send_email
def Send_Approval_Request (parameters, curdir, form, user_info=None):
"""
This function sends an email to the referee in order to start the
simple approval process. This function is very CERN-specific and
should be changed in case of external use. Must be called after
the Get_Report_Number function.
Parameters:
* addressesDAM: email addresses of the people who will receive
this email (comma separated list). this
parameter may contain the <CATEG> string. In
which case the variable computed from the
[categformatDAM] parameter replaces this
string.
eg.:"<CATEG>[email protected]"
* categformatDAM: contains a regular expression used to compute
the category of the document given the
reference of the document.
eg.: if [categformatAFP]="TEST-<CATEG>-.*"
and the reference of the document is
"TEST-CATEGORY1-2001-001", then the computed
category equals "CATEGORY1"
* authorfile: name of the file in which the authors are stored
* titlefile: name of the file in which the title is stored.
* directory: parameter used to create the URL to access the
files.
"""
global rn,sysno
# variables declaration
doctype = re.search(".*/([^/]*)/([^/]*)/[^/]*$",curdir).group(2)
FROMADDR = '%s Submission Engine <%s>' % (CFG_SITE_NAME,CFG_SITE_SUPPORT_EMAIL)
otheraddresses = parameters['addressesDAM']
categformat = parameters['categformatDAM']
# retrieve category
categformat = categformat.replace("<CATEG>","([^-]*)")
m_categ_search = re.match(categformat, rn)
if m_categ_search is not None:
if len(m_categ_search.groups()) > 0:
## Found a match for the category of this document. Get it:
category = m_categ_search.group(1)
else:
## This document has no category.
category = "unknown"
else:
category = "unknown"
# create TI
if os.path.exists("%s/date" % curdir):
fp = open("%s/date" % curdir, "r")
date = fp.read()
fp.close()
else:
date = ""
if os.path.exists("%s/%s" % (curdir,parameters['titleFile'])):
fp = open("%s/%s" % (curdir,parameters['titleFile']),"r")
title = fp.read()
fp.close()
title = title.replace("\n","")
else:
title = ""
title += " - %s" % date
# create AU
if os.path.exists("%s/%s" % (curdir,parameters['authorfile'])):
fp = open("%s/%s" % (curdir,parameters['authorfile']), "r")
author = fp.read()
fp.close()
else:
author = ""
# we get the referee password
sth = run_sql("SELECT access FROM sbmAPPROVAL WHERE rn=%s", (rn,))
if len(sth) >0:
access = sth[0][0]
# Build referee's email address
refereeaddress = ""
# Try to retrieve the referee's email from the referee's database
for user in acc_get_role_users(acc_get_role_id("referee_%s_%s" % (doctype,category))):
refereeaddress += user[1] + ","
# And if there are general referees
for user in acc_get_role_users(acc_get_role_id("referee_%s_*" % doctype)):
refereeaddress += user[1] + ","
refereeaddress = re.sub(",$","",refereeaddress)
# Creation of the mail for the referee
addresses = ""
if refereeaddress != "":
addresses = refereeaddress + ","
if otheraddresses != "":
addresses += otheraddresses
else:
addresses = re.sub(",$","",addresses)
title_referee = "Request for approval of %s" % rn
mail_referee = "The document %s has been submitted to the %s Server..\nYour approval is requested on it.\n\n" % (rn,CFG_SITE_NAME)
mail_referee +="Title: %s\n\nAuthor(s): %s\n\n" % (title,author)
mail_referee +="To access the document(s), select the file(s) from the location:<%s/%s/%s/files/>\n\n" % (CFG_SITE_URL,CFG_SITE_RECORD,sysno)
mail_referee +="To approve/reject the document, you should go to this URL:\n<%s/approve.py?access=%s>\n" % (CFG_SITE_URL,access)
mail_referee +="---------------------------------------------\nBest regards.\nThe submission team.\n"
#Send mail to referee
send_email(FROMADDR, addresses, title_referee, mail_referee, copy_to_admin=CFG_WEBSUBMIT_COPY_MAILS_TO_ADMIN)
return ""
| gpl-2.0 |
WZQ1397/automatic-repo | project/locust/locustcircle_url_test3.py | 1 | 1134 | from locust import TaskSet, task, HttpLocust
import queue
class UserBehavior(TaskSet):
@task
def test_register(self):
try:
data = self.locust.user_data_queue.get()
except queue.Empty:
print('account data run out, test ended.')
exit(0)
print('register with user: {}, pwd: {}'\
.format(data['username'], data['password']))
payload = {
'username': data['username'],
'password': data['password']
}
self.client.post('/register', data=payload)
self.locust.user_data_queue.put_nowait(data)
#TODO 保证并发测试数据唯一性,循环取数据
class WebsiteUser(HttpLocust):
host = 'http://debugtalk.com'
task_set = UserBehavior
user_data_queue = queue.Queue()
for index in range(100):
data = {
"username": "test%04d" % index,
"password": "pwd%04d" % index,
"email": "test%[email protected]" % index,
"phone": "186%08d" % index,
}
user_data_queue.put_nowait(data)
min_wait = 1000
max_wait = 3000 | lgpl-3.0 |
geekaia/edx-platform | lms/djangoapps/shoppingcart/migrations/0001_initial.py | 182 | 12544 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Order'
db.create_table('shoppingcart_order', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('currency', self.gf('django.db.models.fields.CharField')(default='usd', max_length=8)),
('status', self.gf('django.db.models.fields.CharField')(default='cart', max_length=32)),
('purchase_time', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('bill_to_first', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('bill_to_last', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('bill_to_street1', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('bill_to_street2', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('bill_to_city', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('bill_to_state', self.gf('django.db.models.fields.CharField')(max_length=8, blank=True)),
('bill_to_postalcode', self.gf('django.db.models.fields.CharField')(max_length=16, blank=True)),
('bill_to_country', self.gf('django.db.models.fields.CharField')(max_length=64, blank=True)),
('bill_to_ccnum', self.gf('django.db.models.fields.CharField')(max_length=8, blank=True)),
('bill_to_cardtype', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('processor_reply_dump', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('shoppingcart', ['Order'])
# Adding model 'OrderItem'
db.create_table('shoppingcart_orderitem', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('order', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shoppingcart.Order'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('status', self.gf('django.db.models.fields.CharField')(default='cart', max_length=32)),
('qty', self.gf('django.db.models.fields.IntegerField')(default=1)),
('unit_cost', self.gf('django.db.models.fields.DecimalField')(default=0.0, max_digits=30, decimal_places=2)),
('line_cost', self.gf('django.db.models.fields.DecimalField')(default=0.0, max_digits=30, decimal_places=2)),
('line_desc', self.gf('django.db.models.fields.CharField')(default='Misc. Item', max_length=1024)),
('currency', self.gf('django.db.models.fields.CharField')(default='usd', max_length=8)),
))
db.send_create_signal('shoppingcart', ['OrderItem'])
# Adding model 'PaidCourseRegistration'
db.create_table('shoppingcart_paidcourseregistration', (
('orderitem_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['shoppingcart.OrderItem'], unique=True, primary_key=True)),
('course_id', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)),
))
db.send_create_signal('shoppingcart', ['PaidCourseRegistration'])
# Adding model 'CertificateItem'
db.create_table('shoppingcart_certificateitem', (
('orderitem_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['shoppingcart.OrderItem'], unique=True, primary_key=True)),
('course_id', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)),
('course_enrollment', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['student.CourseEnrollment'])),
('mode', self.gf('django.db.models.fields.SlugField')(max_length=50)),
))
db.send_create_signal('shoppingcart', ['CertificateItem'])
def backwards(self, orm):
# Deleting model 'Order'
db.delete_table('shoppingcart_order')
# Deleting model 'OrderItem'
db.delete_table('shoppingcart_orderitem')
# Deleting model 'PaidCourseRegistration'
db.delete_table('shoppingcart_paidcourseregistration')
# Deleting model 'CertificateItem'
db.delete_table('shoppingcart_certificateitem')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
| agpl-3.0 |
apllicationCOM/youtube-dl-api-server | youtube_dl_server/youtube_dl/extractor/daum.py | 118 | 2755 | # encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
)
class DaumIE(InfoExtractor):
_VALID_URL = r'https?://(?:m\.)?tvpot\.daum\.net/(?:v/|.*?clipid=)(?P<id>[^?#&]+)'
IE_NAME = 'daum.net'
_TESTS = [{
'url': 'http://tvpot.daum.net/clip/ClipView.do?clipid=52554690',
'info_dict': {
'id': '52554690',
'ext': 'mp4',
'title': 'DOTA 2GETHER 시즌2 6회 - 2부',
'description': 'DOTA 2GETHER 시즌2 6회 - 2부',
'upload_date': '20130831',
'duration': 3868,
},
}, {
'url': 'http://tvpot.daum.net/v/vab4dyeDBysyBssyukBUjBz',
'only_matching': True,
}, {
'url': 'http://tvpot.daum.net/v/07dXWRka62Y%24',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
canonical_url = 'http://tvpot.daum.net/v/%s' % video_id
webpage = self._download_webpage(canonical_url, video_id)
full_id = self._search_regex(
r'src=["\']http://videofarm\.daum\.net/controller/video/viewer/Video\.html\?.*?vid=(.+?)[&"\']',
webpage, 'full id')
query = compat_urllib_parse.urlencode({'vid': full_id})
info = self._download_xml(
'http://tvpot.daum.net/clip/ClipInfoXml.do?' + query, video_id,
'Downloading video info')
urls = self._download_xml(
'http://videofarm.daum.net/controller/api/open/v1_2/MovieData.apixml?' + query,
video_id, 'Downloading video formats info')
formats = []
for format_el in urls.findall('result/output_list/output_list'):
profile = format_el.attrib['profile']
format_query = compat_urllib_parse.urlencode({
'vid': full_id,
'profile': profile,
})
url_doc = self._download_xml(
'http://videofarm.daum.net/controller/api/open/v1_2/MovieLocation.apixml?' + format_query,
video_id, note='Downloading video data for %s format' % profile)
format_url = url_doc.find('result/url').text
formats.append({
'url': format_url,
'format_id': profile,
})
return {
'id': video_id,
'title': info.find('TITLE').text,
'formats': formats,
'thumbnail': self._og_search_thumbnail(webpage),
'description': info.find('CONTENTS').text,
'duration': int(info.find('DURATION').text),
'upload_date': info.find('REGDTTM').text[:8],
}
| unlicense |
gramps-project/gramps | gramps/gen/merge/mergenotequery.py | 10 | 4964 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Michiel D. Nauta
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Provide merge capabilities for notes.
"""
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ..lib import (Person, Family, Event, Place, Source, Citation, Repository,
Media)
from ..db import DbTxn
from ..const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
from ..errors import MergeError
#-------------------------------------------------------------------------
#
# MergeNoteQuery
#
#-------------------------------------------------------------------------
class MergeNoteQuery:
"""
Create database query to merge two notes.
"""
def __init__(self, dbstate, phoenix, titanic):
self.database = dbstate.db
self.phoenix = phoenix
self.titanic = titanic
def execute(self):
"""
Merges two notes into a single note.
"""
new_handle = self.phoenix.get_handle()
old_handle = self.titanic.get_handle()
self.phoenix.merge(self.titanic)
with DbTxn(_("Merge Notes"), self.database) as trans:
self.database.commit_note(self.phoenix, trans)
for (class_name, handle) in self.database.find_backlink_handles(
old_handle):
if class_name == Person.__name__:
person = self.database.get_person_from_handle(handle)
assert(person.has_note_reference(old_handle))
person.replace_note_references(old_handle, new_handle)
self.database.commit_person(person, trans)
elif class_name == Family.__name__:
family = self.database.get_family_from_handle(handle)
assert(family.has_note_reference(old_handle))
family.replace_note_references(old_handle, new_handle)
self.database.commit_family(family, trans)
elif class_name == Event.__name__:
event = self.database.get_event_from_handle(handle)
assert(event.has_note_reference(old_handle))
event.replace_note_references(old_handle, new_handle)
self.database.commit_event(event, trans)
elif class_name == Source.__name__:
source = self.database.get_source_from_handle(handle)
assert(source.has_note_reference(old_handle))
source.replace_note_references(old_handle, new_handle)
self.database.commit_source(source, trans)
elif class_name == Citation.__name__:
citation = self.database.get_citation_from_handle(handle)
assert(citation.has_note_reference(old_handle))
citation.replace_note_references(old_handle, new_handle)
self.database.commit_citation(citation, trans)
elif class_name == Place.__name__:
place = self.database.get_place_from_handle(handle)
assert(place.has_note_reference(old_handle))
place.replace_note_references(old_handle, new_handle)
self.database.commit_place(place, trans)
elif class_name == Media.__name__:
obj = self.database.get_media_from_handle(handle)
assert(obj.has_note_reference(old_handle))
obj.replace_note_references(old_handle, new_handle)
self.database.commit_media(obj, trans)
elif class_name == Repository.__name__:
repo = self.database.get_repository_from_handle(handle)
assert(repo.has_note_reference(old_handle))
repo.replace_note_references(old_handle, new_handle)
self.database.commit_repository(repo, trans)
else:
raise MergeError("Encounter object of type %s that has "
"a note reference." % class_name)
self.database.remove_note(old_handle, trans)
| gpl-2.0 |
ABaldwinHunter/django-clone-classic | tests/forms_tests/tests/test_validators.py | 119 | 1587 | from __future__ import unicode_literals
import re
from unittest import TestCase
from django import forms
from django.core import validators
from django.core.exceptions import ValidationError
class UserForm(forms.Form):
full_name = forms.CharField(
max_length=50,
validators=[
validators.validate_integer,
validators.validate_email,
]
)
string = forms.CharField(
max_length=50,
validators=[
validators.RegexValidator(
regex='^[a-zA-Z]*$',
message="Letters only.",
)
]
)
ignore_case_string = forms.CharField(
max_length=50,
validators=[
validators.RegexValidator(
regex='^[a-z]*$',
message="Letters only.",
flags=re.IGNORECASE,
)
]
)
class TestFieldWithValidators(TestCase):
def test_all_errors_get_reported(self):
form = UserForm({
'full_name': 'not int nor mail',
'string': '2 is not correct',
'ignore_case_string': "IgnORE Case strIng",
})
self.assertRaises(ValidationError, form.fields['full_name'].clean, 'not int nor mail')
try:
form.fields['full_name'].clean('not int nor mail')
except ValidationError as e:
self.assertEqual(2, len(e.messages))
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['string'], ["Letters only."])
self.assertEqual(form.errors['string'], ["Letters only."])
| bsd-3-clause |
Nicolas570/chris_db | components/faker/faker/providers/person/hi_IN/__init__.py | 19 | 3626 | # coding=utf-8
from __future__ import unicode_literals
from .. import Provider as PersonProvider
class Provider(PersonProvider):
formats = (
'{{first_name}} {{last_name}}',
'{{first_name}} {{last_name}}',
'{{last_name}}, {{first_name}}'
)
first_names = (
'अभय','आदित्य','अजित','अखिल','अमर','आनन्द','अंकुर','अनुपम','अशोक','चन्दना','गणेश','गौतम','गोविंदा','हनुमान्','इन्द्रजित','ईश',
'जगन्नाथ','जगदीश','जयदेव','जितेन्द्र','कैलाश','कालिदास','कम्बोज','किरण','ललित','मानदीप','मोहन','मुकेश','नरेन्द्र','नारायण','निखिल','प्रभाकर',
'प्रबोध','प्रदीप','प्रणव','प्रेम','राजीव','रतन','रोहन','विष्णु','विक्रम','विजया','विजय','विवेक','यश',
'अभिलाषा','अदिती','ऐश्वर्या','अमिता','अंकिता','आशा','अवनी','भरत','चेतना','दिव्या','एषा','इन्दु','जया','जयन्ती','ज्योत्सना','कान्ती','कुमारी',
'लता','लीला','मालती','मोहिनी','निशा','पूर्णिमा','पुष्पा','रचना','रजनी','रश्मी','रिया','सरला','सरस्वती','सावित्री','शक्ति','शान्ता','शर्मिला','श्यामा',
'सुलभा','तृष्णा','विद्या'
)
last_names = (
'पाटिल','शर्मा','आचार्य','अग्रवाल','सिंह','अहलुवालिया','आहूजा','पुष्कर','शिरोळे','गायकवाड','गावित','शिरोळे','बापट','अरोड़ा','बाबू',
'बादामी','जमानत','बजाज','बक्षी','बालकृष्णन','बालासुब्रमणियम','बसु','भंडारी','चौधरी','चौहान','छाबरा','दादा','डानी','डार', 'दारा', 'दत्ता',
'दवे', 'दयाल', 'धालीवाल','दीक्षित', 'दोषी', 'दुआ', 'दूबे' ,'ढींगरा','वाल', 'साया', 'बना', 'ड़ाल' ,'गर्ग' ,'गणेश','गांगुली','गुप्ता',
'हेगडे','जोशी','काले','कृष्णा', 'कृष्णमूर्ति', 'कृष्णन' ,'कुलकर्णी', 'कुमार', 'कुण्डा', 'नाम', 'रामलला', 'लता', 'लोदी', 'लोकनाट्यों',
'विकावि', 'लाल', 'लाला' ,'वफादार', 'लूथरा' ,'मदन', 'मगर' ,'भारत', 'महावीर' , 'महादेव', 'महाजन', 'महाराज', 'मजूमदार', 'मल्लिक' ,'सेनाधीश',
'माने' ,'मंगल', 'मंगत', 'रामशर्मा' ,'मणि', 'मान', 'श्रीविमल', 'कुमार', 'मंडल'
)
| mit |
blacklin/kbengine | kbe/src/lib/python/Lib/idlelib/CallTips.py | 97 | 5932 | """CallTips.py - An IDLE Extension to Jog Your Memory
Call Tips are floating windows which display function, class, and method
parameter and docstring information when you type an opening parenthesis, and
which disappear when you type a closing parenthesis.
"""
import __main__
import inspect
import re
import sys
import textwrap
import types
from idlelib import CallTipWindow
from idlelib.HyperParser import HyperParser
class CallTips:
menudefs = [
('edit', [
("Show call tip", "<<force-open-calltip>>"),
])
]
def __init__(self, editwin=None):
if editwin is None: # subprocess and test
self.editwin = None
else:
self.editwin = editwin
self.text = editwin.text
self.active_calltip = None
self._calltip_window = self._make_tk_calltip_window
def close(self):
self._calltip_window = None
def _make_tk_calltip_window(self):
# See __init__ for usage
return CallTipWindow.CallTip(self.text)
def _remove_calltip_window(self, event=None):
if self.active_calltip:
self.active_calltip.hidetip()
self.active_calltip = None
def force_open_calltip_event(self, event):
"The user selected the menu entry or hotkey, open the tip."
self.open_calltip(True)
def try_open_calltip_event(self, event):
"""Happens when it would be nice to open a CallTip, but not really
necessary, for example after an opening bracket, so function calls
won't be made.
"""
self.open_calltip(False)
def refresh_calltip_event(self, event):
if self.active_calltip and self.active_calltip.is_active():
self.open_calltip(False)
def open_calltip(self, evalfuncs):
self._remove_calltip_window()
hp = HyperParser(self.editwin, "insert")
sur_paren = hp.get_surrounding_brackets('(')
if not sur_paren:
return
hp.set_index(sur_paren[0])
expression = hp.get_expression()
if not expression:
return
if not evalfuncs and (expression.find('(') != -1):
return
argspec = self.fetch_tip(expression)
if not argspec:
return
self.active_calltip = self._calltip_window()
self.active_calltip.showtip(argspec, sur_paren[0], sur_paren[1])
def fetch_tip(self, expression):
"""Return the argument list and docstring of a function or class.
If there is a Python subprocess, get the calltip there. Otherwise,
either this fetch_tip() is running in the subprocess or it was
called in an IDLE running without the subprocess.
The subprocess environment is that of the most recently run script. If
two unrelated modules are being edited some calltips in the current
module may be inoperative if the module was not the last to run.
To find methods, fetch_tip must be fed a fully qualified name.
"""
try:
rpcclt = self.editwin.flist.pyshell.interp.rpcclt
except AttributeError:
rpcclt = None
if rpcclt:
return rpcclt.remotecall("exec", "get_the_calltip",
(expression,), {})
else:
return get_argspec(get_entity(expression))
def get_entity(expression):
"""Return the object corresponding to expression evaluated
in a namespace spanning sys.modules and __main.dict__.
"""
if expression:
namespace = sys.modules.copy()
namespace.update(__main__.__dict__)
try:
return eval(expression, namespace)
except BaseException:
# An uncaught exception closes idle, and eval can raise any
# exception, especially if user classes are involved.
return None
# The following are used in get_argspec and some in tests
_MAX_COLS = 85
_MAX_LINES = 5 # enough for bytes
_INDENT = ' '*4 # for wrapped signatures
_first_param = re.compile('(?<=\()\w*\,?\s*')
_default_callable_argspec = "See source or doc"
def get_argspec(ob):
'''Return a string describing the signature of a callable object, or ''.
For Python-coded functions and methods, the first line is introspected.
Delete 'self' parameter for classes (.__init__) and bound methods.
The next lines are the first lines of the doc string up to the first
empty line or _MAX_LINES. For builtins, this typically includes
the arguments in addition to the return value.
'''
argspec = ""
try:
ob_call = ob.__call__
except BaseException:
return argspec
if isinstance(ob, type):
fob = ob.__init__
elif isinstance(ob_call, types.MethodType):
fob = ob_call
else:
fob = ob
if isinstance(fob, (types.FunctionType, types.MethodType)):
argspec = inspect.formatargspec(*inspect.getfullargspec(fob))
if (isinstance(ob, (type, types.MethodType)) or
isinstance(ob_call, types.MethodType)):
argspec = _first_param.sub("", argspec)
lines = (textwrap.wrap(argspec, _MAX_COLS, subsequent_indent=_INDENT)
if len(argspec) > _MAX_COLS else [argspec] if argspec else [])
if isinstance(ob_call, types.MethodType):
doc = ob_call.__doc__
else:
doc = getattr(ob, "__doc__", "")
if doc:
for line in doc.split('\n', _MAX_LINES)[:_MAX_LINES]:
line = line.strip()
if not line:
break
if len(line) > _MAX_COLS:
line = line[: _MAX_COLS - 3] + '...'
lines.append(line)
argspec = '\n'.join(lines)
if not argspec:
argspec = _default_callable_argspec
return argspec
if __name__ == '__main__':
from unittest import main
main('idlelib.idle_test.test_calltips', verbosity=2)
| lgpl-3.0 |
fuselock/odoo | addons/l10n_ch/__init__.py | 424 | 1212 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# Translation contributors: brain-tec AG, Agile Business Group
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import account_wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
andrei-karalionak/ggrc-core | src/ggrc/converters/handlers/default_people.py | 3 | 4333 | # Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Handlers for default people fields in assessment templates.
These should be used on default verifiers and default assessors.
"""
from ggrc.models import AssessmentTemplate, Person
from ggrc.converters import errors
from ggrc.converters.handlers import handlers
class DefaultPersonColumnHandler(handlers.ColumnHandler):
"""Handler for default verifiers and assessors."""
KEY_MAP = {
"default_assessors": "assessors",
"default_verifier": "verifiers",
}
PEOPLE_LABELS_MAP = {
display_name.lower(): value
for value, display_name
in AssessmentTemplate.DEFAULT_PEOPLE_LABELS.items()
}
def _parse_email_values(self):
"""Parse an email list of default assessors.
This is the "other" option in the default assessor dropdown menu.
"""
# This is not good and fast, because it executes query for each
# field from each row that contains people list.
# If the feature is used actively, it should be refactored
# and optimized.
new_objects = self.row_converter.block_converter.converter.new_objects
new_people = new_objects[Person]
people = []
emails = []
for email in self.raw_value.splitlines():
email = email.strip()
if not email:
continue
if email in new_people:
# In "dry run" mode person.id is None, so it is replaced by int value
# to pass validation.
people.append(new_people[email].id or 0)
else:
emails.append(email)
if emails:
for person in Person.query.filter(Person.email.in_(emails)).all():
people.append(person.id)
emails.remove(person.email)
if emails:
for email in emails:
self.add_warning(errors.UNKNOWN_USER_WARNING,
column_name=self.display_name,
email=email)
if not people:
self.add_error(errors.MISSING_VALUE_ERROR, column_name=self.display_name)
return people
def _parse_label_values(self):
"""Parse predefined default assessors.
These values are the normal selection in the default assessor dropdown.
"""
value = self.PEOPLE_LABELS_MAP.get(self.raw_value.strip().lower())
if not value:
self.add_error(errors.WRONG_REQUIRED_VALUE,
column_name=self.display_name,
value=self.raw_value.strip().lower())
return value
def parse_item(self):
"""Parse values for default assessors."""
if "@" in self.raw_value:
return self._parse_email_values()
else:
return self._parse_label_values()
def set_obj_attr(self):
"""Set default_people attribute.
This is a joint function for default assessors and verifiers. The first
column that gets handled will save the value to "_default_people" and the
second column that gets handled will take that value, include it with its
own and store it into the correct "default_people" field.
NOTE: This is a temporary hack that that should be refactored once this
code is merged into the develop branch. The joining of default_assessors
and default_verifiers should be done by pre_commit_checks for imports.
"""
if not self.value or self.row_converter.ignore:
return
default_people = self.row_converter.obj.default_people or {}
default_people[self.KEY_MAP[self.key]] = self.value
_default_people = getattr(self.row_converter.obj, "_default_people", {})
if _default_people:
default_people.update(_default_people)
setattr(self.row_converter.obj, "default_people", default_people)
else:
setattr(self.row_converter.obj, "_default_people", default_people)
def get_value(self):
"""Get value from default_people attribute."""
value = self.row_converter.obj.default_people.get(
self.KEY_MAP[self.key],
"ERROR",
)
if isinstance(value, list):
# This is not good and fast, because it executes query for each
# field from each row that contains people list.
# If the feature is used actively, it should be refactored
# and optimized.
people = Person.query.filter(Person.id.in_(value)).all()
value = "\n".join(p.email for p in people)
return value
| apache-2.0 |
jules185/IoT_Hackathon | .homeassistant/deps/mutagen/id3/_id3v1.py | 7 | 5106 | # -*- coding: utf-8 -*-
# Copyright (C) 2005 Michael Urman
# 2006 Lukas Lalinsky
# 2013 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import errno
from struct import error as StructError, unpack
from mutagen._util import chr_, text_type
from ._frames import TCON, TRCK, COMM, TDRC, TALB, TPE1, TIT2
def find_id3v1(fileobj):
"""Returns a tuple of (id3tag, offset_to_end) or (None, 0)
offset mainly because we used to write too short tags in some cases and
we need the offset to delete them.
"""
# id3v1 is always at the end (after apev2)
extra_read = b"APETAGEX".index(b"TAG")
try:
fileobj.seek(-128 - extra_read, 2)
except IOError as e:
if e.errno == errno.EINVAL:
# If the file is too small, might be ok since we wrote too small
# tags at some point. let's see how the parsing goes..
fileobj.seek(0, 0)
else:
raise
data = fileobj.read(128 + extra_read)
try:
idx = data.index(b"TAG")
except ValueError:
return (None, 0)
else:
# FIXME: make use of the apev2 parser here
# if TAG is part of APETAGEX assume this is an APEv2 tag
try:
ape_idx = data.index(b"APETAGEX")
except ValueError:
pass
else:
if idx == ape_idx + extra_read:
return (None, 0)
tag = ParseID3v1(data[idx:])
if tag is None:
return (None, 0)
offset = idx - len(data)
return (tag, offset)
# ID3v1.1 support.
def ParseID3v1(data):
"""Parse an ID3v1 tag, returning a list of ID3v2.4 frames.
Returns a {frame_name: frame} dict or None.
"""
try:
data = data[data.index(b"TAG"):]
except ValueError:
return None
if 128 < len(data) or len(data) < 124:
return None
# Issue #69 - Previous versions of Mutagen, when encountering
# out-of-spec TDRC and TYER frames of less than four characters,
# wrote only the characters available - e.g. "1" or "" - into the
# year field. To parse those, reduce the size of the year field.
# Amazingly, "0s" works as a struct format string.
unpack_fmt = "3s30s30s30s%ds29sBB" % (len(data) - 124)
try:
tag, title, artist, album, year, comment, track, genre = unpack(
unpack_fmt, data)
except StructError:
return None
if tag != b"TAG":
return None
def fix(data):
return data.split(b"\x00")[0].strip().decode('latin1')
title, artist, album, year, comment = map(
fix, [title, artist, album, year, comment])
frames = {}
if title:
frames["TIT2"] = TIT2(encoding=0, text=title)
if artist:
frames["TPE1"] = TPE1(encoding=0, text=[artist])
if album:
frames["TALB"] = TALB(encoding=0, text=album)
if year:
frames["TDRC"] = TDRC(encoding=0, text=year)
if comment:
frames["COMM"] = COMM(
encoding=0, lang="eng", desc="ID3v1 Comment", text=comment)
# Don't read a track number if it looks like the comment was
# padded with spaces instead of nulls (thanks, WinAmp).
if track and ((track != 32) or (data[-3] == b'\x00'[0])):
frames["TRCK"] = TRCK(encoding=0, text=str(track))
if genre != 255:
frames["TCON"] = TCON(encoding=0, text=str(genre))
return frames
def MakeID3v1(id3):
"""Return an ID3v1.1 tag string from a dict of ID3v2.4 frames."""
v1 = {}
for v2id, name in {"TIT2": "title", "TPE1": "artist",
"TALB": "album"}.items():
if v2id in id3:
text = id3[v2id].text[0].encode('latin1', 'replace')[:30]
else:
text = b""
v1[name] = text + (b"\x00" * (30 - len(text)))
if "COMM" in id3:
cmnt = id3["COMM"].text[0].encode('latin1', 'replace')[:28]
else:
cmnt = b""
v1["comment"] = cmnt + (b"\x00" * (29 - len(cmnt)))
if "TRCK" in id3:
try:
v1["track"] = chr_(+id3["TRCK"])
except ValueError:
v1["track"] = b"\x00"
else:
v1["track"] = b"\x00"
if "TCON" in id3:
try:
genre = id3["TCON"].genres[0]
except IndexError:
pass
else:
if genre in TCON.GENRES:
v1["genre"] = chr_(TCON.GENRES.index(genre))
if "genre" not in v1:
v1["genre"] = b"\xff"
if "TDRC" in id3:
year = text_type(id3["TDRC"]).encode('ascii')
elif "TYER" in id3:
year = text_type(id3["TYER"]).encode('ascii')
else:
year = b""
v1["year"] = (year + b"\x00\x00\x00\x00")[:4]
return (
b"TAG" +
v1["title"] +
v1["artist"] +
v1["album"] +
v1["year"] +
v1["comment"] +
v1["track"] +
v1["genre"]
)
| mit |
rperier/linux | scripts/gdb/linux/utils.py | 318 | 5151 | #
# gdb helper commands and functions for Linux kernel debugging
#
# common utilities
#
# Copyright (c) Siemens AG, 2011-2013
#
# Authors:
# Jan Kiszka <[email protected]>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
class CachedType:
def __init__(self, name):
self._type = None
self._name = name
def _new_objfile_handler(self, event):
self._type = None
gdb.events.new_objfile.disconnect(self._new_objfile_handler)
def get_type(self):
if self._type is None:
self._type = gdb.lookup_type(self._name)
if self._type is None:
raise gdb.GdbError(
"cannot resolve type '{0}'".format(self._name))
if hasattr(gdb, 'events') and hasattr(gdb.events, 'new_objfile'):
gdb.events.new_objfile.connect(self._new_objfile_handler)
return self._type
long_type = CachedType("long")
def get_long_type():
global long_type
return long_type.get_type()
def offset_of(typeobj, field):
element = gdb.Value(0).cast(typeobj)
return int(str(element[field].address).split()[0], 16)
def container_of(ptr, typeobj, member):
return (ptr.cast(get_long_type()) -
offset_of(typeobj, member)).cast(typeobj)
class ContainerOf(gdb.Function):
"""Return pointer to containing data structure.
$container_of(PTR, "TYPE", "ELEMENT"): Given PTR, return a pointer to the
data structure of the type TYPE in which PTR is the address of ELEMENT.
Note that TYPE and ELEMENT have to be quoted as strings."""
def __init__(self):
super(ContainerOf, self).__init__("container_of")
def invoke(self, ptr, typename, elementname):
return container_of(ptr, gdb.lookup_type(typename.string()).pointer(),
elementname.string())
ContainerOf()
BIG_ENDIAN = 0
LITTLE_ENDIAN = 1
target_endianness = None
def get_target_endianness():
global target_endianness
if target_endianness is None:
endian = gdb.execute("show endian", to_string=True)
if "little endian" in endian:
target_endianness = LITTLE_ENDIAN
elif "big endian" in endian:
target_endianness = BIG_ENDIAN
else:
raise gdb.GdbError("unknown endianness '{0}'".format(str(endian)))
return target_endianness
def read_memoryview(inf, start, length):
return memoryview(inf.read_memory(start, length))
def read_u16(buffer, offset):
buffer_val = buffer[offset:offset + 2]
value = [0, 0]
if type(buffer_val[0]) is str:
value[0] = ord(buffer_val[0])
value[1] = ord(buffer_val[1])
else:
value[0] = buffer_val[0]
value[1] = buffer_val[1]
if get_target_endianness() == LITTLE_ENDIAN:
return value[0] + (value[1] << 8)
else:
return value[1] + (value[0] << 8)
def read_u32(buffer, offset):
if get_target_endianness() == LITTLE_ENDIAN:
return read_u16(buffer, offset) + (read_u16(buffer, offset + 2) << 16)
else:
return read_u16(buffer, offset + 2) + (read_u16(buffer, offset) << 16)
def read_u64(buffer, offset):
if get_target_endianness() == LITTLE_ENDIAN:
return read_u32(buffer, offset) + (read_u32(buffer, offset + 4) << 32)
else:
return read_u32(buffer, offset + 4) + (read_u32(buffer, offset) << 32)
def read_ulong(buffer, offset):
if get_long_type().sizeof == 8:
return read_u64(buffer, offset)
else:
return read_u32(buffer, offset)
target_arch = None
def is_target_arch(arch):
if hasattr(gdb.Frame, 'architecture'):
return arch in gdb.newest_frame().architecture().name()
else:
global target_arch
if target_arch is None:
target_arch = gdb.execute("show architecture", to_string=True)
return arch in target_arch
GDBSERVER_QEMU = 0
GDBSERVER_KGDB = 1
gdbserver_type = None
def get_gdbserver_type():
def exit_handler(event):
global gdbserver_type
gdbserver_type = None
gdb.events.exited.disconnect(exit_handler)
def probe_qemu():
try:
return gdb.execute("monitor info version", to_string=True) != ""
except gdb.error:
return False
def probe_kgdb():
try:
thread_info = gdb.execute("info thread 2", to_string=True)
return "shadowCPU0" in thread_info
except gdb.error:
return False
global gdbserver_type
if gdbserver_type is None:
if probe_qemu():
gdbserver_type = GDBSERVER_QEMU
elif probe_kgdb():
gdbserver_type = GDBSERVER_KGDB
if gdbserver_type is not None and hasattr(gdb, 'events'):
gdb.events.exited.connect(exit_handler)
return gdbserver_type
def gdb_eval_or_none(expresssion):
try:
return gdb.parse_and_eval(expresssion)
except gdb.error:
return None
def dentry_name(d):
parent = d['d_parent']
if parent == d or parent == 0:
return ""
p = dentry_name(d['d_parent']) + "/"
return p + d['d_iname'].string()
| gpl-2.0 |
rasata/brotli | python/tests/test_utils.py | 98 | 1128 | from __future__ import print_function
import sys
import os
import sysconfig
import filecmp
def diff_q(first_file, second_file):
"""Simulate call to POSIX diff with -q argument"""
if not filecmp.cmp(first_file, second_file, shallow=False):
print("Files %s and %s differ" % (first_file, second_file),
file=sys.stderr)
return 1
return 0
PYTHON = sys.executable or "python"
# 'bro.py' script should be in parent directory
BRO = os.path.abspath("../bro.py")
# get platform- and version-specific build/lib folder
platform_lib_name = "lib.{platform}-{version[0]}.{version[1]}".format(
platform=sysconfig.get_platform(),
version=sys.version_info)
# by default, distutils' build base is in the same location as setup.py
build_base = os.path.abspath(os.path.join("..", "..", "build"))
build_lib = os.path.join(build_base, platform_lib_name)
# prepend build/lib to PYTHONPATH environment variable
TEST_ENV = os.environ.copy()
if 'PYTHONPATH' not in TEST_ENV:
TEST_ENV['PYTHONPATH'] = build_lib
else:
TEST_ENV['PYTHONPATH'] = build_lib + os.pathsep + TEST_ENV['PYTHONPATH']
| apache-2.0 |
pekrau/Publications | publications/login.py | 1 | 1889 | "Publications: Login and logout pages."
import logging
import tornado.web
from . import constants
from . import settings
from . import utils
from .requesthandler import RequestHandler
from .account import AccountSaver
class Login(RequestHandler):
"Login to a account account. Set a secure cookie."
def get(self):
"Display login page."
self.render("login.html",
next=self.get_argument("next", self.reverse_url("home")))
def post(self):
"""Login to a account account. Set a secure cookie.
Log failed login attempt and disable account if too many recent.
"""
try:
email = self.get_argument("email")
password = self.get_argument("password")
except tornado.web.MissingArgumentError:
self.set_error_flash("Missing email or password argument.")
self.see_other("login")
return
try:
account = self.get_account(email)
if utils.hashed_password(password) != account.get("password"):
raise KeyError
except KeyError:
self.set_error_flash("No such account or invalid password.")
self.see_other("login")
else:
self.set_secure_cookie(constants.USER_COOKIE,
account["email"],
expires_days=settings["LOGIN_MAX_AGE_DAYS"])
with AccountSaver(doc=account, rqh=self) as saver:
saver["login"] = utils.timestamp() # Set last login timestamp.
self.redirect(self.get_argument("next", self.reverse_url("home")))
class Logout(RequestHandler):
"Logout; unset the secure cookie to invalidate login session."
@tornado.web.authenticated
def post(self):
self.set_secure_cookie(constants.USER_COOKIE, "")
self.see_other("home")
| mit |
davcamer/clients | projects-for-testing/zookeeper/src/contrib/zkpython/src/examples/watch_znode_for_changes.py | 138 | 6544 | #!/usr/bin/env python2.6
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" ZNode Change Watcher Skeleton Script
This script shows you how to write a python program that watches a specific
znode for changes and reacts to them.
Steps to understand how this script works:
1. start a standalone ZooKeeper server (by default it listens on localhost:2181)
Did you know you can deploy "local clusters" by using zkconf[1]?
[1] http://github.com/phunt/zkconf
2. enter the command line console
3. create the test node:
[zk: (CONNECTED) 1] create /watch-test dummy-data
Created /watch-test
4. in another shell start this script in verbose mode
$ python watch_znode_for_changes.py -v
# you should see a lot of log messages. have a look over them because
# you can easily understand how zookeeper works
5. update the node data:
[zk: (CONNECTED) 2] set /watch-test new-data
cZxid = 0xa0000001a
ctime = Fri Jul 09 19:14:45 EEST 2010
mZxid = 0xa0000001e
mtime = Fri Jul 09 19:18:18 EEST 2010
pZxid = 0xa0000001a
cversion = 0
dataVersion = 1
aclVersion = 0
ephemeralOwner = 0x0
dataLength = 8
numChildren = 0
... and you should see similar log messages:
2010-07-09 19:18:18,537:11542(0xb6ea5b70):ZOO_DEBUG@process_completions@1765: Calling a watcher for node [/watch-test], type = -1 event=ZOO_CHANGED_EVENT
2010-07-09 19:18:18,537 watch_znode_for_changes.py:83 - Running watcher: zh=0 event=3 state=3 path=/watch-test
2010-07-09 19:18:18,537:11542(0xb6ea5b70):ZOO_DEBUG@zoo_awget@2400: Sending request xid=0x4c374b33 for path [/watch-test] to 127.0.0.1:2181
2010-07-09 19:18:18,545:11542(0xb76a6b70):ZOO_DEBUG@zookeeper_process@1980: Queueing asynchronous response
2010-07-09 19:18:18,545:11542(0xb6ea5b70):ZOO_DEBUG@process_completions@1772: Calling COMPLETION_DATA for xid=0x4c374b33 rc=0
2010-07-09 19:18:18,545 watch_znode_for_changes.py:54 - This is where your application does work.
You can repeat this step multiple times.
6. that's all. in the end you can delete the node and you should see a ZOO_DELETED_EVENT
"""
import logging
import logging.handlers
import signal
import sys
import time
import threading
import zookeeper
from optparse import OptionParser
logger = logging.getLogger()
class MyClass(threading.Thread):
znode = '/watch-test'
def __init__(self, options, args):
threading.Thread.__init__(self)
logger.debug('Initializing MyClass thread.')
if options.verbose:
zookeeper.set_debug_level(zookeeper.LOG_LEVEL_DEBUG)
self.zh = zookeeper.init(options.servers)
if zookeeper.OK != zookeeper.aget(self.zh, self.znode,
self.watcher, self.handler):
logger.critical('Unable to get znode! Exiting.')
sys.exit(1)
def __del__(self):
zookeeper.close(self.zh)
def aget(self):
return zookeeper.aget(self.zh, self.znode, self.watcher, self.handler)
def handler(self, zh, rc, data, stat):
"""Handle zookeeper.aget() responses.
This code handles the zookeeper.aget callback. It does not handle watches.
Numeric arguments map to constants. See ``DATA`` in ``help(zookeeper)``
for more information.
Args:
zh Zookeeper handle that made this request.
rc Return code.
data Data stored in the znode.
Does not provide a return value.
"""
if zookeeper.OK == rc:
logger.debug('This is where your application does work.')
else:
if zookeeper.NONODE == rc:
# avoid sending too many requests if the node does not yet exists
logger.info('Node not found. Trying again to set the watch.')
time.sleep(1)
if zookeeper.OK != self.aget():
logger.critical('Unable to get znode! Exiting.')
sys.exit(1)
def watcher(self, zh, event, state, path):
"""Handle zookeeper.aget() watches.
This code is called when a znode changes and triggers a data watch.
It is not called to handle the zookeeper.aget call itself.
Numeric arguments map to constants. See ``DATA`` in ``help(zookeeper)``
for more information.
Args:
zh Zookeeper handle that set this watch.
event Event that caused the watch (often called ``type`` elsewhere).
state Connection state.
path Znode that triggered this watch.
Does not provide a return value.
"""
out = ['Running watcher:',
'zh=%d' % zh,
'event=%d' % event,
'state=%d' % state,
'path=%s' % path]
logger.debug(' '.join(out))
if event == zookeeper.CHANGED_EVENT and \
state == zookeeper.CONNECTED_STATE and \
self.znode == path:
if zookeeper.OK != self.aget():
logger.critical('Unable to get znode! Exiting.')
sys.exit(1)
def run(self):
while True:
time.sleep(86400)
def main(argv=None):
# Allow Ctrl-C
signal.signal(signal.SIGINT, signal.SIG_DFL)
parser = OptionParser()
parser.add_option('-v', '--verbose',
dest='verbose',
default=False,
action='store_true',
help='Verbose logging. (default: %default)')
parser.add_option('-s', '--servers',
dest='servers',
default='localhost:2181',
help='Comma-separated list of host:port pairs. (default: %default)')
(options, args) = parser.parse_args()
if options.verbose:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - %(message)s")
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.info('Starting Zookeeper python example: %s' % ' '.join(sys.argv))
mc = MyClass(options, args)
mc.start()
mc.join()
if __name__ == '__main__':
main()
| apache-2.0 |
codeaudit/pattern-1 | pattern/web/pdf/glyphlist.py | 56 | 121501 | #!/usr/bin/env python2
""" Mappings from Adobe glyph names to Unicode characters.
In some CMap tables, Adobe glyph names are used for specifying
Unicode characters instead of using decimal/hex character code.
The following data was taken by
$ wget http://www.adobe.com/devnet/opentype/archives/glyphlist.txt
$ python tools/conv_glyphlist.py glyphlist.txt > glyphlist.py
"""
# ###################################################################################
# Copyright (c) 1997,1998,2002,2007 Adobe Systems Incorporated
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this documentation file to use, copy, publish, distribute,
# sublicense, and/or sell copies of the documentation, and to permit
# others to do the same, provided that:
# - No modification, editing or other alteration of this document is
# allowed; and
# - The above copyright notice and this permission notice shall be
# included in all copies of the documentation.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this documentation file, to create their own derivative works
# from the content of this document to use, copy, publish, distribute,
# sublicense, and/or sell the derivative works, and to permit others to do
# the same, provided that the derived work is not represented as being a
# copy or version of this document.
#
# Adobe shall not be liable to any party for any loss of revenue or profit
# or for indirect, incidental, special, consequential, or other similar
# damages, whether based on tort (including without limitation negligence
# or strict liability), contract or other legal or equitable grounds even
# if Adobe has been advised or had reason to know of the possibility of
# such damages. The Adobe materials are provided on an "AS IS" basis.
# Adobe specifically disclaims all express, statutory, or implied
# warranties relating to the Adobe materials, including but not limited to
# those concerning merchantability or fitness for a particular purpose or
# non-infringement of any third party rights regarding the Adobe
# materials.
# ###################################################################################
# Name: Adobe Glyph List
# Table version: 2.0
# Date: September 20, 2002
#
# See http://partners.adobe.com/asn/developer/typeforum/unicodegn.html
#
# Format: Semicolon-delimited fields:
# (1) glyph name
# (2) Unicode scalar value
glyphname2unicode = {
'A': u'\u0041',
'AE': u'\u00C6',
'AEacute': u'\u01FC',
'AEmacron': u'\u01E2',
'AEsmall': u'\uF7E6',
'Aacute': u'\u00C1',
'Aacutesmall': u'\uF7E1',
'Abreve': u'\u0102',
'Abreveacute': u'\u1EAE',
'Abrevecyrillic': u'\u04D0',
'Abrevedotbelow': u'\u1EB6',
'Abrevegrave': u'\u1EB0',
'Abrevehookabove': u'\u1EB2',
'Abrevetilde': u'\u1EB4',
'Acaron': u'\u01CD',
'Acircle': u'\u24B6',
'Acircumflex': u'\u00C2',
'Acircumflexacute': u'\u1EA4',
'Acircumflexdotbelow': u'\u1EAC',
'Acircumflexgrave': u'\u1EA6',
'Acircumflexhookabove': u'\u1EA8',
'Acircumflexsmall': u'\uF7E2',
'Acircumflextilde': u'\u1EAA',
'Acute': u'\uF6C9',
'Acutesmall': u'\uF7B4',
'Acyrillic': u'\u0410',
'Adblgrave': u'\u0200',
'Adieresis': u'\u00C4',
'Adieresiscyrillic': u'\u04D2',
'Adieresismacron': u'\u01DE',
'Adieresissmall': u'\uF7E4',
'Adotbelow': u'\u1EA0',
'Adotmacron': u'\u01E0',
'Agrave': u'\u00C0',
'Agravesmall': u'\uF7E0',
'Ahookabove': u'\u1EA2',
'Aiecyrillic': u'\u04D4',
'Ainvertedbreve': u'\u0202',
'Alpha': u'\u0391',
'Alphatonos': u'\u0386',
'Amacron': u'\u0100',
'Amonospace': u'\uFF21',
'Aogonek': u'\u0104',
'Aring': u'\u00C5',
'Aringacute': u'\u01FA',
'Aringbelow': u'\u1E00',
'Aringsmall': u'\uF7E5',
'Asmall': u'\uF761',
'Atilde': u'\u00C3',
'Atildesmall': u'\uF7E3',
'Aybarmenian': u'\u0531',
'B': u'\u0042',
'Bcircle': u'\u24B7',
'Bdotaccent': u'\u1E02',
'Bdotbelow': u'\u1E04',
'Becyrillic': u'\u0411',
'Benarmenian': u'\u0532',
'Beta': u'\u0392',
'Bhook': u'\u0181',
'Blinebelow': u'\u1E06',
'Bmonospace': u'\uFF22',
'Brevesmall': u'\uF6F4',
'Bsmall': u'\uF762',
'Btopbar': u'\u0182',
'C': u'\u0043',
'Caarmenian': u'\u053E',
'Cacute': u'\u0106',
'Caron': u'\uF6CA',
'Caronsmall': u'\uF6F5',
'Ccaron': u'\u010C',
'Ccedilla': u'\u00C7',
'Ccedillaacute': u'\u1E08',
'Ccedillasmall': u'\uF7E7',
'Ccircle': u'\u24B8',
'Ccircumflex': u'\u0108',
'Cdot': u'\u010A',
'Cdotaccent': u'\u010A',
'Cedillasmall': u'\uF7B8',
'Chaarmenian': u'\u0549',
'Cheabkhasiancyrillic': u'\u04BC',
'Checyrillic': u'\u0427',
'Chedescenderabkhasiancyrillic': u'\u04BE',
'Chedescendercyrillic': u'\u04B6',
'Chedieresiscyrillic': u'\u04F4',
'Cheharmenian': u'\u0543',
'Chekhakassiancyrillic': u'\u04CB',
'Cheverticalstrokecyrillic': u'\u04B8',
'Chi': u'\u03A7',
'Chook': u'\u0187',
'Circumflexsmall': u'\uF6F6',
'Cmonospace': u'\uFF23',
'Coarmenian': u'\u0551',
'Csmall': u'\uF763',
'D': u'\u0044',
'DZ': u'\u01F1',
'DZcaron': u'\u01C4',
'Daarmenian': u'\u0534',
'Dafrican': u'\u0189',
'Dcaron': u'\u010E',
'Dcedilla': u'\u1E10',
'Dcircle': u'\u24B9',
'Dcircumflexbelow': u'\u1E12',
'Dcroat': u'\u0110',
'Ddotaccent': u'\u1E0A',
'Ddotbelow': u'\u1E0C',
'Decyrillic': u'\u0414',
'Deicoptic': u'\u03EE',
'Delta': u'\u2206',
'Deltagreek': u'\u0394',
'Dhook': u'\u018A',
'Dieresis': u'\uF6CB',
'DieresisAcute': u'\uF6CC',
'DieresisGrave': u'\uF6CD',
'Dieresissmall': u'\uF7A8',
'Digammagreek': u'\u03DC',
'Djecyrillic': u'\u0402',
'Dlinebelow': u'\u1E0E',
'Dmonospace': u'\uFF24',
'Dotaccentsmall': u'\uF6F7',
'Dslash': u'\u0110',
'Dsmall': u'\uF764',
'Dtopbar': u'\u018B',
'Dz': u'\u01F2',
'Dzcaron': u'\u01C5',
'Dzeabkhasiancyrillic': u'\u04E0',
'Dzecyrillic': u'\u0405',
'Dzhecyrillic': u'\u040F',
'E': u'\u0045',
'Eacute': u'\u00C9',
'Eacutesmall': u'\uF7E9',
'Ebreve': u'\u0114',
'Ecaron': u'\u011A',
'Ecedillabreve': u'\u1E1C',
'Echarmenian': u'\u0535',
'Ecircle': u'\u24BA',
'Ecircumflex': u'\u00CA',
'Ecircumflexacute': u'\u1EBE',
'Ecircumflexbelow': u'\u1E18',
'Ecircumflexdotbelow': u'\u1EC6',
'Ecircumflexgrave': u'\u1EC0',
'Ecircumflexhookabove': u'\u1EC2',
'Ecircumflexsmall': u'\uF7EA',
'Ecircumflextilde': u'\u1EC4',
'Ecyrillic': u'\u0404',
'Edblgrave': u'\u0204',
'Edieresis': u'\u00CB',
'Edieresissmall': u'\uF7EB',
'Edot': u'\u0116',
'Edotaccent': u'\u0116',
'Edotbelow': u'\u1EB8',
'Efcyrillic': u'\u0424',
'Egrave': u'\u00C8',
'Egravesmall': u'\uF7E8',
'Eharmenian': u'\u0537',
'Ehookabove': u'\u1EBA',
'Eightroman': u'\u2167',
'Einvertedbreve': u'\u0206',
'Eiotifiedcyrillic': u'\u0464',
'Elcyrillic': u'\u041B',
'Elevenroman': u'\u216A',
'Emacron': u'\u0112',
'Emacronacute': u'\u1E16',
'Emacrongrave': u'\u1E14',
'Emcyrillic': u'\u041C',
'Emonospace': u'\uFF25',
'Encyrillic': u'\u041D',
'Endescendercyrillic': u'\u04A2',
'Eng': u'\u014A',
'Enghecyrillic': u'\u04A4',
'Enhookcyrillic': u'\u04C7',
'Eogonek': u'\u0118',
'Eopen': u'\u0190',
'Epsilon': u'\u0395',
'Epsilontonos': u'\u0388',
'Ercyrillic': u'\u0420',
'Ereversed': u'\u018E',
'Ereversedcyrillic': u'\u042D',
'Escyrillic': u'\u0421',
'Esdescendercyrillic': u'\u04AA',
'Esh': u'\u01A9',
'Esmall': u'\uF765',
'Eta': u'\u0397',
'Etarmenian': u'\u0538',
'Etatonos': u'\u0389',
'Eth': u'\u00D0',
'Ethsmall': u'\uF7F0',
'Etilde': u'\u1EBC',
'Etildebelow': u'\u1E1A',
'Euro': u'\u20AC',
'Ezh': u'\u01B7',
'Ezhcaron': u'\u01EE',
'Ezhreversed': u'\u01B8',
'F': u'\u0046',
'Fcircle': u'\u24BB',
'Fdotaccent': u'\u1E1E',
'Feharmenian': u'\u0556',
'Feicoptic': u'\u03E4',
'Fhook': u'\u0191',
'Fitacyrillic': u'\u0472',
'Fiveroman': u'\u2164',
'Fmonospace': u'\uFF26',
'Fourroman': u'\u2163',
'Fsmall': u'\uF766',
'G': u'\u0047',
'GBsquare': u'\u3387',
'Gacute': u'\u01F4',
'Gamma': u'\u0393',
'Gammaafrican': u'\u0194',
'Gangiacoptic': u'\u03EA',
'Gbreve': u'\u011E',
'Gcaron': u'\u01E6',
'Gcedilla': u'\u0122',
'Gcircle': u'\u24BC',
'Gcircumflex': u'\u011C',
'Gcommaaccent': u'\u0122',
'Gdot': u'\u0120',
'Gdotaccent': u'\u0120',
'Gecyrillic': u'\u0413',
'Ghadarmenian': u'\u0542',
'Ghemiddlehookcyrillic': u'\u0494',
'Ghestrokecyrillic': u'\u0492',
'Gheupturncyrillic': u'\u0490',
'Ghook': u'\u0193',
'Gimarmenian': u'\u0533',
'Gjecyrillic': u'\u0403',
'Gmacron': u'\u1E20',
'Gmonospace': u'\uFF27',
'Grave': u'\uF6CE',
'Gravesmall': u'\uF760',
'Gsmall': u'\uF767',
'Gsmallhook': u'\u029B',
'Gstroke': u'\u01E4',
'H': u'\u0048',
'H18533': u'\u25CF',
'H18543': u'\u25AA',
'H18551': u'\u25AB',
'H22073': u'\u25A1',
'HPsquare': u'\u33CB',
'Haabkhasiancyrillic': u'\u04A8',
'Hadescendercyrillic': u'\u04B2',
'Hardsigncyrillic': u'\u042A',
'Hbar': u'\u0126',
'Hbrevebelow': u'\u1E2A',
'Hcedilla': u'\u1E28',
'Hcircle': u'\u24BD',
'Hcircumflex': u'\u0124',
'Hdieresis': u'\u1E26',
'Hdotaccent': u'\u1E22',
'Hdotbelow': u'\u1E24',
'Hmonospace': u'\uFF28',
'Hoarmenian': u'\u0540',
'Horicoptic': u'\u03E8',
'Hsmall': u'\uF768',
'Hungarumlaut': u'\uF6CF',
'Hungarumlautsmall': u'\uF6F8',
'Hzsquare': u'\u3390',
'I': u'\u0049',
'IAcyrillic': u'\u042F',
'IJ': u'\u0132',
'IUcyrillic': u'\u042E',
'Iacute': u'\u00CD',
'Iacutesmall': u'\uF7ED',
'Ibreve': u'\u012C',
'Icaron': u'\u01CF',
'Icircle': u'\u24BE',
'Icircumflex': u'\u00CE',
'Icircumflexsmall': u'\uF7EE',
'Icyrillic': u'\u0406',
'Idblgrave': u'\u0208',
'Idieresis': u'\u00CF',
'Idieresisacute': u'\u1E2E',
'Idieresiscyrillic': u'\u04E4',
'Idieresissmall': u'\uF7EF',
'Idot': u'\u0130',
'Idotaccent': u'\u0130',
'Idotbelow': u'\u1ECA',
'Iebrevecyrillic': u'\u04D6',
'Iecyrillic': u'\u0415',
'Ifraktur': u'\u2111',
'Igrave': u'\u00CC',
'Igravesmall': u'\uF7EC',
'Ihookabove': u'\u1EC8',
'Iicyrillic': u'\u0418',
'Iinvertedbreve': u'\u020A',
'Iishortcyrillic': u'\u0419',
'Imacron': u'\u012A',
'Imacroncyrillic': u'\u04E2',
'Imonospace': u'\uFF29',
'Iniarmenian': u'\u053B',
'Iocyrillic': u'\u0401',
'Iogonek': u'\u012E',
'Iota': u'\u0399',
'Iotaafrican': u'\u0196',
'Iotadieresis': u'\u03AA',
'Iotatonos': u'\u038A',
'Ismall': u'\uF769',
'Istroke': u'\u0197',
'Itilde': u'\u0128',
'Itildebelow': u'\u1E2C',
'Izhitsacyrillic': u'\u0474',
'Izhitsadblgravecyrillic': u'\u0476',
'J': u'\u004A',
'Jaarmenian': u'\u0541',
'Jcircle': u'\u24BF',
'Jcircumflex': u'\u0134',
'Jecyrillic': u'\u0408',
'Jheharmenian': u'\u054B',
'Jmonospace': u'\uFF2A',
'Jsmall': u'\uF76A',
'K': u'\u004B',
'KBsquare': u'\u3385',
'KKsquare': u'\u33CD',
'Kabashkircyrillic': u'\u04A0',
'Kacute': u'\u1E30',
'Kacyrillic': u'\u041A',
'Kadescendercyrillic': u'\u049A',
'Kahookcyrillic': u'\u04C3',
'Kappa': u'\u039A',
'Kastrokecyrillic': u'\u049E',
'Kaverticalstrokecyrillic': u'\u049C',
'Kcaron': u'\u01E8',
'Kcedilla': u'\u0136',
'Kcircle': u'\u24C0',
'Kcommaaccent': u'\u0136',
'Kdotbelow': u'\u1E32',
'Keharmenian': u'\u0554',
'Kenarmenian': u'\u053F',
'Khacyrillic': u'\u0425',
'Kheicoptic': u'\u03E6',
'Khook': u'\u0198',
'Kjecyrillic': u'\u040C',
'Klinebelow': u'\u1E34',
'Kmonospace': u'\uFF2B',
'Koppacyrillic': u'\u0480',
'Koppagreek': u'\u03DE',
'Ksicyrillic': u'\u046E',
'Ksmall': u'\uF76B',
'L': u'\u004C',
'LJ': u'\u01C7',
'LL': u'\uF6BF',
'Lacute': u'\u0139',
'Lambda': u'\u039B',
'Lcaron': u'\u013D',
'Lcedilla': u'\u013B',
'Lcircle': u'\u24C1',
'Lcircumflexbelow': u'\u1E3C',
'Lcommaaccent': u'\u013B',
'Ldot': u'\u013F',
'Ldotaccent': u'\u013F',
'Ldotbelow': u'\u1E36',
'Ldotbelowmacron': u'\u1E38',
'Liwnarmenian': u'\u053C',
'Lj': u'\u01C8',
'Ljecyrillic': u'\u0409',
'Llinebelow': u'\u1E3A',
'Lmonospace': u'\uFF2C',
'Lslash': u'\u0141',
'Lslashsmall': u'\uF6F9',
'Lsmall': u'\uF76C',
'M': u'\u004D',
'MBsquare': u'\u3386',
'Macron': u'\uF6D0',
'Macronsmall': u'\uF7AF',
'Macute': u'\u1E3E',
'Mcircle': u'\u24C2',
'Mdotaccent': u'\u1E40',
'Mdotbelow': u'\u1E42',
'Menarmenian': u'\u0544',
'Mmonospace': u'\uFF2D',
'Msmall': u'\uF76D',
'Mturned': u'\u019C',
'Mu': u'\u039C',
'N': u'\u004E',
'NJ': u'\u01CA',
'Nacute': u'\u0143',
'Ncaron': u'\u0147',
'Ncedilla': u'\u0145',
'Ncircle': u'\u24C3',
'Ncircumflexbelow': u'\u1E4A',
'Ncommaaccent': u'\u0145',
'Ndotaccent': u'\u1E44',
'Ndotbelow': u'\u1E46',
'Nhookleft': u'\u019D',
'Nineroman': u'\u2168',
'Nj': u'\u01CB',
'Njecyrillic': u'\u040A',
'Nlinebelow': u'\u1E48',
'Nmonospace': u'\uFF2E',
'Nowarmenian': u'\u0546',
'Nsmall': u'\uF76E',
'Ntilde': u'\u00D1',
'Ntildesmall': u'\uF7F1',
'Nu': u'\u039D',
'O': u'\u004F',
'OE': u'\u0152',
'OEsmall': u'\uF6FA',
'Oacute': u'\u00D3',
'Oacutesmall': u'\uF7F3',
'Obarredcyrillic': u'\u04E8',
'Obarreddieresiscyrillic': u'\u04EA',
'Obreve': u'\u014E',
'Ocaron': u'\u01D1',
'Ocenteredtilde': u'\u019F',
'Ocircle': u'\u24C4',
'Ocircumflex': u'\u00D4',
'Ocircumflexacute': u'\u1ED0',
'Ocircumflexdotbelow': u'\u1ED8',
'Ocircumflexgrave': u'\u1ED2',
'Ocircumflexhookabove': u'\u1ED4',
'Ocircumflexsmall': u'\uF7F4',
'Ocircumflextilde': u'\u1ED6',
'Ocyrillic': u'\u041E',
'Odblacute': u'\u0150',
'Odblgrave': u'\u020C',
'Odieresis': u'\u00D6',
'Odieresiscyrillic': u'\u04E6',
'Odieresissmall': u'\uF7F6',
'Odotbelow': u'\u1ECC',
'Ogoneksmall': u'\uF6FB',
'Ograve': u'\u00D2',
'Ogravesmall': u'\uF7F2',
'Oharmenian': u'\u0555',
'Ohm': u'\u2126',
'Ohookabove': u'\u1ECE',
'Ohorn': u'\u01A0',
'Ohornacute': u'\u1EDA',
'Ohorndotbelow': u'\u1EE2',
'Ohorngrave': u'\u1EDC',
'Ohornhookabove': u'\u1EDE',
'Ohorntilde': u'\u1EE0',
'Ohungarumlaut': u'\u0150',
'Oi': u'\u01A2',
'Oinvertedbreve': u'\u020E',
'Omacron': u'\u014C',
'Omacronacute': u'\u1E52',
'Omacrongrave': u'\u1E50',
'Omega': u'\u2126',
'Omegacyrillic': u'\u0460',
'Omegagreek': u'\u03A9',
'Omegaroundcyrillic': u'\u047A',
'Omegatitlocyrillic': u'\u047C',
'Omegatonos': u'\u038F',
'Omicron': u'\u039F',
'Omicrontonos': u'\u038C',
'Omonospace': u'\uFF2F',
'Oneroman': u'\u2160',
'Oogonek': u'\u01EA',
'Oogonekmacron': u'\u01EC',
'Oopen': u'\u0186',
'Oslash': u'\u00D8',
'Oslashacute': u'\u01FE',
'Oslashsmall': u'\uF7F8',
'Osmall': u'\uF76F',
'Ostrokeacute': u'\u01FE',
'Otcyrillic': u'\u047E',
'Otilde': u'\u00D5',
'Otildeacute': u'\u1E4C',
'Otildedieresis': u'\u1E4E',
'Otildesmall': u'\uF7F5',
'P': u'\u0050',
'Pacute': u'\u1E54',
'Pcircle': u'\u24C5',
'Pdotaccent': u'\u1E56',
'Pecyrillic': u'\u041F',
'Peharmenian': u'\u054A',
'Pemiddlehookcyrillic': u'\u04A6',
'Phi': u'\u03A6',
'Phook': u'\u01A4',
'Pi': u'\u03A0',
'Piwrarmenian': u'\u0553',
'Pmonospace': u'\uFF30',
'Psi': u'\u03A8',
'Psicyrillic': u'\u0470',
'Psmall': u'\uF770',
'Q': u'\u0051',
'Qcircle': u'\u24C6',
'Qmonospace': u'\uFF31',
'Qsmall': u'\uF771',
'R': u'\u0052',
'Raarmenian': u'\u054C',
'Racute': u'\u0154',
'Rcaron': u'\u0158',
'Rcedilla': u'\u0156',
'Rcircle': u'\u24C7',
'Rcommaaccent': u'\u0156',
'Rdblgrave': u'\u0210',
'Rdotaccent': u'\u1E58',
'Rdotbelow': u'\u1E5A',
'Rdotbelowmacron': u'\u1E5C',
'Reharmenian': u'\u0550',
'Rfraktur': u'\u211C',
'Rho': u'\u03A1',
'Ringsmall': u'\uF6FC',
'Rinvertedbreve': u'\u0212',
'Rlinebelow': u'\u1E5E',
'Rmonospace': u'\uFF32',
'Rsmall': u'\uF772',
'Rsmallinverted': u'\u0281',
'Rsmallinvertedsuperior': u'\u02B6',
'S': u'\u0053',
'SF010000': u'\u250C',
'SF020000': u'\u2514',
'SF030000': u'\u2510',
'SF040000': u'\u2518',
'SF050000': u'\u253C',
'SF060000': u'\u252C',
'SF070000': u'\u2534',
'SF080000': u'\u251C',
'SF090000': u'\u2524',
'SF100000': u'\u2500',
'SF110000': u'\u2502',
'SF190000': u'\u2561',
'SF200000': u'\u2562',
'SF210000': u'\u2556',
'SF220000': u'\u2555',
'SF230000': u'\u2563',
'SF240000': u'\u2551',
'SF250000': u'\u2557',
'SF260000': u'\u255D',
'SF270000': u'\u255C',
'SF280000': u'\u255B',
'SF360000': u'\u255E',
'SF370000': u'\u255F',
'SF380000': u'\u255A',
'SF390000': u'\u2554',
'SF400000': u'\u2569',
'SF410000': u'\u2566',
'SF420000': u'\u2560',
'SF430000': u'\u2550',
'SF440000': u'\u256C',
'SF450000': u'\u2567',
'SF460000': u'\u2568',
'SF470000': u'\u2564',
'SF480000': u'\u2565',
'SF490000': u'\u2559',
'SF500000': u'\u2558',
'SF510000': u'\u2552',
'SF520000': u'\u2553',
'SF530000': u'\u256B',
'SF540000': u'\u256A',
'Sacute': u'\u015A',
'Sacutedotaccent': u'\u1E64',
'Sampigreek': u'\u03E0',
'Scaron': u'\u0160',
'Scarondotaccent': u'\u1E66',
'Scaronsmall': u'\uF6FD',
'Scedilla': u'\u015E',
'Schwa': u'\u018F',
'Schwacyrillic': u'\u04D8',
'Schwadieresiscyrillic': u'\u04DA',
'Scircle': u'\u24C8',
'Scircumflex': u'\u015C',
'Scommaaccent': u'\u0218',
'Sdotaccent': u'\u1E60',
'Sdotbelow': u'\u1E62',
'Sdotbelowdotaccent': u'\u1E68',
'Seharmenian': u'\u054D',
'Sevenroman': u'\u2166',
'Shaarmenian': u'\u0547',
'Shacyrillic': u'\u0428',
'Shchacyrillic': u'\u0429',
'Sheicoptic': u'\u03E2',
'Shhacyrillic': u'\u04BA',
'Shimacoptic': u'\u03EC',
'Sigma': u'\u03A3',
'Sixroman': u'\u2165',
'Smonospace': u'\uFF33',
'Softsigncyrillic': u'\u042C',
'Ssmall': u'\uF773',
'Stigmagreek': u'\u03DA',
'T': u'\u0054',
'Tau': u'\u03A4',
'Tbar': u'\u0166',
'Tcaron': u'\u0164',
'Tcedilla': u'\u0162',
'Tcircle': u'\u24C9',
'Tcircumflexbelow': u'\u1E70',
'Tcommaaccent': u'\u0162',
'Tdotaccent': u'\u1E6A',
'Tdotbelow': u'\u1E6C',
'Tecyrillic': u'\u0422',
'Tedescendercyrillic': u'\u04AC',
'Tenroman': u'\u2169',
'Tetsecyrillic': u'\u04B4',
'Theta': u'\u0398',
'Thook': u'\u01AC',
'Thorn': u'\u00DE',
'Thornsmall': u'\uF7FE',
'Threeroman': u'\u2162',
'Tildesmall': u'\uF6FE',
'Tiwnarmenian': u'\u054F',
'Tlinebelow': u'\u1E6E',
'Tmonospace': u'\uFF34',
'Toarmenian': u'\u0539',
'Tonefive': u'\u01BC',
'Tonesix': u'\u0184',
'Tonetwo': u'\u01A7',
'Tretroflexhook': u'\u01AE',
'Tsecyrillic': u'\u0426',
'Tshecyrillic': u'\u040B',
'Tsmall': u'\uF774',
'Twelveroman': u'\u216B',
'Tworoman': u'\u2161',
'U': u'\u0055',
'Uacute': u'\u00DA',
'Uacutesmall': u'\uF7FA',
'Ubreve': u'\u016C',
'Ucaron': u'\u01D3',
'Ucircle': u'\u24CA',
'Ucircumflex': u'\u00DB',
'Ucircumflexbelow': u'\u1E76',
'Ucircumflexsmall': u'\uF7FB',
'Ucyrillic': u'\u0423',
'Udblacute': u'\u0170',
'Udblgrave': u'\u0214',
'Udieresis': u'\u00DC',
'Udieresisacute': u'\u01D7',
'Udieresisbelow': u'\u1E72',
'Udieresiscaron': u'\u01D9',
'Udieresiscyrillic': u'\u04F0',
'Udieresisgrave': u'\u01DB',
'Udieresismacron': u'\u01D5',
'Udieresissmall': u'\uF7FC',
'Udotbelow': u'\u1EE4',
'Ugrave': u'\u00D9',
'Ugravesmall': u'\uF7F9',
'Uhookabove': u'\u1EE6',
'Uhorn': u'\u01AF',
'Uhornacute': u'\u1EE8',
'Uhorndotbelow': u'\u1EF0',
'Uhorngrave': u'\u1EEA',
'Uhornhookabove': u'\u1EEC',
'Uhorntilde': u'\u1EEE',
'Uhungarumlaut': u'\u0170',
'Uhungarumlautcyrillic': u'\u04F2',
'Uinvertedbreve': u'\u0216',
'Ukcyrillic': u'\u0478',
'Umacron': u'\u016A',
'Umacroncyrillic': u'\u04EE',
'Umacrondieresis': u'\u1E7A',
'Umonospace': u'\uFF35',
'Uogonek': u'\u0172',
'Upsilon': u'\u03A5',
'Upsilon1': u'\u03D2',
'Upsilonacutehooksymbolgreek': u'\u03D3',
'Upsilonafrican': u'\u01B1',
'Upsilondieresis': u'\u03AB',
'Upsilondieresishooksymbolgreek': u'\u03D4',
'Upsilonhooksymbol': u'\u03D2',
'Upsilontonos': u'\u038E',
'Uring': u'\u016E',
'Ushortcyrillic': u'\u040E',
'Usmall': u'\uF775',
'Ustraightcyrillic': u'\u04AE',
'Ustraightstrokecyrillic': u'\u04B0',
'Utilde': u'\u0168',
'Utildeacute': u'\u1E78',
'Utildebelow': u'\u1E74',
'V': u'\u0056',
'Vcircle': u'\u24CB',
'Vdotbelow': u'\u1E7E',
'Vecyrillic': u'\u0412',
'Vewarmenian': u'\u054E',
'Vhook': u'\u01B2',
'Vmonospace': u'\uFF36',
'Voarmenian': u'\u0548',
'Vsmall': u'\uF776',
'Vtilde': u'\u1E7C',
'W': u'\u0057',
'Wacute': u'\u1E82',
'Wcircle': u'\u24CC',
'Wcircumflex': u'\u0174',
'Wdieresis': u'\u1E84',
'Wdotaccent': u'\u1E86',
'Wdotbelow': u'\u1E88',
'Wgrave': u'\u1E80',
'Wmonospace': u'\uFF37',
'Wsmall': u'\uF777',
'X': u'\u0058',
'Xcircle': u'\u24CD',
'Xdieresis': u'\u1E8C',
'Xdotaccent': u'\u1E8A',
'Xeharmenian': u'\u053D',
'Xi': u'\u039E',
'Xmonospace': u'\uFF38',
'Xsmall': u'\uF778',
'Y': u'\u0059',
'Yacute': u'\u00DD',
'Yacutesmall': u'\uF7FD',
'Yatcyrillic': u'\u0462',
'Ycircle': u'\u24CE',
'Ycircumflex': u'\u0176',
'Ydieresis': u'\u0178',
'Ydieresissmall': u'\uF7FF',
'Ydotaccent': u'\u1E8E',
'Ydotbelow': u'\u1EF4',
'Yericyrillic': u'\u042B',
'Yerudieresiscyrillic': u'\u04F8',
'Ygrave': u'\u1EF2',
'Yhook': u'\u01B3',
'Yhookabove': u'\u1EF6',
'Yiarmenian': u'\u0545',
'Yicyrillic': u'\u0407',
'Yiwnarmenian': u'\u0552',
'Ymonospace': u'\uFF39',
'Ysmall': u'\uF779',
'Ytilde': u'\u1EF8',
'Yusbigcyrillic': u'\u046A',
'Yusbigiotifiedcyrillic': u'\u046C',
'Yuslittlecyrillic': u'\u0466',
'Yuslittleiotifiedcyrillic': u'\u0468',
'Z': u'\u005A',
'Zaarmenian': u'\u0536',
'Zacute': u'\u0179',
'Zcaron': u'\u017D',
'Zcaronsmall': u'\uF6FF',
'Zcircle': u'\u24CF',
'Zcircumflex': u'\u1E90',
'Zdot': u'\u017B',
'Zdotaccent': u'\u017B',
'Zdotbelow': u'\u1E92',
'Zecyrillic': u'\u0417',
'Zedescendercyrillic': u'\u0498',
'Zedieresiscyrillic': u'\u04DE',
'Zeta': u'\u0396',
'Zhearmenian': u'\u053A',
'Zhebrevecyrillic': u'\u04C1',
'Zhecyrillic': u'\u0416',
'Zhedescendercyrillic': u'\u0496',
'Zhedieresiscyrillic': u'\u04DC',
'Zlinebelow': u'\u1E94',
'Zmonospace': u'\uFF3A',
'Zsmall': u'\uF77A',
'Zstroke': u'\u01B5',
'a': u'\u0061',
'aabengali': u'\u0986',
'aacute': u'\u00E1',
'aadeva': u'\u0906',
'aagujarati': u'\u0A86',
'aagurmukhi': u'\u0A06',
'aamatragurmukhi': u'\u0A3E',
'aarusquare': u'\u3303',
'aavowelsignbengali': u'\u09BE',
'aavowelsigndeva': u'\u093E',
'aavowelsigngujarati': u'\u0ABE',
'abbreviationmarkarmenian': u'\u055F',
'abbreviationsigndeva': u'\u0970',
'abengali': u'\u0985',
'abopomofo': u'\u311A',
'abreve': u'\u0103',
'abreveacute': u'\u1EAF',
'abrevecyrillic': u'\u04D1',
'abrevedotbelow': u'\u1EB7',
'abrevegrave': u'\u1EB1',
'abrevehookabove': u'\u1EB3',
'abrevetilde': u'\u1EB5',
'acaron': u'\u01CE',
'acircle': u'\u24D0',
'acircumflex': u'\u00E2',
'acircumflexacute': u'\u1EA5',
'acircumflexdotbelow': u'\u1EAD',
'acircumflexgrave': u'\u1EA7',
'acircumflexhookabove': u'\u1EA9',
'acircumflextilde': u'\u1EAB',
'acute': u'\u00B4',
'acutebelowcmb': u'\u0317',
'acutecmb': u'\u0301',
'acutecomb': u'\u0301',
'acutedeva': u'\u0954',
'acutelowmod': u'\u02CF',
'acutetonecmb': u'\u0341',
'acyrillic': u'\u0430',
'adblgrave': u'\u0201',
'addakgurmukhi': u'\u0A71',
'adeva': u'\u0905',
'adieresis': u'\u00E4',
'adieresiscyrillic': u'\u04D3',
'adieresismacron': u'\u01DF',
'adotbelow': u'\u1EA1',
'adotmacron': u'\u01E1',
'ae': u'\u00E6',
'aeacute': u'\u01FD',
'aekorean': u'\u3150',
'aemacron': u'\u01E3',
'afii00208': u'\u2015',
'afii08941': u'\u20A4',
'afii10017': u'\u0410',
'afii10018': u'\u0411',
'afii10019': u'\u0412',
'afii10020': u'\u0413',
'afii10021': u'\u0414',
'afii10022': u'\u0415',
'afii10023': u'\u0401',
'afii10024': u'\u0416',
'afii10025': u'\u0417',
'afii10026': u'\u0418',
'afii10027': u'\u0419',
'afii10028': u'\u041A',
'afii10029': u'\u041B',
'afii10030': u'\u041C',
'afii10031': u'\u041D',
'afii10032': u'\u041E',
'afii10033': u'\u041F',
'afii10034': u'\u0420',
'afii10035': u'\u0421',
'afii10036': u'\u0422',
'afii10037': u'\u0423',
'afii10038': u'\u0424',
'afii10039': u'\u0425',
'afii10040': u'\u0426',
'afii10041': u'\u0427',
'afii10042': u'\u0428',
'afii10043': u'\u0429',
'afii10044': u'\u042A',
'afii10045': u'\u042B',
'afii10046': u'\u042C',
'afii10047': u'\u042D',
'afii10048': u'\u042E',
'afii10049': u'\u042F',
'afii10050': u'\u0490',
'afii10051': u'\u0402',
'afii10052': u'\u0403',
'afii10053': u'\u0404',
'afii10054': u'\u0405',
'afii10055': u'\u0406',
'afii10056': u'\u0407',
'afii10057': u'\u0408',
'afii10058': u'\u0409',
'afii10059': u'\u040A',
'afii10060': u'\u040B',
'afii10061': u'\u040C',
'afii10062': u'\u040E',
'afii10063': u'\uF6C4',
'afii10064': u'\uF6C5',
'afii10065': u'\u0430',
'afii10066': u'\u0431',
'afii10067': u'\u0432',
'afii10068': u'\u0433',
'afii10069': u'\u0434',
'afii10070': u'\u0435',
'afii10071': u'\u0451',
'afii10072': u'\u0436',
'afii10073': u'\u0437',
'afii10074': u'\u0438',
'afii10075': u'\u0439',
'afii10076': u'\u043A',
'afii10077': u'\u043B',
'afii10078': u'\u043C',
'afii10079': u'\u043D',
'afii10080': u'\u043E',
'afii10081': u'\u043F',
'afii10082': u'\u0440',
'afii10083': u'\u0441',
'afii10084': u'\u0442',
'afii10085': u'\u0443',
'afii10086': u'\u0444',
'afii10087': u'\u0445',
'afii10088': u'\u0446',
'afii10089': u'\u0447',
'afii10090': u'\u0448',
'afii10091': u'\u0449',
'afii10092': u'\u044A',
'afii10093': u'\u044B',
'afii10094': u'\u044C',
'afii10095': u'\u044D',
'afii10096': u'\u044E',
'afii10097': u'\u044F',
'afii10098': u'\u0491',
'afii10099': u'\u0452',
'afii10100': u'\u0453',
'afii10101': u'\u0454',
'afii10102': u'\u0455',
'afii10103': u'\u0456',
'afii10104': u'\u0457',
'afii10105': u'\u0458',
'afii10106': u'\u0459',
'afii10107': u'\u045A',
'afii10108': u'\u045B',
'afii10109': u'\u045C',
'afii10110': u'\u045E',
'afii10145': u'\u040F',
'afii10146': u'\u0462',
'afii10147': u'\u0472',
'afii10148': u'\u0474',
'afii10192': u'\uF6C6',
'afii10193': u'\u045F',
'afii10194': u'\u0463',
'afii10195': u'\u0473',
'afii10196': u'\u0475',
'afii10831': u'\uF6C7',
'afii10832': u'\uF6C8',
'afii10846': u'\u04D9',
'afii299': u'\u200E',
'afii300': u'\u200F',
'afii301': u'\u200D',
'afii57381': u'\u066A',
'afii57388': u'\u060C',
'afii57392': u'\u0660',
'afii57393': u'\u0661',
'afii57394': u'\u0662',
'afii57395': u'\u0663',
'afii57396': u'\u0664',
'afii57397': u'\u0665',
'afii57398': u'\u0666',
'afii57399': u'\u0667',
'afii57400': u'\u0668',
'afii57401': u'\u0669',
'afii57403': u'\u061B',
'afii57407': u'\u061F',
'afii57409': u'\u0621',
'afii57410': u'\u0622',
'afii57411': u'\u0623',
'afii57412': u'\u0624',
'afii57413': u'\u0625',
'afii57414': u'\u0626',
'afii57415': u'\u0627',
'afii57416': u'\u0628',
'afii57417': u'\u0629',
'afii57418': u'\u062A',
'afii57419': u'\u062B',
'afii57420': u'\u062C',
'afii57421': u'\u062D',
'afii57422': u'\u062E',
'afii57423': u'\u062F',
'afii57424': u'\u0630',
'afii57425': u'\u0631',
'afii57426': u'\u0632',
'afii57427': u'\u0633',
'afii57428': u'\u0634',
'afii57429': u'\u0635',
'afii57430': u'\u0636',
'afii57431': u'\u0637',
'afii57432': u'\u0638',
'afii57433': u'\u0639',
'afii57434': u'\u063A',
'afii57440': u'\u0640',
'afii57441': u'\u0641',
'afii57442': u'\u0642',
'afii57443': u'\u0643',
'afii57444': u'\u0644',
'afii57445': u'\u0645',
'afii57446': u'\u0646',
'afii57448': u'\u0648',
'afii57449': u'\u0649',
'afii57450': u'\u064A',
'afii57451': u'\u064B',
'afii57452': u'\u064C',
'afii57453': u'\u064D',
'afii57454': u'\u064E',
'afii57455': u'\u064F',
'afii57456': u'\u0650',
'afii57457': u'\u0651',
'afii57458': u'\u0652',
'afii57470': u'\u0647',
'afii57505': u'\u06A4',
'afii57506': u'\u067E',
'afii57507': u'\u0686',
'afii57508': u'\u0698',
'afii57509': u'\u06AF',
'afii57511': u'\u0679',
'afii57512': u'\u0688',
'afii57513': u'\u0691',
'afii57514': u'\u06BA',
'afii57519': u'\u06D2',
'afii57534': u'\u06D5',
'afii57636': u'\u20AA',
'afii57645': u'\u05BE',
'afii57658': u'\u05C3',
'afii57664': u'\u05D0',
'afii57665': u'\u05D1',
'afii57666': u'\u05D2',
'afii57667': u'\u05D3',
'afii57668': u'\u05D4',
'afii57669': u'\u05D5',
'afii57670': u'\u05D6',
'afii57671': u'\u05D7',
'afii57672': u'\u05D8',
'afii57673': u'\u05D9',
'afii57674': u'\u05DA',
'afii57675': u'\u05DB',
'afii57676': u'\u05DC',
'afii57677': u'\u05DD',
'afii57678': u'\u05DE',
'afii57679': u'\u05DF',
'afii57680': u'\u05E0',
'afii57681': u'\u05E1',
'afii57682': u'\u05E2',
'afii57683': u'\u05E3',
'afii57684': u'\u05E4',
'afii57685': u'\u05E5',
'afii57686': u'\u05E6',
'afii57687': u'\u05E7',
'afii57688': u'\u05E8',
'afii57689': u'\u05E9',
'afii57690': u'\u05EA',
'afii57694': u'\uFB2A',
'afii57695': u'\uFB2B',
'afii57700': u'\uFB4B',
'afii57705': u'\uFB1F',
'afii57716': u'\u05F0',
'afii57717': u'\u05F1',
'afii57718': u'\u05F2',
'afii57723': u'\uFB35',
'afii57793': u'\u05B4',
'afii57794': u'\u05B5',
'afii57795': u'\u05B6',
'afii57796': u'\u05BB',
'afii57797': u'\u05B8',
'afii57798': u'\u05B7',
'afii57799': u'\u05B0',
'afii57800': u'\u05B2',
'afii57801': u'\u05B1',
'afii57802': u'\u05B3',
'afii57803': u'\u05C2',
'afii57804': u'\u05C1',
'afii57806': u'\u05B9',
'afii57807': u'\u05BC',
'afii57839': u'\u05BD',
'afii57841': u'\u05BF',
'afii57842': u'\u05C0',
'afii57929': u'\u02BC',
'afii61248': u'\u2105',
'afii61289': u'\u2113',
'afii61352': u'\u2116',
'afii61573': u'\u202C',
'afii61574': u'\u202D',
'afii61575': u'\u202E',
'afii61664': u'\u200C',
'afii63167': u'\u066D',
'afii64937': u'\u02BD',
'agrave': u'\u00E0',
'agujarati': u'\u0A85',
'agurmukhi': u'\u0A05',
'ahiragana': u'\u3042',
'ahookabove': u'\u1EA3',
'aibengali': u'\u0990',
'aibopomofo': u'\u311E',
'aideva': u'\u0910',
'aiecyrillic': u'\u04D5',
'aigujarati': u'\u0A90',
'aigurmukhi': u'\u0A10',
'aimatragurmukhi': u'\u0A48',
'ainarabic': u'\u0639',
'ainfinalarabic': u'\uFECA',
'aininitialarabic': u'\uFECB',
'ainmedialarabic': u'\uFECC',
'ainvertedbreve': u'\u0203',
'aivowelsignbengali': u'\u09C8',
'aivowelsigndeva': u'\u0948',
'aivowelsigngujarati': u'\u0AC8',
'akatakana': u'\u30A2',
'akatakanahalfwidth': u'\uFF71',
'akorean': u'\u314F',
'alef': u'\u05D0',
'alefarabic': u'\u0627',
'alefdageshhebrew': u'\uFB30',
'aleffinalarabic': u'\uFE8E',
'alefhamzaabovearabic': u'\u0623',
'alefhamzaabovefinalarabic': u'\uFE84',
'alefhamzabelowarabic': u'\u0625',
'alefhamzabelowfinalarabic': u'\uFE88',
'alefhebrew': u'\u05D0',
'aleflamedhebrew': u'\uFB4F',
'alefmaddaabovearabic': u'\u0622',
'alefmaddaabovefinalarabic': u'\uFE82',
'alefmaksuraarabic': u'\u0649',
'alefmaksurafinalarabic': u'\uFEF0',
'alefmaksurainitialarabic': u'\uFEF3',
'alefmaksuramedialarabic': u'\uFEF4',
'alefpatahhebrew': u'\uFB2E',
'alefqamatshebrew': u'\uFB2F',
'aleph': u'\u2135',
'allequal': u'\u224C',
'alpha': u'\u03B1',
'alphatonos': u'\u03AC',
'amacron': u'\u0101',
'amonospace': u'\uFF41',
'ampersand': u'\u0026',
'ampersandmonospace': u'\uFF06',
'ampersandsmall': u'\uF726',
'amsquare': u'\u33C2',
'anbopomofo': u'\u3122',
'angbopomofo': u'\u3124',
'angkhankhuthai': u'\u0E5A',
'angle': u'\u2220',
'anglebracketleft': u'\u3008',
'anglebracketleftvertical': u'\uFE3F',
'anglebracketright': u'\u3009',
'anglebracketrightvertical': u'\uFE40',
'angleleft': u'\u2329',
'angleright': u'\u232A',
'angstrom': u'\u212B',
'anoteleia': u'\u0387',
'anudattadeva': u'\u0952',
'anusvarabengali': u'\u0982',
'anusvaradeva': u'\u0902',
'anusvaragujarati': u'\u0A82',
'aogonek': u'\u0105',
'apaatosquare': u'\u3300',
'aparen': u'\u249C',
'apostrophearmenian': u'\u055A',
'apostrophemod': u'\u02BC',
'apple': u'\uF8FF',
'approaches': u'\u2250',
'approxequal': u'\u2248',
'approxequalorimage': u'\u2252',
'approximatelyequal': u'\u2245',
'araeaekorean': u'\u318E',
'araeakorean': u'\u318D',
'arc': u'\u2312',
'arighthalfring': u'\u1E9A',
'aring': u'\u00E5',
'aringacute': u'\u01FB',
'aringbelow': u'\u1E01',
'arrowboth': u'\u2194',
'arrowdashdown': u'\u21E3',
'arrowdashleft': u'\u21E0',
'arrowdashright': u'\u21E2',
'arrowdashup': u'\u21E1',
'arrowdblboth': u'\u21D4',
'arrowdbldown': u'\u21D3',
'arrowdblleft': u'\u21D0',
'arrowdblright': u'\u21D2',
'arrowdblup': u'\u21D1',
'arrowdown': u'\u2193',
'arrowdownleft': u'\u2199',
'arrowdownright': u'\u2198',
'arrowdownwhite': u'\u21E9',
'arrowheaddownmod': u'\u02C5',
'arrowheadleftmod': u'\u02C2',
'arrowheadrightmod': u'\u02C3',
'arrowheadupmod': u'\u02C4',
'arrowhorizex': u'\uF8E7',
'arrowleft': u'\u2190',
'arrowleftdbl': u'\u21D0',
'arrowleftdblstroke': u'\u21CD',
'arrowleftoverright': u'\u21C6',
'arrowleftwhite': u'\u21E6',
'arrowright': u'\u2192',
'arrowrightdblstroke': u'\u21CF',
'arrowrightheavy': u'\u279E',
'arrowrightoverleft': u'\u21C4',
'arrowrightwhite': u'\u21E8',
'arrowtableft': u'\u21E4',
'arrowtabright': u'\u21E5',
'arrowup': u'\u2191',
'arrowupdn': u'\u2195',
'arrowupdnbse': u'\u21A8',
'arrowupdownbase': u'\u21A8',
'arrowupleft': u'\u2196',
'arrowupleftofdown': u'\u21C5',
'arrowupright': u'\u2197',
'arrowupwhite': u'\u21E7',
'arrowvertex': u'\uF8E6',
'asciicircum': u'\u005E',
'asciicircummonospace': u'\uFF3E',
'asciitilde': u'\u007E',
'asciitildemonospace': u'\uFF5E',
'ascript': u'\u0251',
'ascriptturned': u'\u0252',
'asmallhiragana': u'\u3041',
'asmallkatakana': u'\u30A1',
'asmallkatakanahalfwidth': u'\uFF67',
'asterisk': u'\u002A',
'asteriskaltonearabic': u'\u066D',
'asteriskarabic': u'\u066D',
'asteriskmath': u'\u2217',
'asteriskmonospace': u'\uFF0A',
'asterisksmall': u'\uFE61',
'asterism': u'\u2042',
'asuperior': u'\uF6E9',
'asymptoticallyequal': u'\u2243',
'at': u'\u0040',
'atilde': u'\u00E3',
'atmonospace': u'\uFF20',
'atsmall': u'\uFE6B',
'aturned': u'\u0250',
'aubengali': u'\u0994',
'aubopomofo': u'\u3120',
'audeva': u'\u0914',
'augujarati': u'\u0A94',
'augurmukhi': u'\u0A14',
'aulengthmarkbengali': u'\u09D7',
'aumatragurmukhi': u'\u0A4C',
'auvowelsignbengali': u'\u09CC',
'auvowelsigndeva': u'\u094C',
'auvowelsigngujarati': u'\u0ACC',
'avagrahadeva': u'\u093D',
'aybarmenian': u'\u0561',
'ayin': u'\u05E2',
'ayinaltonehebrew': u'\uFB20',
'ayinhebrew': u'\u05E2',
'b': u'\u0062',
'babengali': u'\u09AC',
'backslash': u'\u005C',
'backslashmonospace': u'\uFF3C',
'badeva': u'\u092C',
'bagujarati': u'\u0AAC',
'bagurmukhi': u'\u0A2C',
'bahiragana': u'\u3070',
'bahtthai': u'\u0E3F',
'bakatakana': u'\u30D0',
'bar': u'\u007C',
'barmonospace': u'\uFF5C',
'bbopomofo': u'\u3105',
'bcircle': u'\u24D1',
'bdotaccent': u'\u1E03',
'bdotbelow': u'\u1E05',
'beamedsixteenthnotes': u'\u266C',
'because': u'\u2235',
'becyrillic': u'\u0431',
'beharabic': u'\u0628',
'behfinalarabic': u'\uFE90',
'behinitialarabic': u'\uFE91',
'behiragana': u'\u3079',
'behmedialarabic': u'\uFE92',
'behmeeminitialarabic': u'\uFC9F',
'behmeemisolatedarabic': u'\uFC08',
'behnoonfinalarabic': u'\uFC6D',
'bekatakana': u'\u30D9',
'benarmenian': u'\u0562',
'bet': u'\u05D1',
'beta': u'\u03B2',
'betasymbolgreek': u'\u03D0',
'betdagesh': u'\uFB31',
'betdageshhebrew': u'\uFB31',
'bethebrew': u'\u05D1',
'betrafehebrew': u'\uFB4C',
'bhabengali': u'\u09AD',
'bhadeva': u'\u092D',
'bhagujarati': u'\u0AAD',
'bhagurmukhi': u'\u0A2D',
'bhook': u'\u0253',
'bihiragana': u'\u3073',
'bikatakana': u'\u30D3',
'bilabialclick': u'\u0298',
'bindigurmukhi': u'\u0A02',
'birusquare': u'\u3331',
'blackcircle': u'\u25CF',
'blackdiamond': u'\u25C6',
'blackdownpointingtriangle': u'\u25BC',
'blackleftpointingpointer': u'\u25C4',
'blackleftpointingtriangle': u'\u25C0',
'blacklenticularbracketleft': u'\u3010',
'blacklenticularbracketleftvertical': u'\uFE3B',
'blacklenticularbracketright': u'\u3011',
'blacklenticularbracketrightvertical': u'\uFE3C',
'blacklowerlefttriangle': u'\u25E3',
'blacklowerrighttriangle': u'\u25E2',
'blackrectangle': u'\u25AC',
'blackrightpointingpointer': u'\u25BA',
'blackrightpointingtriangle': u'\u25B6',
'blacksmallsquare': u'\u25AA',
'blacksmilingface': u'\u263B',
'blacksquare': u'\u25A0',
'blackstar': u'\u2605',
'blackupperlefttriangle': u'\u25E4',
'blackupperrighttriangle': u'\u25E5',
'blackuppointingsmalltriangle': u'\u25B4',
'blackuppointingtriangle': u'\u25B2',
'blank': u'\u2423',
'blinebelow': u'\u1E07',
'block': u'\u2588',
'bmonospace': u'\uFF42',
'bobaimaithai': u'\u0E1A',
'bohiragana': u'\u307C',
'bokatakana': u'\u30DC',
'bparen': u'\u249D',
'bqsquare': u'\u33C3',
'braceex': u'\uF8F4',
'braceleft': u'\u007B',
'braceleftbt': u'\uF8F3',
'braceleftmid': u'\uF8F2',
'braceleftmonospace': u'\uFF5B',
'braceleftsmall': u'\uFE5B',
'bracelefttp': u'\uF8F1',
'braceleftvertical': u'\uFE37',
'braceright': u'\u007D',
'bracerightbt': u'\uF8FE',
'bracerightmid': u'\uF8FD',
'bracerightmonospace': u'\uFF5D',
'bracerightsmall': u'\uFE5C',
'bracerighttp': u'\uF8FC',
'bracerightvertical': u'\uFE38',
'bracketleft': u'\u005B',
'bracketleftbt': u'\uF8F0',
'bracketleftex': u'\uF8EF',
'bracketleftmonospace': u'\uFF3B',
'bracketlefttp': u'\uF8EE',
'bracketright': u'\u005D',
'bracketrightbt': u'\uF8FB',
'bracketrightex': u'\uF8FA',
'bracketrightmonospace': u'\uFF3D',
'bracketrighttp': u'\uF8F9',
'breve': u'\u02D8',
'brevebelowcmb': u'\u032E',
'brevecmb': u'\u0306',
'breveinvertedbelowcmb': u'\u032F',
'breveinvertedcmb': u'\u0311',
'breveinverteddoublecmb': u'\u0361',
'bridgebelowcmb': u'\u032A',
'bridgeinvertedbelowcmb': u'\u033A',
'brokenbar': u'\u00A6',
'bstroke': u'\u0180',
'bsuperior': u'\uF6EA',
'btopbar': u'\u0183',
'buhiragana': u'\u3076',
'bukatakana': u'\u30D6',
'bullet': u'\u2022',
'bulletinverse': u'\u25D8',
'bulletoperator': u'\u2219',
'bullseye': u'\u25CE',
'c': u'\u0063',
'caarmenian': u'\u056E',
'cabengali': u'\u099A',
'cacute': u'\u0107',
'cadeva': u'\u091A',
'cagujarati': u'\u0A9A',
'cagurmukhi': u'\u0A1A',
'calsquare': u'\u3388',
'candrabindubengali': u'\u0981',
'candrabinducmb': u'\u0310',
'candrabindudeva': u'\u0901',
'candrabindugujarati': u'\u0A81',
'capslock': u'\u21EA',
'careof': u'\u2105',
'caron': u'\u02C7',
'caronbelowcmb': u'\u032C',
'caroncmb': u'\u030C',
'carriagereturn': u'\u21B5',
'cbopomofo': u'\u3118',
'ccaron': u'\u010D',
'ccedilla': u'\u00E7',
'ccedillaacute': u'\u1E09',
'ccircle': u'\u24D2',
'ccircumflex': u'\u0109',
'ccurl': u'\u0255',
'cdot': u'\u010B',
'cdotaccent': u'\u010B',
'cdsquare': u'\u33C5',
'cedilla': u'\u00B8',
'cedillacmb': u'\u0327',
'cent': u'\u00A2',
'centigrade': u'\u2103',
'centinferior': u'\uF6DF',
'centmonospace': u'\uFFE0',
'centoldstyle': u'\uF7A2',
'centsuperior': u'\uF6E0',
'chaarmenian': u'\u0579',
'chabengali': u'\u099B',
'chadeva': u'\u091B',
'chagujarati': u'\u0A9B',
'chagurmukhi': u'\u0A1B',
'chbopomofo': u'\u3114',
'cheabkhasiancyrillic': u'\u04BD',
'checkmark': u'\u2713',
'checyrillic': u'\u0447',
'chedescenderabkhasiancyrillic': u'\u04BF',
'chedescendercyrillic': u'\u04B7',
'chedieresiscyrillic': u'\u04F5',
'cheharmenian': u'\u0573',
'chekhakassiancyrillic': u'\u04CC',
'cheverticalstrokecyrillic': u'\u04B9',
'chi': u'\u03C7',
'chieuchacirclekorean': u'\u3277',
'chieuchaparenkorean': u'\u3217',
'chieuchcirclekorean': u'\u3269',
'chieuchkorean': u'\u314A',
'chieuchparenkorean': u'\u3209',
'chochangthai': u'\u0E0A',
'chochanthai': u'\u0E08',
'chochingthai': u'\u0E09',
'chochoethai': u'\u0E0C',
'chook': u'\u0188',
'cieucacirclekorean': u'\u3276',
'cieucaparenkorean': u'\u3216',
'cieuccirclekorean': u'\u3268',
'cieuckorean': u'\u3148',
'cieucparenkorean': u'\u3208',
'cieucuparenkorean': u'\u321C',
'circle': u'\u25CB',
'circlemultiply': u'\u2297',
'circleot': u'\u2299',
'circleplus': u'\u2295',
'circlepostalmark': u'\u3036',
'circlewithlefthalfblack': u'\u25D0',
'circlewithrighthalfblack': u'\u25D1',
'circumflex': u'\u02C6',
'circumflexbelowcmb': u'\u032D',
'circumflexcmb': u'\u0302',
'clear': u'\u2327',
'clickalveolar': u'\u01C2',
'clickdental': u'\u01C0',
'clicklateral': u'\u01C1',
'clickretroflex': u'\u01C3',
'club': u'\u2663',
'clubsuitblack': u'\u2663',
'clubsuitwhite': u'\u2667',
'cmcubedsquare': u'\u33A4',
'cmonospace': u'\uFF43',
'cmsquaredsquare': u'\u33A0',
'coarmenian': u'\u0581',
'colon': u'\u003A',
'colonmonetary': u'\u20A1',
'colonmonospace': u'\uFF1A',
'colonsign': u'\u20A1',
'colonsmall': u'\uFE55',
'colontriangularhalfmod': u'\u02D1',
'colontriangularmod': u'\u02D0',
'comma': u'\u002C',
'commaabovecmb': u'\u0313',
'commaaboverightcmb': u'\u0315',
'commaaccent': u'\uF6C3',
'commaarabic': u'\u060C',
'commaarmenian': u'\u055D',
'commainferior': u'\uF6E1',
'commamonospace': u'\uFF0C',
'commareversedabovecmb': u'\u0314',
'commareversedmod': u'\u02BD',
'commasmall': u'\uFE50',
'commasuperior': u'\uF6E2',
'commaturnedabovecmb': u'\u0312',
'commaturnedmod': u'\u02BB',
'compass': u'\u263C',
'congruent': u'\u2245',
'contourintegral': u'\u222E',
'control': u'\u2303',
'controlACK': u'\u0006',
'controlBEL': u'\u0007',
'controlBS': u'\u0008',
'controlCAN': u'\u0018',
'controlCR': u'\u000D',
'controlDC1': u'\u0011',
'controlDC2': u'\u0012',
'controlDC3': u'\u0013',
'controlDC4': u'\u0014',
'controlDEL': u'\u007F',
'controlDLE': u'\u0010',
'controlEM': u'\u0019',
'controlENQ': u'\u0005',
'controlEOT': u'\u0004',
'controlESC': u'\u001B',
'controlETB': u'\u0017',
'controlETX': u'\u0003',
'controlFF': u'\u000C',
'controlFS': u'\u001C',
'controlGS': u'\u001D',
'controlHT': u'\u0009',
'controlLF': u'\u000A',
'controlNAK': u'\u0015',
'controlRS': u'\u001E',
'controlSI': u'\u000F',
'controlSO': u'\u000E',
'controlSOT': u'\u0002',
'controlSTX': u'\u0001',
'controlSUB': u'\u001A',
'controlSYN': u'\u0016',
'controlUS': u'\u001F',
'controlVT': u'\u000B',
'copyright': u'\u00A9',
'copyrightsans': u'\uF8E9',
'copyrightserif': u'\uF6D9',
'cornerbracketleft': u'\u300C',
'cornerbracketlefthalfwidth': u'\uFF62',
'cornerbracketleftvertical': u'\uFE41',
'cornerbracketright': u'\u300D',
'cornerbracketrighthalfwidth': u'\uFF63',
'cornerbracketrightvertical': u'\uFE42',
'corporationsquare': u'\u337F',
'cosquare': u'\u33C7',
'coverkgsquare': u'\u33C6',
'cparen': u'\u249E',
'cruzeiro': u'\u20A2',
'cstretched': u'\u0297',
'curlyand': u'\u22CF',
'curlyor': u'\u22CE',
'currency': u'\u00A4',
'cyrBreve': u'\uF6D1',
'cyrFlex': u'\uF6D2',
'cyrbreve': u'\uF6D4',
'cyrflex': u'\uF6D5',
'd': u'\u0064',
'daarmenian': u'\u0564',
'dabengali': u'\u09A6',
'dadarabic': u'\u0636',
'dadeva': u'\u0926',
'dadfinalarabic': u'\uFEBE',
'dadinitialarabic': u'\uFEBF',
'dadmedialarabic': u'\uFEC0',
'dagesh': u'\u05BC',
'dageshhebrew': u'\u05BC',
'dagger': u'\u2020',
'daggerdbl': u'\u2021',
'dagujarati': u'\u0AA6',
'dagurmukhi': u'\u0A26',
'dahiragana': u'\u3060',
'dakatakana': u'\u30C0',
'dalarabic': u'\u062F',
'dalet': u'\u05D3',
'daletdagesh': u'\uFB33',
'daletdageshhebrew': u'\uFB33',
'dalethatafpatah': u'\u05D3\u05B2',
'dalethatafpatahhebrew': u'\u05D3\u05B2',
'dalethatafsegol': u'\u05D3\u05B1',
'dalethatafsegolhebrew': u'\u05D3\u05B1',
'dalethebrew': u'\u05D3',
'dalethiriq': u'\u05D3\u05B4',
'dalethiriqhebrew': u'\u05D3\u05B4',
'daletholam': u'\u05D3\u05B9',
'daletholamhebrew': u'\u05D3\u05B9',
'daletpatah': u'\u05D3\u05B7',
'daletpatahhebrew': u'\u05D3\u05B7',
'daletqamats': u'\u05D3\u05B8',
'daletqamatshebrew': u'\u05D3\u05B8',
'daletqubuts': u'\u05D3\u05BB',
'daletqubutshebrew': u'\u05D3\u05BB',
'daletsegol': u'\u05D3\u05B6',
'daletsegolhebrew': u'\u05D3\u05B6',
'daletsheva': u'\u05D3\u05B0',
'daletshevahebrew': u'\u05D3\u05B0',
'dalettsere': u'\u05D3\u05B5',
'dalettserehebrew': u'\u05D3\u05B5',
'dalfinalarabic': u'\uFEAA',
'dammaarabic': u'\u064F',
'dammalowarabic': u'\u064F',
'dammatanaltonearabic': u'\u064C',
'dammatanarabic': u'\u064C',
'danda': u'\u0964',
'dargahebrew': u'\u05A7',
'dargalefthebrew': u'\u05A7',
'dasiapneumatacyrilliccmb': u'\u0485',
'dblGrave': u'\uF6D3',
'dblanglebracketleft': u'\u300A',
'dblanglebracketleftvertical': u'\uFE3D',
'dblanglebracketright': u'\u300B',
'dblanglebracketrightvertical': u'\uFE3E',
'dblarchinvertedbelowcmb': u'\u032B',
'dblarrowleft': u'\u21D4',
'dblarrowright': u'\u21D2',
'dbldanda': u'\u0965',
'dblgrave': u'\uF6D6',
'dblgravecmb': u'\u030F',
'dblintegral': u'\u222C',
'dbllowline': u'\u2017',
'dbllowlinecmb': u'\u0333',
'dbloverlinecmb': u'\u033F',
'dblprimemod': u'\u02BA',
'dblverticalbar': u'\u2016',
'dblverticallineabovecmb': u'\u030E',
'dbopomofo': u'\u3109',
'dbsquare': u'\u33C8',
'dcaron': u'\u010F',
'dcedilla': u'\u1E11',
'dcircle': u'\u24D3',
'dcircumflexbelow': u'\u1E13',
'dcroat': u'\u0111',
'ddabengali': u'\u09A1',
'ddadeva': u'\u0921',
'ddagujarati': u'\u0AA1',
'ddagurmukhi': u'\u0A21',
'ddalarabic': u'\u0688',
'ddalfinalarabic': u'\uFB89',
'dddhadeva': u'\u095C',
'ddhabengali': u'\u09A2',
'ddhadeva': u'\u0922',
'ddhagujarati': u'\u0AA2',
'ddhagurmukhi': u'\u0A22',
'ddotaccent': u'\u1E0B',
'ddotbelow': u'\u1E0D',
'decimalseparatorarabic': u'\u066B',
'decimalseparatorpersian': u'\u066B',
'decyrillic': u'\u0434',
'degree': u'\u00B0',
'dehihebrew': u'\u05AD',
'dehiragana': u'\u3067',
'deicoptic': u'\u03EF',
'dekatakana': u'\u30C7',
'deleteleft': u'\u232B',
'deleteright': u'\u2326',
'delta': u'\u03B4',
'deltaturned': u'\u018D',
'denominatorminusonenumeratorbengali': u'\u09F8',
'dezh': u'\u02A4',
'dhabengali': u'\u09A7',
'dhadeva': u'\u0927',
'dhagujarati': u'\u0AA7',
'dhagurmukhi': u'\u0A27',
'dhook': u'\u0257',
'dialytikatonos': u'\u0385',
'dialytikatonoscmb': u'\u0344',
'diamond': u'\u2666',
'diamondsuitwhite': u'\u2662',
'dieresis': u'\u00A8',
'dieresisacute': u'\uF6D7',
'dieresisbelowcmb': u'\u0324',
'dieresiscmb': u'\u0308',
'dieresisgrave': u'\uF6D8',
'dieresistonos': u'\u0385',
'dihiragana': u'\u3062',
'dikatakana': u'\u30C2',
'dittomark': u'\u3003',
'divide': u'\u00F7',
'divides': u'\u2223',
'divisionslash': u'\u2215',
'djecyrillic': u'\u0452',
'dkshade': u'\u2593',
'dlinebelow': u'\u1E0F',
'dlsquare': u'\u3397',
'dmacron': u'\u0111',
'dmonospace': u'\uFF44',
'dnblock': u'\u2584',
'dochadathai': u'\u0E0E',
'dodekthai': u'\u0E14',
'dohiragana': u'\u3069',
'dokatakana': u'\u30C9',
'dollar': u'\u0024',
'dollarinferior': u'\uF6E3',
'dollarmonospace': u'\uFF04',
'dollaroldstyle': u'\uF724',
'dollarsmall': u'\uFE69',
'dollarsuperior': u'\uF6E4',
'dong': u'\u20AB',
'dorusquare': u'\u3326',
'dotaccent': u'\u02D9',
'dotaccentcmb': u'\u0307',
'dotbelowcmb': u'\u0323',
'dotbelowcomb': u'\u0323',
'dotkatakana': u'\u30FB',
'dotlessi': u'\u0131',
'dotlessj': u'\uF6BE',
'dotlessjstrokehook': u'\u0284',
'dotmath': u'\u22C5',
'dottedcircle': u'\u25CC',
'doubleyodpatah': u'\uFB1F',
'doubleyodpatahhebrew': u'\uFB1F',
'downtackbelowcmb': u'\u031E',
'downtackmod': u'\u02D5',
'dparen': u'\u249F',
'dsuperior': u'\uF6EB',
'dtail': u'\u0256',
'dtopbar': u'\u018C',
'duhiragana': u'\u3065',
'dukatakana': u'\u30C5',
'dz': u'\u01F3',
'dzaltone': u'\u02A3',
'dzcaron': u'\u01C6',
'dzcurl': u'\u02A5',
'dzeabkhasiancyrillic': u'\u04E1',
'dzecyrillic': u'\u0455',
'dzhecyrillic': u'\u045F',
'e': u'\u0065',
'eacute': u'\u00E9',
'earth': u'\u2641',
'ebengali': u'\u098F',
'ebopomofo': u'\u311C',
'ebreve': u'\u0115',
'ecandradeva': u'\u090D',
'ecandragujarati': u'\u0A8D',
'ecandravowelsigndeva': u'\u0945',
'ecandravowelsigngujarati': u'\u0AC5',
'ecaron': u'\u011B',
'ecedillabreve': u'\u1E1D',
'echarmenian': u'\u0565',
'echyiwnarmenian': u'\u0587',
'ecircle': u'\u24D4',
'ecircumflex': u'\u00EA',
'ecircumflexacute': u'\u1EBF',
'ecircumflexbelow': u'\u1E19',
'ecircumflexdotbelow': u'\u1EC7',
'ecircumflexgrave': u'\u1EC1',
'ecircumflexhookabove': u'\u1EC3',
'ecircumflextilde': u'\u1EC5',
'ecyrillic': u'\u0454',
'edblgrave': u'\u0205',
'edeva': u'\u090F',
'edieresis': u'\u00EB',
'edot': u'\u0117',
'edotaccent': u'\u0117',
'edotbelow': u'\u1EB9',
'eegurmukhi': u'\u0A0F',
'eematragurmukhi': u'\u0A47',
'efcyrillic': u'\u0444',
'egrave': u'\u00E8',
'egujarati': u'\u0A8F',
'eharmenian': u'\u0567',
'ehbopomofo': u'\u311D',
'ehiragana': u'\u3048',
'ehookabove': u'\u1EBB',
'eibopomofo': u'\u311F',
'eight': u'\u0038',
'eightarabic': u'\u0668',
'eightbengali': u'\u09EE',
'eightcircle': u'\u2467',
'eightcircleinversesansserif': u'\u2791',
'eightdeva': u'\u096E',
'eighteencircle': u'\u2471',
'eighteenparen': u'\u2485',
'eighteenperiod': u'\u2499',
'eightgujarati': u'\u0AEE',
'eightgurmukhi': u'\u0A6E',
'eighthackarabic': u'\u0668',
'eighthangzhou': u'\u3028',
'eighthnotebeamed': u'\u266B',
'eightideographicparen': u'\u3227',
'eightinferior': u'\u2088',
'eightmonospace': u'\uFF18',
'eightoldstyle': u'\uF738',
'eightparen': u'\u247B',
'eightperiod': u'\u248F',
'eightpersian': u'\u06F8',
'eightroman': u'\u2177',
'eightsuperior': u'\u2078',
'eightthai': u'\u0E58',
'einvertedbreve': u'\u0207',
'eiotifiedcyrillic': u'\u0465',
'ekatakana': u'\u30A8',
'ekatakanahalfwidth': u'\uFF74',
'ekonkargurmukhi': u'\u0A74',
'ekorean': u'\u3154',
'elcyrillic': u'\u043B',
'element': u'\u2208',
'elevencircle': u'\u246A',
'elevenparen': u'\u247E',
'elevenperiod': u'\u2492',
'elevenroman': u'\u217A',
'ellipsis': u'\u2026',
'ellipsisvertical': u'\u22EE',
'emacron': u'\u0113',
'emacronacute': u'\u1E17',
'emacrongrave': u'\u1E15',
'emcyrillic': u'\u043C',
'emdash': u'\u2014',
'emdashvertical': u'\uFE31',
'emonospace': u'\uFF45',
'emphasismarkarmenian': u'\u055B',
'emptyset': u'\u2205',
'enbopomofo': u'\u3123',
'encyrillic': u'\u043D',
'endash': u'\u2013',
'endashvertical': u'\uFE32',
'endescendercyrillic': u'\u04A3',
'eng': u'\u014B',
'engbopomofo': u'\u3125',
'enghecyrillic': u'\u04A5',
'enhookcyrillic': u'\u04C8',
'enspace': u'\u2002',
'eogonek': u'\u0119',
'eokorean': u'\u3153',
'eopen': u'\u025B',
'eopenclosed': u'\u029A',
'eopenreversed': u'\u025C',
'eopenreversedclosed': u'\u025E',
'eopenreversedhook': u'\u025D',
'eparen': u'\u24A0',
'epsilon': u'\u03B5',
'epsilontonos': u'\u03AD',
'equal': u'\u003D',
'equalmonospace': u'\uFF1D',
'equalsmall': u'\uFE66',
'equalsuperior': u'\u207C',
'equivalence': u'\u2261',
'erbopomofo': u'\u3126',
'ercyrillic': u'\u0440',
'ereversed': u'\u0258',
'ereversedcyrillic': u'\u044D',
'escyrillic': u'\u0441',
'esdescendercyrillic': u'\u04AB',
'esh': u'\u0283',
'eshcurl': u'\u0286',
'eshortdeva': u'\u090E',
'eshortvowelsigndeva': u'\u0946',
'eshreversedloop': u'\u01AA',
'eshsquatreversed': u'\u0285',
'esmallhiragana': u'\u3047',
'esmallkatakana': u'\u30A7',
'esmallkatakanahalfwidth': u'\uFF6A',
'estimated': u'\u212E',
'esuperior': u'\uF6EC',
'eta': u'\u03B7',
'etarmenian': u'\u0568',
'etatonos': u'\u03AE',
'eth': u'\u00F0',
'etilde': u'\u1EBD',
'etildebelow': u'\u1E1B',
'etnahtafoukhhebrew': u'\u0591',
'etnahtafoukhlefthebrew': u'\u0591',
'etnahtahebrew': u'\u0591',
'etnahtalefthebrew': u'\u0591',
'eturned': u'\u01DD',
'eukorean': u'\u3161',
'euro': u'\u20AC',
'evowelsignbengali': u'\u09C7',
'evowelsigndeva': u'\u0947',
'evowelsigngujarati': u'\u0AC7',
'exclam': u'\u0021',
'exclamarmenian': u'\u055C',
'exclamdbl': u'\u203C',
'exclamdown': u'\u00A1',
'exclamdownsmall': u'\uF7A1',
'exclammonospace': u'\uFF01',
'exclamsmall': u'\uF721',
'existential': u'\u2203',
'ezh': u'\u0292',
'ezhcaron': u'\u01EF',
'ezhcurl': u'\u0293',
'ezhreversed': u'\u01B9',
'ezhtail': u'\u01BA',
'f': u'\u0066',
'fadeva': u'\u095E',
'fagurmukhi': u'\u0A5E',
'fahrenheit': u'\u2109',
'fathaarabic': u'\u064E',
'fathalowarabic': u'\u064E',
'fathatanarabic': u'\u064B',
'fbopomofo': u'\u3108',
'fcircle': u'\u24D5',
'fdotaccent': u'\u1E1F',
'feharabic': u'\u0641',
'feharmenian': u'\u0586',
'fehfinalarabic': u'\uFED2',
'fehinitialarabic': u'\uFED3',
'fehmedialarabic': u'\uFED4',
'feicoptic': u'\u03E5',
'female': u'\u2640',
'ff': u'\uFB00',
'ffi': u'\uFB03',
'ffl': u'\uFB04',
'fi': u'\uFB01',
'fifteencircle': u'\u246E',
'fifteenparen': u'\u2482',
'fifteenperiod': u'\u2496',
'figuredash': u'\u2012',
'filledbox': u'\u25A0',
'filledrect': u'\u25AC',
'finalkaf': u'\u05DA',
'finalkafdagesh': u'\uFB3A',
'finalkafdageshhebrew': u'\uFB3A',
'finalkafhebrew': u'\u05DA',
'finalkafqamats': u'\u05DA\u05B8',
'finalkafqamatshebrew': u'\u05DA\u05B8',
'finalkafsheva': u'\u05DA\u05B0',
'finalkafshevahebrew': u'\u05DA\u05B0',
'finalmem': u'\u05DD',
'finalmemhebrew': u'\u05DD',
'finalnun': u'\u05DF',
'finalnunhebrew': u'\u05DF',
'finalpe': u'\u05E3',
'finalpehebrew': u'\u05E3',
'finaltsadi': u'\u05E5',
'finaltsadihebrew': u'\u05E5',
'firsttonechinese': u'\u02C9',
'fisheye': u'\u25C9',
'fitacyrillic': u'\u0473',
'five': u'\u0035',
'fivearabic': u'\u0665',
'fivebengali': u'\u09EB',
'fivecircle': u'\u2464',
'fivecircleinversesansserif': u'\u278E',
'fivedeva': u'\u096B',
'fiveeighths': u'\u215D',
'fivegujarati': u'\u0AEB',
'fivegurmukhi': u'\u0A6B',
'fivehackarabic': u'\u0665',
'fivehangzhou': u'\u3025',
'fiveideographicparen': u'\u3224',
'fiveinferior': u'\u2085',
'fivemonospace': u'\uFF15',
'fiveoldstyle': u'\uF735',
'fiveparen': u'\u2478',
'fiveperiod': u'\u248C',
'fivepersian': u'\u06F5',
'fiveroman': u'\u2174',
'fivesuperior': u'\u2075',
'fivethai': u'\u0E55',
'fl': u'\uFB02',
'florin': u'\u0192',
'fmonospace': u'\uFF46',
'fmsquare': u'\u3399',
'fofanthai': u'\u0E1F',
'fofathai': u'\u0E1D',
'fongmanthai': u'\u0E4F',
'forall': u'\u2200',
'four': u'\u0034',
'fourarabic': u'\u0664',
'fourbengali': u'\u09EA',
'fourcircle': u'\u2463',
'fourcircleinversesansserif': u'\u278D',
'fourdeva': u'\u096A',
'fourgujarati': u'\u0AEA',
'fourgurmukhi': u'\u0A6A',
'fourhackarabic': u'\u0664',
'fourhangzhou': u'\u3024',
'fourideographicparen': u'\u3223',
'fourinferior': u'\u2084',
'fourmonospace': u'\uFF14',
'fournumeratorbengali': u'\u09F7',
'fouroldstyle': u'\uF734',
'fourparen': u'\u2477',
'fourperiod': u'\u248B',
'fourpersian': u'\u06F4',
'fourroman': u'\u2173',
'foursuperior': u'\u2074',
'fourteencircle': u'\u246D',
'fourteenparen': u'\u2481',
'fourteenperiod': u'\u2495',
'fourthai': u'\u0E54',
'fourthtonechinese': u'\u02CB',
'fparen': u'\u24A1',
'fraction': u'\u2044',
'franc': u'\u20A3',
'g': u'\u0067',
'gabengali': u'\u0997',
'gacute': u'\u01F5',
'gadeva': u'\u0917',
'gafarabic': u'\u06AF',
'gaffinalarabic': u'\uFB93',
'gafinitialarabic': u'\uFB94',
'gafmedialarabic': u'\uFB95',
'gagujarati': u'\u0A97',
'gagurmukhi': u'\u0A17',
'gahiragana': u'\u304C',
'gakatakana': u'\u30AC',
'gamma': u'\u03B3',
'gammalatinsmall': u'\u0263',
'gammasuperior': u'\u02E0',
'gangiacoptic': u'\u03EB',
'gbopomofo': u'\u310D',
'gbreve': u'\u011F',
'gcaron': u'\u01E7',
'gcedilla': u'\u0123',
'gcircle': u'\u24D6',
'gcircumflex': u'\u011D',
'gcommaaccent': u'\u0123',
'gdot': u'\u0121',
'gdotaccent': u'\u0121',
'gecyrillic': u'\u0433',
'gehiragana': u'\u3052',
'gekatakana': u'\u30B2',
'geometricallyequal': u'\u2251',
'gereshaccenthebrew': u'\u059C',
'gereshhebrew': u'\u05F3',
'gereshmuqdamhebrew': u'\u059D',
'germandbls': u'\u00DF',
'gershayimaccenthebrew': u'\u059E',
'gershayimhebrew': u'\u05F4',
'getamark': u'\u3013',
'ghabengali': u'\u0998',
'ghadarmenian': u'\u0572',
'ghadeva': u'\u0918',
'ghagujarati': u'\u0A98',
'ghagurmukhi': u'\u0A18',
'ghainarabic': u'\u063A',
'ghainfinalarabic': u'\uFECE',
'ghaininitialarabic': u'\uFECF',
'ghainmedialarabic': u'\uFED0',
'ghemiddlehookcyrillic': u'\u0495',
'ghestrokecyrillic': u'\u0493',
'gheupturncyrillic': u'\u0491',
'ghhadeva': u'\u095A',
'ghhagurmukhi': u'\u0A5A',
'ghook': u'\u0260',
'ghzsquare': u'\u3393',
'gihiragana': u'\u304E',
'gikatakana': u'\u30AE',
'gimarmenian': u'\u0563',
'gimel': u'\u05D2',
'gimeldagesh': u'\uFB32',
'gimeldageshhebrew': u'\uFB32',
'gimelhebrew': u'\u05D2',
'gjecyrillic': u'\u0453',
'glottalinvertedstroke': u'\u01BE',
'glottalstop': u'\u0294',
'glottalstopinverted': u'\u0296',
'glottalstopmod': u'\u02C0',
'glottalstopreversed': u'\u0295',
'glottalstopreversedmod': u'\u02C1',
'glottalstopreversedsuperior': u'\u02E4',
'glottalstopstroke': u'\u02A1',
'glottalstopstrokereversed': u'\u02A2',
'gmacron': u'\u1E21',
'gmonospace': u'\uFF47',
'gohiragana': u'\u3054',
'gokatakana': u'\u30B4',
'gparen': u'\u24A2',
'gpasquare': u'\u33AC',
'gradient': u'\u2207',
'grave': u'\u0060',
'gravebelowcmb': u'\u0316',
'gravecmb': u'\u0300',
'gravecomb': u'\u0300',
'gravedeva': u'\u0953',
'gravelowmod': u'\u02CE',
'gravemonospace': u'\uFF40',
'gravetonecmb': u'\u0340',
'greater': u'\u003E',
'greaterequal': u'\u2265',
'greaterequalorless': u'\u22DB',
'greatermonospace': u'\uFF1E',
'greaterorequivalent': u'\u2273',
'greaterorless': u'\u2277',
'greateroverequal': u'\u2267',
'greatersmall': u'\uFE65',
'gscript': u'\u0261',
'gstroke': u'\u01E5',
'guhiragana': u'\u3050',
'guillemotleft': u'\u00AB',
'guillemotright': u'\u00BB',
'guilsinglleft': u'\u2039',
'guilsinglright': u'\u203A',
'gukatakana': u'\u30B0',
'guramusquare': u'\u3318',
'gysquare': u'\u33C9',
'h': u'\u0068',
'haabkhasiancyrillic': u'\u04A9',
'haaltonearabic': u'\u06C1',
'habengali': u'\u09B9',
'hadescendercyrillic': u'\u04B3',
'hadeva': u'\u0939',
'hagujarati': u'\u0AB9',
'hagurmukhi': u'\u0A39',
'haharabic': u'\u062D',
'hahfinalarabic': u'\uFEA2',
'hahinitialarabic': u'\uFEA3',
'hahiragana': u'\u306F',
'hahmedialarabic': u'\uFEA4',
'haitusquare': u'\u332A',
'hakatakana': u'\u30CF',
'hakatakanahalfwidth': u'\uFF8A',
'halantgurmukhi': u'\u0A4D',
'hamzaarabic': u'\u0621',
'hamzadammaarabic': u'\u0621\u064F',
'hamzadammatanarabic': u'\u0621\u064C',
'hamzafathaarabic': u'\u0621\u064E',
'hamzafathatanarabic': u'\u0621\u064B',
'hamzalowarabic': u'\u0621',
'hamzalowkasraarabic': u'\u0621\u0650',
'hamzalowkasratanarabic': u'\u0621\u064D',
'hamzasukunarabic': u'\u0621\u0652',
'hangulfiller': u'\u3164',
'hardsigncyrillic': u'\u044A',
'harpoonleftbarbup': u'\u21BC',
'harpoonrightbarbup': u'\u21C0',
'hasquare': u'\u33CA',
'hatafpatah': u'\u05B2',
'hatafpatah16': u'\u05B2',
'hatafpatah23': u'\u05B2',
'hatafpatah2f': u'\u05B2',
'hatafpatahhebrew': u'\u05B2',
'hatafpatahnarrowhebrew': u'\u05B2',
'hatafpatahquarterhebrew': u'\u05B2',
'hatafpatahwidehebrew': u'\u05B2',
'hatafqamats': u'\u05B3',
'hatafqamats1b': u'\u05B3',
'hatafqamats28': u'\u05B3',
'hatafqamats34': u'\u05B3',
'hatafqamatshebrew': u'\u05B3',
'hatafqamatsnarrowhebrew': u'\u05B3',
'hatafqamatsquarterhebrew': u'\u05B3',
'hatafqamatswidehebrew': u'\u05B3',
'hatafsegol': u'\u05B1',
'hatafsegol17': u'\u05B1',
'hatafsegol24': u'\u05B1',
'hatafsegol30': u'\u05B1',
'hatafsegolhebrew': u'\u05B1',
'hatafsegolnarrowhebrew': u'\u05B1',
'hatafsegolquarterhebrew': u'\u05B1',
'hatafsegolwidehebrew': u'\u05B1',
'hbar': u'\u0127',
'hbopomofo': u'\u310F',
'hbrevebelow': u'\u1E2B',
'hcedilla': u'\u1E29',
'hcircle': u'\u24D7',
'hcircumflex': u'\u0125',
'hdieresis': u'\u1E27',
'hdotaccent': u'\u1E23',
'hdotbelow': u'\u1E25',
'he': u'\u05D4',
'heart': u'\u2665',
'heartsuitblack': u'\u2665',
'heartsuitwhite': u'\u2661',
'hedagesh': u'\uFB34',
'hedageshhebrew': u'\uFB34',
'hehaltonearabic': u'\u06C1',
'heharabic': u'\u0647',
'hehebrew': u'\u05D4',
'hehfinalaltonearabic': u'\uFBA7',
'hehfinalalttwoarabic': u'\uFEEA',
'hehfinalarabic': u'\uFEEA',
'hehhamzaabovefinalarabic': u'\uFBA5',
'hehhamzaaboveisolatedarabic': u'\uFBA4',
'hehinitialaltonearabic': u'\uFBA8',
'hehinitialarabic': u'\uFEEB',
'hehiragana': u'\u3078',
'hehmedialaltonearabic': u'\uFBA9',
'hehmedialarabic': u'\uFEEC',
'heiseierasquare': u'\u337B',
'hekatakana': u'\u30D8',
'hekatakanahalfwidth': u'\uFF8D',
'hekutaarusquare': u'\u3336',
'henghook': u'\u0267',
'herutusquare': u'\u3339',
'het': u'\u05D7',
'hethebrew': u'\u05D7',
'hhook': u'\u0266',
'hhooksuperior': u'\u02B1',
'hieuhacirclekorean': u'\u327B',
'hieuhaparenkorean': u'\u321B',
'hieuhcirclekorean': u'\u326D',
'hieuhkorean': u'\u314E',
'hieuhparenkorean': u'\u320D',
'hihiragana': u'\u3072',
'hikatakana': u'\u30D2',
'hikatakanahalfwidth': u'\uFF8B',
'hiriq': u'\u05B4',
'hiriq14': u'\u05B4',
'hiriq21': u'\u05B4',
'hiriq2d': u'\u05B4',
'hiriqhebrew': u'\u05B4',
'hiriqnarrowhebrew': u'\u05B4',
'hiriqquarterhebrew': u'\u05B4',
'hiriqwidehebrew': u'\u05B4',
'hlinebelow': u'\u1E96',
'hmonospace': u'\uFF48',
'hoarmenian': u'\u0570',
'hohipthai': u'\u0E2B',
'hohiragana': u'\u307B',
'hokatakana': u'\u30DB',
'hokatakanahalfwidth': u'\uFF8E',
'holam': u'\u05B9',
'holam19': u'\u05B9',
'holam26': u'\u05B9',
'holam32': u'\u05B9',
'holamhebrew': u'\u05B9',
'holamnarrowhebrew': u'\u05B9',
'holamquarterhebrew': u'\u05B9',
'holamwidehebrew': u'\u05B9',
'honokhukthai': u'\u0E2E',
'hookabovecomb': u'\u0309',
'hookcmb': u'\u0309',
'hookpalatalizedbelowcmb': u'\u0321',
'hookretroflexbelowcmb': u'\u0322',
'hoonsquare': u'\u3342',
'horicoptic': u'\u03E9',
'horizontalbar': u'\u2015',
'horncmb': u'\u031B',
'hotsprings': u'\u2668',
'house': u'\u2302',
'hparen': u'\u24A3',
'hsuperior': u'\u02B0',
'hturned': u'\u0265',
'huhiragana': u'\u3075',
'huiitosquare': u'\u3333',
'hukatakana': u'\u30D5',
'hukatakanahalfwidth': u'\uFF8C',
'hungarumlaut': u'\u02DD',
'hungarumlautcmb': u'\u030B',
'hv': u'\u0195',
'hyphen': u'\u002D',
'hypheninferior': u'\uF6E5',
'hyphenmonospace': u'\uFF0D',
'hyphensmall': u'\uFE63',
'hyphensuperior': u'\uF6E6',
'hyphentwo': u'\u2010',
'i': u'\u0069',
'iacute': u'\u00ED',
'iacyrillic': u'\u044F',
'ibengali': u'\u0987',
'ibopomofo': u'\u3127',
'ibreve': u'\u012D',
'icaron': u'\u01D0',
'icircle': u'\u24D8',
'icircumflex': u'\u00EE',
'icyrillic': u'\u0456',
'idblgrave': u'\u0209',
'ideographearthcircle': u'\u328F',
'ideographfirecircle': u'\u328B',
'ideographicallianceparen': u'\u323F',
'ideographiccallparen': u'\u323A',
'ideographiccentrecircle': u'\u32A5',
'ideographicclose': u'\u3006',
'ideographiccomma': u'\u3001',
'ideographiccommaleft': u'\uFF64',
'ideographiccongratulationparen': u'\u3237',
'ideographiccorrectcircle': u'\u32A3',
'ideographicearthparen': u'\u322F',
'ideographicenterpriseparen': u'\u323D',
'ideographicexcellentcircle': u'\u329D',
'ideographicfestivalparen': u'\u3240',
'ideographicfinancialcircle': u'\u3296',
'ideographicfinancialparen': u'\u3236',
'ideographicfireparen': u'\u322B',
'ideographichaveparen': u'\u3232',
'ideographichighcircle': u'\u32A4',
'ideographiciterationmark': u'\u3005',
'ideographiclaborcircle': u'\u3298',
'ideographiclaborparen': u'\u3238',
'ideographicleftcircle': u'\u32A7',
'ideographiclowcircle': u'\u32A6',
'ideographicmedicinecircle': u'\u32A9',
'ideographicmetalparen': u'\u322E',
'ideographicmoonparen': u'\u322A',
'ideographicnameparen': u'\u3234',
'ideographicperiod': u'\u3002',
'ideographicprintcircle': u'\u329E',
'ideographicreachparen': u'\u3243',
'ideographicrepresentparen': u'\u3239',
'ideographicresourceparen': u'\u323E',
'ideographicrightcircle': u'\u32A8',
'ideographicsecretcircle': u'\u3299',
'ideographicselfparen': u'\u3242',
'ideographicsocietyparen': u'\u3233',
'ideographicspace': u'\u3000',
'ideographicspecialparen': u'\u3235',
'ideographicstockparen': u'\u3231',
'ideographicstudyparen': u'\u323B',
'ideographicsunparen': u'\u3230',
'ideographicsuperviseparen': u'\u323C',
'ideographicwaterparen': u'\u322C',
'ideographicwoodparen': u'\u322D',
'ideographiczero': u'\u3007',
'ideographmetalcircle': u'\u328E',
'ideographmooncircle': u'\u328A',
'ideographnamecircle': u'\u3294',
'ideographsuncircle': u'\u3290',
'ideographwatercircle': u'\u328C',
'ideographwoodcircle': u'\u328D',
'ideva': u'\u0907',
'idieresis': u'\u00EF',
'idieresisacute': u'\u1E2F',
'idieresiscyrillic': u'\u04E5',
'idotbelow': u'\u1ECB',
'iebrevecyrillic': u'\u04D7',
'iecyrillic': u'\u0435',
'ieungacirclekorean': u'\u3275',
'ieungaparenkorean': u'\u3215',
'ieungcirclekorean': u'\u3267',
'ieungkorean': u'\u3147',
'ieungparenkorean': u'\u3207',
'igrave': u'\u00EC',
'igujarati': u'\u0A87',
'igurmukhi': u'\u0A07',
'ihiragana': u'\u3044',
'ihookabove': u'\u1EC9',
'iibengali': u'\u0988',
'iicyrillic': u'\u0438',
'iideva': u'\u0908',
'iigujarati': u'\u0A88',
'iigurmukhi': u'\u0A08',
'iimatragurmukhi': u'\u0A40',
'iinvertedbreve': u'\u020B',
'iishortcyrillic': u'\u0439',
'iivowelsignbengali': u'\u09C0',
'iivowelsigndeva': u'\u0940',
'iivowelsigngujarati': u'\u0AC0',
'ij': u'\u0133',
'ikatakana': u'\u30A4',
'ikatakanahalfwidth': u'\uFF72',
'ikorean': u'\u3163',
'ilde': u'\u02DC',
'iluyhebrew': u'\u05AC',
'imacron': u'\u012B',
'imacroncyrillic': u'\u04E3',
'imageorapproximatelyequal': u'\u2253',
'imatragurmukhi': u'\u0A3F',
'imonospace': u'\uFF49',
'increment': u'\u2206',
'infinity': u'\u221E',
'iniarmenian': u'\u056B',
'integral': u'\u222B',
'integralbottom': u'\u2321',
'integralbt': u'\u2321',
'integralex': u'\uF8F5',
'integraltop': u'\u2320',
'integraltp': u'\u2320',
'intersection': u'\u2229',
'intisquare': u'\u3305',
'invbullet': u'\u25D8',
'invcircle': u'\u25D9',
'invsmileface': u'\u263B',
'iocyrillic': u'\u0451',
'iogonek': u'\u012F',
'iota': u'\u03B9',
'iotadieresis': u'\u03CA',
'iotadieresistonos': u'\u0390',
'iotalatin': u'\u0269',
'iotatonos': u'\u03AF',
'iparen': u'\u24A4',
'irigurmukhi': u'\u0A72',
'ismallhiragana': u'\u3043',
'ismallkatakana': u'\u30A3',
'ismallkatakanahalfwidth': u'\uFF68',
'issharbengali': u'\u09FA',
'istroke': u'\u0268',
'isuperior': u'\uF6ED',
'iterationhiragana': u'\u309D',
'iterationkatakana': u'\u30FD',
'itilde': u'\u0129',
'itildebelow': u'\u1E2D',
'iubopomofo': u'\u3129',
'iucyrillic': u'\u044E',
'ivowelsignbengali': u'\u09BF',
'ivowelsigndeva': u'\u093F',
'ivowelsigngujarati': u'\u0ABF',
'izhitsacyrillic': u'\u0475',
'izhitsadblgravecyrillic': u'\u0477',
'j': u'\u006A',
'jaarmenian': u'\u0571',
'jabengali': u'\u099C',
'jadeva': u'\u091C',
'jagujarati': u'\u0A9C',
'jagurmukhi': u'\u0A1C',
'jbopomofo': u'\u3110',
'jcaron': u'\u01F0',
'jcircle': u'\u24D9',
'jcircumflex': u'\u0135',
'jcrossedtail': u'\u029D',
'jdotlessstroke': u'\u025F',
'jecyrillic': u'\u0458',
'jeemarabic': u'\u062C',
'jeemfinalarabic': u'\uFE9E',
'jeeminitialarabic': u'\uFE9F',
'jeemmedialarabic': u'\uFEA0',
'jeharabic': u'\u0698',
'jehfinalarabic': u'\uFB8B',
'jhabengali': u'\u099D',
'jhadeva': u'\u091D',
'jhagujarati': u'\u0A9D',
'jhagurmukhi': u'\u0A1D',
'jheharmenian': u'\u057B',
'jis': u'\u3004',
'jmonospace': u'\uFF4A',
'jparen': u'\u24A5',
'jsuperior': u'\u02B2',
'k': u'\u006B',
'kabashkircyrillic': u'\u04A1',
'kabengali': u'\u0995',
'kacute': u'\u1E31',
'kacyrillic': u'\u043A',
'kadescendercyrillic': u'\u049B',
'kadeva': u'\u0915',
'kaf': u'\u05DB',
'kafarabic': u'\u0643',
'kafdagesh': u'\uFB3B',
'kafdageshhebrew': u'\uFB3B',
'kaffinalarabic': u'\uFEDA',
'kafhebrew': u'\u05DB',
'kafinitialarabic': u'\uFEDB',
'kafmedialarabic': u'\uFEDC',
'kafrafehebrew': u'\uFB4D',
'kagujarati': u'\u0A95',
'kagurmukhi': u'\u0A15',
'kahiragana': u'\u304B',
'kahookcyrillic': u'\u04C4',
'kakatakana': u'\u30AB',
'kakatakanahalfwidth': u'\uFF76',
'kappa': u'\u03BA',
'kappasymbolgreek': u'\u03F0',
'kapyeounmieumkorean': u'\u3171',
'kapyeounphieuphkorean': u'\u3184',
'kapyeounpieupkorean': u'\u3178',
'kapyeounssangpieupkorean': u'\u3179',
'karoriisquare': u'\u330D',
'kashidaautoarabic': u'\u0640',
'kashidaautonosidebearingarabic': u'\u0640',
'kasmallkatakana': u'\u30F5',
'kasquare': u'\u3384',
'kasraarabic': u'\u0650',
'kasratanarabic': u'\u064D',
'kastrokecyrillic': u'\u049F',
'katahiraprolongmarkhalfwidth': u'\uFF70',
'kaverticalstrokecyrillic': u'\u049D',
'kbopomofo': u'\u310E',
'kcalsquare': u'\u3389',
'kcaron': u'\u01E9',
'kcedilla': u'\u0137',
'kcircle': u'\u24DA',
'kcommaaccent': u'\u0137',
'kdotbelow': u'\u1E33',
'keharmenian': u'\u0584',
'kehiragana': u'\u3051',
'kekatakana': u'\u30B1',
'kekatakanahalfwidth': u'\uFF79',
'kenarmenian': u'\u056F',
'kesmallkatakana': u'\u30F6',
'kgreenlandic': u'\u0138',
'khabengali': u'\u0996',
'khacyrillic': u'\u0445',
'khadeva': u'\u0916',
'khagujarati': u'\u0A96',
'khagurmukhi': u'\u0A16',
'khaharabic': u'\u062E',
'khahfinalarabic': u'\uFEA6',
'khahinitialarabic': u'\uFEA7',
'khahmedialarabic': u'\uFEA8',
'kheicoptic': u'\u03E7',
'khhadeva': u'\u0959',
'khhagurmukhi': u'\u0A59',
'khieukhacirclekorean': u'\u3278',
'khieukhaparenkorean': u'\u3218',
'khieukhcirclekorean': u'\u326A',
'khieukhkorean': u'\u314B',
'khieukhparenkorean': u'\u320A',
'khokhaithai': u'\u0E02',
'khokhonthai': u'\u0E05',
'khokhuatthai': u'\u0E03',
'khokhwaithai': u'\u0E04',
'khomutthai': u'\u0E5B',
'khook': u'\u0199',
'khorakhangthai': u'\u0E06',
'khzsquare': u'\u3391',
'kihiragana': u'\u304D',
'kikatakana': u'\u30AD',
'kikatakanahalfwidth': u'\uFF77',
'kiroguramusquare': u'\u3315',
'kiromeetorusquare': u'\u3316',
'kirosquare': u'\u3314',
'kiyeokacirclekorean': u'\u326E',
'kiyeokaparenkorean': u'\u320E',
'kiyeokcirclekorean': u'\u3260',
'kiyeokkorean': u'\u3131',
'kiyeokparenkorean': u'\u3200',
'kiyeoksioskorean': u'\u3133',
'kjecyrillic': u'\u045C',
'klinebelow': u'\u1E35',
'klsquare': u'\u3398',
'kmcubedsquare': u'\u33A6',
'kmonospace': u'\uFF4B',
'kmsquaredsquare': u'\u33A2',
'kohiragana': u'\u3053',
'kohmsquare': u'\u33C0',
'kokaithai': u'\u0E01',
'kokatakana': u'\u30B3',
'kokatakanahalfwidth': u'\uFF7A',
'kooposquare': u'\u331E',
'koppacyrillic': u'\u0481',
'koreanstandardsymbol': u'\u327F',
'koroniscmb': u'\u0343',
'kparen': u'\u24A6',
'kpasquare': u'\u33AA',
'ksicyrillic': u'\u046F',
'ktsquare': u'\u33CF',
'kturned': u'\u029E',
'kuhiragana': u'\u304F',
'kukatakana': u'\u30AF',
'kukatakanahalfwidth': u'\uFF78',
'kvsquare': u'\u33B8',
'kwsquare': u'\u33BE',
'l': u'\u006C',
'labengali': u'\u09B2',
'lacute': u'\u013A',
'ladeva': u'\u0932',
'lagujarati': u'\u0AB2',
'lagurmukhi': u'\u0A32',
'lakkhangyaothai': u'\u0E45',
'lamaleffinalarabic': u'\uFEFC',
'lamalefhamzaabovefinalarabic': u'\uFEF8',
'lamalefhamzaaboveisolatedarabic': u'\uFEF7',
'lamalefhamzabelowfinalarabic': u'\uFEFA',
'lamalefhamzabelowisolatedarabic': u'\uFEF9',
'lamalefisolatedarabic': u'\uFEFB',
'lamalefmaddaabovefinalarabic': u'\uFEF6',
'lamalefmaddaaboveisolatedarabic': u'\uFEF5',
'lamarabic': u'\u0644',
'lambda': u'\u03BB',
'lambdastroke': u'\u019B',
'lamed': u'\u05DC',
'lameddagesh': u'\uFB3C',
'lameddageshhebrew': u'\uFB3C',
'lamedhebrew': u'\u05DC',
'lamedholam': u'\u05DC\u05B9',
'lamedholamdagesh': u'\u05DC\u05B9\u05BC',
'lamedholamdageshhebrew': u'\u05DC\u05B9\u05BC',
'lamedholamhebrew': u'\u05DC\u05B9',
'lamfinalarabic': u'\uFEDE',
'lamhahinitialarabic': u'\uFCCA',
'laminitialarabic': u'\uFEDF',
'lamjeeminitialarabic': u'\uFCC9',
'lamkhahinitialarabic': u'\uFCCB',
'lamlamhehisolatedarabic': u'\uFDF2',
'lammedialarabic': u'\uFEE0',
'lammeemhahinitialarabic': u'\uFD88',
'lammeeminitialarabic': u'\uFCCC',
'lammeemjeeminitialarabic': u'\uFEDF\uFEE4\uFEA0',
'lammeemkhahinitialarabic': u'\uFEDF\uFEE4\uFEA8',
'largecircle': u'\u25EF',
'lbar': u'\u019A',
'lbelt': u'\u026C',
'lbopomofo': u'\u310C',
'lcaron': u'\u013E',
'lcedilla': u'\u013C',
'lcircle': u'\u24DB',
'lcircumflexbelow': u'\u1E3D',
'lcommaaccent': u'\u013C',
'ldot': u'\u0140',
'ldotaccent': u'\u0140',
'ldotbelow': u'\u1E37',
'ldotbelowmacron': u'\u1E39',
'leftangleabovecmb': u'\u031A',
'lefttackbelowcmb': u'\u0318',
'less': u'\u003C',
'lessequal': u'\u2264',
'lessequalorgreater': u'\u22DA',
'lessmonospace': u'\uFF1C',
'lessorequivalent': u'\u2272',
'lessorgreater': u'\u2276',
'lessoverequal': u'\u2266',
'lesssmall': u'\uFE64',
'lezh': u'\u026E',
'lfblock': u'\u258C',
'lhookretroflex': u'\u026D',
'lira': u'\u20A4',
'liwnarmenian': u'\u056C',
'lj': u'\u01C9',
'ljecyrillic': u'\u0459',
'll': u'\uF6C0',
'lladeva': u'\u0933',
'llagujarati': u'\u0AB3',
'llinebelow': u'\u1E3B',
'llladeva': u'\u0934',
'llvocalicbengali': u'\u09E1',
'llvocalicdeva': u'\u0961',
'llvocalicvowelsignbengali': u'\u09E3',
'llvocalicvowelsigndeva': u'\u0963',
'lmiddletilde': u'\u026B',
'lmonospace': u'\uFF4C',
'lmsquare': u'\u33D0',
'lochulathai': u'\u0E2C',
'logicaland': u'\u2227',
'logicalnot': u'\u00AC',
'logicalnotreversed': u'\u2310',
'logicalor': u'\u2228',
'lolingthai': u'\u0E25',
'longs': u'\u017F',
'lowlinecenterline': u'\uFE4E',
'lowlinecmb': u'\u0332',
'lowlinedashed': u'\uFE4D',
'lozenge': u'\u25CA',
'lparen': u'\u24A7',
'lslash': u'\u0142',
'lsquare': u'\u2113',
'lsuperior': u'\uF6EE',
'ltshade': u'\u2591',
'luthai': u'\u0E26',
'lvocalicbengali': u'\u098C',
'lvocalicdeva': u'\u090C',
'lvocalicvowelsignbengali': u'\u09E2',
'lvocalicvowelsigndeva': u'\u0962',
'lxsquare': u'\u33D3',
'm': u'\u006D',
'mabengali': u'\u09AE',
'macron': u'\u00AF',
'macronbelowcmb': u'\u0331',
'macroncmb': u'\u0304',
'macronlowmod': u'\u02CD',
'macronmonospace': u'\uFFE3',
'macute': u'\u1E3F',
'madeva': u'\u092E',
'magujarati': u'\u0AAE',
'magurmukhi': u'\u0A2E',
'mahapakhhebrew': u'\u05A4',
'mahapakhlefthebrew': u'\u05A4',
'mahiragana': u'\u307E',
'maichattawalowleftthai': u'\uF895',
'maichattawalowrightthai': u'\uF894',
'maichattawathai': u'\u0E4B',
'maichattawaupperleftthai': u'\uF893',
'maieklowleftthai': u'\uF88C',
'maieklowrightthai': u'\uF88B',
'maiekthai': u'\u0E48',
'maiekupperleftthai': u'\uF88A',
'maihanakatleftthai': u'\uF884',
'maihanakatthai': u'\u0E31',
'maitaikhuleftthai': u'\uF889',
'maitaikhuthai': u'\u0E47',
'maitholowleftthai': u'\uF88F',
'maitholowrightthai': u'\uF88E',
'maithothai': u'\u0E49',
'maithoupperleftthai': u'\uF88D',
'maitrilowleftthai': u'\uF892',
'maitrilowrightthai': u'\uF891',
'maitrithai': u'\u0E4A',
'maitriupperleftthai': u'\uF890',
'maiyamokthai': u'\u0E46',
'makatakana': u'\u30DE',
'makatakanahalfwidth': u'\uFF8F',
'male': u'\u2642',
'mansyonsquare': u'\u3347',
'maqafhebrew': u'\u05BE',
'mars': u'\u2642',
'masoracirclehebrew': u'\u05AF',
'masquare': u'\u3383',
'mbopomofo': u'\u3107',
'mbsquare': u'\u33D4',
'mcircle': u'\u24DC',
'mcubedsquare': u'\u33A5',
'mdotaccent': u'\u1E41',
'mdotbelow': u'\u1E43',
'meemarabic': u'\u0645',
'meemfinalarabic': u'\uFEE2',
'meeminitialarabic': u'\uFEE3',
'meemmedialarabic': u'\uFEE4',
'meemmeeminitialarabic': u'\uFCD1',
'meemmeemisolatedarabic': u'\uFC48',
'meetorusquare': u'\u334D',
'mehiragana': u'\u3081',
'meizierasquare': u'\u337E',
'mekatakana': u'\u30E1',
'mekatakanahalfwidth': u'\uFF92',
'mem': u'\u05DE',
'memdagesh': u'\uFB3E',
'memdageshhebrew': u'\uFB3E',
'memhebrew': u'\u05DE',
'menarmenian': u'\u0574',
'merkhahebrew': u'\u05A5',
'merkhakefulahebrew': u'\u05A6',
'merkhakefulalefthebrew': u'\u05A6',
'merkhalefthebrew': u'\u05A5',
'mhook': u'\u0271',
'mhzsquare': u'\u3392',
'middledotkatakanahalfwidth': u'\uFF65',
'middot': u'\u00B7',
'mieumacirclekorean': u'\u3272',
'mieumaparenkorean': u'\u3212',
'mieumcirclekorean': u'\u3264',
'mieumkorean': u'\u3141',
'mieumpansioskorean': u'\u3170',
'mieumparenkorean': u'\u3204',
'mieumpieupkorean': u'\u316E',
'mieumsioskorean': u'\u316F',
'mihiragana': u'\u307F',
'mikatakana': u'\u30DF',
'mikatakanahalfwidth': u'\uFF90',
'minus': u'\u2212',
'minusbelowcmb': u'\u0320',
'minuscircle': u'\u2296',
'minusmod': u'\u02D7',
'minusplus': u'\u2213',
'minute': u'\u2032',
'miribaarusquare': u'\u334A',
'mirisquare': u'\u3349',
'mlonglegturned': u'\u0270',
'mlsquare': u'\u3396',
'mmcubedsquare': u'\u33A3',
'mmonospace': u'\uFF4D',
'mmsquaredsquare': u'\u339F',
'mohiragana': u'\u3082',
'mohmsquare': u'\u33C1',
'mokatakana': u'\u30E2',
'mokatakanahalfwidth': u'\uFF93',
'molsquare': u'\u33D6',
'momathai': u'\u0E21',
'moverssquare': u'\u33A7',
'moverssquaredsquare': u'\u33A8',
'mparen': u'\u24A8',
'mpasquare': u'\u33AB',
'mssquare': u'\u33B3',
'msuperior': u'\uF6EF',
'mturned': u'\u026F',
'mu': u'\u00B5',
'mu1': u'\u00B5',
'muasquare': u'\u3382',
'muchgreater': u'\u226B',
'muchless': u'\u226A',
'mufsquare': u'\u338C',
'mugreek': u'\u03BC',
'mugsquare': u'\u338D',
'muhiragana': u'\u3080',
'mukatakana': u'\u30E0',
'mukatakanahalfwidth': u'\uFF91',
'mulsquare': u'\u3395',
'multiply': u'\u00D7',
'mumsquare': u'\u339B',
'munahhebrew': u'\u05A3',
'munahlefthebrew': u'\u05A3',
'musicalnote': u'\u266A',
'musicalnotedbl': u'\u266B',
'musicflatsign': u'\u266D',
'musicsharpsign': u'\u266F',
'mussquare': u'\u33B2',
'muvsquare': u'\u33B6',
'muwsquare': u'\u33BC',
'mvmegasquare': u'\u33B9',
'mvsquare': u'\u33B7',
'mwmegasquare': u'\u33BF',
'mwsquare': u'\u33BD',
'n': u'\u006E',
'nabengali': u'\u09A8',
'nabla': u'\u2207',
'nacute': u'\u0144',
'nadeva': u'\u0928',
'nagujarati': u'\u0AA8',
'nagurmukhi': u'\u0A28',
'nahiragana': u'\u306A',
'nakatakana': u'\u30CA',
'nakatakanahalfwidth': u'\uFF85',
'napostrophe': u'\u0149',
'nasquare': u'\u3381',
'nbopomofo': u'\u310B',
'nbspace': u'\u00A0',
'ncaron': u'\u0148',
'ncedilla': u'\u0146',
'ncircle': u'\u24DD',
'ncircumflexbelow': u'\u1E4B',
'ncommaaccent': u'\u0146',
'ndotaccent': u'\u1E45',
'ndotbelow': u'\u1E47',
'nehiragana': u'\u306D',
'nekatakana': u'\u30CD',
'nekatakanahalfwidth': u'\uFF88',
'newsheqelsign': u'\u20AA',
'nfsquare': u'\u338B',
'ngabengali': u'\u0999',
'ngadeva': u'\u0919',
'ngagujarati': u'\u0A99',
'ngagurmukhi': u'\u0A19',
'ngonguthai': u'\u0E07',
'nhiragana': u'\u3093',
'nhookleft': u'\u0272',
'nhookretroflex': u'\u0273',
'nieunacirclekorean': u'\u326F',
'nieunaparenkorean': u'\u320F',
'nieuncieuckorean': u'\u3135',
'nieuncirclekorean': u'\u3261',
'nieunhieuhkorean': u'\u3136',
'nieunkorean': u'\u3134',
'nieunpansioskorean': u'\u3168',
'nieunparenkorean': u'\u3201',
'nieunsioskorean': u'\u3167',
'nieuntikeutkorean': u'\u3166',
'nihiragana': u'\u306B',
'nikatakana': u'\u30CB',
'nikatakanahalfwidth': u'\uFF86',
'nikhahitleftthai': u'\uF899',
'nikhahitthai': u'\u0E4D',
'nine': u'\u0039',
'ninearabic': u'\u0669',
'ninebengali': u'\u09EF',
'ninecircle': u'\u2468',
'ninecircleinversesansserif': u'\u2792',
'ninedeva': u'\u096F',
'ninegujarati': u'\u0AEF',
'ninegurmukhi': u'\u0A6F',
'ninehackarabic': u'\u0669',
'ninehangzhou': u'\u3029',
'nineideographicparen': u'\u3228',
'nineinferior': u'\u2089',
'ninemonospace': u'\uFF19',
'nineoldstyle': u'\uF739',
'nineparen': u'\u247C',
'nineperiod': u'\u2490',
'ninepersian': u'\u06F9',
'nineroman': u'\u2178',
'ninesuperior': u'\u2079',
'nineteencircle': u'\u2472',
'nineteenparen': u'\u2486',
'nineteenperiod': u'\u249A',
'ninethai': u'\u0E59',
'nj': u'\u01CC',
'njecyrillic': u'\u045A',
'nkatakana': u'\u30F3',
'nkatakanahalfwidth': u'\uFF9D',
'nlegrightlong': u'\u019E',
'nlinebelow': u'\u1E49',
'nmonospace': u'\uFF4E',
'nmsquare': u'\u339A',
'nnabengali': u'\u09A3',
'nnadeva': u'\u0923',
'nnagujarati': u'\u0AA3',
'nnagurmukhi': u'\u0A23',
'nnnadeva': u'\u0929',
'nohiragana': u'\u306E',
'nokatakana': u'\u30CE',
'nokatakanahalfwidth': u'\uFF89',
'nonbreakingspace': u'\u00A0',
'nonenthai': u'\u0E13',
'nonuthai': u'\u0E19',
'noonarabic': u'\u0646',
'noonfinalarabic': u'\uFEE6',
'noonghunnaarabic': u'\u06BA',
'noonghunnafinalarabic': u'\uFB9F',
'noonhehinitialarabic': u'\uFEE7\uFEEC',
'nooninitialarabic': u'\uFEE7',
'noonjeeminitialarabic': u'\uFCD2',
'noonjeemisolatedarabic': u'\uFC4B',
'noonmedialarabic': u'\uFEE8',
'noonmeeminitialarabic': u'\uFCD5',
'noonmeemisolatedarabic': u'\uFC4E',
'noonnoonfinalarabic': u'\uFC8D',
'notcontains': u'\u220C',
'notelement': u'\u2209',
'notelementof': u'\u2209',
'notequal': u'\u2260',
'notgreater': u'\u226F',
'notgreaternorequal': u'\u2271',
'notgreaternorless': u'\u2279',
'notidentical': u'\u2262',
'notless': u'\u226E',
'notlessnorequal': u'\u2270',
'notparallel': u'\u2226',
'notprecedes': u'\u2280',
'notsubset': u'\u2284',
'notsucceeds': u'\u2281',
'notsuperset': u'\u2285',
'nowarmenian': u'\u0576',
'nparen': u'\u24A9',
'nssquare': u'\u33B1',
'nsuperior': u'\u207F',
'ntilde': u'\u00F1',
'nu': u'\u03BD',
'nuhiragana': u'\u306C',
'nukatakana': u'\u30CC',
'nukatakanahalfwidth': u'\uFF87',
'nuktabengali': u'\u09BC',
'nuktadeva': u'\u093C',
'nuktagujarati': u'\u0ABC',
'nuktagurmukhi': u'\u0A3C',
'numbersign': u'\u0023',
'numbersignmonospace': u'\uFF03',
'numbersignsmall': u'\uFE5F',
'numeralsigngreek': u'\u0374',
'numeralsignlowergreek': u'\u0375',
'numero': u'\u2116',
'nun': u'\u05E0',
'nundagesh': u'\uFB40',
'nundageshhebrew': u'\uFB40',
'nunhebrew': u'\u05E0',
'nvsquare': u'\u33B5',
'nwsquare': u'\u33BB',
'nyabengali': u'\u099E',
'nyadeva': u'\u091E',
'nyagujarati': u'\u0A9E',
'nyagurmukhi': u'\u0A1E',
'o': u'\u006F',
'oacute': u'\u00F3',
'oangthai': u'\u0E2D',
'obarred': u'\u0275',
'obarredcyrillic': u'\u04E9',
'obarreddieresiscyrillic': u'\u04EB',
'obengali': u'\u0993',
'obopomofo': u'\u311B',
'obreve': u'\u014F',
'ocandradeva': u'\u0911',
'ocandragujarati': u'\u0A91',
'ocandravowelsigndeva': u'\u0949',
'ocandravowelsigngujarati': u'\u0AC9',
'ocaron': u'\u01D2',
'ocircle': u'\u24DE',
'ocircumflex': u'\u00F4',
'ocircumflexacute': u'\u1ED1',
'ocircumflexdotbelow': u'\u1ED9',
'ocircumflexgrave': u'\u1ED3',
'ocircumflexhookabove': u'\u1ED5',
'ocircumflextilde': u'\u1ED7',
'ocyrillic': u'\u043E',
'odblacute': u'\u0151',
'odblgrave': u'\u020D',
'odeva': u'\u0913',
'odieresis': u'\u00F6',
'odieresiscyrillic': u'\u04E7',
'odotbelow': u'\u1ECD',
'oe': u'\u0153',
'oekorean': u'\u315A',
'ogonek': u'\u02DB',
'ogonekcmb': u'\u0328',
'ograve': u'\u00F2',
'ogujarati': u'\u0A93',
'oharmenian': u'\u0585',
'ohiragana': u'\u304A',
'ohookabove': u'\u1ECF',
'ohorn': u'\u01A1',
'ohornacute': u'\u1EDB',
'ohorndotbelow': u'\u1EE3',
'ohorngrave': u'\u1EDD',
'ohornhookabove': u'\u1EDF',
'ohorntilde': u'\u1EE1',
'ohungarumlaut': u'\u0151',
'oi': u'\u01A3',
'oinvertedbreve': u'\u020F',
'okatakana': u'\u30AA',
'okatakanahalfwidth': u'\uFF75',
'okorean': u'\u3157',
'olehebrew': u'\u05AB',
'omacron': u'\u014D',
'omacronacute': u'\u1E53',
'omacrongrave': u'\u1E51',
'omdeva': u'\u0950',
'omega': u'\u03C9',
'omega1': u'\u03D6',
'omegacyrillic': u'\u0461',
'omegalatinclosed': u'\u0277',
'omegaroundcyrillic': u'\u047B',
'omegatitlocyrillic': u'\u047D',
'omegatonos': u'\u03CE',
'omgujarati': u'\u0AD0',
'omicron': u'\u03BF',
'omicrontonos': u'\u03CC',
'omonospace': u'\uFF4F',
'one': u'\u0031',
'onearabic': u'\u0661',
'onebengali': u'\u09E7',
'onecircle': u'\u2460',
'onecircleinversesansserif': u'\u278A',
'onedeva': u'\u0967',
'onedotenleader': u'\u2024',
'oneeighth': u'\u215B',
'onefitted': u'\uF6DC',
'onegujarati': u'\u0AE7',
'onegurmukhi': u'\u0A67',
'onehackarabic': u'\u0661',
'onehalf': u'\u00BD',
'onehangzhou': u'\u3021',
'oneideographicparen': u'\u3220',
'oneinferior': u'\u2081',
'onemonospace': u'\uFF11',
'onenumeratorbengali': u'\u09F4',
'oneoldstyle': u'\uF731',
'oneparen': u'\u2474',
'oneperiod': u'\u2488',
'onepersian': u'\u06F1',
'onequarter': u'\u00BC',
'oneroman': u'\u2170',
'onesuperior': u'\u00B9',
'onethai': u'\u0E51',
'onethird': u'\u2153',
'oogonek': u'\u01EB',
'oogonekmacron': u'\u01ED',
'oogurmukhi': u'\u0A13',
'oomatragurmukhi': u'\u0A4B',
'oopen': u'\u0254',
'oparen': u'\u24AA',
'openbullet': u'\u25E6',
'option': u'\u2325',
'ordfeminine': u'\u00AA',
'ordmasculine': u'\u00BA',
'orthogonal': u'\u221F',
'oshortdeva': u'\u0912',
'oshortvowelsigndeva': u'\u094A',
'oslash': u'\u00F8',
'oslashacute': u'\u01FF',
'osmallhiragana': u'\u3049',
'osmallkatakana': u'\u30A9',
'osmallkatakanahalfwidth': u'\uFF6B',
'ostrokeacute': u'\u01FF',
'osuperior': u'\uF6F0',
'otcyrillic': u'\u047F',
'otilde': u'\u00F5',
'otildeacute': u'\u1E4D',
'otildedieresis': u'\u1E4F',
'oubopomofo': u'\u3121',
'overline': u'\u203E',
'overlinecenterline': u'\uFE4A',
'overlinecmb': u'\u0305',
'overlinedashed': u'\uFE49',
'overlinedblwavy': u'\uFE4C',
'overlinewavy': u'\uFE4B',
'overscore': u'\u00AF',
'ovowelsignbengali': u'\u09CB',
'ovowelsigndeva': u'\u094B',
'ovowelsigngujarati': u'\u0ACB',
'p': u'\u0070',
'paampssquare': u'\u3380',
'paasentosquare': u'\u332B',
'pabengali': u'\u09AA',
'pacute': u'\u1E55',
'padeva': u'\u092A',
'pagedown': u'\u21DF',
'pageup': u'\u21DE',
'pagujarati': u'\u0AAA',
'pagurmukhi': u'\u0A2A',
'pahiragana': u'\u3071',
'paiyannoithai': u'\u0E2F',
'pakatakana': u'\u30D1',
'palatalizationcyrilliccmb': u'\u0484',
'palochkacyrillic': u'\u04C0',
'pansioskorean': u'\u317F',
'paragraph': u'\u00B6',
'parallel': u'\u2225',
'parenleft': u'\u0028',
'parenleftaltonearabic': u'\uFD3E',
'parenleftbt': u'\uF8ED',
'parenleftex': u'\uF8EC',
'parenleftinferior': u'\u208D',
'parenleftmonospace': u'\uFF08',
'parenleftsmall': u'\uFE59',
'parenleftsuperior': u'\u207D',
'parenlefttp': u'\uF8EB',
'parenleftvertical': u'\uFE35',
'parenright': u'\u0029',
'parenrightaltonearabic': u'\uFD3F',
'parenrightbt': u'\uF8F8',
'parenrightex': u'\uF8F7',
'parenrightinferior': u'\u208E',
'parenrightmonospace': u'\uFF09',
'parenrightsmall': u'\uFE5A',
'parenrightsuperior': u'\u207E',
'parenrighttp': u'\uF8F6',
'parenrightvertical': u'\uFE36',
'partialdiff': u'\u2202',
'paseqhebrew': u'\u05C0',
'pashtahebrew': u'\u0599',
'pasquare': u'\u33A9',
'patah': u'\u05B7',
'patah11': u'\u05B7',
'patah1d': u'\u05B7',
'patah2a': u'\u05B7',
'patahhebrew': u'\u05B7',
'patahnarrowhebrew': u'\u05B7',
'patahquarterhebrew': u'\u05B7',
'patahwidehebrew': u'\u05B7',
'pazerhebrew': u'\u05A1',
'pbopomofo': u'\u3106',
'pcircle': u'\u24DF',
'pdotaccent': u'\u1E57',
'pe': u'\u05E4',
'pecyrillic': u'\u043F',
'pedagesh': u'\uFB44',
'pedageshhebrew': u'\uFB44',
'peezisquare': u'\u333B',
'pefinaldageshhebrew': u'\uFB43',
'peharabic': u'\u067E',
'peharmenian': u'\u057A',
'pehebrew': u'\u05E4',
'pehfinalarabic': u'\uFB57',
'pehinitialarabic': u'\uFB58',
'pehiragana': u'\u307A',
'pehmedialarabic': u'\uFB59',
'pekatakana': u'\u30DA',
'pemiddlehookcyrillic': u'\u04A7',
'perafehebrew': u'\uFB4E',
'percent': u'\u0025',
'percentarabic': u'\u066A',
'percentmonospace': u'\uFF05',
'percentsmall': u'\uFE6A',
'period': u'\u002E',
'periodarmenian': u'\u0589',
'periodcentered': u'\u00B7',
'periodhalfwidth': u'\uFF61',
'periodinferior': u'\uF6E7',
'periodmonospace': u'\uFF0E',
'periodsmall': u'\uFE52',
'periodsuperior': u'\uF6E8',
'perispomenigreekcmb': u'\u0342',
'perpendicular': u'\u22A5',
'perthousand': u'\u2030',
'peseta': u'\u20A7',
'pfsquare': u'\u338A',
'phabengali': u'\u09AB',
'phadeva': u'\u092B',
'phagujarati': u'\u0AAB',
'phagurmukhi': u'\u0A2B',
'phi': u'\u03C6',
'phi1': u'\u03D5',
'phieuphacirclekorean': u'\u327A',
'phieuphaparenkorean': u'\u321A',
'phieuphcirclekorean': u'\u326C',
'phieuphkorean': u'\u314D',
'phieuphparenkorean': u'\u320C',
'philatin': u'\u0278',
'phinthuthai': u'\u0E3A',
'phisymbolgreek': u'\u03D5',
'phook': u'\u01A5',
'phophanthai': u'\u0E1E',
'phophungthai': u'\u0E1C',
'phosamphaothai': u'\u0E20',
'pi': u'\u03C0',
'pieupacirclekorean': u'\u3273',
'pieupaparenkorean': u'\u3213',
'pieupcieuckorean': u'\u3176',
'pieupcirclekorean': u'\u3265',
'pieupkiyeokkorean': u'\u3172',
'pieupkorean': u'\u3142',
'pieupparenkorean': u'\u3205',
'pieupsioskiyeokkorean': u'\u3174',
'pieupsioskorean': u'\u3144',
'pieupsiostikeutkorean': u'\u3175',
'pieupthieuthkorean': u'\u3177',
'pieuptikeutkorean': u'\u3173',
'pihiragana': u'\u3074',
'pikatakana': u'\u30D4',
'pisymbolgreek': u'\u03D6',
'piwrarmenian': u'\u0583',
'plus': u'\u002B',
'plusbelowcmb': u'\u031F',
'pluscircle': u'\u2295',
'plusminus': u'\u00B1',
'plusmod': u'\u02D6',
'plusmonospace': u'\uFF0B',
'plussmall': u'\uFE62',
'plussuperior': u'\u207A',
'pmonospace': u'\uFF50',
'pmsquare': u'\u33D8',
'pohiragana': u'\u307D',
'pointingindexdownwhite': u'\u261F',
'pointingindexleftwhite': u'\u261C',
'pointingindexrightwhite': u'\u261E',
'pointingindexupwhite': u'\u261D',
'pokatakana': u'\u30DD',
'poplathai': u'\u0E1B',
'postalmark': u'\u3012',
'postalmarkface': u'\u3020',
'pparen': u'\u24AB',
'precedes': u'\u227A',
'prescription': u'\u211E',
'primemod': u'\u02B9',
'primereversed': u'\u2035',
'product': u'\u220F',
'projective': u'\u2305',
'prolongedkana': u'\u30FC',
'propellor': u'\u2318',
'propersubset': u'\u2282',
'propersuperset': u'\u2283',
'proportion': u'\u2237',
'proportional': u'\u221D',
'psi': u'\u03C8',
'psicyrillic': u'\u0471',
'psilipneumatacyrilliccmb': u'\u0486',
'pssquare': u'\u33B0',
'puhiragana': u'\u3077',
'pukatakana': u'\u30D7',
'pvsquare': u'\u33B4',
'pwsquare': u'\u33BA',
'q': u'\u0071',
'qadeva': u'\u0958',
'qadmahebrew': u'\u05A8',
'qafarabic': u'\u0642',
'qaffinalarabic': u'\uFED6',
'qafinitialarabic': u'\uFED7',
'qafmedialarabic': u'\uFED8',
'qamats': u'\u05B8',
'qamats10': u'\u05B8',
'qamats1a': u'\u05B8',
'qamats1c': u'\u05B8',
'qamats27': u'\u05B8',
'qamats29': u'\u05B8',
'qamats33': u'\u05B8',
'qamatsde': u'\u05B8',
'qamatshebrew': u'\u05B8',
'qamatsnarrowhebrew': u'\u05B8',
'qamatsqatanhebrew': u'\u05B8',
'qamatsqatannarrowhebrew': u'\u05B8',
'qamatsqatanquarterhebrew': u'\u05B8',
'qamatsqatanwidehebrew': u'\u05B8',
'qamatsquarterhebrew': u'\u05B8',
'qamatswidehebrew': u'\u05B8',
'qarneyparahebrew': u'\u059F',
'qbopomofo': u'\u3111',
'qcircle': u'\u24E0',
'qhook': u'\u02A0',
'qmonospace': u'\uFF51',
'qof': u'\u05E7',
'qofdagesh': u'\uFB47',
'qofdageshhebrew': u'\uFB47',
'qofhatafpatah': u'\u05E7\u05B2',
'qofhatafpatahhebrew': u'\u05E7\u05B2',
'qofhatafsegol': u'\u05E7\u05B1',
'qofhatafsegolhebrew': u'\u05E7\u05B1',
'qofhebrew': u'\u05E7',
'qofhiriq': u'\u05E7\u05B4',
'qofhiriqhebrew': u'\u05E7\u05B4',
'qofholam': u'\u05E7\u05B9',
'qofholamhebrew': u'\u05E7\u05B9',
'qofpatah': u'\u05E7\u05B7',
'qofpatahhebrew': u'\u05E7\u05B7',
'qofqamats': u'\u05E7\u05B8',
'qofqamatshebrew': u'\u05E7\u05B8',
'qofqubuts': u'\u05E7\u05BB',
'qofqubutshebrew': u'\u05E7\u05BB',
'qofsegol': u'\u05E7\u05B6',
'qofsegolhebrew': u'\u05E7\u05B6',
'qofsheva': u'\u05E7\u05B0',
'qofshevahebrew': u'\u05E7\u05B0',
'qoftsere': u'\u05E7\u05B5',
'qoftserehebrew': u'\u05E7\u05B5',
'qparen': u'\u24AC',
'quarternote': u'\u2669',
'qubuts': u'\u05BB',
'qubuts18': u'\u05BB',
'qubuts25': u'\u05BB',
'qubuts31': u'\u05BB',
'qubutshebrew': u'\u05BB',
'qubutsnarrowhebrew': u'\u05BB',
'qubutsquarterhebrew': u'\u05BB',
'qubutswidehebrew': u'\u05BB',
'question': u'\u003F',
'questionarabic': u'\u061F',
'questionarmenian': u'\u055E',
'questiondown': u'\u00BF',
'questiondownsmall': u'\uF7BF',
'questiongreek': u'\u037E',
'questionmonospace': u'\uFF1F',
'questionsmall': u'\uF73F',
'quotedbl': u'\u0022',
'quotedblbase': u'\u201E',
'quotedblleft': u'\u201C',
'quotedblmonospace': u'\uFF02',
'quotedblprime': u'\u301E',
'quotedblprimereversed': u'\u301D',
'quotedblright': u'\u201D',
'quoteleft': u'\u2018',
'quoteleftreversed': u'\u201B',
'quotereversed': u'\u201B',
'quoteright': u'\u2019',
'quoterightn': u'\u0149',
'quotesinglbase': u'\u201A',
'quotesingle': u'\u0027',
'quotesinglemonospace': u'\uFF07',
'r': u'\u0072',
'raarmenian': u'\u057C',
'rabengali': u'\u09B0',
'racute': u'\u0155',
'radeva': u'\u0930',
'radical': u'\u221A',
'radicalex': u'\uF8E5',
'radoverssquare': u'\u33AE',
'radoverssquaredsquare': u'\u33AF',
'radsquare': u'\u33AD',
'rafe': u'\u05BF',
'rafehebrew': u'\u05BF',
'ragujarati': u'\u0AB0',
'ragurmukhi': u'\u0A30',
'rahiragana': u'\u3089',
'rakatakana': u'\u30E9',
'rakatakanahalfwidth': u'\uFF97',
'ralowerdiagonalbengali': u'\u09F1',
'ramiddlediagonalbengali': u'\u09F0',
'ramshorn': u'\u0264',
'ratio': u'\u2236',
'rbopomofo': u'\u3116',
'rcaron': u'\u0159',
'rcedilla': u'\u0157',
'rcircle': u'\u24E1',
'rcommaaccent': u'\u0157',
'rdblgrave': u'\u0211',
'rdotaccent': u'\u1E59',
'rdotbelow': u'\u1E5B',
'rdotbelowmacron': u'\u1E5D',
'referencemark': u'\u203B',
'reflexsubset': u'\u2286',
'reflexsuperset': u'\u2287',
'registered': u'\u00AE',
'registersans': u'\uF8E8',
'registerserif': u'\uF6DA',
'reharabic': u'\u0631',
'reharmenian': u'\u0580',
'rehfinalarabic': u'\uFEAE',
'rehiragana': u'\u308C',
'rehyehaleflamarabic': u'\u0631\uFEF3\uFE8E\u0644',
'rekatakana': u'\u30EC',
'rekatakanahalfwidth': u'\uFF9A',
'resh': u'\u05E8',
'reshdageshhebrew': u'\uFB48',
'reshhatafpatah': u'\u05E8\u05B2',
'reshhatafpatahhebrew': u'\u05E8\u05B2',
'reshhatafsegol': u'\u05E8\u05B1',
'reshhatafsegolhebrew': u'\u05E8\u05B1',
'reshhebrew': u'\u05E8',
'reshhiriq': u'\u05E8\u05B4',
'reshhiriqhebrew': u'\u05E8\u05B4',
'reshholam': u'\u05E8\u05B9',
'reshholamhebrew': u'\u05E8\u05B9',
'reshpatah': u'\u05E8\u05B7',
'reshpatahhebrew': u'\u05E8\u05B7',
'reshqamats': u'\u05E8\u05B8',
'reshqamatshebrew': u'\u05E8\u05B8',
'reshqubuts': u'\u05E8\u05BB',
'reshqubutshebrew': u'\u05E8\u05BB',
'reshsegol': u'\u05E8\u05B6',
'reshsegolhebrew': u'\u05E8\u05B6',
'reshsheva': u'\u05E8\u05B0',
'reshshevahebrew': u'\u05E8\u05B0',
'reshtsere': u'\u05E8\u05B5',
'reshtserehebrew': u'\u05E8\u05B5',
'reversedtilde': u'\u223D',
'reviahebrew': u'\u0597',
'reviamugrashhebrew': u'\u0597',
'revlogicalnot': u'\u2310',
'rfishhook': u'\u027E',
'rfishhookreversed': u'\u027F',
'rhabengali': u'\u09DD',
'rhadeva': u'\u095D',
'rho': u'\u03C1',
'rhook': u'\u027D',
'rhookturned': u'\u027B',
'rhookturnedsuperior': u'\u02B5',
'rhosymbolgreek': u'\u03F1',
'rhotichookmod': u'\u02DE',
'rieulacirclekorean': u'\u3271',
'rieulaparenkorean': u'\u3211',
'rieulcirclekorean': u'\u3263',
'rieulhieuhkorean': u'\u3140',
'rieulkiyeokkorean': u'\u313A',
'rieulkiyeoksioskorean': u'\u3169',
'rieulkorean': u'\u3139',
'rieulmieumkorean': u'\u313B',
'rieulpansioskorean': u'\u316C',
'rieulparenkorean': u'\u3203',
'rieulphieuphkorean': u'\u313F',
'rieulpieupkorean': u'\u313C',
'rieulpieupsioskorean': u'\u316B',
'rieulsioskorean': u'\u313D',
'rieulthieuthkorean': u'\u313E',
'rieultikeutkorean': u'\u316A',
'rieulyeorinhieuhkorean': u'\u316D',
'rightangle': u'\u221F',
'righttackbelowcmb': u'\u0319',
'righttriangle': u'\u22BF',
'rihiragana': u'\u308A',
'rikatakana': u'\u30EA',
'rikatakanahalfwidth': u'\uFF98',
'ring': u'\u02DA',
'ringbelowcmb': u'\u0325',
'ringcmb': u'\u030A',
'ringhalfleft': u'\u02BF',
'ringhalfleftarmenian': u'\u0559',
'ringhalfleftbelowcmb': u'\u031C',
'ringhalfleftcentered': u'\u02D3',
'ringhalfright': u'\u02BE',
'ringhalfrightbelowcmb': u'\u0339',
'ringhalfrightcentered': u'\u02D2',
'rinvertedbreve': u'\u0213',
'rittorusquare': u'\u3351',
'rlinebelow': u'\u1E5F',
'rlongleg': u'\u027C',
'rlonglegturned': u'\u027A',
'rmonospace': u'\uFF52',
'rohiragana': u'\u308D',
'rokatakana': u'\u30ED',
'rokatakanahalfwidth': u'\uFF9B',
'roruathai': u'\u0E23',
'rparen': u'\u24AD',
'rrabengali': u'\u09DC',
'rradeva': u'\u0931',
'rragurmukhi': u'\u0A5C',
'rreharabic': u'\u0691',
'rrehfinalarabic': u'\uFB8D',
'rrvocalicbengali': u'\u09E0',
'rrvocalicdeva': u'\u0960',
'rrvocalicgujarati': u'\u0AE0',
'rrvocalicvowelsignbengali': u'\u09C4',
'rrvocalicvowelsigndeva': u'\u0944',
'rrvocalicvowelsigngujarati': u'\u0AC4',
'rsuperior': u'\uF6F1',
'rtblock': u'\u2590',
'rturned': u'\u0279',
'rturnedsuperior': u'\u02B4',
'ruhiragana': u'\u308B',
'rukatakana': u'\u30EB',
'rukatakanahalfwidth': u'\uFF99',
'rupeemarkbengali': u'\u09F2',
'rupeesignbengali': u'\u09F3',
'rupiah': u'\uF6DD',
'ruthai': u'\u0E24',
'rvocalicbengali': u'\u098B',
'rvocalicdeva': u'\u090B',
'rvocalicgujarati': u'\u0A8B',
'rvocalicvowelsignbengali': u'\u09C3',
'rvocalicvowelsigndeva': u'\u0943',
'rvocalicvowelsigngujarati': u'\u0AC3',
's': u'\u0073',
'sabengali': u'\u09B8',
'sacute': u'\u015B',
'sacutedotaccent': u'\u1E65',
'sadarabic': u'\u0635',
'sadeva': u'\u0938',
'sadfinalarabic': u'\uFEBA',
'sadinitialarabic': u'\uFEBB',
'sadmedialarabic': u'\uFEBC',
'sagujarati': u'\u0AB8',
'sagurmukhi': u'\u0A38',
'sahiragana': u'\u3055',
'sakatakana': u'\u30B5',
'sakatakanahalfwidth': u'\uFF7B',
'sallallahoualayhewasallamarabic': u'\uFDFA',
'samekh': u'\u05E1',
'samekhdagesh': u'\uFB41',
'samekhdageshhebrew': u'\uFB41',
'samekhhebrew': u'\u05E1',
'saraaathai': u'\u0E32',
'saraaethai': u'\u0E41',
'saraaimaimalaithai': u'\u0E44',
'saraaimaimuanthai': u'\u0E43',
'saraamthai': u'\u0E33',
'saraathai': u'\u0E30',
'saraethai': u'\u0E40',
'saraiileftthai': u'\uF886',
'saraiithai': u'\u0E35',
'saraileftthai': u'\uF885',
'saraithai': u'\u0E34',
'saraothai': u'\u0E42',
'saraueeleftthai': u'\uF888',
'saraueethai': u'\u0E37',
'saraueleftthai': u'\uF887',
'sarauethai': u'\u0E36',
'sarauthai': u'\u0E38',
'sarauuthai': u'\u0E39',
'sbopomofo': u'\u3119',
'scaron': u'\u0161',
'scarondotaccent': u'\u1E67',
'scedilla': u'\u015F',
'schwa': u'\u0259',
'schwacyrillic': u'\u04D9',
'schwadieresiscyrillic': u'\u04DB',
'schwahook': u'\u025A',
'scircle': u'\u24E2',
'scircumflex': u'\u015D',
'scommaaccent': u'\u0219',
'sdotaccent': u'\u1E61',
'sdotbelow': u'\u1E63',
'sdotbelowdotaccent': u'\u1E69',
'seagullbelowcmb': u'\u033C',
'second': u'\u2033',
'secondtonechinese': u'\u02CA',
'section': u'\u00A7',
'seenarabic': u'\u0633',
'seenfinalarabic': u'\uFEB2',
'seeninitialarabic': u'\uFEB3',
'seenmedialarabic': u'\uFEB4',
'segol': u'\u05B6',
'segol13': u'\u05B6',
'segol1f': u'\u05B6',
'segol2c': u'\u05B6',
'segolhebrew': u'\u05B6',
'segolnarrowhebrew': u'\u05B6',
'segolquarterhebrew': u'\u05B6',
'segoltahebrew': u'\u0592',
'segolwidehebrew': u'\u05B6',
'seharmenian': u'\u057D',
'sehiragana': u'\u305B',
'sekatakana': u'\u30BB',
'sekatakanahalfwidth': u'\uFF7E',
'semicolon': u'\u003B',
'semicolonarabic': u'\u061B',
'semicolonmonospace': u'\uFF1B',
'semicolonsmall': u'\uFE54',
'semivoicedmarkkana': u'\u309C',
'semivoicedmarkkanahalfwidth': u'\uFF9F',
'sentisquare': u'\u3322',
'sentosquare': u'\u3323',
'seven': u'\u0037',
'sevenarabic': u'\u0667',
'sevenbengali': u'\u09ED',
'sevencircle': u'\u2466',
'sevencircleinversesansserif': u'\u2790',
'sevendeva': u'\u096D',
'seveneighths': u'\u215E',
'sevengujarati': u'\u0AED',
'sevengurmukhi': u'\u0A6D',
'sevenhackarabic': u'\u0667',
'sevenhangzhou': u'\u3027',
'sevenideographicparen': u'\u3226',
'seveninferior': u'\u2087',
'sevenmonospace': u'\uFF17',
'sevenoldstyle': u'\uF737',
'sevenparen': u'\u247A',
'sevenperiod': u'\u248E',
'sevenpersian': u'\u06F7',
'sevenroman': u'\u2176',
'sevensuperior': u'\u2077',
'seventeencircle': u'\u2470',
'seventeenparen': u'\u2484',
'seventeenperiod': u'\u2498',
'seventhai': u'\u0E57',
'sfthyphen': u'\u00AD',
'shaarmenian': u'\u0577',
'shabengali': u'\u09B6',
'shacyrillic': u'\u0448',
'shaddaarabic': u'\u0651',
'shaddadammaarabic': u'\uFC61',
'shaddadammatanarabic': u'\uFC5E',
'shaddafathaarabic': u'\uFC60',
'shaddafathatanarabic': u'\u0651\u064B',
'shaddakasraarabic': u'\uFC62',
'shaddakasratanarabic': u'\uFC5F',
'shade': u'\u2592',
'shadedark': u'\u2593',
'shadelight': u'\u2591',
'shademedium': u'\u2592',
'shadeva': u'\u0936',
'shagujarati': u'\u0AB6',
'shagurmukhi': u'\u0A36',
'shalshelethebrew': u'\u0593',
'shbopomofo': u'\u3115',
'shchacyrillic': u'\u0449',
'sheenarabic': u'\u0634',
'sheenfinalarabic': u'\uFEB6',
'sheeninitialarabic': u'\uFEB7',
'sheenmedialarabic': u'\uFEB8',
'sheicoptic': u'\u03E3',
'sheqel': u'\u20AA',
'sheqelhebrew': u'\u20AA',
'sheva': u'\u05B0',
'sheva115': u'\u05B0',
'sheva15': u'\u05B0',
'sheva22': u'\u05B0',
'sheva2e': u'\u05B0',
'shevahebrew': u'\u05B0',
'shevanarrowhebrew': u'\u05B0',
'shevaquarterhebrew': u'\u05B0',
'shevawidehebrew': u'\u05B0',
'shhacyrillic': u'\u04BB',
'shimacoptic': u'\u03ED',
'shin': u'\u05E9',
'shindagesh': u'\uFB49',
'shindageshhebrew': u'\uFB49',
'shindageshshindot': u'\uFB2C',
'shindageshshindothebrew': u'\uFB2C',
'shindageshsindot': u'\uFB2D',
'shindageshsindothebrew': u'\uFB2D',
'shindothebrew': u'\u05C1',
'shinhebrew': u'\u05E9',
'shinshindot': u'\uFB2A',
'shinshindothebrew': u'\uFB2A',
'shinsindot': u'\uFB2B',
'shinsindothebrew': u'\uFB2B',
'shook': u'\u0282',
'sigma': u'\u03C3',
'sigma1': u'\u03C2',
'sigmafinal': u'\u03C2',
'sigmalunatesymbolgreek': u'\u03F2',
'sihiragana': u'\u3057',
'sikatakana': u'\u30B7',
'sikatakanahalfwidth': u'\uFF7C',
'siluqhebrew': u'\u05BD',
'siluqlefthebrew': u'\u05BD',
'similar': u'\u223C',
'sindothebrew': u'\u05C2',
'siosacirclekorean': u'\u3274',
'siosaparenkorean': u'\u3214',
'sioscieuckorean': u'\u317E',
'sioscirclekorean': u'\u3266',
'sioskiyeokkorean': u'\u317A',
'sioskorean': u'\u3145',
'siosnieunkorean': u'\u317B',
'siosparenkorean': u'\u3206',
'siospieupkorean': u'\u317D',
'siostikeutkorean': u'\u317C',
'six': u'\u0036',
'sixarabic': u'\u0666',
'sixbengali': u'\u09EC',
'sixcircle': u'\u2465',
'sixcircleinversesansserif': u'\u278F',
'sixdeva': u'\u096C',
'sixgujarati': u'\u0AEC',
'sixgurmukhi': u'\u0A6C',
'sixhackarabic': u'\u0666',
'sixhangzhou': u'\u3026',
'sixideographicparen': u'\u3225',
'sixinferior': u'\u2086',
'sixmonospace': u'\uFF16',
'sixoldstyle': u'\uF736',
'sixparen': u'\u2479',
'sixperiod': u'\u248D',
'sixpersian': u'\u06F6',
'sixroman': u'\u2175',
'sixsuperior': u'\u2076',
'sixteencircle': u'\u246F',
'sixteencurrencydenominatorbengali': u'\u09F9',
'sixteenparen': u'\u2483',
'sixteenperiod': u'\u2497',
'sixthai': u'\u0E56',
'slash': u'\u002F',
'slashmonospace': u'\uFF0F',
'slong': u'\u017F',
'slongdotaccent': u'\u1E9B',
'smileface': u'\u263A',
'smonospace': u'\uFF53',
'sofpasuqhebrew': u'\u05C3',
'softhyphen': u'\u00AD',
'softsigncyrillic': u'\u044C',
'sohiragana': u'\u305D',
'sokatakana': u'\u30BD',
'sokatakanahalfwidth': u'\uFF7F',
'soliduslongoverlaycmb': u'\u0338',
'solidusshortoverlaycmb': u'\u0337',
'sorusithai': u'\u0E29',
'sosalathai': u'\u0E28',
'sosothai': u'\u0E0B',
'sosuathai': u'\u0E2A',
'space': u'\u0020',
'spacehackarabic': u'\u0020',
'spade': u'\u2660',
'spadesuitblack': u'\u2660',
'spadesuitwhite': u'\u2664',
'sparen': u'\u24AE',
'squarebelowcmb': u'\u033B',
'squarecc': u'\u33C4',
'squarecm': u'\u339D',
'squarediagonalcrosshatchfill': u'\u25A9',
'squarehorizontalfill': u'\u25A4',
'squarekg': u'\u338F',
'squarekm': u'\u339E',
'squarekmcapital': u'\u33CE',
'squareln': u'\u33D1',
'squarelog': u'\u33D2',
'squaremg': u'\u338E',
'squaremil': u'\u33D5',
'squaremm': u'\u339C',
'squaremsquared': u'\u33A1',
'squareorthogonalcrosshatchfill': u'\u25A6',
'squareupperlefttolowerrightfill': u'\u25A7',
'squareupperrighttolowerleftfill': u'\u25A8',
'squareverticalfill': u'\u25A5',
'squarewhitewithsmallblack': u'\u25A3',
'srsquare': u'\u33DB',
'ssabengali': u'\u09B7',
'ssadeva': u'\u0937',
'ssagujarati': u'\u0AB7',
'ssangcieuckorean': u'\u3149',
'ssanghieuhkorean': u'\u3185',
'ssangieungkorean': u'\u3180',
'ssangkiyeokkorean': u'\u3132',
'ssangnieunkorean': u'\u3165',
'ssangpieupkorean': u'\u3143',
'ssangsioskorean': u'\u3146',
'ssangtikeutkorean': u'\u3138',
'ssuperior': u'\uF6F2',
'sterling': u'\u00A3',
'sterlingmonospace': u'\uFFE1',
'strokelongoverlaycmb': u'\u0336',
'strokeshortoverlaycmb': u'\u0335',
'subset': u'\u2282',
'subsetnotequal': u'\u228A',
'subsetorequal': u'\u2286',
'succeeds': u'\u227B',
'suchthat': u'\u220B',
'suhiragana': u'\u3059',
'sukatakana': u'\u30B9',
'sukatakanahalfwidth': u'\uFF7D',
'sukunarabic': u'\u0652',
'summation': u'\u2211',
'sun': u'\u263C',
'superset': u'\u2283',
'supersetnotequal': u'\u228B',
'supersetorequal': u'\u2287',
'svsquare': u'\u33DC',
'syouwaerasquare': u'\u337C',
't': u'\u0074',
'tabengali': u'\u09A4',
'tackdown': u'\u22A4',
'tackleft': u'\u22A3',
'tadeva': u'\u0924',
'tagujarati': u'\u0AA4',
'tagurmukhi': u'\u0A24',
'taharabic': u'\u0637',
'tahfinalarabic': u'\uFEC2',
'tahinitialarabic': u'\uFEC3',
'tahiragana': u'\u305F',
'tahmedialarabic': u'\uFEC4',
'taisyouerasquare': u'\u337D',
'takatakana': u'\u30BF',
'takatakanahalfwidth': u'\uFF80',
'tatweelarabic': u'\u0640',
'tau': u'\u03C4',
'tav': u'\u05EA',
'tavdages': u'\uFB4A',
'tavdagesh': u'\uFB4A',
'tavdageshhebrew': u'\uFB4A',
'tavhebrew': u'\u05EA',
'tbar': u'\u0167',
'tbopomofo': u'\u310A',
'tcaron': u'\u0165',
'tccurl': u'\u02A8',
'tcedilla': u'\u0163',
'tcheharabic': u'\u0686',
'tchehfinalarabic': u'\uFB7B',
'tchehinitialarabic': u'\uFB7C',
'tchehmedialarabic': u'\uFB7D',
'tchehmeeminitialarabic': u'\uFB7C\uFEE4',
'tcircle': u'\u24E3',
'tcircumflexbelow': u'\u1E71',
'tcommaaccent': u'\u0163',
'tdieresis': u'\u1E97',
'tdotaccent': u'\u1E6B',
'tdotbelow': u'\u1E6D',
'tecyrillic': u'\u0442',
'tedescendercyrillic': u'\u04AD',
'teharabic': u'\u062A',
'tehfinalarabic': u'\uFE96',
'tehhahinitialarabic': u'\uFCA2',
'tehhahisolatedarabic': u'\uFC0C',
'tehinitialarabic': u'\uFE97',
'tehiragana': u'\u3066',
'tehjeeminitialarabic': u'\uFCA1',
'tehjeemisolatedarabic': u'\uFC0B',
'tehmarbutaarabic': u'\u0629',
'tehmarbutafinalarabic': u'\uFE94',
'tehmedialarabic': u'\uFE98',
'tehmeeminitialarabic': u'\uFCA4',
'tehmeemisolatedarabic': u'\uFC0E',
'tehnoonfinalarabic': u'\uFC73',
'tekatakana': u'\u30C6',
'tekatakanahalfwidth': u'\uFF83',
'telephone': u'\u2121',
'telephoneblack': u'\u260E',
'telishagedolahebrew': u'\u05A0',
'telishaqetanahebrew': u'\u05A9',
'tencircle': u'\u2469',
'tenideographicparen': u'\u3229',
'tenparen': u'\u247D',
'tenperiod': u'\u2491',
'tenroman': u'\u2179',
'tesh': u'\u02A7',
'tet': u'\u05D8',
'tetdagesh': u'\uFB38',
'tetdageshhebrew': u'\uFB38',
'tethebrew': u'\u05D8',
'tetsecyrillic': u'\u04B5',
'tevirhebrew': u'\u059B',
'tevirlefthebrew': u'\u059B',
'thabengali': u'\u09A5',
'thadeva': u'\u0925',
'thagujarati': u'\u0AA5',
'thagurmukhi': u'\u0A25',
'thalarabic': u'\u0630',
'thalfinalarabic': u'\uFEAC',
'thanthakhatlowleftthai': u'\uF898',
'thanthakhatlowrightthai': u'\uF897',
'thanthakhatthai': u'\u0E4C',
'thanthakhatupperleftthai': u'\uF896',
'theharabic': u'\u062B',
'thehfinalarabic': u'\uFE9A',
'thehinitialarabic': u'\uFE9B',
'thehmedialarabic': u'\uFE9C',
'thereexists': u'\u2203',
'therefore': u'\u2234',
'theta': u'\u03B8',
'theta1': u'\u03D1',
'thetasymbolgreek': u'\u03D1',
'thieuthacirclekorean': u'\u3279',
'thieuthaparenkorean': u'\u3219',
'thieuthcirclekorean': u'\u326B',
'thieuthkorean': u'\u314C',
'thieuthparenkorean': u'\u320B',
'thirteencircle': u'\u246C',
'thirteenparen': u'\u2480',
'thirteenperiod': u'\u2494',
'thonangmonthothai': u'\u0E11',
'thook': u'\u01AD',
'thophuthaothai': u'\u0E12',
'thorn': u'\u00FE',
'thothahanthai': u'\u0E17',
'thothanthai': u'\u0E10',
'thothongthai': u'\u0E18',
'thothungthai': u'\u0E16',
'thousandcyrillic': u'\u0482',
'thousandsseparatorarabic': u'\u066C',
'thousandsseparatorpersian': u'\u066C',
'three': u'\u0033',
'threearabic': u'\u0663',
'threebengali': u'\u09E9',
'threecircle': u'\u2462',
'threecircleinversesansserif': u'\u278C',
'threedeva': u'\u0969',
'threeeighths': u'\u215C',
'threegujarati': u'\u0AE9',
'threegurmukhi': u'\u0A69',
'threehackarabic': u'\u0663',
'threehangzhou': u'\u3023',
'threeideographicparen': u'\u3222',
'threeinferior': u'\u2083',
'threemonospace': u'\uFF13',
'threenumeratorbengali': u'\u09F6',
'threeoldstyle': u'\uF733',
'threeparen': u'\u2476',
'threeperiod': u'\u248A',
'threepersian': u'\u06F3',
'threequarters': u'\u00BE',
'threequartersemdash': u'\uF6DE',
'threeroman': u'\u2172',
'threesuperior': u'\u00B3',
'threethai': u'\u0E53',
'thzsquare': u'\u3394',
'tihiragana': u'\u3061',
'tikatakana': u'\u30C1',
'tikatakanahalfwidth': u'\uFF81',
'tikeutacirclekorean': u'\u3270',
'tikeutaparenkorean': u'\u3210',
'tikeutcirclekorean': u'\u3262',
'tikeutkorean': u'\u3137',
'tikeutparenkorean': u'\u3202',
'tilde': u'\u02DC',
'tildebelowcmb': u'\u0330',
'tildecmb': u'\u0303',
'tildecomb': u'\u0303',
'tildedoublecmb': u'\u0360',
'tildeoperator': u'\u223C',
'tildeoverlaycmb': u'\u0334',
'tildeverticalcmb': u'\u033E',
'timescircle': u'\u2297',
'tipehahebrew': u'\u0596',
'tipehalefthebrew': u'\u0596',
'tippigurmukhi': u'\u0A70',
'titlocyrilliccmb': u'\u0483',
'tiwnarmenian': u'\u057F',
'tlinebelow': u'\u1E6F',
'tmonospace': u'\uFF54',
'toarmenian': u'\u0569',
'tohiragana': u'\u3068',
'tokatakana': u'\u30C8',
'tokatakanahalfwidth': u'\uFF84',
'tonebarextrahighmod': u'\u02E5',
'tonebarextralowmod': u'\u02E9',
'tonebarhighmod': u'\u02E6',
'tonebarlowmod': u'\u02E8',
'tonebarmidmod': u'\u02E7',
'tonefive': u'\u01BD',
'tonesix': u'\u0185',
'tonetwo': u'\u01A8',
'tonos': u'\u0384',
'tonsquare': u'\u3327',
'topatakthai': u'\u0E0F',
'tortoiseshellbracketleft': u'\u3014',
'tortoiseshellbracketleftsmall': u'\uFE5D',
'tortoiseshellbracketleftvertical': u'\uFE39',
'tortoiseshellbracketright': u'\u3015',
'tortoiseshellbracketrightsmall': u'\uFE5E',
'tortoiseshellbracketrightvertical': u'\uFE3A',
'totaothai': u'\u0E15',
'tpalatalhook': u'\u01AB',
'tparen': u'\u24AF',
'trademark': u'\u2122',
'trademarksans': u'\uF8EA',
'trademarkserif': u'\uF6DB',
'tretroflexhook': u'\u0288',
'triagdn': u'\u25BC',
'triaglf': u'\u25C4',
'triagrt': u'\u25BA',
'triagup': u'\u25B2',
'ts': u'\u02A6',
'tsadi': u'\u05E6',
'tsadidagesh': u'\uFB46',
'tsadidageshhebrew': u'\uFB46',
'tsadihebrew': u'\u05E6',
'tsecyrillic': u'\u0446',
'tsere': u'\u05B5',
'tsere12': u'\u05B5',
'tsere1e': u'\u05B5',
'tsere2b': u'\u05B5',
'tserehebrew': u'\u05B5',
'tserenarrowhebrew': u'\u05B5',
'tserequarterhebrew': u'\u05B5',
'tserewidehebrew': u'\u05B5',
'tshecyrillic': u'\u045B',
'tsuperior': u'\uF6F3',
'ttabengali': u'\u099F',
'ttadeva': u'\u091F',
'ttagujarati': u'\u0A9F',
'ttagurmukhi': u'\u0A1F',
'tteharabic': u'\u0679',
'ttehfinalarabic': u'\uFB67',
'ttehinitialarabic': u'\uFB68',
'ttehmedialarabic': u'\uFB69',
'tthabengali': u'\u09A0',
'tthadeva': u'\u0920',
'tthagujarati': u'\u0AA0',
'tthagurmukhi': u'\u0A20',
'tturned': u'\u0287',
'tuhiragana': u'\u3064',
'tukatakana': u'\u30C4',
'tukatakanahalfwidth': u'\uFF82',
'tusmallhiragana': u'\u3063',
'tusmallkatakana': u'\u30C3',
'tusmallkatakanahalfwidth': u'\uFF6F',
'twelvecircle': u'\u246B',
'twelveparen': u'\u247F',
'twelveperiod': u'\u2493',
'twelveroman': u'\u217B',
'twentycircle': u'\u2473',
'twentyhangzhou': u'\u5344',
'twentyparen': u'\u2487',
'twentyperiod': u'\u249B',
'two': u'\u0032',
'twoarabic': u'\u0662',
'twobengali': u'\u09E8',
'twocircle': u'\u2461',
'twocircleinversesansserif': u'\u278B',
'twodeva': u'\u0968',
'twodotenleader': u'\u2025',
'twodotleader': u'\u2025',
'twodotleadervertical': u'\uFE30',
'twogujarati': u'\u0AE8',
'twogurmukhi': u'\u0A68',
'twohackarabic': u'\u0662',
'twohangzhou': u'\u3022',
'twoideographicparen': u'\u3221',
'twoinferior': u'\u2082',
'twomonospace': u'\uFF12',
'twonumeratorbengali': u'\u09F5',
'twooldstyle': u'\uF732',
'twoparen': u'\u2475',
'twoperiod': u'\u2489',
'twopersian': u'\u06F2',
'tworoman': u'\u2171',
'twostroke': u'\u01BB',
'twosuperior': u'\u00B2',
'twothai': u'\u0E52',
'twothirds': u'\u2154',
'u': u'\u0075',
'uacute': u'\u00FA',
'ubar': u'\u0289',
'ubengali': u'\u0989',
'ubopomofo': u'\u3128',
'ubreve': u'\u016D',
'ucaron': u'\u01D4',
'ucircle': u'\u24E4',
'ucircumflex': u'\u00FB',
'ucircumflexbelow': u'\u1E77',
'ucyrillic': u'\u0443',
'udattadeva': u'\u0951',
'udblacute': u'\u0171',
'udblgrave': u'\u0215',
'udeva': u'\u0909',
'udieresis': u'\u00FC',
'udieresisacute': u'\u01D8',
'udieresisbelow': u'\u1E73',
'udieresiscaron': u'\u01DA',
'udieresiscyrillic': u'\u04F1',
'udieresisgrave': u'\u01DC',
'udieresismacron': u'\u01D6',
'udotbelow': u'\u1EE5',
'ugrave': u'\u00F9',
'ugujarati': u'\u0A89',
'ugurmukhi': u'\u0A09',
'uhiragana': u'\u3046',
'uhookabove': u'\u1EE7',
'uhorn': u'\u01B0',
'uhornacute': u'\u1EE9',
'uhorndotbelow': u'\u1EF1',
'uhorngrave': u'\u1EEB',
'uhornhookabove': u'\u1EED',
'uhorntilde': u'\u1EEF',
'uhungarumlaut': u'\u0171',
'uhungarumlautcyrillic': u'\u04F3',
'uinvertedbreve': u'\u0217',
'ukatakana': u'\u30A6',
'ukatakanahalfwidth': u'\uFF73',
'ukcyrillic': u'\u0479',
'ukorean': u'\u315C',
'umacron': u'\u016B',
'umacroncyrillic': u'\u04EF',
'umacrondieresis': u'\u1E7B',
'umatragurmukhi': u'\u0A41',
'umonospace': u'\uFF55',
'underscore': u'\u005F',
'underscoredbl': u'\u2017',
'underscoremonospace': u'\uFF3F',
'underscorevertical': u'\uFE33',
'underscorewavy': u'\uFE4F',
'union': u'\u222A',
'universal': u'\u2200',
'uogonek': u'\u0173',
'uparen': u'\u24B0',
'upblock': u'\u2580',
'upperdothebrew': u'\u05C4',
'upsilon': u'\u03C5',
'upsilondieresis': u'\u03CB',
'upsilondieresistonos': u'\u03B0',
'upsilonlatin': u'\u028A',
'upsilontonos': u'\u03CD',
'uptackbelowcmb': u'\u031D',
'uptackmod': u'\u02D4',
'uragurmukhi': u'\u0A73',
'uring': u'\u016F',
'ushortcyrillic': u'\u045E',
'usmallhiragana': u'\u3045',
'usmallkatakana': u'\u30A5',
'usmallkatakanahalfwidth': u'\uFF69',
'ustraightcyrillic': u'\u04AF',
'ustraightstrokecyrillic': u'\u04B1',
'utilde': u'\u0169',
'utildeacute': u'\u1E79',
'utildebelow': u'\u1E75',
'uubengali': u'\u098A',
'uudeva': u'\u090A',
'uugujarati': u'\u0A8A',
'uugurmukhi': u'\u0A0A',
'uumatragurmukhi': u'\u0A42',
'uuvowelsignbengali': u'\u09C2',
'uuvowelsigndeva': u'\u0942',
'uuvowelsigngujarati': u'\u0AC2',
'uvowelsignbengali': u'\u09C1',
'uvowelsigndeva': u'\u0941',
'uvowelsigngujarati': u'\u0AC1',
'v': u'\u0076',
'vadeva': u'\u0935',
'vagujarati': u'\u0AB5',
'vagurmukhi': u'\u0A35',
'vakatakana': u'\u30F7',
'vav': u'\u05D5',
'vavdagesh': u'\uFB35',
'vavdagesh65': u'\uFB35',
'vavdageshhebrew': u'\uFB35',
'vavhebrew': u'\u05D5',
'vavholam': u'\uFB4B',
'vavholamhebrew': u'\uFB4B',
'vavvavhebrew': u'\u05F0',
'vavyodhebrew': u'\u05F1',
'vcircle': u'\u24E5',
'vdotbelow': u'\u1E7F',
'vecyrillic': u'\u0432',
'veharabic': u'\u06A4',
'vehfinalarabic': u'\uFB6B',
'vehinitialarabic': u'\uFB6C',
'vehmedialarabic': u'\uFB6D',
'vekatakana': u'\u30F9',
'venus': u'\u2640',
'verticalbar': u'\u007C',
'verticallineabovecmb': u'\u030D',
'verticallinebelowcmb': u'\u0329',
'verticallinelowmod': u'\u02CC',
'verticallinemod': u'\u02C8',
'vewarmenian': u'\u057E',
'vhook': u'\u028B',
'vikatakana': u'\u30F8',
'viramabengali': u'\u09CD',
'viramadeva': u'\u094D',
'viramagujarati': u'\u0ACD',
'visargabengali': u'\u0983',
'visargadeva': u'\u0903',
'visargagujarati': u'\u0A83',
'vmonospace': u'\uFF56',
'voarmenian': u'\u0578',
'voicediterationhiragana': u'\u309E',
'voicediterationkatakana': u'\u30FE',
'voicedmarkkana': u'\u309B',
'voicedmarkkanahalfwidth': u'\uFF9E',
'vokatakana': u'\u30FA',
'vparen': u'\u24B1',
'vtilde': u'\u1E7D',
'vturned': u'\u028C',
'vuhiragana': u'\u3094',
'vukatakana': u'\u30F4',
'w': u'\u0077',
'wacute': u'\u1E83',
'waekorean': u'\u3159',
'wahiragana': u'\u308F',
'wakatakana': u'\u30EF',
'wakatakanahalfwidth': u'\uFF9C',
'wakorean': u'\u3158',
'wasmallhiragana': u'\u308E',
'wasmallkatakana': u'\u30EE',
'wattosquare': u'\u3357',
'wavedash': u'\u301C',
'wavyunderscorevertical': u'\uFE34',
'wawarabic': u'\u0648',
'wawfinalarabic': u'\uFEEE',
'wawhamzaabovearabic': u'\u0624',
'wawhamzaabovefinalarabic': u'\uFE86',
'wbsquare': u'\u33DD',
'wcircle': u'\u24E6',
'wcircumflex': u'\u0175',
'wdieresis': u'\u1E85',
'wdotaccent': u'\u1E87',
'wdotbelow': u'\u1E89',
'wehiragana': u'\u3091',
'weierstrass': u'\u2118',
'wekatakana': u'\u30F1',
'wekorean': u'\u315E',
'weokorean': u'\u315D',
'wgrave': u'\u1E81',
'whitebullet': u'\u25E6',
'whitecircle': u'\u25CB',
'whitecircleinverse': u'\u25D9',
'whitecornerbracketleft': u'\u300E',
'whitecornerbracketleftvertical': u'\uFE43',
'whitecornerbracketright': u'\u300F',
'whitecornerbracketrightvertical': u'\uFE44',
'whitediamond': u'\u25C7',
'whitediamondcontainingblacksmalldiamond': u'\u25C8',
'whitedownpointingsmalltriangle': u'\u25BF',
'whitedownpointingtriangle': u'\u25BD',
'whiteleftpointingsmalltriangle': u'\u25C3',
'whiteleftpointingtriangle': u'\u25C1',
'whitelenticularbracketleft': u'\u3016',
'whitelenticularbracketright': u'\u3017',
'whiterightpointingsmalltriangle': u'\u25B9',
'whiterightpointingtriangle': u'\u25B7',
'whitesmallsquare': u'\u25AB',
'whitesmilingface': u'\u263A',
'whitesquare': u'\u25A1',
'whitestar': u'\u2606',
'whitetelephone': u'\u260F',
'whitetortoiseshellbracketleft': u'\u3018',
'whitetortoiseshellbracketright': u'\u3019',
'whiteuppointingsmalltriangle': u'\u25B5',
'whiteuppointingtriangle': u'\u25B3',
'wihiragana': u'\u3090',
'wikatakana': u'\u30F0',
'wikorean': u'\u315F',
'wmonospace': u'\uFF57',
'wohiragana': u'\u3092',
'wokatakana': u'\u30F2',
'wokatakanahalfwidth': u'\uFF66',
'won': u'\u20A9',
'wonmonospace': u'\uFFE6',
'wowaenthai': u'\u0E27',
'wparen': u'\u24B2',
'wring': u'\u1E98',
'wsuperior': u'\u02B7',
'wturned': u'\u028D',
'wynn': u'\u01BF',
'x': u'\u0078',
'xabovecmb': u'\u033D',
'xbopomofo': u'\u3112',
'xcircle': u'\u24E7',
'xdieresis': u'\u1E8D',
'xdotaccent': u'\u1E8B',
'xeharmenian': u'\u056D',
'xi': u'\u03BE',
'xmonospace': u'\uFF58',
'xparen': u'\u24B3',
'xsuperior': u'\u02E3',
'y': u'\u0079',
'yaadosquare': u'\u334E',
'yabengali': u'\u09AF',
'yacute': u'\u00FD',
'yadeva': u'\u092F',
'yaekorean': u'\u3152',
'yagujarati': u'\u0AAF',
'yagurmukhi': u'\u0A2F',
'yahiragana': u'\u3084',
'yakatakana': u'\u30E4',
'yakatakanahalfwidth': u'\uFF94',
'yakorean': u'\u3151',
'yamakkanthai': u'\u0E4E',
'yasmallhiragana': u'\u3083',
'yasmallkatakana': u'\u30E3',
'yasmallkatakanahalfwidth': u'\uFF6C',
'yatcyrillic': u'\u0463',
'ycircle': u'\u24E8',
'ycircumflex': u'\u0177',
'ydieresis': u'\u00FF',
'ydotaccent': u'\u1E8F',
'ydotbelow': u'\u1EF5',
'yeharabic': u'\u064A',
'yehbarreearabic': u'\u06D2',
'yehbarreefinalarabic': u'\uFBAF',
'yehfinalarabic': u'\uFEF2',
'yehhamzaabovearabic': u'\u0626',
'yehhamzaabovefinalarabic': u'\uFE8A',
'yehhamzaaboveinitialarabic': u'\uFE8B',
'yehhamzaabovemedialarabic': u'\uFE8C',
'yehinitialarabic': u'\uFEF3',
'yehmedialarabic': u'\uFEF4',
'yehmeeminitialarabic': u'\uFCDD',
'yehmeemisolatedarabic': u'\uFC58',
'yehnoonfinalarabic': u'\uFC94',
'yehthreedotsbelowarabic': u'\u06D1',
'yekorean': u'\u3156',
'yen': u'\u00A5',
'yenmonospace': u'\uFFE5',
'yeokorean': u'\u3155',
'yeorinhieuhkorean': u'\u3186',
'yerahbenyomohebrew': u'\u05AA',
'yerahbenyomolefthebrew': u'\u05AA',
'yericyrillic': u'\u044B',
'yerudieresiscyrillic': u'\u04F9',
'yesieungkorean': u'\u3181',
'yesieungpansioskorean': u'\u3183',
'yesieungsioskorean': u'\u3182',
'yetivhebrew': u'\u059A',
'ygrave': u'\u1EF3',
'yhook': u'\u01B4',
'yhookabove': u'\u1EF7',
'yiarmenian': u'\u0575',
'yicyrillic': u'\u0457',
'yikorean': u'\u3162',
'yinyang': u'\u262F',
'yiwnarmenian': u'\u0582',
'ymonospace': u'\uFF59',
'yod': u'\u05D9',
'yoddagesh': u'\uFB39',
'yoddageshhebrew': u'\uFB39',
'yodhebrew': u'\u05D9',
'yodyodhebrew': u'\u05F2',
'yodyodpatahhebrew': u'\uFB1F',
'yohiragana': u'\u3088',
'yoikorean': u'\u3189',
'yokatakana': u'\u30E8',
'yokatakanahalfwidth': u'\uFF96',
'yokorean': u'\u315B',
'yosmallhiragana': u'\u3087',
'yosmallkatakana': u'\u30E7',
'yosmallkatakanahalfwidth': u'\uFF6E',
'yotgreek': u'\u03F3',
'yoyaekorean': u'\u3188',
'yoyakorean': u'\u3187',
'yoyakthai': u'\u0E22',
'yoyingthai': u'\u0E0D',
'yparen': u'\u24B4',
'ypogegrammeni': u'\u037A',
'ypogegrammenigreekcmb': u'\u0345',
'yr': u'\u01A6',
'yring': u'\u1E99',
'ysuperior': u'\u02B8',
'ytilde': u'\u1EF9',
'yturned': u'\u028E',
'yuhiragana': u'\u3086',
'yuikorean': u'\u318C',
'yukatakana': u'\u30E6',
'yukatakanahalfwidth': u'\uFF95',
'yukorean': u'\u3160',
'yusbigcyrillic': u'\u046B',
'yusbigiotifiedcyrillic': u'\u046D',
'yuslittlecyrillic': u'\u0467',
'yuslittleiotifiedcyrillic': u'\u0469',
'yusmallhiragana': u'\u3085',
'yusmallkatakana': u'\u30E5',
'yusmallkatakanahalfwidth': u'\uFF6D',
'yuyekorean': u'\u318B',
'yuyeokorean': u'\u318A',
'yyabengali': u'\u09DF',
'yyadeva': u'\u095F',
'z': u'\u007A',
'zaarmenian': u'\u0566',
'zacute': u'\u017A',
'zadeva': u'\u095B',
'zagurmukhi': u'\u0A5B',
'zaharabic': u'\u0638',
'zahfinalarabic': u'\uFEC6',
'zahinitialarabic': u'\uFEC7',
'zahiragana': u'\u3056',
'zahmedialarabic': u'\uFEC8',
'zainarabic': u'\u0632',
'zainfinalarabic': u'\uFEB0',
'zakatakana': u'\u30B6',
'zaqefgadolhebrew': u'\u0595',
'zaqefqatanhebrew': u'\u0594',
'zarqahebrew': u'\u0598',
'zayin': u'\u05D6',
'zayindagesh': u'\uFB36',
'zayindageshhebrew': u'\uFB36',
'zayinhebrew': u'\u05D6',
'zbopomofo': u'\u3117',
'zcaron': u'\u017E',
'zcircle': u'\u24E9',
'zcircumflex': u'\u1E91',
'zcurl': u'\u0291',
'zdot': u'\u017C',
'zdotaccent': u'\u017C',
'zdotbelow': u'\u1E93',
'zecyrillic': u'\u0437',
'zedescendercyrillic': u'\u0499',
'zedieresiscyrillic': u'\u04DF',
'zehiragana': u'\u305C',
'zekatakana': u'\u30BC',
'zero': u'\u0030',
'zeroarabic': u'\u0660',
'zerobengali': u'\u09E6',
'zerodeva': u'\u0966',
'zerogujarati': u'\u0AE6',
'zerogurmukhi': u'\u0A66',
'zerohackarabic': u'\u0660',
'zeroinferior': u'\u2080',
'zeromonospace': u'\uFF10',
'zerooldstyle': u'\uF730',
'zeropersian': u'\u06F0',
'zerosuperior': u'\u2070',
'zerothai': u'\u0E50',
'zerowidthjoiner': u'\uFEFF',
'zerowidthnonjoiner': u'\u200C',
'zerowidthspace': u'\u200B',
'zeta': u'\u03B6',
'zhbopomofo': u'\u3113',
'zhearmenian': u'\u056A',
'zhebrevecyrillic': u'\u04C2',
'zhecyrillic': u'\u0436',
'zhedescendercyrillic': u'\u0497',
'zhedieresiscyrillic': u'\u04DD',
'zihiragana': u'\u3058',
'zikatakana': u'\u30B8',
'zinorhebrew': u'\u05AE',
'zlinebelow': u'\u1E95',
'zmonospace': u'\uFF5A',
'zohiragana': u'\u305E',
'zokatakana': u'\u30BE',
'zparen': u'\u24B5',
'zretroflexhook': u'\u0290',
'zstroke': u'\u01B6',
'zuhiragana': u'\u305A',
'zukatakana': u'\u30BA',
}
#--end
| bsd-3-clause |
RicardoJohann/um | erpnext/stock/report/stock_projected_qty/stock_projected_qty.py | 24 | 2178 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
columns = get_columns()
data = frappe.db.sql("""select
item.name, item.item_name, description, item_group, brand, warehouse, item.stock_uom,
actual_qty, planned_qty, indented_qty, ordered_qty, reserved_qty,
projected_qty, item.re_order_level, item.re_order_qty,
(item.re_order_level - projected_qty) as shortage_qty
from `tabBin` bin,
(select name, company from tabWarehouse
{warehouse_conditions}) wh,
(select name, item_name, description, stock_uom, item_group,
brand, re_order_level, re_order_qty
from `tabItem` {item_conditions}) item
where item_code = item.name and warehouse = wh.name
order by item.name, wh.name"""\
.format(item_conditions=get_item_conditions(filters),
warehouse_conditions=get_warehouse_conditions(filters)), filters)
return columns, data
def get_columns():
return [_("Item Code") + ":Link/Item:140", _("Item Name") + "::100", _("Description") + "::200",
_("Item Group") + ":Link/Item Group:100", _("Brand") + ":Link/Brand:100", _("Warehouse") + ":Link/Warehouse:120",
_("UOM") + ":Link/UOM:100", _("Actual Qty") + ":Float:100", _("Planned Qty") + ":Float:100",
_("Requested Qty") + ":Float:110", _("Ordered Qty") + ":Float:100", _("Reserved Qty") + ":Float:100",
_("Projected Qty") + ":Float:100", _("Reorder Level") + ":Float:100", _("Reorder Qty") + ":Float:100",
_("Shortage Qty") + ":Float:100"]
def get_item_conditions(filters):
conditions = []
if filters.get("item_code"):
conditions.append("name=%(item_code)s")
if filters.get("brand"):
conditions.append("brand=%(brand)s")
return "where {}".format(" and ".join(conditions)) if conditions else ""
def get_warehouse_conditions(filters):
conditions = []
if filters.get("company"):
conditions.append("company=%(company)s")
if filters.get("warehouse"):
conditions.append("name=%(warehouse)s")
return "where {}".format(" and ".join(conditions)) if conditions else "" | agpl-3.0 |
thomlake/EbmLib | ebmlib/srrbm/smsrrbm.py | 1 | 4739 | #---------------------------------------#
# This file is part of EbmLib.
#
# EbmLib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EbmLib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EbmLib. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------#
# author:
# tllake
# email:
# <[email protected]>
# <[email protected]>
# date:
# 2011.08.30
# file:
# srrbm.py
# description:
# Recursive Restricted Boltzmann Machine class
#---------------------------------------#
import numpy as np
from .. units import sigmoid, rthresh, softmax
from .. units import unittypes
class SoftmaxSrrbm(object):
"""recursive restricted boltzmann machine class
:param nvis: number of visible units
:param nhid: number of hidden units
:param vtype: visible unit type, see units.py for available types
:param htype: hidden unit type, see units.py for available types
:type nvis: int
:type nhid: int
:type vtype: string
:type htype: string
"""
def __init__(self, nvis, nhid):
# unit counts
self.nvis = nvis
self.nhid = nhid
# units
self.v = np.zeros(nvis)
self.c = np.zeros(nhid)
self.h = np.zeros(nhid)
# weights
self.Whv = np.random.uniform(low = -0.2, high = 0.2, size = (nhid, nvis))
self.Whc = np.random.uniform(low = -0.2, high = 0.2, size = (nhid, nhid))
# biases
self.vb = np.zeros(nvis)
self.cb = np.zeros(nhid)
self.hb = np.zeros(nhid)
# delta weights
self.dWhv = np.zeros((nhid, nvis))
self.dWhc = np.zeros((nhid, nhid))
# delta biases
self.dvb = np.zeros(nvis)
self.dcb = np.zeros(nhid)
self.dhb = np.zeros(nhid)
def ff(self, v, c):
"""sample hidden given visible and context
:param v: visible unit state
:param c: context unit state
:type v: numpy.array
:type c: numpy.array
:returns: hidden state
:rtype: numpy.array
"""
return softmax(np.dot(self.Whv, v) + np.dot(self.Whc, c) + self.hb)
def fb(self, h):
"""sample hidden given visible
:param h: hidden unit state
:type v: numpy.array
:returns: visible state, context state
:rtype: tuple (numpy.array, numpy.array)
"""
return sigmoid(np.dot(self.Whv.T, h) + self.vb), softmax(np.dot(self.Whc.T, h) + self.cb)
def hid_sample(self, h, det = False, index = False):
if det:
if index:
return h.argmax()
s = np.zeros(self.nhid)
s[h.argmax()] = 1
return s
else:
if index:
return h.cumsum().searchsorted(np.random.random())
s = np.zeros(self.nhid)
s[h.cumsum().searchsorted(np.random.random())] = 1
return s
def vis_sample(self, v):
return rthresh(v)
def push(self, x):
"""push an input x
:param x: input
:type x: numpy.array
:rtype: None
"""
self.h = self.ff(x, self.hid_sample(self.h))
def pop(self):
"""pop a visible state and return it
:returns: visible state
:rtype: numpy.array
"""
v, self.h = self.fb(self.hid_sample(self.h))
return self.vis_sample(v)
def reset(self):
"""reset the netowrks stateful hidden units to 0
:rtype: None
"""
#self.h = np.zeros(self.nhid)
self.h = softmax(self.hb)
def free_energy(self, v):
"""compute the free energy of a visible vector
:param v: visible unit state
:type v: numpy.ndarray
:returns: free energy of v
:rtype: float
"""
vbias_term = -1 * np.sum(v * self.vb)
cbias_term = -1 * np.sum(self.h * self.cb)
hidden_term = -1 * np.sum(np.log(1 + np.exp(np.dot(self.Whv, v) + np.dot(self.Whc, self.h) + self.hb)))
return vbias_term + cbias_term + hidden_term
def __getstate__(self):
d = {
'nvis': self.nvis,
'nhid': self.nhid,
'v': self.v.copy(),
'c': self.c.copy(),
'h': self.h.copy(),
'Whv': self.Whv.copy(),
'Whc': self.Whc.copy(),
'vb': self.vb.copy(),
'cb': self.cb.copy(),
'hb': self.hb.copy(),
'dWhv': self.dwv.copy(),
'dWhc': self.dwc.copy(),
'dvb': self.dvb.copy(),
'dcb': self.dcb.copy(),
'dhb': self.dhb.copy()}
return d
def __setstate__(self, d):
self.nvis = d['nvis']
self.nhid = d['nhid']
self.v = d['v']
self.c = d['c']
self.h = d['h']
self.Whv = d['Whv']
self.Whc = d['Whc']
self.vb = d['vb']
self.cb = d['cb']
self.hb = d['hb']
self.dWhv = d['dWhv']
self.dWhc = d['dWhc']
self.dvb = d['dvb']
self.dcb = d['dcb']
self.dhb = d['dhb']
| gpl-3.0 |
freedomtan/tensorflow | tensorflow/python/ops/nccl_ops_test.py | 5 | 7058 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nccl ops. See also the cc test for nccl_communicator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
import numpy as np
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import nccl_ops
from tensorflow.python.platform import test
def _DeviceTensors(tensors, devices):
res = []
for t, d in zip(tensors, devices):
with ops.device(d):
res.append(array_ops.identity(t))
return res
def _NcclAllReduce(nccl_fun, tensors, devices):
return nccl_fun(_DeviceTensors(tensors, devices))
def _NcclReduce(nccl_fun, tensors, devices):
receiver = np.random.randint(0, len(devices))
with ops.device(devices[receiver]):
return [nccl_fun(_DeviceTensors(tensors, devices))]
def _NcclBroadcast(tensors, devices):
sender = np.random.randint(0, len(devices))
with ops.device(devices[sender]):
tensor = array_ops.identity(tensors[0])
broadcast = nccl_ops.broadcast(tensor)
return _DeviceTensors([broadcast] * len(devices), devices)
class NcclTestCase(test.TestCase):
def _Test(self,
nccl_reduce,
numpy_fn,
device_sets=(['/device:GPU:1', '/device:GPU:2', '/device:GPU:0'],
['/device:GPU:1', '/device:GPU:0'])):
"""Tests that nccl_reduce does the same as reduction with numpy_fn.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the reduction of the
two.
device_sets: Tuple of virtual devices to run test on.
"""
for dtype in [np.float16, np.float32, np.int32, np.int64, np.float64]:
# Create session inside outer loop to test use of
# same communicator across multiple sessions.
with self.test_session(use_gpu=True) as sess:
for devices in device_sets:
shape = (3, 4)
random = (np.random.random_sample(shape) - .5) * 1024
tensors = []
for _ in devices:
tensors.append(random.astype(dtype))
np_ans = tensors[0]
for t in tensors[1:]:
np_ans = numpy_fn(np_ans, t)
reduce_tensors = nccl_reduce(tensors, devices)
self.assertNotEmpty(reduce_tensors)
# Test shape inference.
for r in reduce_tensors:
self.assertEqual(shape, r.get_shape())
result_tensors = [array_ops.identity(t) for t in reduce_tensors]
# Check GPU availability *after* creating session, see b/68975239.
if not test.is_gpu_available():
# If no GPU is available, only test graph construction.
continue
# Test execution and results.
for t in self.evaluate(result_tensors):
self.assertAllClose(t, np_ans)
def _TestGradient(self, nccl_reduce, numpy_fn):
"""Tests the gradient of nccl_reduce.
Args:
nccl_reduce: A function taking a list of tensors and a list of devices,
and returns a list of reduced tensors and a list of ops to perform the
reduction.
numpy_fn: A function taking two tensors and returning the gradient of the
reduction of the two.
"""
def _Gradient(tensors, devices):
inputs = [array_ops.placeholder(t.dtype, t.shape) for t in tensors]
reduce_tensors = nccl_reduce(inputs, devices)
losses = _DeviceTensors(tensors, [t.device for t in reduce_tensors])
grads = gradients.gradients(
reduce_tensors, inputs, losses, colocate_gradients_with_ops=True)
return [g for g in grads if g is not None]
self._Test(_Gradient, numpy_fn)
class AllReduceTest(NcclTestCase):
def testAllReduce(self):
self._Test(partial(_NcclAllReduce, nccl_ops.all_sum), lambda x, y: x + y)
self._Test(partial(_NcclAllReduce, nccl_ops.all_prod), lambda x, y: x * y)
self._Test(partial(_NcclAllReduce, nccl_ops.all_min), np.minimum)
self._Test(partial(_NcclAllReduce, nccl_ops.all_max), np.maximum)
def testAllSumGrad(self):
self._TestGradient(
partial(_NcclAllReduce, nccl_ops.all_sum), lambda x, y: x + y)
def testErrors(self):
with self.assertRaisesRegex(ValueError, 'Device assignment required'):
nccl_ops.all_sum([array_ops.identity(np.random.random_sample((3, 4)))])
with self.assertRaisesRegex(ValueError, 'Must pass >0 tensors'):
nccl_ops.all_sum([])
class SingleReduceTest(NcclTestCase):
def testSum(self):
self._Test(partial(_NcclReduce, nccl_ops.reduce_sum), lambda x, y: x + y)
def testSumGrad(self):
self._TestGradient(partial(_NcclReduce, nccl_ops.reduce_sum),
lambda x, y: x)
class BroadcastTest(NcclTestCase):
def testBroadcast(self):
self._Test(_NcclBroadcast, lambda x, y: x)
def testBroadcastSingleDevice(self):
# Broadcasts on a single device are removed completely during rewrite.
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:GPU:0'],))
def testBroadcastToCpuError(self):
try:
# Broadcasts to CPU is not supported.
self._Test(_NcclBroadcast, lambda x, y: x,
(['/device:GPU:0', '/device:CPU:0'],))
except errors.NotFoundError as e:
self.assertRegex(
str(e), "No registered '_NcclBroadcastRecv' OpKernel for CPU devices")
else:
# Session isn't executed when no GPU is available.
if test.is_gpu_available():
self.fail("Didn't raise NotFoundError trying to broadcast to CPU")
class CombinedTest(NcclTestCase):
"""Test all-reduce vs. single-reduce plus broadcast in one session.run."""
def _Combined(self, tensors, devices):
all_reduce_tensors = _NcclAllReduce(nccl_ops.all_sum, tensors, devices)
single_reduce_tensors = _NcclReduce(nccl_ops.reduce_sum, tensors, devices)
broadcast_tensors = _NcclBroadcast(single_reduce_tensors, devices)
return all_reduce_tensors + broadcast_tensors
def testCombined(self):
self._Test(self._Combined, lambda x, y: x + y)
if __name__ == '__main__':
test.main()
| apache-2.0 |
jordanemedlock/psychtruths | temboo/core/Library/PagerDuty/Incidents/ListIncidents.py | 5 | 6795 | # -*- coding: utf-8 -*-
###############################################################################
#
# ListIncidents
# Allows you to list or search PagerDuty incidents.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListIncidents(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListIncidents Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListIncidents, self).__init__(temboo_session, '/Library/PagerDuty/Incidents/ListIncidents')
def new_input_set(self):
return ListIncidentsInputSet()
def _make_result_set(self, result, path):
return ListIncidentsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListIncidentsChoreographyExecution(session, exec_id, path)
class ListIncidentsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListIncidents
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by PagerDuty.)
"""
super(ListIncidentsInputSet, self)._set_input('APIKey', value)
def set_AssignedToUser(self, value):
"""
Set the value of the AssignedToUser input for this Choreo. ((optional, string) Returns only incidents assigned to the specified user.)
"""
super(ListIncidentsInputSet, self)._set_input('AssignedToUser', value)
def set_DateRange(self, value):
"""
Set the value of the DateRange input for this Choreo. ((optional, string) When set to "all", this allows you to retrieve all incidents since the account was created.)
"""
super(ListIncidentsInputSet, self)._set_input('DateRange', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Allows you to select specific incident properties to be returned in the response.)
"""
super(ListIncidentsInputSet, self)._set_input('Fields', value)
def set_IncidentKey(self, value):
"""
Set the value of the IncidentKey input for this Choreo. ((optional, string) Returns only incidents with the specified key.)
"""
super(ListIncidentsInputSet, self)._set_input('IncidentKey', value)
def set_Limit(self, value):
"""
Set the value of the Limit input for this Choreo. ((optional, integer) The number of incidents returned. Default (and max limit) is 100.)
"""
super(ListIncidentsInputSet, self)._set_input('Limit', value)
def set_Offset(self, value):
"""
Set the value of the Offset input for this Choreo. ((optional, integer) The offset of the first incident record returned. Default is 0.)
"""
super(ListIncidentsInputSet, self)._set_input('Offset', value)
def set_Service(self, value):
"""
Set the value of the Service input for this Choreo. ((optional, string) Returns only incidents associated with the specified service.)
"""
super(ListIncidentsInputSet, self)._set_input('Service', value)
def set_Since(self, value):
"""
Set the value of the Since input for this Choreo. ((optional, date) The start of the date range to search (e.g., 2013-03-06T15:28-05). Note that including the time is optional.)
"""
super(ListIncidentsInputSet, self)._set_input('Since', value)
def set_SortBy(self, value):
"""
Set the value of the SortBy input for this Choreo. ((optional, string) Used to specify both the field you wish to sort the results on (incident_number, created_on, or resolved_on), as well as the direction (asc/desc) of the results (e.g., created_on:desc).)
"""
super(ListIncidentsInputSet, self)._set_input('SortBy', value)
def set_Status(self, value):
"""
Set the value of the Status input for this Choreo. ((optional, string) Returns only the incidents with this specified status. Valid values are: triggered, acknowledged, and resolved.)
"""
super(ListIncidentsInputSet, self)._set_input('Status', value)
def set_SubDomain(self, value):
"""
Set the value of the SubDomain input for this Choreo. ((required, string) The subdomain of your PagerDuty site address.)
"""
super(ListIncidentsInputSet, self)._set_input('SubDomain', value)
def set_TimeZone(self, value):
"""
Set the value of the TimeZone input for this Choreo. ((optional, string) The time zone in which dates in the result will be rendered. Defaults to account time zone.)
"""
super(ListIncidentsInputSet, self)._set_input('TimeZone', value)
def set_Until(self, value):
"""
Set the value of the Until input for this Choreo. ((optional, date) The end of the date range to search (e.g., 2013-03-06T15:28-05). Note that including the time is optional.)
"""
super(ListIncidentsInputSet, self)._set_input('Until', value)
class ListIncidentsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListIncidents Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from PagerDuty.)
"""
return self._output.get('Response', None)
class ListIncidentsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListIncidentsResultSet(response, path)
| apache-2.0 |
ihiji/version_utils | docs/conf.py | 1 | 11784 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# version_utils documentation build configuration file, created by
# sphinx-quickstart on Wed Dec 16 17:15:51 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
from version_utils.version import __version__
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
]
intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
autodoc_member_order = 'groupwise'
autodoc_default_flags = ['members', 'private-members', 'show-inheritance',
'undoc-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'version_utils'
copyright = '2015, Matthew Planchard'
author = 'Matthew Planchard'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'version_utilsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'version_utils.tex', 'version\\_utils Documentation',
'Matthew Planchard', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'version_utils', 'version_utils Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'version_utils', 'version_utils Documentation',
author, 'version_utils', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| gpl-3.0 |
UDST/synthpop | synthpop/test/test_censushelpers.py | 2 | 2888 | import pytest
from ..census_helpers import Census
import numpy as np
from pandas.util.testing import assert_series_equal
import os
@pytest.fixture
def c():
return Census('bfa6b4e541243011fab6307a31aed9e91015ba90')
def test_block_group_and_tract_query(c):
income_columns = ['B19001_0%02dE' % i for i in range(1, 18)]
vehicle_columns = ['B08201_0%02dE' % i for i in range(1, 7)]
workers_columns = ['B08202_0%02dE' % i for i in range(1, 6)]
families_columns = ['B11001_001E', 'B11001_002E']
block_group_columns = income_columns + families_columns
tract_columns = vehicle_columns + workers_columns
df = c.block_group_and_tract_query(block_group_columns,
tract_columns, "06", "075",
merge_columns=['tract', 'county',
'state'],
block_group_size_attr="B11001_001E",
tract_size_attr="B08201_001E",
tract="030600")
assert len(df) == 3
assert_series_equal(
df["B11001_001E"], df["B08201_001E"], check_names=False)
assert np.all(df.state == "06")
assert np.all(df.county == "075")
df = c.block_group_and_tract_query(block_group_columns,
tract_columns, "06", "075",
merge_columns=['tract', 'county',
'state'],
block_group_size_attr="B11001_001E",
tract_size_attr="B08201_001E",
tract=None)
# number of block groups in San Francisco
assert len(df) == 581
assert_series_equal(
df["B11001_001E"], df["B08201_001E"], check_names=False)
assert np.all(df.state == "06")
assert np.all(df.county == "075")
def test_wide_block_group_query(c):
population = ['B01001_001E']
sex = ['B01001_002E', 'B01001_026E']
race = ['B02001_0%02dE' % i for i in range(1, 11)]
male_age_columns = ['B01001_0%02dE' % i for i in range(3, 26)]
female_age_columns = ['B01001_0%02dE' % i for i in range(27, 50)]
all_columns = population + sex + race + male_age_columns + \
female_age_columns
df = c.block_group_query(all_columns, "06", "075", tract="030600")
assert len(df) == 3
assert np.all(df.state == "06")
assert np.all(df.county == "075")
assert len(df.columns) > 50
def test_tract_to_puma(c):
puma = c.tract_to_puma("06", "075", "030600")[0]
assert puma == "07506"
def test_download_pums(c):
puma = "07506"
c.download_population_pums("06", puma)
c.download_household_pums("06", puma)
c.download_population_pums("10")
c.download_household_pums("10")
| bsd-3-clause |
xbmc/atv2 | xbmc/lib/libPython/Python/Lib/plat-mac/lib-scriptpackages/CodeWarrior/Standard_Suite.py | 9 | 12335 | """Suite Standard Suite: Common terms for most applications
Level 1, version 1
Generated from /Volumes/Sap/Applications (Mac OS 9)/Metrowerks CodeWarrior 7.0/Metrowerks CodeWarrior/CodeWarrior IDE 4.2.5
AETE/AEUT resource version 1/0, language 0, script 0
"""
import aetools
import MacOS
_code = 'CoRe'
from StdSuites.Standard_Suite import *
class Standard_Suite_Events(Standard_Suite_Events):
_argmap_close = {
'saving' : 'savo',
'saving_in' : 'kfil',
}
def close(self, _object, _attributes={}, **_arguments):
"""close: close an object
Required argument: the object to close
Keyword argument saving: specifies whether or not changes should be saved before closing
Keyword argument saving_in: the file in which to save the object
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'core'
_subcode = 'clos'
aetools.keysubst(_arguments, self._argmap_close)
_arguments['----'] = _object
aetools.enumsubst(_arguments, 'savo', _Enum_savo)
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_count = {
'each' : 'kocl',
}
def count(self, _object, _attributes={}, **_arguments):
"""count: return the number of elements of a particular class within an object
Required argument: the object whose elements are to be counted
Keyword argument each: the class of the elements to be counted. Keyword 'each' is optional in AppleScript
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: the number of elements
"""
_code = 'core'
_subcode = 'cnte'
aetools.keysubst(_arguments, self._argmap_count)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_get = {
'as' : 'rtyp',
}
def get(self, _object, _attributes={}, **_arguments):
"""get: get the data for an object
Required argument: the object whose data is to be returned
Keyword argument as: the desired types for the data, in order of preference
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: The data from the object
"""
_code = 'core'
_subcode = 'getd'
aetools.keysubst(_arguments, self._argmap_get)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_make = {
'new' : 'kocl',
'as' : 'rtyp',
'at' : 'insh',
'with_data' : 'data',
'with_properties' : 'prdt',
}
def make(self, _no_object=None, _attributes={}, **_arguments):
"""make: make a new element
Keyword argument new: the class of the new element\xd1keyword 'new' is optional in AppleScript
Keyword argument as: the desired types for the data, in order of preference
Keyword argument at: the location at which to insert the element
Keyword argument with_data: the initial data for the element
Keyword argument with_properties: the initial values for the properties of the element
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: to the new object(s)
"""
_code = 'core'
_subcode = 'crel'
aetools.keysubst(_arguments, self._argmap_make)
if _no_object != None: raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def select(self, _object=None, _attributes={}, **_arguments):
"""select: select the specified object
Required argument: the object to select
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'misc'
_subcode = 'slct'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
_argmap_set = {
'to' : 'data',
}
def set(self, _object, _attributes={}, **_arguments):
"""set: set an object's data
Required argument: the object to change
Keyword argument to: the new value
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'core'
_subcode = 'setd'
aetools.keysubst(_arguments, self._argmap_set)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
class application(aetools.ComponentItem):
"""application - an application program """
want = 'capp'
class _Prop_user_interaction(aetools.NProperty):
"""user interaction - user interaction level """
which = 'inte'
want = 'Inte'
user_interaction = _Prop_user_interaction()
# element 'cwin' as ['indx', 'name', 'rang']
# element 'docu' as ['indx', 'name', 'rang']
class character(aetools.ComponentItem):
"""character - a character """
want = 'cha '
class _Prop_length(aetools.NProperty):
"""length - length in characters of this object """
which = 'pLen'
want = 'long'
class _Prop_offset(aetools.NProperty):
"""offset - offset of a text object from the beginning of the document (first char has offset 1) """
which = 'pOff'
want = 'long'
class insertion_point(aetools.ComponentItem):
"""insertion point - An insertion location between two objects """
want = 'cins'
class line(aetools.ComponentItem):
"""line - lines of text """
want = 'clin'
class _Prop_index(aetools.NProperty):
"""index - index of a line object from the beginning of the document (first line has index 1) """
which = 'pidx'
want = 'long'
# element 'cha ' as ['indx', 'rang', 'rele']
lines = line
class selection_2d_object(aetools.ComponentItem):
"""selection-object - the selection visible to the user """
want = 'csel'
class _Prop_contents(aetools.NProperty):
"""contents - the contents of the selection """
which = 'pcnt'
want = 'type'
# element 'cha ' as ['indx', 'rele', 'rang', 'test']
# element 'clin' as ['indx', 'rang', 'rele']
# element 'ctxt' as ['rang']
class text(aetools.ComponentItem):
"""text - Text """
want = 'ctxt'
# element 'cha ' as ['indx', 'rele', 'rang']
# element 'cins' as ['rele']
# element 'clin' as ['indx', 'rang', 'rele']
# element 'ctxt' as ['rang']
class window(aetools.ComponentItem):
"""window - A window """
want = 'cwin'
class _Prop_bounds(aetools.NProperty):
"""bounds - the boundary rectangle for the window """
which = 'pbnd'
want = 'qdrt'
class _Prop_document(aetools.NProperty):
"""document - the document that owns this window """
which = 'docu'
want = 'docu'
class _Prop_name(aetools.NProperty):
"""name - the title of the window """
which = 'pnam'
want = 'itxt'
class _Prop_position(aetools.NProperty):
"""position - upper left coordinates of window """
which = 'ppos'
want = 'QDpt'
class _Prop_visible(aetools.NProperty):
"""visible - is the window visible? """
which = 'pvis'
want = 'bool'
class _Prop_zoomed(aetools.NProperty):
"""zoomed - Is the window zoomed? """
which = 'pzum'
want = 'bool'
windows = window
class document(aetools.ComponentItem):
"""document - a document """
want = 'docu'
class _Prop_file_permissions(aetools.NProperty):
"""file permissions - the file permissions for the document """
which = 'PERM'
want = 'PERM'
class _Prop_kind(aetools.NProperty):
"""kind - the kind of document """
which = 'DKND'
want = 'DKND'
class _Prop_location(aetools.NProperty):
"""location - the file of the document """
which = 'FILE'
want = 'fss '
class _Prop_window(aetools.NProperty):
"""window - the window of the document. """
which = 'cwin'
want = 'cwin'
documents = document
class files(aetools.ComponentItem):
"""files - Every file """
want = 'file'
file = files
application._superclassnames = []
application._privpropdict = {
'user_interaction' : _Prop_user_interaction,
}
application._privelemdict = {
'document' : document,
'window' : window,
}
character._superclassnames = []
character._privpropdict = {
'length' : _Prop_length,
'offset' : _Prop_offset,
}
character._privelemdict = {
}
insertion_point._superclassnames = []
insertion_point._privpropdict = {
'length' : _Prop_length,
'offset' : _Prop_offset,
}
insertion_point._privelemdict = {
}
line._superclassnames = []
line._privpropdict = {
'index' : _Prop_index,
'length' : _Prop_length,
'offset' : _Prop_offset,
}
line._privelemdict = {
'character' : character,
}
selection_2d_object._superclassnames = []
selection_2d_object._privpropdict = {
'contents' : _Prop_contents,
'length' : _Prop_length,
'offset' : _Prop_offset,
}
selection_2d_object._privelemdict = {
'character' : character,
'line' : line,
'text' : text,
}
text._superclassnames = []
text._privpropdict = {
'length' : _Prop_length,
'offset' : _Prop_offset,
}
text._privelemdict = {
'character' : character,
'insertion_point' : insertion_point,
'line' : line,
'text' : text,
}
window._superclassnames = []
window._privpropdict = {
'bounds' : _Prop_bounds,
'document' : _Prop_document,
'index' : _Prop_index,
'name' : _Prop_name,
'position' : _Prop_position,
'visible' : _Prop_visible,
'zoomed' : _Prop_zoomed,
}
window._privelemdict = {
}
document._superclassnames = []
document._privpropdict = {
'file_permissions' : _Prop_file_permissions,
'index' : _Prop_index,
'kind' : _Prop_kind,
'location' : _Prop_location,
'name' : _Prop_name,
'window' : _Prop_window,
}
document._privelemdict = {
}
files._superclassnames = []
files._privpropdict = {
}
files._privelemdict = {
}
#
# Indices of types declared in this module
#
_classdeclarations = {
'capp' : application,
'cha ' : character,
'cins' : insertion_point,
'clin' : line,
'csel' : selection_2d_object,
'ctxt' : text,
'cwin' : window,
'docu' : document,
'file' : files,
}
_propdeclarations = {
'DKND' : _Prop_kind,
'FILE' : _Prop_location,
'PERM' : _Prop_file_permissions,
'cwin' : _Prop_window,
'docu' : _Prop_document,
'inte' : _Prop_user_interaction,
'pLen' : _Prop_length,
'pOff' : _Prop_offset,
'pbnd' : _Prop_bounds,
'pcnt' : _Prop_contents,
'pidx' : _Prop_index,
'pnam' : _Prop_name,
'ppos' : _Prop_position,
'pvis' : _Prop_visible,
'pzum' : _Prop_zoomed,
}
_compdeclarations = {
}
_enumdeclarations = {
}
| gpl-2.0 |
sanmik/brain_network_viz | source/helper.py | 1 | 5017 | """
A set of helper functions that have no other home.
"""
from math import degrees, radians, pi, sqrt, cos, sin
import numpy
def centerOfMass(positions, weights):
"""
Calculate the center of mass of a set of weighted positions.
Args:
positions: A list of (x,y,z) position tuples
weights: A list of position weights
Return:
A tuple of floats representing the coordinates of the center of mass.
"""
tot_weight = sum(weights)
assert tot_weight != 0
zipped = zip(positions, weights)
[x, y, z] = [0.0, 0.0, 0.0]
for (pos, weight) in zipped:
x += pos[0] * weight
y += pos[1] * weight
z += pos[2] * weight
return tuple(map(lambda k: k / tot_weight, [x,y,z]))
"""
Return the cartesian coordinates corresponding to the given polar coords.
Args:
r: Radial coordinate
theta: Angular coordinate (in degrees)
Return:
Cartesian coordinate 2-tuple
"""
def polar2Cartesian(r, theta):
x = r * cos(radians(theta))
y = r * sin(radians(theta))
return (x, y)
def cartesian2Polar(x, y):
"""
Return the polar coordinates corresponding to the given cartesian coords.
Args:
x: Cartesian x coordinate
y: Cartesian y coordinate
Return:
Polar coordinate tuple (r, theta) with theta in degrees [0,359]
"""
r = sqrt(x ** 2 + y ** 2)
theta = degrees(numpy.arctan2(y, x)) % 360
return (r, theta)
"""
Return the angular midpoint of the 2 given angular coordinates.
"""
def midTheta(theta1, theta2):
return (theta1 + theta2) / 2
"""
Find out which Cartesian quadrant the given theta lies in.
Args:
theta Angular coordinate (in degrees)
Return:
Integer quadrant number
"""
def theta2Quadrant(theta):
theta = theta % 360.0
if theta >= 0 and theta < 90.0:
return 1
if theta < 180.0:
return 2
if theta < 270.0:
return 3
return 4
def calcColor(col1, col2, u, min_u, max_u):
"""
Args:
col1: String of first color
col2: String of first color
u: Interpolation parameter in range [min_u, max_u]
min_u
max_u
Return:
Hex string interpolated color
"""
assert type(u) is float
assert type(min_u) is float
assert type(max_u) is float
# Remove '#'
col1 = col1[1:]
col2 = col2[1:]
# Calculate decimal RGB vals
col1_rgb = [int(col1[:2], 16), int(col1[2:4], 16), int(col1[4:], 16)]
col2_rgb = [int(col2[:2], 16), int(col2[2:4], 16), int(col2[4:], 16)]
result_rgb = [0,0,0]
# Calculate normalized parameter u
assert max_u - min_u != 0.0
# v = (u - min_u) / (max_u - min_u)
v = mapRangeParam(u, min_u, max_u, 0.0, 1.0)
assert 0.0 <= v <= 1.0
for i in range(3):
result_rgb[i] = int(v * (col2_rgb[i] - col1_rgb[i]) + col1_rgb[i])
0 <= result_rgb[i] <= 255
result_str = "#%0.2X" % result_rgb[0]
result_str += "%0.2X" % result_rgb[1]
result_str += "%0.2X" % result_rgb[2]
return result_str
def mapRangeParam(u, min_u, max_u, min_v, max_v):
"""
Linearly map a floating point value u in the range [min_u, max_u] to a value
v in the range defined by [min_v, max_v].
Args:
See description.
Return:
Floating point value v
"""
assert type(u) is float
assert type(min_u) is float
assert type(max_u) is float
assert type(min_v) is float
assert type(max_v) is float
return ((max_v - min_v) * (u - min_u)) / float(max_u - min_u) + min_v
def minNetDiff(a, b):
"""
Returns an offset s that minimizes (zeros) the net difference between the
equal length lists a and b. Net difference is the sum of all pairwise
differences: a_elem - (b_elem + s).
Let n = number of elements in a (and in b)
E = -(n * s) + sum(a) - sum(b)
Args:
a: A list of values
b: A list of values
Return:
A floating point offset
"""
assert len(a) == len(b)
assert len(a) != 0
# Find the root
s = (sum(a) - sum(b)) / float(len(a))
return s
def topRange(l, s):
"""
Given a list of values, determines a range in which the values in the top
s% must lie.
Args:
l: A list of values
s: A percentage
Return:
A tuple (lowest value that could be in top s%, max value in l)
"""
mx = max(l)
mn = min(l)
if s is 100:
return (mn, mx)
dx = (mx - mn) / 100.0 # Width of 1% in the range covered by l's vals
min_thresh = mx - (s * dx)
return (min_thresh, mx)
def angularExtentsOverlap(a1, a2, b1, b2):
"""
Determine if 2 angular intervals overlap.
a1, a2: The start and end angles definining interval a
b1, b2: The start and end angles definining interval b
"""
if a1 < b1:
return (a2 >= b1)
else:
return (a1 <= b2)
def findRenderer(fig):
"""
Look up the matplotlib renderer instance given a figure instance.
Source: https://stackoverflow.com/questions/22667224/matplotlib-get-text-bounding-box-independent-of-backend?lq=1
"""
if hasattr(fig.canvas, "get_renderer"):
renderer = fig.canvas.get_renderer()
else:
import io
fig.canvas.print_pdf(io.BytesIO())
renderer = fig._cachedRenderer
return(renderer)
| mit |
zenodo/invenio | invenio/modules/formatter/format_elements/bfe_affiliation.py | 13 | 2661 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2006, 2007, 2008, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints affiliation
"""
__revision__ = "$Id$"
import cgi
from invenio.config import \
CFG_SITE_URL, CFG_SITE_NAME
from invenio.legacy.bibauthority.config import \
CFG_BIBAUTHORITY_AUTHORITY_COLLECTION_NAME
from invenio.legacy.bibauthority.engine import \
get_low_level_recIDs_from_control_no
def format_element(bfo):
"""
HTML Affiliation display
"""
affiliations = bfo.fields('909C1', repeatable_subfields_p=True)
out = ""
for affiliation_dict in affiliations:
if 'u' in affiliation_dict:
recIDs = []
affiliation = affiliation_dict['u'][0]
control_nos = affiliation_dict.get('0')
for control_no in control_nos or []:
recIDs.extend(get_low_level_recIDs_from_control_no(control_no))
affiliation = cgi.escape(affiliation)
if len(recIDs) == 1:
affiliation = '<a href="' + CFG_SITE_URL + \
'/record/' + str(recIDs[0]) + \
'?ln=' + bfo.lang + \
'">' + affiliation + '</a>'
elif len(recIDs) > 1:
affiliation = '<a href="' + CFG_SITE_URL + \
'/search?' + \
'p=recid:' + " or recid:".join([str(_id) for _id in recIDs]) + \
'&c=' + CFG_SITE_NAME + \
'&c=' + CFG_BIBAUTHORITY_AUTHORITY_COLLECTION_NAME + \
'&ln=' + bfo.lang + \
'">' + affiliation + '</a>'
out += affiliation + " "
if out:
return "<br/>" + out
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
| gpl-2.0 |
laperry1/android_external_chromium_org | components/policy/resources/PRESUBMIT.py | 44 | 5372 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# If this presubmit check fails or misbehaves, please complain to
# [email protected], [email protected] or [email protected].
import itertools
import sys
import xml.dom.minidom
def _GetPolicyTemplates(template_path):
# Read list of policies in the template. eval() is used instead of a JSON
# parser because policy_templates.json is not quite JSON, and uses some
# python features such as #-comments and '''strings'''. policy_templates.json
# is actually maintained as a python dictionary.
with open(template_path) as f:
template_data = eval(f.read(), {})
policies = ( policy
for policy in template_data['policy_definitions']
if policy['type'] != 'group' )
groups = ( policy['policies']
for policy in template_data['policy_definitions']
if policy['type'] == 'group' )
subpolicies = ( policy for group in groups for policy in group )
return list(itertools.chain(policies, subpolicies))
def _CheckPolicyTemplatesSyntax(input_api, output_api):
local_path = input_api.PresubmitLocalPath()
filepath = input_api.os_path.join(local_path, 'policy_templates.json')
if any(f.AbsoluteLocalPath() == filepath
for f in input_api.AffectedFiles()):
old_sys_path = sys.path
try:
tools_path = input_api.os_path.normpath(
input_api.os_path.join(local_path, input_api.os_path.pardir, 'tools'))
sys.path = [ tools_path ] + sys.path
# Optimization: only load this when it's needed.
import syntax_check_policy_template_json
checker = syntax_check_policy_template_json.PolicyTemplateChecker()
if checker.Run([], filepath) > 0:
return [output_api.PresubmitError('Syntax error(s) in file:',
[filepath])]
finally:
sys.path = old_sys_path
return []
def _CheckPolicyTestCases(input_api, output_api, policies):
# Read list of policies in chrome/test/data/policy/policy_test_cases.json.
root = input_api.change.RepositoryRoot()
policy_test_cases_file = input_api.os_path.join(
root, 'chrome', 'test', 'data', 'policy', 'policy_test_cases.json')
test_names = input_api.json.load(open(policy_test_cases_file)).keys()
tested_policies = frozenset(name.partition('.')[0]
for name in test_names
if name[:2] != '--')
policy_names = frozenset(policy['name'] for policy in policies)
# Finally check if any policies are missing.
missing = policy_names - tested_policies
extra = tested_policies - policy_names
error_missing = ('Policy \'%s\' was added to policy_templates.json but not '
'to src/chrome/test/data/policy/policy_test_cases.json. '
'Please update both files.')
error_extra = ('Policy \'%s\' is tested by '
'src/chrome/test/data/policy/policy_test_cases.json but is not'
' defined in policy_templates.json. Please update both files.')
results = []
for policy in missing:
results.append(output_api.PresubmitError(error_missing % policy))
for policy in extra:
results.append(output_api.PresubmitError(error_extra % policy))
return results
def _CheckPolicyHistograms(input_api, output_api, policies):
root = input_api.change.RepositoryRoot()
histograms = input_api.os_path.join(
root, 'tools', 'metrics', 'histograms', 'histograms.xml')
with open(histograms) as f:
tree = xml.dom.minidom.parseString(f.read())
enums = (tree.getElementsByTagName('histogram-configuration')[0]
.getElementsByTagName('enums')[0]
.getElementsByTagName('enum'))
policy_enum = [e for e in enums
if e.getAttribute('name') == 'EnterprisePolicies'][0]
policy_ids = frozenset([int(e.getAttribute('value'))
for e in policy_enum.getElementsByTagName('int')])
error_missing = ('Policy \'%s\' was added to policy_templates.json but not '
'to src/tools/metrics/histograms/histograms.xml. '
'Please update both files.')
results = []
for policy in policies:
if policy['id'] not in policy_ids:
results.append(output_api.PresubmitError(error_missing % policy['name']))
return results
def _CommonChecks(input_api, output_api):
results = []
results.extend(_CheckPolicyTemplatesSyntax(input_api, output_api))
os_path = input_api.os_path
local_path = input_api.PresubmitLocalPath()
template_path = os_path.join(local_path, 'policy_templates.json')
affected_files = input_api.AffectedFiles()
if any(f.AbsoluteLocalPath() == template_path for f in affected_files):
try:
policies = _GetPolicyTemplates(template_path)
except:
results.append(output_api.PresubmitError('Invalid Python/JSON syntax.'))
return results
results.extend(_CheckPolicyTestCases(input_api, output_api, policies))
results.extend(_CheckPolicyHistograms(input_api, output_api, policies))
return results
def CheckChangeOnUpload(input_api, output_api):
return _CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
| bsd-3-clause |
ProfessionalIT/professionalit-webiste | sdk/google_appengine/lib/django-1.3/django/contrib/gis/tests/__init__.py | 229 | 4968 | from django.conf import settings
from django.test.simple import build_suite, DjangoTestSuiteRunner
from django.utils import unittest
def run_tests(*args, **kwargs):
from django.test.simple import run_tests as base_run_tests
return base_run_tests(*args, **kwargs)
def run_gis_tests(test_labels, verbosity=1, interactive=True, failfast=False, extra_tests=None):
import warnings
warnings.warn(
'The run_gis_tests() test runner has been deprecated in favor of GeoDjangoTestSuiteRunner.',
DeprecationWarning
)
test_runner = GeoDjangoTestSuiteRunner(verbosity=verbosity, interactive=interactive, failfast=failfast)
return test_runner.run_tests(test_labels, extra_tests=extra_tests)
def geo_apps(namespace=True, runtests=False):
"""
Returns a list of GeoDjango test applications that reside in
`django.contrib.gis.tests` that can be used with the current
database and the spatial libraries that are installed.
"""
from django.db import connection
from django.contrib.gis.geos import GEOS_PREPARE
from django.contrib.gis.gdal import HAS_GDAL
apps = ['geoapp', 'relatedapp']
# No distance queries on MySQL.
if not connection.ops.mysql:
apps.append('distapp')
# Test geography support with PostGIS 1.5+.
if connection.ops.postgis and connection.ops.geography:
apps.append('geogapp')
# The following GeoDjango test apps depend on GDAL support.
if HAS_GDAL:
# 3D apps use LayerMapping, which uses GDAL.
if connection.ops.postgis and GEOS_PREPARE:
apps.append('geo3d')
apps.append('layermap')
if runtests:
return [('django.contrib.gis.tests', app) for app in apps]
elif namespace:
return ['django.contrib.gis.tests.%s' % app
for app in apps]
else:
return apps
def geodjango_suite(apps=True):
"""
Returns a TestSuite consisting only of GeoDjango tests that can be run.
"""
import sys
from django.db.models import get_app
suite = unittest.TestSuite()
# Adding the GEOS tests.
from django.contrib.gis.geos import tests as geos_tests
suite.addTest(geos_tests.suite())
# Adding the measurment tests.
from django.contrib.gis.tests import test_measure
suite.addTest(test_measure.suite())
# Adding GDAL tests, and any test suite that depends on GDAL, to the
# suite if GDAL is available.
from django.contrib.gis.gdal import HAS_GDAL
if HAS_GDAL:
from django.contrib.gis.gdal import tests as gdal_tests
suite.addTest(gdal_tests.suite())
from django.contrib.gis.tests import test_spatialrefsys, test_geoforms
suite.addTest(test_spatialrefsys.suite())
suite.addTest(test_geoforms.suite())
else:
sys.stderr.write('GDAL not available - no tests requiring GDAL will be run.\n')
# Add GeoIP tests to the suite, if the library and data is available.
from django.contrib.gis.utils import HAS_GEOIP
if HAS_GEOIP and hasattr(settings, 'GEOIP_PATH'):
from django.contrib.gis.tests import test_geoip
suite.addTest(test_geoip.suite())
# Finally, adding the suites for each of the GeoDjango test apps.
if apps:
for app_name in geo_apps(namespace=False):
suite.addTest(build_suite(get_app(app_name)))
return suite
class GeoDjangoTestSuiteRunner(DjangoTestSuiteRunner):
def setup_test_environment(self, **kwargs):
super(GeoDjangoTestSuiteRunner, self).setup_test_environment(**kwargs)
# Saving original values of INSTALLED_APPS, ROOT_URLCONF, and SITE_ID.
self.old_installed = getattr(settings, 'INSTALLED_APPS', None)
self.old_root_urlconf = getattr(settings, 'ROOT_URLCONF', '')
self.old_site_id = getattr(settings, 'SITE_ID', None)
# Constructing the new INSTALLED_APPS, and including applications
# within the GeoDjango test namespace.
new_installed = ['django.contrib.sites',
'django.contrib.sitemaps',
'django.contrib.gis',
]
# Calling out to `geo_apps` to get GeoDjango applications supported
# for testing.
new_installed.extend(geo_apps())
settings.INSTALLED_APPS = new_installed
# SITE_ID needs to be set
settings.SITE_ID = 1
# ROOT_URLCONF needs to be set, else `AttributeErrors` are raised
# when TestCases are torn down that have `urls` defined.
settings.ROOT_URLCONF = ''
def teardown_test_environment(self, **kwargs):
super(GeoDjangoTestSuiteRunner, self).teardown_test_environment(**kwargs)
settings.INSTALLED_APPS = self.old_installed
settings.ROOT_URLCONF = self.old_root_urlconf
settings.SITE_ID = self.old_site_id
def build_suite(self, test_labels, extra_tests=None, **kwargs):
return geodjango_suite()
| lgpl-3.0 |
nju520/flask | tests/test_basic.py | 134 | 43285 | # -*- coding: utf-8 -*-
"""
tests.basic
~~~~~~~~~~~~~~~~~~~~~
The basic functionality.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import re
import uuid
import time
import flask
import pickle
from datetime import datetime
from threading import Thread
from flask._compat import text_type
from werkzeug.exceptions import BadRequest, NotFound, Forbidden
from werkzeug.http import parse_date
from werkzeug.routing import BuildError
import werkzeug.serving
def test_options_work():
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'Hello World'
rv = app.test_client().open('/', method='OPTIONS')
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS', 'POST']
assert rv.data == b''
def test_options_on_multiple_rules():
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'Hello World'
@app.route('/', methods=['PUT'])
def index_put():
return 'Aha!'
rv = app.test_client().open('/', method='OPTIONS')
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS', 'POST', 'PUT']
def test_options_handling_disabled():
app = flask.Flask(__name__)
def index():
return 'Hello World!'
index.provide_automatic_options = False
app.route('/')(index)
rv = app.test_client().open('/', method='OPTIONS')
assert rv.status_code == 405
app = flask.Flask(__name__)
def index2():
return 'Hello World!'
index2.provide_automatic_options = True
app.route('/', methods=['OPTIONS'])(index2)
rv = app.test_client().open('/', method='OPTIONS')
assert sorted(rv.allow) == ['OPTIONS']
def test_request_dispatching():
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.request.method
@app.route('/more', methods=['GET', 'POST'])
def more():
return flask.request.method
c = app.test_client()
assert c.get('/').data == b'GET'
rv = c.post('/')
assert rv.status_code == 405
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS']
rv = c.head('/')
assert rv.status_code == 200
assert not rv.data # head truncates
assert c.post('/more').data == b'POST'
assert c.get('/more').data == b'GET'
rv = c.delete('/more')
assert rv.status_code == 405
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS', 'POST']
def test_disallow_string_for_allowed_methods():
app = flask.Flask(__name__)
with pytest.raises(TypeError):
@app.route('/', methods='GET POST')
def index():
return "Hey"
def test_url_mapping():
app = flask.Flask(__name__)
random_uuid4 = "7eb41166-9ebf-4d26-b771-ea3f54f8b383"
def index():
return flask.request.method
def more():
return flask.request.method
def options():
return random_uuid4
app.add_url_rule('/', 'index', index)
app.add_url_rule('/more', 'more', more, methods=['GET', 'POST'])
# Issue 1288: Test that automatic options are not added when non-uppercase 'options' in methods
app.add_url_rule('/options', 'options', options, methods=['options'])
c = app.test_client()
assert c.get('/').data == b'GET'
rv = c.post('/')
assert rv.status_code == 405
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS']
rv = c.head('/')
assert rv.status_code == 200
assert not rv.data # head truncates
assert c.post('/more').data == b'POST'
assert c.get('/more').data == b'GET'
rv = c.delete('/more')
assert rv.status_code == 405
assert sorted(rv.allow) == ['GET', 'HEAD', 'OPTIONS', 'POST']
rv = c.open('/options', method='OPTIONS')
assert rv.status_code == 200
assert random_uuid4 in rv.data.decode("utf-8")
def test_werkzeug_routing():
from werkzeug.routing import Submount, Rule
app = flask.Flask(__name__)
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
def bar():
return 'bar'
def index():
return 'index'
app.view_functions['bar'] = bar
app.view_functions['index'] = index
c = app.test_client()
assert c.get('/foo/').data == b'index'
assert c.get('/foo/bar').data == b'bar'
def test_endpoint_decorator():
from werkzeug.routing import Submount, Rule
app = flask.Flask(__name__)
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
@app.endpoint('bar')
def bar():
return 'bar'
@app.endpoint('index')
def index():
return 'index'
c = app.test_client()
assert c.get('/foo/').data == b'index'
assert c.get('/foo/bar').data == b'bar'
def test_session():
app = flask.Flask(__name__)
app.secret_key = 'testkey'
@app.route('/set', methods=['POST'])
def set():
flask.session['value'] = flask.request.form['value']
return 'value set'
@app.route('/get')
def get():
return flask.session['value']
c = app.test_client()
assert c.post('/set', data={'value': '42'}).data == b'value set'
assert c.get('/get').data == b'42'
def test_session_using_server_name():
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com/')
assert 'domain=.example.com' in rv.headers['set-cookie'].lower()
assert 'httponly' in rv.headers['set-cookie'].lower()
def test_session_using_server_name_and_port():
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com:8080'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/')
assert 'domain=.example.com' in rv.headers['set-cookie'].lower()
assert 'httponly' in rv.headers['set-cookie'].lower()
def test_session_using_server_name_port_and_path():
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com:8080',
APPLICATION_ROOT='/foo'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/foo')
assert 'domain=example.com' in rv.headers['set-cookie'].lower()
assert 'path=/foo' in rv.headers['set-cookie'].lower()
assert 'httponly' in rv.headers['set-cookie'].lower()
def test_session_using_application_root():
class PrefixPathMiddleware(object):
def __init__(self, app, prefix):
self.app = app
self.prefix = prefix
def __call__(self, environ, start_response):
environ['SCRIPT_NAME'] = self.prefix
return self.app(environ, start_response)
app = flask.Flask(__name__)
app.wsgi_app = PrefixPathMiddleware(app.wsgi_app, '/bar')
app.config.update(
SECRET_KEY='foo',
APPLICATION_ROOT='/bar'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/')
assert 'path=/bar' in rv.headers['set-cookie'].lower()
def test_session_using_session_settings():
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='www.example.com:8080',
APPLICATION_ROOT='/test',
SESSION_COOKIE_DOMAIN='.example.com',
SESSION_COOKIE_HTTPONLY=False,
SESSION_COOKIE_SECURE=True,
SESSION_COOKIE_PATH='/'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://www.example.com:8080/test/')
cookie = rv.headers['set-cookie'].lower()
assert 'domain=.example.com' in cookie
assert 'path=/' in cookie
assert 'secure' in cookie
assert 'httponly' not in cookie
def test_missing_session():
app = flask.Flask(__name__)
def expect_exception(f, *args, **kwargs):
try:
f(*args, **kwargs)
except RuntimeError as e:
assert e.args and 'session is unavailable' in e.args[0]
else:
assert False, 'expected exception'
with app.test_request_context():
assert flask.session.get('missing_key') is None
expect_exception(flask.session.__setitem__, 'foo', 42)
expect_exception(flask.session.pop, 'foo')
def test_session_expiration():
permanent = True
app = flask.Flask(__name__)
app.secret_key = 'testkey'
@app.route('/')
def index():
flask.session['test'] = 42
flask.session.permanent = permanent
return ''
@app.route('/test')
def test():
return text_type(flask.session.permanent)
client = app.test_client()
rv = client.get('/')
assert 'set-cookie' in rv.headers
match = re.search(r'\bexpires=([^;]+)(?i)', rv.headers['set-cookie'])
expires = parse_date(match.group())
expected = datetime.utcnow() + app.permanent_session_lifetime
assert expires.year == expected.year
assert expires.month == expected.month
assert expires.day == expected.day
rv = client.get('/test')
assert rv.data == b'True'
permanent = False
rv = app.test_client().get('/')
assert 'set-cookie' in rv.headers
match = re.search(r'\bexpires=([^;]+)', rv.headers['set-cookie'])
assert match is None
def test_session_stored_last():
app = flask.Flask(__name__)
app.secret_key = 'development-key'
app.testing = True
@app.after_request
def modify_session(response):
flask.session['foo'] = 42
return response
@app.route('/')
def dump_session_contents():
return repr(flask.session.get('foo'))
c = app.test_client()
assert c.get('/').data == b'None'
assert c.get('/').data == b'42'
def test_session_special_types():
app = flask.Flask(__name__)
app.secret_key = 'development-key'
app.testing = True
now = datetime.utcnow().replace(microsecond=0)
the_uuid = uuid.uuid4()
@app.after_request
def modify_session(response):
flask.session['m'] = flask.Markup('Hello!')
flask.session['u'] = the_uuid
flask.session['dt'] = now
flask.session['b'] = b'\xff'
flask.session['t'] = (1, 2, 3)
return response
@app.route('/')
def dump_session_contents():
return pickle.dumps(dict(flask.session))
c = app.test_client()
c.get('/')
rv = pickle.loads(c.get('/').data)
assert rv['m'] == flask.Markup('Hello!')
assert type(rv['m']) == flask.Markup
assert rv['dt'] == now
assert rv['u'] == the_uuid
assert rv['b'] == b'\xff'
assert type(rv['b']) == bytes
assert rv['t'] == (1, 2, 3)
def test_session_cookie_setting():
app = flask.Flask(__name__)
app.testing = True
app.secret_key = 'dev key'
is_permanent = True
@app.route('/bump')
def bump():
rv = flask.session['foo'] = flask.session.get('foo', 0) + 1
flask.session.permanent = is_permanent
return str(rv)
@app.route('/read')
def read():
return str(flask.session.get('foo', 0))
def run_test(expect_header):
with app.test_client() as c:
assert c.get('/bump').data == b'1'
assert c.get('/bump').data == b'2'
assert c.get('/bump').data == b'3'
rv = c.get('/read')
set_cookie = rv.headers.get('set-cookie')
assert (set_cookie is not None) == expect_header
assert rv.data == b'3'
is_permanent = True
app.config['SESSION_REFRESH_EACH_REQUEST'] = True
run_test(expect_header=True)
is_permanent = True
app.config['SESSION_REFRESH_EACH_REQUEST'] = False
run_test(expect_header=False)
is_permanent = False
app.config['SESSION_REFRESH_EACH_REQUEST'] = True
run_test(expect_header=False)
is_permanent = False
app.config['SESSION_REFRESH_EACH_REQUEST'] = False
run_test(expect_header=False)
def test_flashes():
app = flask.Flask(__name__)
app.secret_key = 'testkey'
with app.test_request_context():
assert not flask.session.modified
flask.flash('Zap')
flask.session.modified = False
flask.flash('Zip')
assert flask.session.modified
assert list(flask.get_flashed_messages()) == ['Zap', 'Zip']
def test_extended_flashing():
# Be sure app.testing=True below, else tests can fail silently.
#
# Specifically, if app.testing is not set to True, the AssertionErrors
# in the view functions will cause a 500 response to the test client
# instead of propagating exceptions.
app = flask.Flask(__name__)
app.secret_key = 'testkey'
app.testing = True
@app.route('/')
def index():
flask.flash(u'Hello World')
flask.flash(u'Hello World', 'error')
flask.flash(flask.Markup(u'<em>Testing</em>'), 'warning')
return ''
@app.route('/test/')
def test():
messages = flask.get_flashed_messages()
assert list(messages) == [
u'Hello World',
u'Hello World',
flask.Markup(u'<em>Testing</em>')
]
return ''
@app.route('/test_with_categories/')
def test_with_categories():
messages = flask.get_flashed_messages(with_categories=True)
assert len(messages) == 3
assert list(messages) == [
('message', u'Hello World'),
('error', u'Hello World'),
('warning', flask.Markup(u'<em>Testing</em>'))
]
return ''
@app.route('/test_filter/')
def test_filter():
messages = flask.get_flashed_messages(
category_filter=['message'], with_categories=True)
assert list(messages) == [('message', u'Hello World')]
return ''
@app.route('/test_filters/')
def test_filters():
messages = flask.get_flashed_messages(
category_filter=['message', 'warning'], with_categories=True)
assert list(messages) == [
('message', u'Hello World'),
('warning', flask.Markup(u'<em>Testing</em>'))
]
return ''
@app.route('/test_filters_without_returning_categories/')
def test_filters2():
messages = flask.get_flashed_messages(
category_filter=['message', 'warning'])
assert len(messages) == 2
assert messages[0] == u'Hello World'
assert messages[1] == flask.Markup(u'<em>Testing</em>')
return ''
# Create new test client on each test to clean flashed messages.
c = app.test_client()
c.get('/')
c.get('/test/')
c = app.test_client()
c.get('/')
c.get('/test_with_categories/')
c = app.test_client()
c.get('/')
c.get('/test_filter/')
c = app.test_client()
c.get('/')
c.get('/test_filters/')
c = app.test_client()
c.get('/')
c.get('/test_filters_without_returning_categories/')
def test_request_processing():
app = flask.Flask(__name__)
evts = []
@app.before_request
def before_request():
evts.append('before')
@app.after_request
def after_request(response):
response.data += b'|after'
evts.append('after')
return response
@app.route('/')
def index():
assert 'before' in evts
assert 'after' not in evts
return 'request'
assert 'after' not in evts
rv = app.test_client().get('/').data
assert 'after' in evts
assert rv == b'request|after'
def test_request_preprocessing_early_return():
app = flask.Flask(__name__)
evts = []
@app.before_request
def before_request1():
evts.append(1)
@app.before_request
def before_request2():
evts.append(2)
return "hello"
@app.before_request
def before_request3():
evts.append(3)
return "bye"
@app.route('/')
def index():
evts.append('index')
return "damnit"
rv = app.test_client().get('/').data.strip()
assert rv == b'hello'
assert evts == [1, 2]
def test_after_request_processing():
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
@flask.after_this_request
def foo(response):
response.headers['X-Foo'] = 'a header'
return response
return 'Test'
c = app.test_client()
resp = c.get('/')
assert resp.status_code == 200
assert resp.headers['X-Foo'] == 'a header'
def test_teardown_request_handler():
called = []
app = flask.Flask(__name__)
@app.teardown_request
def teardown_request(exc):
called.append(True)
return "Ignored"
@app.route('/')
def root():
return "Response"
rv = app.test_client().get('/')
assert rv.status_code == 200
assert b'Response' in rv.data
assert len(called) == 1
def test_teardown_request_handler_debug_mode():
called = []
app = flask.Flask(__name__)
app.testing = True
@app.teardown_request
def teardown_request(exc):
called.append(True)
return "Ignored"
@app.route('/')
def root():
return "Response"
rv = app.test_client().get('/')
assert rv.status_code == 200
assert b'Response' in rv.data
assert len(called) == 1
def test_teardown_request_handler_error():
called = []
app = flask.Flask(__name__)
app.config['LOGGER_HANDLER_POLICY'] = 'never'
@app.teardown_request
def teardown_request1(exc):
assert type(exc) == ZeroDivisionError
called.append(True)
# This raises a new error and blows away sys.exc_info(), so we can
# test that all teardown_requests get passed the same original
# exception.
try:
raise TypeError()
except:
pass
@app.teardown_request
def teardown_request2(exc):
assert type(exc) == ZeroDivisionError
called.append(True)
# This raises a new error and blows away sys.exc_info(), so we can
# test that all teardown_requests get passed the same original
# exception.
try:
raise TypeError()
except:
pass
@app.route('/')
def fails():
1 // 0
rv = app.test_client().get('/')
assert rv.status_code == 500
assert b'Internal Server Error' in rv.data
assert len(called) == 2
def test_before_after_request_order():
called = []
app = flask.Flask(__name__)
@app.before_request
def before1():
called.append(1)
@app.before_request
def before2():
called.append(2)
@app.after_request
def after1(response):
called.append(4)
return response
@app.after_request
def after2(response):
called.append(3)
return response
@app.teardown_request
def finish1(exc):
called.append(6)
@app.teardown_request
def finish2(exc):
called.append(5)
@app.route('/')
def index():
return '42'
rv = app.test_client().get('/')
assert rv.data == b'42'
assert called == [1, 2, 3, 4, 5, 6]
def test_error_handling():
app = flask.Flask(__name__)
app.config['LOGGER_HANDLER_POLICY'] = 'never'
@app.errorhandler(404)
def not_found(e):
return 'not found', 404
@app.errorhandler(500)
def internal_server_error(e):
return 'internal server error', 500
@app.errorhandler(Forbidden)
def forbidden(e):
return 'forbidden', 403
@app.route('/')
def index():
flask.abort(404)
@app.route('/error')
def error():
1 // 0
@app.route('/forbidden')
def error2():
flask.abort(403)
c = app.test_client()
rv = c.get('/')
assert rv.status_code == 404
assert rv.data == b'not found'
rv = c.get('/error')
assert rv.status_code == 500
assert b'internal server error' == rv.data
rv = c.get('/forbidden')
assert rv.status_code == 403
assert b'forbidden' == rv.data
def test_before_request_and_routing_errors():
app = flask.Flask(__name__)
@app.before_request
def attach_something():
flask.g.something = 'value'
@app.errorhandler(404)
def return_something(error):
return flask.g.something, 404
rv = app.test_client().get('/')
assert rv.status_code == 404
assert rv.data == b'value'
def test_user_error_handling():
class MyException(Exception):
pass
app = flask.Flask(__name__)
@app.errorhandler(MyException)
def handle_my_exception(e):
assert isinstance(e, MyException)
return '42'
@app.route('/')
def index():
raise MyException()
c = app.test_client()
assert c.get('/').data == b'42'
def test_http_error_subclass_handling():
class ForbiddenSubclass(Forbidden):
pass
app = flask.Flask(__name__)
@app.errorhandler(ForbiddenSubclass)
def handle_forbidden_subclass(e):
assert isinstance(e, ForbiddenSubclass)
return 'banana'
@app.errorhandler(403)
def handle_forbidden_subclass(e):
assert not isinstance(e, ForbiddenSubclass)
assert isinstance(e, Forbidden)
return 'apple'
@app.route('/1')
def index1():
raise ForbiddenSubclass()
@app.route('/2')
def index2():
flask.abort(403)
@app.route('/3')
def index3():
raise Forbidden()
c = app.test_client()
assert c.get('/1').data == b'banana'
assert c.get('/2').data == b'apple'
assert c.get('/3').data == b'apple'
def test_trapping_of_bad_request_key_errors():
app = flask.Flask(__name__)
app.testing = True
@app.route('/fail')
def fail():
flask.request.form['missing_key']
c = app.test_client()
assert c.get('/fail').status_code == 400
app.config['TRAP_BAD_REQUEST_ERRORS'] = True
c = app.test_client()
try:
c.get('/fail')
except KeyError as e:
assert isinstance(e, BadRequest)
else:
assert False, 'Expected exception'
def test_trapping_of_all_http_exceptions():
app = flask.Flask(__name__)
app.testing = True
app.config['TRAP_HTTP_EXCEPTIONS'] = True
@app.route('/fail')
def fail():
flask.abort(404)
c = app.test_client()
with pytest.raises(NotFound):
c.get('/fail')
def test_enctype_debug_helper():
from flask.debughelpers import DebugFilesKeyError
app = flask.Flask(__name__)
app.debug = True
@app.route('/fail', methods=['POST'])
def index():
return flask.request.files['foo'].filename
# with statement is important because we leave an exception on the
# stack otherwise and we want to ensure that this is not the case
# to not negatively affect other tests.
with app.test_client() as c:
try:
c.post('/fail', data={'foo': 'index.txt'})
except DebugFilesKeyError as e:
assert 'no file contents were transmitted' in str(e)
assert 'This was submitted: "index.txt"' in str(e)
else:
assert False, 'Expected exception'
def test_response_creation():
app = flask.Flask(__name__)
@app.route('/unicode')
def from_unicode():
return u'Hällo Wörld'
@app.route('/string')
def from_string():
return u'Hällo Wörld'.encode('utf-8')
@app.route('/args')
def from_tuple():
return 'Meh', 400, {
'X-Foo': 'Testing',
'Content-Type': 'text/plain; charset=utf-8'
}
@app.route('/two_args')
def from_two_args_tuple():
return 'Hello', {
'X-Foo': 'Test',
'Content-Type': 'text/plain; charset=utf-8'
}
@app.route('/args_status')
def from_status_tuple():
return 'Hi, status!', 400
@app.route('/args_header')
def from_response_instance_status_tuple():
return flask.Response('Hello world', 404), {
"X-Foo": "Bar",
"X-Bar": "Foo"
}
c = app.test_client()
assert c.get('/unicode').data == u'Hällo Wörld'.encode('utf-8')
assert c.get('/string').data == u'Hällo Wörld'.encode('utf-8')
rv = c.get('/args')
assert rv.data == b'Meh'
assert rv.headers['X-Foo'] == 'Testing'
assert rv.status_code == 400
assert rv.mimetype == 'text/plain'
rv2 = c.get('/two_args')
assert rv2.data == b'Hello'
assert rv2.headers['X-Foo'] == 'Test'
assert rv2.status_code == 200
assert rv2.mimetype == 'text/plain'
rv3 = c.get('/args_status')
assert rv3.data == b'Hi, status!'
assert rv3.status_code == 400
assert rv3.mimetype == 'text/html'
rv4 = c.get('/args_header')
assert rv4.data == b'Hello world'
assert rv4.headers['X-Foo'] == 'Bar'
assert rv4.headers['X-Bar'] == 'Foo'
assert rv4.status_code == 404
def test_make_response():
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.make_response()
assert rv.status_code == 200
assert rv.data == b''
assert rv.mimetype == 'text/html'
rv = flask.make_response('Awesome')
assert rv.status_code == 200
assert rv.data == b'Awesome'
assert rv.mimetype == 'text/html'
rv = flask.make_response('W00t', 404)
assert rv.status_code == 404
assert rv.data == b'W00t'
assert rv.mimetype == 'text/html'
def test_make_response_with_response_instance():
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.make_response(
flask.jsonify({'msg': 'W00t'}), 400)
assert rv.status_code == 400
assert rv.data == b'{\n "msg": "W00t"\n}\n'
assert rv.mimetype == 'application/json'
rv = flask.make_response(
flask.Response(''), 400)
assert rv.status_code == 400
assert rv.data == b''
assert rv.mimetype == 'text/html'
rv = flask.make_response(
flask.Response('', headers={'Content-Type': 'text/html'}),
400, [('X-Foo', 'bar')])
assert rv.status_code == 400
assert rv.headers['Content-Type'] == 'text/html'
assert rv.headers['X-Foo'] == 'bar'
def test_jsonify_no_prettyprint():
app = flask.Flask(__name__)
app.config.update({"JSONIFY_PRETTYPRINT_REGULAR": False})
with app.test_request_context():
compressed_msg = b'{"msg":{"submsg":"W00t"},"msg2":"foobar"}\n'
uncompressed_msg = {
"msg": {
"submsg": "W00t"
},
"msg2": "foobar"
}
rv = flask.make_response(
flask.jsonify(uncompressed_msg), 200)
assert rv.data == compressed_msg
def test_jsonify_prettyprint():
app = flask.Flask(__name__)
app.config.update({"JSONIFY_PRETTYPRINT_REGULAR": True})
with app.test_request_context():
compressed_msg = {"msg":{"submsg":"W00t"},"msg2":"foobar"}
pretty_response =\
b'{\n "msg": {\n "submsg": "W00t"\n }, \n "msg2": "foobar"\n}\n'
rv = flask.make_response(
flask.jsonify(compressed_msg), 200)
assert rv.data == pretty_response
def test_url_generation():
app = flask.Flask(__name__)
@app.route('/hello/<name>', methods=['POST'])
def hello():
pass
with app.test_request_context():
assert flask.url_for('hello', name='test x') == '/hello/test%20x'
assert flask.url_for('hello', name='test x', _external=True) == \
'http://localhost/hello/test%20x'
def test_build_error_handler():
app = flask.Flask(__name__)
# Test base case, a URL which results in a BuildError.
with app.test_request_context():
pytest.raises(BuildError, flask.url_for, 'spam')
# Verify the error is re-raised if not the current exception.
try:
with app.test_request_context():
flask.url_for('spam')
except BuildError as err:
error = err
try:
raise RuntimeError('Test case where BuildError is not current.')
except RuntimeError:
pytest.raises(
BuildError, app.handle_url_build_error, error, 'spam', {})
# Test a custom handler.
def handler(error, endpoint, values):
# Just a test.
return '/test_handler/'
app.url_build_error_handlers.append(handler)
with app.test_request_context():
assert flask.url_for('spam') == '/test_handler/'
def test_build_error_handler_reraise():
app = flask.Flask(__name__)
# Test a custom handler which reraises the BuildError
def handler_raises_build_error(error, endpoint, values):
raise error
app.url_build_error_handlers.append(handler_raises_build_error)
with app.test_request_context():
pytest.raises(BuildError, flask.url_for, 'not.existing')
def test_custom_converters():
from werkzeug.routing import BaseConverter
class ListConverter(BaseConverter):
def to_python(self, value):
return value.split(',')
def to_url(self, value):
base_to_url = super(ListConverter, self).to_url
return ','.join(base_to_url(x) for x in value)
app = flask.Flask(__name__)
app.url_map.converters['list'] = ListConverter
@app.route('/<list:args>')
def index(args):
return '|'.join(args)
c = app.test_client()
assert c.get('/1,2,3').data == b'1|2|3'
def test_static_files():
app = flask.Flask(__name__)
app.testing = True
rv = app.test_client().get('/static/index.html')
assert rv.status_code == 200
assert rv.data.strip() == b'<h1>Hello World!</h1>'
with app.test_request_context():
assert flask.url_for('static', filename='index.html') == \
'/static/index.html'
rv.close()
def test_none_response():
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def test():
return None
try:
app.test_client().get('/')
except ValueError as e:
assert str(e) == 'View function did not return a response'
pass
else:
assert "Expected ValueError"
def test_request_locals():
assert repr(flask.g) == '<LocalProxy unbound>'
assert not flask.g
def test_test_app_proper_environ():
app = flask.Flask(__name__)
app.config.update(
SERVER_NAME='localhost.localdomain:5000'
)
@app.route('/')
def index():
return 'Foo'
@app.route('/', subdomain='foo')
def subdomain():
return 'Foo SubDomain'
rv = app.test_client().get('/')
assert rv.data == b'Foo'
rv = app.test_client().get('/', 'http://localhost.localdomain:5000')
assert rv.data == b'Foo'
rv = app.test_client().get('/', 'https://localhost.localdomain:5000')
assert rv.data == b'Foo'
app.config.update(SERVER_NAME='localhost.localdomain')
rv = app.test_client().get('/', 'https://localhost.localdomain')
assert rv.data == b'Foo'
try:
app.config.update(SERVER_NAME='localhost.localdomain:443')
rv = app.test_client().get('/', 'https://localhost.localdomain')
# Werkzeug 0.8
assert rv.status_code == 404
except ValueError as e:
# Werkzeug 0.7
assert str(e) == (
"the server name provided "
"('localhost.localdomain:443') does not match the "
"server name from the WSGI environment ('localhost.localdomain')"
)
try:
app.config.update(SERVER_NAME='localhost.localdomain')
rv = app.test_client().get('/', 'http://foo.localhost')
# Werkzeug 0.8
assert rv.status_code == 404
except ValueError as e:
# Werkzeug 0.7
assert str(e) == (
"the server name provided "
"('localhost.localdomain') does not match the "
"server name from the WSGI environment ('foo.localhost')"
)
rv = app.test_client().get('/', 'http://foo.localhost.localdomain')
assert rv.data == b'Foo SubDomain'
def test_exception_propagation():
def apprunner(config_key):
app = flask.Flask(__name__)
app.config['LOGGER_HANDLER_POLICY'] = 'never'
@app.route('/')
def index():
1 // 0
c = app.test_client()
if config_key is not None:
app.config[config_key] = True
try:
c.get('/')
except Exception:
pass
else:
assert False, 'expected exception'
else:
assert c.get('/').status_code == 500
# we have to run this test in an isolated thread because if the
# debug flag is set to true and an exception happens the context is
# not torn down. This causes other tests that run after this fail
# when they expect no exception on the stack.
for config_key in 'TESTING', 'PROPAGATE_EXCEPTIONS', 'DEBUG', None:
t = Thread(target=apprunner, args=(config_key,))
t.start()
t.join()
def test_max_content_length():
app = flask.Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 64
@app.before_request
def always_first():
flask.request.form['myfile']
assert False
@app.route('/accept', methods=['POST'])
def accept_file():
flask.request.form['myfile']
assert False
@app.errorhandler(413)
def catcher(error):
return '42'
c = app.test_client()
rv = c.post('/accept', data={'myfile': 'foo' * 100})
assert rv.data == b'42'
def test_url_processors():
app = flask.Flask(__name__)
@app.url_defaults
def add_language_code(endpoint, values):
if flask.g.lang_code is not None and \
app.url_map.is_endpoint_expecting(endpoint, 'lang_code'):
values.setdefault('lang_code', flask.g.lang_code)
@app.url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop('lang_code', None)
@app.route('/<lang_code>/')
def index():
return flask.url_for('about')
@app.route('/<lang_code>/about')
def about():
return flask.url_for('something_else')
@app.route('/foo')
def something_else():
return flask.url_for('about', lang_code='en')
c = app.test_client()
assert c.get('/de/').data == b'/de/about'
assert c.get('/de/about').data == b'/foo'
assert c.get('/foo').data == b'/en/about'
def test_inject_blueprint_url_defaults():
app = flask.Flask(__name__)
bp = flask.Blueprint('foo.bar.baz', __name__,
template_folder='template')
@bp.url_defaults
def bp_defaults(endpoint, values):
values['page'] = 'login'
@bp.route('/<page>')
def view(page):
pass
app.register_blueprint(bp)
values = dict()
app.inject_url_defaults('foo.bar.baz.view', values)
expected = dict(page='login')
assert values == expected
with app.test_request_context('/somepage'):
url = flask.url_for('foo.bar.baz.view')
expected = '/login'
assert url == expected
def test_nonascii_pathinfo():
app = flask.Flask(__name__)
app.testing = True
@app.route(u'/киртест')
def index():
return 'Hello World!'
c = app.test_client()
rv = c.get(u'/киртест')
assert rv.data == b'Hello World!'
def test_debug_mode_complains_after_first_request():
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
return 'Awesome'
assert not app.got_first_request
assert app.test_client().get('/').data == b'Awesome'
try:
@app.route('/foo')
def broken():
return 'Meh'
except AssertionError as e:
assert 'A setup function was called' in str(e)
else:
assert False, 'Expected exception'
app.debug = False
@app.route('/foo')
def working():
return 'Meh'
assert app.test_client().get('/foo').data == b'Meh'
assert app.got_first_request
def test_before_first_request_functions():
got = []
app = flask.Flask(__name__)
@app.before_first_request
def foo():
got.append(42)
c = app.test_client()
c.get('/')
assert got == [42]
c.get('/')
assert got == [42]
assert app.got_first_request
def test_before_first_request_functions_concurrent():
got = []
app = flask.Flask(__name__)
@app.before_first_request
def foo():
time.sleep(0.2)
got.append(42)
c = app.test_client()
def get_and_assert():
c.get("/")
assert got == [42]
t = Thread(target=get_and_assert)
t.start()
get_and_assert()
t.join()
assert app.got_first_request
def test_routing_redirect_debugging():
app = flask.Flask(__name__)
app.debug = True
@app.route('/foo/', methods=['GET', 'POST'])
def foo():
return 'success'
with app.test_client() as c:
try:
c.post('/foo', data={})
except AssertionError as e:
assert 'http://localhost/foo/' in str(e)
assert ('Make sure to directly send '
'your POST-request to this URL') in str(e)
else:
assert False, 'Expected exception'
rv = c.get('/foo', data={}, follow_redirects=True)
assert rv.data == b'success'
app.debug = False
with app.test_client() as c:
rv = c.post('/foo', data={}, follow_redirects=True)
assert rv.data == b'success'
def test_route_decorator_custom_endpoint():
app = flask.Flask(__name__)
app.debug = True
@app.route('/foo/')
def foo():
return flask.request.endpoint
@app.route('/bar/', endpoint='bar')
def for_bar():
return flask.request.endpoint
@app.route('/bar/123', endpoint='123')
def for_bar_foo():
return flask.request.endpoint
with app.test_request_context():
assert flask.url_for('foo') == '/foo/'
assert flask.url_for('bar') == '/bar/'
assert flask.url_for('123') == '/bar/123'
c = app.test_client()
assert c.get('/foo/').data == b'foo'
assert c.get('/bar/').data == b'bar'
assert c.get('/bar/123').data == b'123'
def test_preserve_only_once():
app = flask.Flask(__name__)
app.debug = True
@app.route('/fail')
def fail_func():
1 // 0
c = app.test_client()
for x in range(3):
with pytest.raises(ZeroDivisionError):
c.get('/fail')
assert flask._request_ctx_stack.top is not None
assert flask._app_ctx_stack.top is not None
# implicit appctx disappears too
flask._request_ctx_stack.top.pop()
assert flask._request_ctx_stack.top is None
assert flask._app_ctx_stack.top is None
def test_preserve_remembers_exception():
app = flask.Flask(__name__)
app.debug = True
errors = []
@app.route('/fail')
def fail_func():
1 // 0
@app.route('/success')
def success_func():
return 'Okay'
@app.teardown_request
def teardown_handler(exc):
errors.append(exc)
c = app.test_client()
# After this failure we did not yet call the teardown handler
with pytest.raises(ZeroDivisionError):
c.get('/fail')
assert errors == []
# But this request triggers it, and it's an error
c.get('/success')
assert len(errors) == 2
assert isinstance(errors[0], ZeroDivisionError)
# At this point another request does nothing.
c.get('/success')
assert len(errors) == 3
assert errors[1] is None
def test_get_method_on_g():
app = flask.Flask(__name__)
app.testing = True
with app.app_context():
assert flask.g.get('x') is None
assert flask.g.get('x', 11) == 11
flask.g.x = 42
assert flask.g.get('x') == 42
assert flask.g.x == 42
def test_g_iteration_protocol():
app = flask.Flask(__name__)
app.testing = True
with app.app_context():
flask.g.foo = 23
flask.g.bar = 42
assert 'foo' in flask.g
assert 'foos' not in flask.g
assert sorted(flask.g) == ['bar', 'foo']
def test_subdomain_basic_support():
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
@app.route('/')
def normal_index():
return 'normal index'
@app.route('/', subdomain='test')
def test_index():
return 'test index'
c = app.test_client()
rv = c.get('/', 'http://localhost/')
assert rv.data == b'normal index'
rv = c.get('/', 'http://test.localhost/')
assert rv.data == b'test index'
def test_subdomain_matching():
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
@app.route('/', subdomain='<user>')
def index(user):
return 'index for %s' % user
c = app.test_client()
rv = c.get('/', 'http://mitsuhiko.localhost/')
assert rv.data == b'index for mitsuhiko'
def test_subdomain_matching_with_ports():
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost:3000'
@app.route('/', subdomain='<user>')
def index(user):
return 'index for %s' % user
c = app.test_client()
rv = c.get('/', 'http://mitsuhiko.localhost:3000/')
assert rv.data == b'index for mitsuhiko'
def test_multi_route_rules():
app = flask.Flask(__name__)
@app.route('/')
@app.route('/<test>/')
def index(test='a'):
return test
rv = app.test_client().open('/')
assert rv.data == b'a'
rv = app.test_client().open('/b/')
assert rv.data == b'b'
def test_multi_route_class_views():
class View(object):
def __init__(self, app):
app.add_url_rule('/', 'index', self.index)
app.add_url_rule('/<test>/', 'index', self.index)
def index(self, test='a'):
return test
app = flask.Flask(__name__)
_ = View(app)
rv = app.test_client().open('/')
assert rv.data == b'a'
rv = app.test_client().open('/b/')
assert rv.data == b'b'
def test_run_defaults(monkeypatch):
rv = {}
# Mocks werkzeug.serving.run_simple method
def run_simple_mock(*args, **kwargs):
rv['result'] = 'running...'
app = flask.Flask(__name__)
monkeypatch.setattr(werkzeug.serving, 'run_simple', run_simple_mock)
app.run()
assert rv['result'] == 'running...'
def test_run_server_port(monkeypatch):
rv = {}
# Mocks werkzeug.serving.run_simple method
def run_simple_mock(hostname, port, application, *args, **kwargs):
rv['result'] = 'running on %s:%s ...' % (hostname, port)
app = flask.Flask(__name__)
monkeypatch.setattr(werkzeug.serving, 'run_simple', run_simple_mock)
hostname, port = 'localhost', 8000
app.run(hostname, port, debug=True)
assert rv['result'] == 'running on %s:%s ...' % (hostname, port)
| bsd-3-clause |
al1221/ghost-openshift | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/pygments/formatters/terminal256.py | 362 | 7657 | # -*- coding: utf-8 -*-
"""
pygments.formatters.terminal256
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for 256-color terminal output with ANSI sequences.
RGB-to-XTERM color conversion routines adapted from xterm256-conv
tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
by Wolfgang Frisch.
Formatter version 1.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# TODO:
# - Options to map style's bold/underline/italic/border attributes
# to some ANSI attrbutes (something like 'italic=underline')
# - An option to output "style RGB to xterm RGB/index" conversion table
# - An option to indicate that we are running in "reverse background"
# xterm. This means that default colors are white-on-black, not
# black-on-while, so colors like "white background" need to be converted
# to "white background, black foreground", etc...
import sys
from pygments.formatter import Formatter
__all__ = ['Terminal256Formatter']
class EscapeSequence:
def __init__(self, fg=None, bg=None, bold=False, underline=False):
self.fg = fg
self.bg = bg
self.bold = bold
self.underline = underline
def escape(self, attrs):
if len(attrs):
return "\x1b[" + ";".join(attrs) + "m"
return ""
def color_string(self):
attrs = []
if self.fg is not None:
attrs.extend(("38", "5", "%i" % self.fg))
if self.bg is not None:
attrs.extend(("48", "5", "%i" % self.bg))
if self.bold:
attrs.append("01")
if self.underline:
attrs.append("04")
return self.escape(attrs)
def reset_string(self):
attrs = []
if self.fg is not None:
attrs.append("39")
if self.bg is not None:
attrs.append("49")
if self.bold or self.underline:
attrs.append("00")
return self.escape(attrs)
class Terminal256Formatter(Formatter):
r"""
Format tokens with ANSI color sequences, for output in a 256-color
terminal or console. Like in `TerminalFormatter` color sequences
are terminated at newlines, so that paging the output works correctly.
The formatter takes colors from a style defined by the `style` option
and converts them to nearest ANSI 256-color escape sequences. Bold and
underline attributes from the style are preserved (and displayed).
*New in Pygments 0.9.*
Options accepted:
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``).
"""
name = 'Terminal256'
aliases = ['terminal256', 'console256', '256']
filenames = []
def __init__(self, **options):
Formatter.__init__(self, **options)
self.xterm_colors = []
self.best_match = {}
self.style_string = {}
self.usebold = 'nobold' not in options
self.useunderline = 'nounderline' not in options
self._build_color_table() # build an RGB-to-256 color conversion table
self._setup_styles() # convert selected style's colors to term. colors
def _build_color_table(self):
# colors 0..15: 16 basic colors
self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
# colors 16..232: the 6x6x6 color cube
valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
for i in range(217):
r = valuerange[(i // 36) % 6]
g = valuerange[(i // 6) % 6]
b = valuerange[i % 6]
self.xterm_colors.append((r, g, b))
# colors 233..253: grayscale
for i in range(1, 22):
v = 8 + i * 10
self.xterm_colors.append((v, v, v))
def _closest_color(self, r, g, b):
distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
match = 0
for i in range(0, 254):
values = self.xterm_colors[i]
rd = r - values[0]
gd = g - values[1]
bd = b - values[2]
d = rd*rd + gd*gd + bd*bd
if d < distance:
match = i
distance = d
return match
def _color_index(self, color):
index = self.best_match.get(color, None)
if index is None:
try:
rgb = int(str(color), 16)
except ValueError:
rgb = 0
r = (rgb >> 16) & 0xff
g = (rgb >> 8) & 0xff
b = rgb & 0xff
index = self._closest_color(r, g, b)
self.best_match[color] = index
return index
def _setup_styles(self):
for ttype, ndef in self.style:
escape = EscapeSequence()
if ndef['color']:
escape.fg = self._color_index(ndef['color'])
if ndef['bgcolor']:
escape.bg = self._color_index(ndef['bgcolor'])
if self.usebold and ndef['bold']:
escape.bold = True
if self.useunderline and ndef['underline']:
escape.underline = True
self.style_string[str(ttype)] = (escape.color_string(),
escape.reset_string())
def format(self, tokensource, outfile):
# hack: if the output is a terminal and has an encoding set,
# use that to avoid unicode encode problems
if not self.encoding and hasattr(outfile, "encoding") and \
hasattr(outfile, "isatty") and outfile.isatty() and \
sys.version_info < (3,):
self.encoding = outfile.encoding
return Formatter.format(self, tokensource, outfile)
def format_unencoded(self, tokensource, outfile):
for ttype, value in tokensource:
not_found = True
while ttype and not_found:
try:
#outfile.write( "<" + str(ttype) + ">" )
on, off = self.style_string[str(ttype)]
# Like TerminalFormatter, add "reset colors" escape sequence
# on newline.
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write(on + line + off)
outfile.write('\n')
if spl[-1]:
outfile.write(on + spl[-1] + off)
not_found = False
#outfile.write( '#' + str(ttype) + '#' )
except KeyError:
#ottype = ttype
ttype = ttype[:-1]
#outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
if not_found:
outfile.write(value)
| mit |
h2oai/h2o-3 | h2o-py/tests/testdir_apis/H2O_Module/pyunit_h2oget_model.py | 4 | 1077 | from __future__ import print_function
import sys
sys.path.insert(1,"../../../")
from tests import pyunit_utils
import h2o
from h2o.estimators.glm import H2OGeneralizedLinearEstimator
from h2o.utils.typechecks import assert_is_type
def h2oget_model():
"""
Python API test: h2o.get_model(model_id)
"""
training_data = h2o.import_file(pyunit_utils.locate("smalldata/logreg/benign.csv"))
Y = 3
X = [0, 1, 2, 4, 5, 6, 7, 8, 9, 10]
model = H2OGeneralizedLinearEstimator(family="binomial", alpha=0, Lambda=1e-5)
model.train(x=X, y=Y, training_frame=training_data)
model2 = h2o.get_model(model.model_id)
assert_is_type( model, H2OGeneralizedLinearEstimator)
assert_is_type(model2, H2OGeneralizedLinearEstimator)
assert (model._model_json['output']['model_category']==model2._model_json['output']['model_category']) and \
(model2._model_json['output']['model_category']=='Binomial'), "h2o.get_model() command is not working"
if __name__ == "__main__":
pyunit_utils.standalone_test(h2oget_model)
else:
h2oget_model()
| apache-2.0 |
WhackoJacko/Jetee | jetee/common/config_factories/project/git.py | 1 | 1899 | import os
from jetee.base.config_factory import AnsiblePreTaskConfigFactory
from jetee.runtime.configuration import project_configuration
class CloneGitRepoAnsiblePreTaskConfigFactory(AnsiblePreTaskConfigFactory):
template = {
u'name': u'Checkout project repo',
u'git': {
u'accept_hostkey': u'yes',
u'dest': u'',
u'repo': u'',
u'version': u''
}
}
def get_config(self, parent):
project = parent
template = self.template.copy()
template[u'git'][u'dest'] = os.path.join(project.location, project_configuration.get_project_name())
template[u'git'][u'repo'] = project.cvs_repo_url
template[u'git'][u'version'] = project.cvs_repo_branch
return [template]
class UpdateGitRepoAnsibleTaskConfigFactory(AnsiblePreTaskConfigFactory):
template = {
u'name': u'Update project repo',
u'command': u'git pull origin {}',
u'args': {
u'chdir': u''
}
}
def get_config(self, parent):
project = parent
template = self.template.copy()
template[u'command'] = template[u'command'].format(project.cvs_repo_branch)
template[u'args'][u'chdir'] = os.path.join(project.location, project_configuration.get_project_name())
return [template]
class CheckoutGitBranchAnsibleTaskConfigFactory(AnsiblePreTaskConfigFactory):
template = {
u'name': u'Checkout branch',
u'command': u'git checkout {}',
u'args': {
u'chdir': u''
}
}
def get_config(self, parent):
project = parent
template = self.template.copy()
template[u'command'] = template[u'command'].format(project.cvs_repo_branch)
template[u'args'][u'chdir'] = os.path.join(project.location, project_configuration.get_project_name())
return [template] | bsd-2-clause |
casinocoin/casinocoin | contrib/testgen/base58.py | 2139 | 2818 | '''
Bitcoin base58 encoding and decoding.
Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
# for compatibility with following code...
class SHA256:
new = hashlib.sha256
if str != bytes:
# Python 3.x
def ord(c):
return c
def chr(n):
return bytes( (n,) )
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
b58chars = __b58chars
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length = None):
""" decode v into a string of len bytes
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def checksum(v):
"""Return 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
if result is None:
return None
h3 = checksum(result[:-4])
if result[-4:] == checksum(result[:-4]):
return result[:-4]
else:
return None
def get_bcaddress_version(strAddress):
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
addr = b58decode_chk(strAddress)
if addr is None or len(addr)!=21: return None
version = addr[0]
return ord(version)
if __name__ == '__main__':
# Test case (from http://gitorious.org/bitcoin/python-base58.git)
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
_ohai = 'o hai'.encode('ascii')
_tmp = b58encode(_ohai)
assert _tmp == 'DYB3oMS'
assert b58decode(_tmp, 5) == _ohai
print("Tests passed")
| mit |
HingeChat/HingeChat | src/hingechat/qt/qPassphraseDialog.py | 1 | 2449 | import os
import signal
from PyQt5.QtWidgets import QDialog
from PyQt5.QtWidgets import QHBoxLayout
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QLabel
from PyQt5.QtWidgets import QLineEdit
from PyQt5.QtWidgets import QPushButton
from PyQt5.QtWidgets import QVBoxLayout
import qtUtils
from src.hinge.utils import constants
class QPassphraseDialog(QDialog):
def __init__(self, verify=False, showForgotButton=True):
QDialog.__init__(self)
self.passphrase = None
self.clickedButton = constants.BUTTON_CANCEL
# Set the title and icon
self.setWindowTitle("Save Keys Passphrase")
self.setWindowIcon(QIcon(qtUtils.getAbsoluteImagePath('icon.png')))
label = QLabel("Encryption keys passphrase:" if not verify else "Confirm passphrase:", self)
self.passphraseInput = QLineEdit(self)
self.passphraseInput.setEchoMode(QLineEdit.Password)
okayButton = QPushButton(QIcon.fromTheme('dialog-ok'), "OK", self)
cancelButton = QPushButton(QIcon.fromTheme('dialog-cancel'), "Cancel", self)
if showForgotButton:
forgotButton = QPushButton(QIcon.fromTheme('edit-undo'), "Forgot Passphrase", self)
okayButton.clicked.connect(lambda: self.buttonClicked(constants.BUTTON_OKAY))
cancelButton.clicked.connect(lambda: self.buttonClicked(constants.BUTTON_CANCEL))
if showForgotButton:
forgotButton.clicked.connect(lambda: self.buttonClicked(constants.BUTTON_FORGOT))
# Float the buttons to the right
hbox = QHBoxLayout()
hbox.addStretch(1)
hbox.addWidget(okayButton)
hbox.addWidget(cancelButton)
if showForgotButton:
hbox.addWidget(forgotButton)
vbox = QVBoxLayout()
vbox.addStretch(1)
vbox.addWidget(label)
vbox.addWidget(self.passphraseInput)
vbox.addLayout(hbox)
vbox.addStretch(1)
self.setLayout(vbox)
def buttonClicked(self, button):
self.passphrase = self.passphraseInput.text()
self.clickedButton = button
self.close()
@staticmethod
def getPassphrase(verify=False, showForgotButton=True):
passphraseDialog = QPassphraseDialog(verify, showForgotButton)
passphraseDialog.exec_()
return passphraseDialog.passphrase, passphraseDialog.clickedButton
| lgpl-3.0 |
DanHickstein/EMpy | examples/ex_RCWA_3.py | 4 | 1283 | """Rigorous Coupled Wave Analysis example."""
import numpy
import pylab
import EMpy
from EMpy.materials import IsotropicMaterial, RefractiveIndex
alpha = EMpy.utils.deg2rad(30.)
delta = EMpy.utils.deg2rad(45.)
psi = EMpy.utils.deg2rad(0.) # TE
phi = EMpy.utils.deg2rad(90.)
wls = numpy.linspace(1.5495e-6, 1.550e-6, 101)
LAMBDA = 1.e-6 # grating periodicity
n = 3 # orders of diffraction
Top = IsotropicMaterial('Top', n0=RefractiveIndex(n0_const=1.))
Bottom = IsotropicMaterial('Bottom', n0=RefractiveIndex(n0_const=3.47))
multilayer = EMpy.utils.Multilayer([
EMpy.utils.Layer(Top, numpy.inf),
EMpy.utils.BinaryGrating(Top, Bottom, .4, LAMBDA, .01),
EMpy.utils.Layer(Bottom, numpy.inf),
])
solution = EMpy.RCWA.IsotropicRCWA(multilayer, alpha, delta, psi, phi, n).solve(wls)
pylab.plot(wls, solution.DE1[n, :], 'ko-',
wls, solution.DE3[n, :], 'ro-',
wls, solution.DE1[n - 1, :], 'kx-',
wls, solution.DE3[n - 1, :], 'rx-',
wls, solution.DE1[n + 1, :], 'k.-',
wls, solution.DE3[n + 1, :], 'r.-',
)
pylab.xlabel('wavelength /m')
pylab.ylabel('diffraction efficiency')
pylab.legend(('DE1:0', 'DE3:0', 'DE1:-1', 'DE3:-1', 'DE1:+1', 'DE3:+1'))
pylab.axis('tight')
pylab.ylim([0, 1])
pylab.show()
| mit |
adit-chandra/tensorflow | tensorflow/python/data/experimental/kernel_tests/optimization/map_and_batch_fusion_test.py | 3 | 1753 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the `MapAndBatchFusion` optimization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.experimental.ops import testing
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class MapAndBatchFusionTest(test_base.DatasetTestBase):
def testMapAndBatchFusion(self):
dataset = dataset_ops.Dataset.range(10).apply(
testing.assert_next(
["MapAndBatch"])).map(lambda x: x * x).batch(10)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
options.experimental_optimization.map_and_batch_fusion = True
dataset = dataset.with_options(options)
self.assertDatasetProduces(
dataset, expected_output=[[x * x for x in range(10)]])
if __name__ == "__main__":
test.main()
| apache-2.0 |
midma101/m0du1ar | .venv/lib/python2.7/site-packages/jinja2/bccache.py | 117 | 10623 | # -*- coding: utf-8 -*-
"""
jinja2.bccache
~~~~~~~~~~~~~~
This module implements the bytecode cache system Jinja is optionally
using. This is useful if you have very complex template situations and
the compiliation of all those templates slow down your application too
much.
Situations where this is useful are often forking web applications that
are initialized on the first request.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from os import path, listdir
import sys
import marshal
import tempfile
import cPickle as pickle
import fnmatch
try:
from hashlib import sha1
except ImportError:
from sha import new as sha1
from jinja2.utils import open_if_exists
# marshal works better on 3.x, one hack less required
if sys.version_info > (3, 0):
from io import BytesIO
marshal_dump = marshal.dump
marshal_load = marshal.load
else:
from cStringIO import StringIO as BytesIO
def marshal_dump(code, f):
if isinstance(f, file):
marshal.dump(code, f)
else:
f.write(marshal.dumps(code))
def marshal_load(f):
if isinstance(f, file):
return marshal.load(f)
return marshal.loads(f.read())
bc_version = 2
# magic version used to only change with new jinja versions. With 2.6
# we change this to also take Python version changes into account. The
# reason for this is that Python tends to segfault if fed earlier bytecode
# versions because someone thought it would be a good idea to reuse opcodes
# or make Python incompatible with earlier versions.
bc_magic = 'j2'.encode('ascii') + \
pickle.dumps(bc_version, 2) + \
pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1])
class Bucket(object):
"""Buckets are used to store the bytecode for one template. It's created
and initialized by the bytecode cache and passed to the loading functions.
The buckets get an internal checksum from the cache assigned and use this
to automatically reject outdated cache material. Individual bytecode
cache subclasses don't have to care about cache invalidation.
"""
def __init__(self, environment, key, checksum):
self.environment = environment
self.key = key
self.checksum = checksum
self.reset()
def reset(self):
"""Resets the bucket (unloads the bytecode)."""
self.code = None
def load_bytecode(self, f):
"""Loads bytecode from a file or file like object."""
# make sure the magic header is correct
magic = f.read(len(bc_magic))
if magic != bc_magic:
self.reset()
return
# the source code of the file changed, we need to reload
checksum = pickle.load(f)
if self.checksum != checksum:
self.reset()
return
self.code = marshal_load(f)
def write_bytecode(self, f):
"""Dump the bytecode into the file or file like object passed."""
if self.code is None:
raise TypeError('can\'t write empty bucket')
f.write(bc_magic)
pickle.dump(self.checksum, f, 2)
marshal_dump(self.code, f)
def bytecode_from_string(self, string):
"""Load bytecode from a string."""
self.load_bytecode(BytesIO(string))
def bytecode_to_string(self):
"""Return the bytecode as string."""
out = BytesIO()
self.write_bytecode(out)
return out.getvalue()
class BytecodeCache(object):
"""To implement your own bytecode cache you have to subclass this class
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
these methods are passed a :class:`~jinja2.bccache.Bucket`.
A very basic bytecode cache that saves the bytecode on the file system::
from os import path
class MyCache(BytecodeCache):
def __init__(self, directory):
self.directory = directory
def load_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
if path.exists(filename):
with open(filename, 'rb') as f:
bucket.load_bytecode(f)
def dump_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
with open(filename, 'wb') as f:
bucket.write_bytecode(f)
A more advanced version of a filesystem based bytecode cache is part of
Jinja2.
"""
def load_bytecode(self, bucket):
"""Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.
"""
raise NotImplementedError()
def dump_bytecode(self, bucket):
"""Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.
"""
raise NotImplementedError()
def clear(self):
"""Clears the cache. This method is not used by Jinja2 but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
"""
def get_cache_key(self, name, filename=None):
"""Returns the unique hash key for this template name."""
hash = sha1(name.encode('utf-8'))
if filename is not None:
filename = '|' + filename
if isinstance(filename, unicode):
filename = filename.encode('utf-8')
hash.update(filename)
return hash.hexdigest()
def get_source_checksum(self, source):
"""Returns a checksum for the source."""
return sha1(source.encode('utf-8')).hexdigest()
def get_bucket(self, environment, name, filename, source):
"""Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.
"""
key = self.get_cache_key(name, filename)
checksum = self.get_source_checksum(source)
bucket = Bucket(environment, key, checksum)
self.load_bytecode(bucket)
return bucket
def set_bucket(self, bucket):
"""Put the bucket into the cache."""
self.dump_bytecode(bucket)
class FileSystemBytecodeCache(BytecodeCache):
"""A bytecode cache that stores bytecode on the filesystem. It accepts
two arguments: The directory where the cache items are stored and a
pattern string that is used to build the filename.
If no directory is specified the system temporary items folder is used.
The pattern can be used to have multiple separate caches operate on the
same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
is replaced with the cache key.
>>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
This bytecode cache supports clearing of the cache using the clear method.
"""
def __init__(self, directory=None, pattern='__jinja2_%s.cache'):
if directory is None:
directory = tempfile.gettempdir()
self.directory = directory
self.pattern = pattern
def _get_cache_filename(self, bucket):
return path.join(self.directory, self.pattern % bucket.key)
def load_bytecode(self, bucket):
f = open_if_exists(self._get_cache_filename(bucket), 'rb')
if f is not None:
try:
bucket.load_bytecode(f)
finally:
f.close()
def dump_bytecode(self, bucket):
f = open(self._get_cache_filename(bucket), 'wb')
try:
bucket.write_bytecode(f)
finally:
f.close()
def clear(self):
# imported lazily here because google app-engine doesn't support
# write access on the file system and the function does not exist
# normally.
from os import remove
files = fnmatch.filter(listdir(self.directory), self.pattern % '*')
for filename in files:
try:
remove(path.join(self.directory, filename))
except OSError:
pass
class MemcachedBytecodeCache(BytecodeCache):
"""This class implements a bytecode cache that uses a memcache cache for
storing the information. It does not enforce a specific memcache library
(tummy's memcache or cmemcache) but will accept any class that provides
the minimal interface required.
Libraries compatible with this class:
- `werkzeug <http://werkzeug.pocoo.org/>`_.contrib.cache
- `python-memcached <http://www.tummy.com/Community/software/python-memcached/>`_
- `cmemcache <http://gijsbert.org/cmemcache/>`_
(Unfortunately the django cache interface is not compatible because it
does not support storing binary data, only unicode. You can however pass
the underlying cache client to the bytecode cache which is available
as `django.core.cache.cache._client`.)
The minimal interface for the client passed to the constructor is this:
.. class:: MinimalClientInterface
.. method:: set(key, value[, timeout])
Stores the bytecode in the cache. `value` is a string and
`timeout` the timeout of the key. If timeout is not provided
a default timeout or no timeout should be assumed, if it's
provided it's an integer with the number of seconds the cache
item should exist.
.. method:: get(key)
Returns the value for the cache key. If the item does not
exist in the cache the return value must be `None`.
The other arguments to the constructor are the prefix for all keys that
is added before the actual cache key and the timeout for the bytecode in
the cache system. We recommend a high (or no) timeout.
This bytecode cache does not support clearing of used items in the cache.
The clear method is a no-operation function.
"""
def __init__(self, client, prefix='jinja2/bytecode/', timeout=None):
self.client = client
self.prefix = prefix
self.timeout = timeout
def load_bytecode(self, bucket):
code = self.client.get(self.prefix + bucket.key)
if code is not None:
bucket.bytecode_from_string(code)
def dump_bytecode(self, bucket):
args = (self.prefix + bucket.key, bucket.bytecode_to_string())
if self.timeout is not None:
args += (self.timeout,)
self.client.set(*args)
| mit |
piller-imre/exprail-python | tests/test_integer_list_grammar.py | 1 | 5219 | import unittest
from exprail.classifier import Classifier
from exprail.grammar import Grammar
from exprail.parser import Parser
from exprail.source import SourceString
class ListClassifier(Classifier):
"""Classify integer list tokens"""
@staticmethod
def is_in_class(token_class, token):
"""
Distinguish the list elements
:param token_class: '0-9', '[', ']', ',' or 'ws'
:param token: the value of the token
:return: True, when the token is in the class, else False
"""
if token.type == 'empty':
return False
if len(token_class) == 1:
return token.value == token_class
elif token_class == '0-9':
return token.value.isdigit()
elif token_class == 'ws':
return token.value == ' '
else:
raise ValueError('Unexpected token class!')
class ListParser(Parser):
"""Parse the input text and print the words"""
def __init__(self, grammar, source):
"""Initialize the integer list parser."""
super(ListParser, self).__init__(grammar, source)
self._result = []
@property
def result(self):
return self._result
def operate(self, operation, token):
"""Print the token value on print operation."""
if operation == 'add':
number = int(''.join(self._stacks['']))
self._result.append(number)
elif operation == 'save':
pass
else:
raise ValueError('The "{}" is an invalid operation!'.format(operation))
def show_error(self, message, token):
"""Show error in the parsing process."""
raise ValueError(message)
class IntegerListTest(unittest.TestCase):
"""Integer list grammar tests with examples"""
def test_empty_source(self):
list_classifier = ListClassifier()
grammar = Grammar(filename='grammars/integer_list.grammar', classifier=list_classifier)
source = SourceString('')
parser = ListParser(grammar, source)
try:
parser.parse()
except ValueError as error:
self.assertEqual(str(error), 'Missing [ character!')
else:
self.fail('The expected ValueError has not raised!')
def test_invalid_leading_character(self):
list_classifier = ListClassifier()
grammar = Grammar(filename='grammars/integer_list.grammar', classifier=list_classifier)
source = SourceString('invalid')
parser = ListParser(grammar, source)
try:
parser.parse()
except ValueError as error:
self.assertEqual(str(error), 'Missing [ character!')
else:
self.fail('The expected ValueError has not raised!')
def test_empty_list(self):
list_classifier = ListClassifier()
grammar = Grammar(filename='grammars/integer_list.grammar', classifier=list_classifier)
source = SourceString('[]')
parser = ListParser(grammar, source)
parser.parse()
self.assertEqual(parser.result, [])
def test_single_integer(self):
list_classifier = ListClassifier()
grammar = Grammar(filename='grammars/integer_list.grammar', classifier=list_classifier)
source = SourceString('[1234]')
parser = ListParser(grammar, source)
parser.parse()
self.assertEqual(parser.result, [1234])
def test_multiple_integers(self):
list_classifier = ListClassifier()
grammar = Grammar(filename='grammars/integer_list.grammar', classifier=list_classifier)
source = SourceString('[12, 34, 56]')
parser = ListParser(grammar, source)
parser.parse()
self.assertEqual(parser.result, [12, 34, 56])
def test_unexpected_character(self):
list_classifier = ListClassifier()
grammar = Grammar(filename='grammars/integer_list.grammar', classifier=list_classifier)
source = SourceString('[12, 34o]')
parser = ListParser(grammar, source)
try:
parser.parse()
except ValueError as error:
self.assertEqual(str(error), 'Unexpected character!')
else:
self.fail('The expected ValueError has not raised!')
def test_missing_space(self):
list_classifier = ListClassifier()
grammar = Grammar(filename='grammars/integer_list.grammar', classifier=list_classifier)
source = SourceString('[12, 34,56]')
parser = ListParser(grammar, source)
try:
parser.parse()
except ValueError as error:
self.assertEqual(str(error), 'Missing space!')
else:
self.fail('The expected ValueError has not raised!')
def test_missing_integer(self):
list_classifier = ListClassifier()
grammar = Grammar(filename='grammars/integer_list.grammar', classifier=list_classifier)
source = SourceString('[12, 34, , 78]')
parser = ListParser(grammar, source)
try:
parser.parse()
except ValueError as error:
self.assertEqual(str(error), 'An integer expected!')
else:
self.fail('The expected ValueError has not raised!')
| mit |
OpenPymeMx/OCB | addons/sale_crm/wizard/crm_make_sale.py | 36 | 7564 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class crm_make_sale(osv.osv_memory):
""" Make sale order for crm """
_name = "crm.make.sale"
_description = "Make sales"
def _selectPartner(self, cr, uid, context=None):
"""
This function gets default value for partner_id field.
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param context: A standard dictionary for contextual values
@return: default value of partner_id field.
"""
if context is None:
context = {}
lead_obj = self.pool.get('crm.lead')
active_id = context and context.get('active_id', False) or False
if not active_id:
return False
lead = lead_obj.read(cr, uid, active_id, ['partner_id'], context=context)
return lead['partner_id'][0] if lead['partner_id'] else False
def view_init(self, cr, uid, fields_list, context=None):
return super(crm_make_sale, self).view_init(cr, uid, fields_list, context=context)
def makeOrder(self, cr, uid, ids, context=None):
"""
This function create Quotation on given case.
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of crm make sales' ids
@param context: A standard dictionary for contextual values
@return: Dictionary value of created sales order.
"""
if context is None:
context = {}
# update context: if come from phonecall, default state values can make the quote crash lp:1017353
context.pop('default_state', False)
case_obj = self.pool.get('crm.lead')
sale_obj = self.pool.get('sale.order')
partner_obj = self.pool.get('res.partner')
data = context and context.get('active_ids', []) or []
for make in self.browse(cr, uid, ids, context=context):
partner = make.partner_id
partner_addr = partner_obj.address_get(cr, uid, [partner.id],
['default', 'invoice', 'delivery', 'contact'])
pricelist = partner.property_product_pricelist.id
fpos = partner.property_account_position and partner.property_account_position.id or False
payment_term = partner.property_payment_term and partner.property_payment_term.id or False
new_ids = []
for case in case_obj.browse(cr, uid, data, context=context):
if not partner and case.partner_id:
partner = case.partner_id
fpos = partner.property_account_position and partner.property_account_position.id or False
payment_term = partner.property_payment_term and partner.property_payment_term.id or False
partner_addr = partner_obj.address_get(cr, uid, [partner.id],
['default', 'invoice', 'delivery', 'contact'])
pricelist = partner.property_product_pricelist.id
if False in partner_addr.values():
raise osv.except_osv(_('Insufficient Data!'), _('No address(es) defined for this customer.'))
vals = {
'origin': _('Opportunity: %s') % str(case.id),
'section_id': case.section_id and case.section_id.id or False,
'categ_ids': [(6, 0, [categ_id.id for categ_id in case.categ_ids])],
'shop_id': make.shop_id.id,
'partner_id': partner.id,
'pricelist_id': pricelist,
'partner_invoice_id': partner_addr['invoice'],
'partner_shipping_id': partner_addr['delivery'],
'date_order': fields.date.context_today(self,cr,uid,context=context),
'fiscal_position': fpos,
'payment_term':payment_term,
}
if partner.id:
vals['user_id'] = partner.user_id and partner.user_id.id or uid
new_id = sale_obj.create(cr, uid, vals, context=context)
sale_order = sale_obj.browse(cr, uid, new_id, context=context)
case_obj.write(cr, uid, [case.id], {'ref': 'sale.order,%s' % new_id})
new_ids.append(new_id)
message = _("Opportunity has been <b>converted</b> to the quotation <em>%s</em>.") % (sale_order.name)
case.message_post(body=message)
if make.close:
case_obj.case_close(cr, uid, data)
if not new_ids:
return {'type': 'ir.actions.act_window_close'}
if len(new_ids)<=1:
value = {
'domain': str([('id', 'in', new_ids)]),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'sale.order',
'view_id': False,
'type': 'ir.actions.act_window',
'name' : _('Quotation'),
'res_id': new_ids and new_ids[0]
}
else:
value = {
'domain': str([('id', 'in', new_ids)]),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'sale.order',
'view_id': False,
'type': 'ir.actions.act_window',
'name' : _('Quotation'),
'res_id': new_ids
}
return value
def _get_shop_id(self, cr, uid, ids, context=None):
cmpny_id = self.pool.get('res.users')._get_company(cr, uid, context=context)
shop = self.pool.get('sale.shop').search(cr, uid, [('company_id', '=', cmpny_id)])
return shop and shop[0] or False
_columns = {
'shop_id': fields.many2one('sale.shop', 'Shop', required=True),
'partner_id': fields.many2one('res.partner', 'Customer', required=True, domain=[('customer','=',True)]),
'close': fields.boolean('Mark Won', help='Check this to close the opportunity after having created the sales order.'),
}
_defaults = {
'shop_id': _get_shop_id,
'close': False,
'partner_id': _selectPartner,
}
crm_make_sale()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
raymondxyang/tensorflow | tensorflow/contrib/learn/python/learn/estimators/debug_test.py | 46 | 32817 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Debug estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import operator
import tempfile
import numpy as np
from tensorflow.contrib.layers.python.layers import feature_column
from tensorflow.contrib.layers.python.layers import feature_column_ops
from tensorflow.contrib.learn.python.learn import experiment
from tensorflow.contrib.learn.python.learn.datasets import base
from tensorflow.contrib.learn.python.learn.estimators import _sklearn
from tensorflow.contrib.learn.python.learn.estimators import debug
from tensorflow.contrib.learn.python.learn.estimators import estimator_test_utils
from tensorflow.contrib.learn.python.learn.estimators import run_config
from tensorflow.contrib.learn.python.learn.estimators import test_data
from tensorflow.contrib.learn.python.learn.metric_spec import MetricSpec
from tensorflow.contrib.metrics.python.ops import metric_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.training import input as input_lib
NUM_EXAMPLES = 100
N_CLASSES = 5 # Cardinality of multiclass labels.
LABEL_DIMENSION = 3 # Dimensionality of regression labels.
def _train_test_split(features_and_labels):
features, labels = features_and_labels
train_set = (features[:int(len(features) / 2)], labels[:int(len(features) / 2)])
test_set = (features[int(len(features) / 2):], labels[int(len(features) / 2):])
return train_set, test_set
def _input_fn_builder(features, labels):
def input_fn():
feature_dict = {'features': constant_op.constant(features)}
my_labels = labels
if my_labels is not None:
my_labels = constant_op.constant(my_labels)
return feature_dict, my_labels
return input_fn
class DebugClassifierTest(test.TestCase):
def setUp(self):
np.random.seed(100)
self.features = np.random.rand(NUM_EXAMPLES, 5)
self.labels = np.random.choice(
range(N_CLASSES), p=[0.1, 0.3, 0.4, 0.1, 0.1], size=NUM_EXAMPLES)
self.binary_labels = np.random.choice(
range(2), p=[0.2, 0.8], size=NUM_EXAMPLES)
self.binary_float_labels = np.random.choice(
range(2), p=[0.2, 0.8], size=NUM_EXAMPLES)
def testPredict(self):
"""Tests that DebugClassifier outputs the majority class."""
(train_features, train_labels), (test_features,
test_labels) = _train_test_split(
[self.features, self.labels])
majority_class, _ = max(collections.Counter(train_labels).items(),
key=operator.itemgetter(1))
expected_prediction = np.vstack(
[[majority_class] for _ in range(test_labels.shape[0])])
classifier = debug.DebugClassifier(n_classes=N_CLASSES)
classifier.fit(input_fn=_input_fn_builder(train_features, train_labels),
steps=50)
pred = classifier.predict_classes(input_fn=_input_fn_builder(test_features,
None))
self.assertAllEqual(expected_prediction, np.vstack(pred))
def testPredictBinary(self):
"""Same as above for binary predictions."""
(train_features, train_labels), (test_features,
test_labels) = _train_test_split(
[self.features, self.binary_labels])
majority_class, _ = max(collections.Counter(train_labels).items(),
key=operator.itemgetter(1))
expected_prediction = np.vstack(
[[majority_class] for _ in range(test_labels.shape[0])])
classifier = debug.DebugClassifier(n_classes=2)
classifier.fit(input_fn=_input_fn_builder(train_features, train_labels),
steps=50)
pred = classifier.predict_classes(input_fn=_input_fn_builder(test_features,
None))
self.assertAllEqual(expected_prediction, np.vstack(pred))
(train_features, train_labels), (
test_features, test_labels) = _train_test_split(
[self.features, self.binary_float_labels])
majority_class, _ = max(collections.Counter(train_labels).items(),
key=operator.itemgetter(1))
expected_prediction = np.vstack(
[[majority_class] for _ in range(test_labels.shape[0])])
classifier = debug.DebugClassifier(n_classes=2)
classifier.fit(input_fn=_input_fn_builder(train_features, train_labels),
steps=50)
pred = classifier.predict_classes(input_fn=_input_fn_builder(test_features,
None))
self.assertAllEqual(expected_prediction, np.vstack(pred))
def testPredictProba(self):
"""Tests that DebugClassifier outputs observed class distribution."""
(train_features, train_labels), (test_features,
test_labels) = _train_test_split(
[self.features, self.labels])
class_distribution = np.zeros((1, N_CLASSES))
for label in train_labels:
class_distribution[0, label] += 1
class_distribution /= len(train_labels)
expected_prediction = np.vstack(
[class_distribution for _ in range(test_labels.shape[0])])
classifier = debug.DebugClassifier(n_classes=N_CLASSES)
classifier.fit(input_fn=_input_fn_builder(train_features, train_labels),
steps=50)
pred = classifier.predict_proba(
input_fn=_input_fn_builder(test_features, None))
self.assertAllClose(expected_prediction, np.vstack(pred), atol=0.1)
def testPredictProbaBinary(self):
"""Same as above but for binary classification."""
(train_features, train_labels), (test_features,
test_labels) = _train_test_split(
[self.features, self.binary_labels])
class_distribution = np.zeros((1, 2))
for label in train_labels:
class_distribution[0, label] += 1
class_distribution /= len(train_labels)
expected_prediction = np.vstack(
[class_distribution for _ in range(test_labels.shape[0])])
classifier = debug.DebugClassifier(n_classes=2)
classifier.fit(input_fn=_input_fn_builder(train_features, train_labels),
steps=50)
pred = classifier.predict_proba(
input_fn=_input_fn_builder(test_features, None))
self.assertAllClose(expected_prediction, np.vstack(pred), atol=0.1)
(train_features, train_labels), (
test_features, test_labels) = _train_test_split(
[self.features, self.binary_float_labels])
class_distribution = np.zeros((1, 2))
for label in train_labels:
class_distribution[0, int(label)] += 1
class_distribution /= len(train_labels)
expected_prediction = np.vstack(
[class_distribution for _ in range(test_labels.shape[0])])
classifier = debug.DebugClassifier(n_classes=2)
classifier.fit(input_fn=_input_fn_builder(train_features, train_labels),
steps=50)
pred = classifier.predict_proba(
input_fn=_input_fn_builder(test_features, None))
self.assertAllClose(expected_prediction, np.vstack(pred), atol=0.1)
def testExperimentIntegration(self):
exp = experiment.Experiment(
estimator=debug.DebugClassifier(n_classes=3),
train_input_fn=test_data.iris_input_multiclass_fn,
eval_input_fn=test_data.iris_input_multiclass_fn)
exp.test()
def _assertInRange(self, expected_min, expected_max, actual):
self.assertLessEqual(expected_min, actual)
self.assertGreaterEqual(expected_max, actual)
def testEstimatorContract(self):
estimator_test_utils.assert_estimator_contract(self, debug.DebugClassifier)
def testLogisticRegression_MatrixData(self):
"""Tests binary classification using matrix data as input."""
classifier = debug.DebugClassifier(
config=run_config.RunConfig(tf_random_seed=1))
input_fn = test_data.iris_input_logistic_fn
classifier.fit(input_fn=input_fn, steps=5)
scores = classifier.evaluate(input_fn=input_fn, steps=1)
self._assertInRange(0.0, 1.0, scores['accuracy'])
self.assertIn('loss', scores)
def testLogisticRegression_MatrixData_Labels1D(self):
"""Same as the last test, but label shape is [100] instead of [100, 1]."""
def _input_fn():
iris = test_data.prepare_iris_data_for_logistic_regression()
return {
'feature': constant_op.constant(
iris.data, dtype=dtypes.float32)
}, constant_op.constant(
iris.target, shape=[100], dtype=dtypes.int32)
classifier = debug.DebugClassifier(config=run_config.RunConfig(
tf_random_seed=1))
classifier.fit(input_fn=_input_fn, steps=5)
scores = classifier.evaluate(input_fn=_input_fn, steps=1)
self.assertIn('loss', scores)
def testLogisticRegression_NpMatrixData(self):
"""Tests binary classification using numpy matrix data as input."""
iris = test_data.prepare_iris_data_for_logistic_regression()
train_x = iris.data
train_y = iris.target
classifier = debug.DebugClassifier(
config=run_config.RunConfig(tf_random_seed=1))
classifier.fit(x=train_x, y=train_y, steps=5)
scores = classifier.evaluate(x=train_x, y=train_y, steps=1)
self._assertInRange(0.0, 1.0, scores['accuracy'])
def _assertBinaryPredictions(self, expected_len, predictions):
self.assertEqual(expected_len, len(predictions))
for prediction in predictions:
self.assertIn(prediction, (0, 1))
def _assertProbabilities(self, expected_batch_size, expected_n_classes,
probabilities):
self.assertEqual(expected_batch_size, len(probabilities))
for b in range(expected_batch_size):
self.assertEqual(expected_n_classes, len(probabilities[b]))
for i in range(expected_n_classes):
self._assertInRange(0.0, 1.0, probabilities[b][i])
def testLogisticRegression_TensorData(self):
"""Tests binary classification using tensor data as input."""
def _input_fn(num_epochs=None):
features = {
'age':
input_lib.limit_epochs(
constant_op.constant([[.8], [0.2], [.1]]),
num_epochs=num_epochs),
'language':
sparse_tensor.SparseTensor(
values=input_lib.limit_epochs(
['en', 'fr', 'zh'], num_epochs=num_epochs),
indices=[[0, 0], [0, 1], [2, 0]],
dense_shape=[3, 2])
}
return features, constant_op.constant([[1], [0], [0]], dtype=dtypes.int32)
classifier = debug.DebugClassifier(n_classes=2)
classifier.fit(input_fn=_input_fn, steps=50)
scores = classifier.evaluate(input_fn=_input_fn, steps=1)
self._assertInRange(0.0, 1.0, scores['accuracy'])
self.assertIn('loss', scores)
predict_input_fn = functools.partial(_input_fn, num_epochs=1)
predictions = list(classifier.predict_classes(input_fn=predict_input_fn))
self._assertBinaryPredictions(3, predictions)
def testLogisticRegression_FloatLabel(self):
"""Tests binary classification with float labels."""
def _input_fn_float_label(num_epochs=None):
features = {
'age':
input_lib.limit_epochs(
constant_op.constant([[50], [20], [10]]),
num_epochs=num_epochs),
'language':
sparse_tensor.SparseTensor(
values=input_lib.limit_epochs(
['en', 'fr', 'zh'], num_epochs=num_epochs),
indices=[[0, 0], [0, 1], [2, 0]],
dense_shape=[3, 2])
}
labels = constant_op.constant([[0.8], [0.], [0.2]], dtype=dtypes.float32)
return features, labels
classifier = debug.DebugClassifier(n_classes=2)
classifier.fit(input_fn=_input_fn_float_label, steps=50)
predict_input_fn = functools.partial(_input_fn_float_label, num_epochs=1)
predictions = list(classifier.predict_classes(input_fn=predict_input_fn))
self._assertBinaryPredictions(3, predictions)
predictions_proba = list(
classifier.predict_proba(input_fn=predict_input_fn))
self._assertProbabilities(3, 2, predictions_proba)
def testMultiClass_MatrixData(self):
"""Tests multi-class classification using matrix data as input."""
classifier = debug.DebugClassifier(n_classes=3)
input_fn = test_data.iris_input_multiclass_fn
classifier.fit(input_fn=input_fn, steps=200)
scores = classifier.evaluate(input_fn=input_fn, steps=1)
self._assertInRange(0.0, 1.0, scores['accuracy'])
self.assertIn('loss', scores)
def testMultiClass_MatrixData_Labels1D(self):
"""Same as the last test, but label shape is [150] instead of [150, 1]."""
def _input_fn():
iris = base.load_iris()
return {
'feature': constant_op.constant(
iris.data, dtype=dtypes.float32)
}, constant_op.constant(
iris.target, shape=[150], dtype=dtypes.int32)
classifier = debug.DebugClassifier(n_classes=3)
classifier.fit(input_fn=_input_fn, steps=200)
scores = classifier.evaluate(input_fn=_input_fn, steps=1)
self._assertInRange(0.0, 1.0, scores['accuracy'])
def testMultiClass_NpMatrixData(self):
"""Tests multi-class classification using numpy matrix data as input."""
iris = base.load_iris()
train_x = iris.data
train_y = iris.target
classifier = debug.DebugClassifier(n_classes=3)
classifier.fit(x=train_x, y=train_y, steps=200)
scores = classifier.evaluate(x=train_x, y=train_y, steps=1)
self._assertInRange(0.0, 1.0, scores['accuracy'])
def testMultiClass_StringLabel(self):
"""Tests multi-class classification with string labels."""
def _input_fn_train():
labels = constant_op.constant([['foo'], ['bar'], ['baz'], ['bar']])
features = {
'x': array_ops.ones(shape=[4, 1], dtype=dtypes.float32),
}
return features, labels
classifier = debug.DebugClassifier(
n_classes=3, label_keys=['foo', 'bar', 'baz'])
classifier.fit(input_fn=_input_fn_train, steps=5)
scores = classifier.evaluate(input_fn=_input_fn_train, steps=1)
self.assertIn('loss', scores)
def testLoss(self):
"""Tests loss calculation."""
def _input_fn_train():
# Create 4 rows, one of them (y = x), three of them (y=Not(x))
# The logistic prediction should be (y = 0.25).
labels = constant_op.constant([[1], [0], [0], [0]])
features = {'x': array_ops.ones(shape=[4, 1], dtype=dtypes.float32),}
return features, labels
classifier = debug.DebugClassifier(n_classes=2)
classifier.fit(input_fn=_input_fn_train, steps=5)
scores = classifier.evaluate(input_fn=_input_fn_train, steps=1)
self.assertIn('loss', scores)
def testLossWithWeights(self):
"""Tests loss calculation with weights."""
def _input_fn_train():
# 4 rows with equal weight, one of them (y = x), three of them (y=Not(x))
# The logistic prediction should be (y = 0.25).
labels = constant_op.constant([[1.], [0.], [0.], [0.]])
features = {
'x': array_ops.ones(
shape=[4, 1], dtype=dtypes.float32),
'w': constant_op.constant([[1.], [1.], [1.], [1.]])
}
return features, labels
def _input_fn_eval():
# 4 rows, with different weights.
labels = constant_op.constant([[1.], [0.], [0.], [0.]])
features = {
'x': array_ops.ones(
shape=[4, 1], dtype=dtypes.float32),
'w': constant_op.constant([[7.], [1.], [1.], [1.]])
}
return features, labels
classifier = debug.DebugClassifier(
weight_column_name='w',
n_classes=2,
config=run_config.RunConfig(tf_random_seed=1))
classifier.fit(input_fn=_input_fn_train, steps=5)
scores = classifier.evaluate(input_fn=_input_fn_eval, steps=1)
self.assertIn('loss', scores)
def testTrainWithWeights(self):
"""Tests training with given weight column."""
def _input_fn_train():
# Create 4 rows, one of them (y = x), three of them (y=Not(x))
# First row has more weight than others. Model should fit (y=x) better
# than (y=Not(x)) due to the relative higher weight of the first row.
labels = constant_op.constant([[1], [0], [0], [0]])
features = {
'x': array_ops.ones(
shape=[4, 1], dtype=dtypes.float32),
'w': constant_op.constant([[100.], [3.], [2.], [2.]])
}
return features, labels
def _input_fn_eval():
# Create 4 rows (y = x)
labels = constant_op.constant([[1], [1], [1], [1]])
features = {
'x': array_ops.ones(
shape=[4, 1], dtype=dtypes.float32),
'w': constant_op.constant([[1.], [1.], [1.], [1.]])
}
return features, labels
classifier = debug.DebugClassifier(weight_column_name='w')
classifier.fit(input_fn=_input_fn_train, steps=5)
scores = classifier.evaluate(input_fn=_input_fn_eval, steps=1)
self._assertInRange(0.0, 1.0, scores['accuracy'])
def testCustomMetrics(self):
"""Tests custom evaluation metrics."""
def _input_fn(num_epochs=None):
# Create 4 rows, one of them (y = x), three of them (y=Not(x))
labels = constant_op.constant([[1], [0], [0], [0]])
features = {
'x':
input_lib.limit_epochs(
array_ops.ones(
shape=[4, 1], dtype=dtypes.float32),
num_epochs=num_epochs),
}
return features, labels
def _my_metric_op(predictions, labels):
# For the case of binary classification, the 2nd column of "predictions"
# denotes the model predictions.
labels = math_ops.to_float(labels)
predictions = array_ops.strided_slice(
predictions, [0, 1], [-1, 2], end_mask=1)
labels = math_ops.cast(labels, predictions.dtype)
return math_ops.reduce_sum(math_ops.multiply(predictions, labels))
classifier = debug.DebugClassifier(
config=run_config.RunConfig(tf_random_seed=1))
classifier.fit(input_fn=_input_fn, steps=5)
scores = classifier.evaluate(
input_fn=_input_fn,
steps=5,
metrics={
'my_accuracy':
MetricSpec(
metric_fn=metric_ops.streaming_accuracy,
prediction_key='classes'),
'my_precision':
MetricSpec(
metric_fn=metric_ops.streaming_precision,
prediction_key='classes'),
'my_metric':
MetricSpec(
metric_fn=_my_metric_op, prediction_key='probabilities')
})
self.assertTrue(
set(['loss', 'my_accuracy', 'my_precision', 'my_metric']).issubset(
set(scores.keys())))
predict_input_fn = functools.partial(_input_fn, num_epochs=1)
predictions = np.array(
list(classifier.predict_classes(input_fn=predict_input_fn)))
self.assertEqual(
_sklearn.accuracy_score([1, 0, 0, 0], predictions),
scores['my_accuracy'])
# Test the case where the 2nd element of the key is neither "classes" nor
# "probabilities".
with self.assertRaisesRegexp(KeyError, 'bad_type'):
classifier.evaluate(
input_fn=_input_fn,
steps=5,
metrics={
'bad_name':
MetricSpec(
metric_fn=metric_ops.streaming_auc,
prediction_key='bad_type')
})
def testTrainSaveLoad(self):
"""Tests that insures you can save and reload a trained model."""
def _input_fn(num_epochs=None):
features = {
'age':
input_lib.limit_epochs(
constant_op.constant([[.8], [.2], [.1]]),
num_epochs=num_epochs),
'language':
sparse_tensor.SparseTensor(
values=input_lib.limit_epochs(
['en', 'fr', 'zh'], num_epochs=num_epochs),
indices=[[0, 0], [0, 1], [2, 0]],
dense_shape=[3, 2])
}
return features, constant_op.constant([[1], [0], [0]], dtype=dtypes.int32)
model_dir = tempfile.mkdtemp()
classifier = debug.DebugClassifier(
model_dir=model_dir,
n_classes=3,
config=run_config.RunConfig(tf_random_seed=1))
classifier.fit(input_fn=_input_fn, steps=5)
predict_input_fn = functools.partial(_input_fn, num_epochs=1)
predictions1 = classifier.predict_classes(input_fn=predict_input_fn)
del classifier
classifier2 = debug.DebugClassifier(
model_dir=model_dir,
n_classes=3,
config=run_config.RunConfig(tf_random_seed=1))
predictions2 = classifier2.predict_classes(input_fn=predict_input_fn)
self.assertEqual(list(predictions1), list(predictions2))
def testExport(self):
"""Tests export model for servo."""
def input_fn():
return {
'age':
constant_op.constant([1]),
'language':
sparse_tensor.SparseTensor(
values=['english'], indices=[[0, 0]], dense_shape=[1, 1])
}, constant_op.constant([[1]])
language = feature_column.sparse_column_with_hash_bucket('language', 100)
feature_columns = [
feature_column.real_valued_column('age'),
feature_column.embedding_column(
language, dimension=1)
]
classifier = debug.DebugClassifier(config=run_config.RunConfig(
tf_random_seed=1))
classifier.fit(input_fn=input_fn, steps=5)
def default_input_fn(unused_estimator, examples):
return feature_column_ops.parse_feature_columns_from_examples(
examples, feature_columns)
export_dir = tempfile.mkdtemp()
classifier.export(export_dir, input_fn=default_input_fn)
class DebugRegressorTest(test.TestCase):
def setUp(self):
np.random.seed(100)
self.features = np.random.rand(NUM_EXAMPLES, 5)
self.targets = np.random.rand(NUM_EXAMPLES, LABEL_DIMENSION)
def testPredictScores(self):
"""Tests that DebugRegressor outputs the mean target."""
(train_features, train_labels), (test_features,
test_labels) = _train_test_split(
[self.features, self.targets])
mean_target = np.mean(train_labels, 0)
expected_prediction = np.vstack(
[mean_target for _ in range(test_labels.shape[0])])
classifier = debug.DebugRegressor(label_dimension=LABEL_DIMENSION)
classifier.fit(
input_fn=_input_fn_builder(train_features, train_labels), steps=50)
pred = classifier.predict_scores(input_fn=_input_fn_builder(test_features,
None))
self.assertAllClose(expected_prediction, np.vstack(pred), atol=0.1)
def testExperimentIntegration(self):
exp = experiment.Experiment(
estimator=debug.DebugRegressor(),
train_input_fn=test_data.iris_input_logistic_fn,
eval_input_fn=test_data.iris_input_logistic_fn)
exp.test()
def testEstimatorContract(self):
estimator_test_utils.assert_estimator_contract(self, debug.DebugRegressor)
def testRegression_MatrixData(self):
"""Tests regression using matrix data as input."""
regressor = debug.DebugRegressor(
config=run_config.RunConfig(tf_random_seed=1))
input_fn = test_data.iris_input_logistic_fn
regressor.fit(input_fn=input_fn, steps=200)
scores = regressor.evaluate(input_fn=input_fn, steps=1)
self.assertIn('loss', scores)
def testRegression_MatrixData_Labels1D(self):
"""Same as the last test, but label shape is [100] instead of [100, 1]."""
def _input_fn():
iris = test_data.prepare_iris_data_for_logistic_regression()
return {
'feature': constant_op.constant(iris.data, dtype=dtypes.float32)
}, constant_op.constant(
iris.target, shape=[100], dtype=dtypes.int32)
regressor = debug.DebugRegressor(
config=run_config.RunConfig(tf_random_seed=1))
regressor.fit(input_fn=_input_fn, steps=200)
scores = regressor.evaluate(input_fn=_input_fn, steps=1)
self.assertIn('loss', scores)
def testRegression_NpMatrixData(self):
"""Tests binary classification using numpy matrix data as input."""
iris = test_data.prepare_iris_data_for_logistic_regression()
train_x = iris.data
train_y = iris.target
regressor = debug.DebugRegressor(
config=run_config.RunConfig(tf_random_seed=1))
regressor.fit(x=train_x, y=train_y, steps=200)
scores = regressor.evaluate(x=train_x, y=train_y, steps=1)
self.assertIn('loss', scores)
def testRegression_TensorData(self):
"""Tests regression using tensor data as input."""
def _input_fn(num_epochs=None):
features = {
'age':
input_lib.limit_epochs(
constant_op.constant([[.8], [.15], [0.]]),
num_epochs=num_epochs),
'language':
sparse_tensor.SparseTensor(
values=input_lib.limit_epochs(
['en', 'fr', 'zh'], num_epochs=num_epochs),
indices=[[0, 0], [0, 1], [2, 0]],
dense_shape=[3, 2])
}
return features, constant_op.constant([1., 0., 0.2], dtype=dtypes.float32)
regressor = debug.DebugRegressor(
config=run_config.RunConfig(tf_random_seed=1))
regressor.fit(input_fn=_input_fn, steps=200)
scores = regressor.evaluate(input_fn=_input_fn, steps=1)
self.assertIn('loss', scores)
def testLoss(self):
"""Tests loss calculation."""
def _input_fn_train():
# Create 4 rows, one of them (y = x), three of them (y=Not(x))
# The algorithm should learn (y = 0.25).
labels = constant_op.constant([[1.], [0.], [0.], [0.]])
features = {'x': array_ops.ones(shape=[4, 1], dtype=dtypes.float32),}
return features, labels
regressor = debug.DebugRegressor(
config=run_config.RunConfig(tf_random_seed=1))
regressor.fit(input_fn=_input_fn_train, steps=5)
scores = regressor.evaluate(input_fn=_input_fn_train, steps=1)
self.assertIn('loss', scores)
def testLossWithWeights(self):
"""Tests loss calculation with weights."""
def _input_fn_train():
# 4 rows with equal weight, one of them (y = x), three of them (y=Not(x))
# The algorithm should learn (y = 0.25).
labels = constant_op.constant([[1.], [0.], [0.], [0.]])
features = {
'x': array_ops.ones(shape=[4, 1], dtype=dtypes.float32),
'w': constant_op.constant([[1.], [1.], [1.], [1.]])
}
return features, labels
def _input_fn_eval():
# 4 rows, with different weights.
labels = constant_op.constant([[1.], [0.], [0.], [0.]])
features = {
'x': array_ops.ones(shape=[4, 1], dtype=dtypes.float32),
'w': constant_op.constant([[7.], [1.], [1.], [1.]])
}
return features, labels
regressor = debug.DebugRegressor(
weight_column_name='w', config=run_config.RunConfig(tf_random_seed=1))
regressor.fit(input_fn=_input_fn_train, steps=5)
scores = regressor.evaluate(input_fn=_input_fn_eval, steps=1)
self.assertIn('loss', scores)
def testTrainWithWeights(self):
"""Tests training with given weight column."""
def _input_fn_train():
# Create 4 rows, one of them (y = x), three of them (y=Not(x))
# First row has more weight than others. Model should fit (y=x) better
# than (y=Not(x)) due to the relative higher weight of the first row.
labels = constant_op.constant([[1.], [0.], [0.], [0.]])
features = {
'x': array_ops.ones(shape=[4, 1], dtype=dtypes.float32),
'w': constant_op.constant([[100.], [3.], [2.], [2.]])
}
return features, labels
def _input_fn_eval():
# Create 4 rows (y = x)
labels = constant_op.constant([[1.], [1.], [1.], [1.]])
features = {
'x': array_ops.ones(shape=[4, 1], dtype=dtypes.float32),
'w': constant_op.constant([[1.], [1.], [1.], [1.]])
}
return features, labels
regressor = debug.DebugRegressor(
weight_column_name='w', config=run_config.RunConfig(tf_random_seed=1))
regressor.fit(input_fn=_input_fn_train, steps=5)
scores = regressor.evaluate(input_fn=_input_fn_eval, steps=1)
self.assertIn('loss', scores)
def testCustomMetrics(self):
"""Tests custom evaluation metrics."""
def _input_fn(num_epochs=None):
# Create 4 rows, one of them (y = x), three of them (y=Not(x))
labels = constant_op.constant([[1.], [0.], [0.], [0.]])
features = {
'x':
input_lib.limit_epochs(
array_ops.ones(shape=[4, 1], dtype=dtypes.float32),
num_epochs=num_epochs),
}
return features, labels
def _my_metric_op(predictions, labels):
return math_ops.reduce_sum(math_ops.multiply(predictions, labels))
regressor = debug.DebugRegressor(
config=run_config.RunConfig(tf_random_seed=1))
regressor.fit(input_fn=_input_fn, steps=5)
scores = regressor.evaluate(
input_fn=_input_fn,
steps=1,
metrics={
'my_error':
MetricSpec(
metric_fn=metric_ops.streaming_mean_squared_error,
prediction_key='scores'),
'my_metric':
MetricSpec(metric_fn=_my_metric_op, prediction_key='scores')
})
self.assertIn('loss', set(scores.keys()))
self.assertIn('my_error', set(scores.keys()))
self.assertIn('my_metric', set(scores.keys()))
predict_input_fn = functools.partial(_input_fn, num_epochs=1)
predictions = np.array(
list(regressor.predict_scores(input_fn=predict_input_fn)))
self.assertAlmostEqual(
_sklearn.mean_squared_error(np.array([1, 0, 0, 0]), predictions),
scores['my_error'])
# Tests the case where the prediction_key is not "scores".
with self.assertRaisesRegexp(KeyError, 'bad_type'):
regressor.evaluate(
input_fn=_input_fn,
steps=1,
metrics={
'bad_name':
MetricSpec(
metric_fn=metric_ops.streaming_auc,
prediction_key='bad_type')
})
def testTrainSaveLoad(self):
"""Tests that insures you can save and reload a trained model."""
def _input_fn(num_epochs=None):
features = {
'age':
input_lib.limit_epochs(
constant_op.constant([[0.8], [0.15], [0.]]),
num_epochs=num_epochs),
'language':
sparse_tensor.SparseTensor(
values=input_lib.limit_epochs(
['en', 'fr', 'zh'], num_epochs=num_epochs),
indices=[[0, 0], [0, 1], [2, 0]],
dense_shape=[3, 2])
}
return features, constant_op.constant([1., 0., 0.2], dtype=dtypes.float32)
model_dir = tempfile.mkdtemp()
regressor = debug.DebugRegressor(
model_dir=model_dir, config=run_config.RunConfig(tf_random_seed=1))
regressor.fit(input_fn=_input_fn, steps=5)
predict_input_fn = functools.partial(_input_fn, num_epochs=1)
predictions = list(regressor.predict_scores(input_fn=predict_input_fn))
del regressor
regressor2 = debug.DebugRegressor(
model_dir=model_dir, config=run_config.RunConfig(tf_random_seed=1))
predictions2 = list(regressor2.predict_scores(input_fn=predict_input_fn))
self.assertAllClose(predictions, predictions2)
if __name__ == '__main__':
test.main()
| apache-2.0 |
cloudera/hue | desktop/core/ext-py/pycryptodomex-3.9.7/lib/Cryptodome/SelfTest/Hash/test_SHA512.py | 4 | 5302 | # -*- coding: utf-8 -*-
#
# SelfTest/Hash/test_SHA512.py: Self-test for the SHA-512 hash function
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Cryptodome.Hash.SHA512"""
from binascii import hexlify
from Cryptodome.Hash import SHA512
from .common import make_hash_tests
from Cryptodome.SelfTest.loader import load_tests
# Test vectors from various sources
# This is a list of (expected_result, input[, description]) tuples.
test_data_512_other = [
# RFC 4634: Section Page 8.4, "Test 1"
('ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f', 'abc'),
# RFC 4634: Section Page 8.4, "Test 2.1"
('8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'),
# RFC 4634: Section Page 8.4, "Test 3"
('e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973ebde0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b', 'a' * 10**6, "'a' * 10**6"),
# Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm
('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', ''),
('af9ed2de700433b803240a552b41b5a472a6ef3fe1431a722b2063c75e9f07451f67a28e37d09cde769424c96aea6f8971389db9e1993d6c565c3c71b855723c', 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'),
]
def get_tests_SHA512():
test_vectors = load_tests(("Cryptodome", "SelfTest", "Hash", "test_vectors", "SHA2"),
"SHA512ShortMsg.rsp",
"KAT SHA-512",
{ "len" : lambda x: int(x) } )
test_data = test_data_512_other[:]
for tv in test_vectors:
try:
if tv.startswith('['):
continue
except AttributeError:
pass
if tv.len == 0:
tv.msg = b""
test_data.append((hexlify(tv.md), tv.msg, tv.desc))
tests = make_hash_tests(SHA512, "SHA512", test_data,
digest_size=64,
oid="2.16.840.1.101.3.4.2.3")
return tests
def get_tests_SHA512_224():
test_vectors = load_tests(("Cryptodome", "SelfTest", "Hash", "test_vectors", "SHA2"),
"SHA512_224ShortMsg.rsp",
"KAT SHA-512/224",
{ "len" : lambda x: int(x) } )
test_data = []
for tv in test_vectors:
try:
if tv.startswith('['):
continue
except AttributeError:
pass
if tv.len == 0:
tv.msg = b""
test_data.append((hexlify(tv.md), tv.msg, tv.desc))
tests = make_hash_tests(SHA512, "SHA512/224", test_data,
digest_size=28,
oid="2.16.840.1.101.3.4.2.5",
extra_params={ "truncate" : "224" })
return tests
def get_tests_SHA512_256():
test_vectors = load_tests(("Cryptodome", "SelfTest", "Hash", "test_vectors", "SHA2"),
"SHA512_256ShortMsg.rsp",
"KAT SHA-512/256",
{ "len" : lambda x: int(x) } )
test_data = []
for tv in test_vectors:
try:
if tv.startswith('['):
continue
except AttributeError:
pass
if tv.len == 0:
tv.msg = b""
test_data.append((hexlify(tv.md), tv.msg, tv.desc))
tests = make_hash_tests(SHA512, "SHA512/256", test_data,
digest_size=32,
oid="2.16.840.1.101.3.4.2.6",
extra_params={ "truncate" : "256" })
return tests
def get_tests(config={}):
tests = []
tests += get_tests_SHA512()
tests += get_tests_SHA512_224()
tests += get_tests_SHA512_256()
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| apache-2.0 |
asrie/phantomjs | src/qt/qtwebkit/Source/WebCore/inspector/CodeGeneratorInspector.py | 117 | 97853 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Copyright (c) 2012 Intel Corporation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os.path
import sys
import string
import optparse
import re
try:
import json
except ImportError:
import simplejson as json
import CodeGeneratorInspectorStrings
DOMAIN_DEFINE_NAME_MAP = {
"Database": "SQL_DATABASE",
"Debugger": "JAVASCRIPT_DEBUGGER",
"DOMDebugger": "JAVASCRIPT_DEBUGGER",
"FileSystem": "FILE_SYSTEM",
"IndexedDB": "INDEXED_DATABASE",
"Profiler": "JAVASCRIPT_DEBUGGER",
"Worker": "WORKERS",
}
# Manually-filled map of type name replacements.
TYPE_NAME_FIX_MAP = {
"RGBA": "Rgba", # RGBA is reported to be conflicting with a define name in Windows CE.
"": "Empty",
}
TYPES_WITH_RUNTIME_CAST_SET = frozenset(["Runtime.RemoteObject", "Runtime.PropertyDescriptor", "Runtime.InternalPropertyDescriptor",
"Debugger.FunctionDetails", "Debugger.CallFrame",
"Canvas.TraceLog", "Canvas.ResourceInfo", "Canvas.ResourceState",
# This should be a temporary hack. TimelineEvent should be created via generated C++ API.
"Timeline.TimelineEvent"])
TYPES_WITH_OPEN_FIELD_LIST_SET = frozenset(["Timeline.TimelineEvent",
# InspectorStyleSheet not only creates this property but wants to read it and modify it.
"CSS.CSSProperty",
# InspectorResourceAgent needs to update mime-type.
"Network.Response"])
EXACTLY_INT_SUPPORTED = False
cmdline_parser = optparse.OptionParser()
cmdline_parser.add_option("--output_h_dir")
cmdline_parser.add_option("--output_cpp_dir")
cmdline_parser.add_option("--write_always", action="store_true")
try:
arg_options, arg_values = cmdline_parser.parse_args()
if (len(arg_values) != 1):
raise Exception("Exactly one plain argument expected (found %s)" % len(arg_values))
input_json_filename = arg_values[0]
output_header_dirname = arg_options.output_h_dir
output_cpp_dirname = arg_options.output_cpp_dir
write_always = arg_options.write_always
if not output_header_dirname:
raise Exception("Output .h directory must be specified")
if not output_cpp_dirname:
raise Exception("Output .cpp directory must be specified")
except Exception:
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
exc = sys.exc_info()[1]
sys.stderr.write("Failed to parse command-line arguments: %s\n\n" % exc)
sys.stderr.write("Usage: <script> Inspector.json --output_h_dir <output_header_dir> --output_cpp_dir <output_cpp_dir> [--write_always]\n")
exit(1)
def dash_to_camelcase(word):
return ''.join(x.capitalize() or '-' for x in word.split('-'))
def fix_camel_case(name):
refined = re.sub(r'-(\w)', lambda pat: pat.group(1).upper(), name)
refined = to_title_case(refined)
return re.sub(r'(?i)HTML|XML|WML|API', lambda pat: pat.group(0).upper(), refined)
def to_title_case(name):
return name[:1].upper() + name[1:]
class Capitalizer:
@staticmethod
def lower_camel_case_to_upper(str):
if len(str) > 0 and str[0].islower():
str = str[0].upper() + str[1:]
return str
@staticmethod
def upper_camel_case_to_lower(str):
pos = 0
while pos < len(str) and str[pos].isupper():
pos += 1
if pos == 0:
return str
if pos == 1:
return str[0].lower() + str[1:]
if pos < len(str):
pos -= 1
possible_abbreviation = str[0:pos]
if possible_abbreviation not in Capitalizer.ABBREVIATION:
raise Exception("Unknown abbreviation %s" % possible_abbreviation)
str = possible_abbreviation.lower() + str[pos:]
return str
@staticmethod
def camel_case_to_capitalized_with_underscores(str):
if len(str) == 0:
return str
output = Capitalizer.split_camel_case_(str)
return "_".join(output).upper()
@staticmethod
def split_camel_case_(str):
output = []
pos_being = 0
pos = 1
has_oneletter = False
while pos < len(str):
if str[pos].isupper():
output.append(str[pos_being:pos].upper())
if pos - pos_being == 1:
has_oneletter = True
pos_being = pos
pos += 1
output.append(str[pos_being:])
if has_oneletter:
array_pos = 0
while array_pos < len(output) - 1:
if len(output[array_pos]) == 1:
array_pos_end = array_pos + 1
while array_pos_end < len(output) and len(output[array_pos_end]) == 1:
array_pos_end += 1
if array_pos_end - array_pos > 1:
possible_abbreviation = "".join(output[array_pos:array_pos_end])
if possible_abbreviation.upper() in Capitalizer.ABBREVIATION:
output[array_pos:array_pos_end] = [possible_abbreviation]
else:
array_pos = array_pos_end - 1
array_pos += 1
return output
ABBREVIATION = frozenset(["XHR", "DOM", "CSS"])
VALIDATOR_IFDEF_NAME = "!ASSERT_DISABLED"
class DomainNameFixes:
@classmethod
def get_fixed_data(cls, domain_name):
field_name_res = Capitalizer.upper_camel_case_to_lower(domain_name) + "Agent"
class Res(object):
skip_js_bind = domain_name in cls.skip_js_bind_domains
agent_field_name = field_name_res
@staticmethod
def get_guard():
if domain_name in DOMAIN_DEFINE_NAME_MAP:
define_name = DOMAIN_DEFINE_NAME_MAP[domain_name]
class Guard:
@staticmethod
def generate_open(output):
output.append("#if ENABLE(%s)\n" % define_name)
@staticmethod
def generate_close(output):
output.append("#endif // ENABLE(%s)\n" % define_name)
return Guard
return Res
skip_js_bind_domains = set(["DOMDebugger"])
class RawTypes(object):
@staticmethod
def get(json_type):
if json_type == "boolean":
return RawTypes.Bool
elif json_type == "string":
return RawTypes.String
elif json_type == "array":
return RawTypes.Array
elif json_type == "object":
return RawTypes.Object
elif json_type == "integer":
return RawTypes.Int
elif json_type == "number":
return RawTypes.Number
elif json_type == "any":
return RawTypes.Any
else:
raise Exception("Unknown type: %s" % json_type)
# For output parameter all values are passed by pointer except RefPtr-based types.
class OutputPassModel:
class ByPointer:
@staticmethod
def get_argument_prefix():
return "&"
@staticmethod
def get_parameter_type_suffix():
return "*"
class ByReference:
@staticmethod
def get_argument_prefix():
return ""
@staticmethod
def get_parameter_type_suffix():
return "&"
class BaseType(object):
need_internal_runtime_cast_ = False
@classmethod
def request_raw_internal_runtime_cast(cls):
if not cls.need_internal_runtime_cast_:
cls.need_internal_runtime_cast_ = True
@classmethod
def get_raw_validator_call_text(cls):
return "RuntimeCastHelper::assertType<InspectorValue::Type%s>" % cls.get_validate_method_params().template_type
class String(BaseType):
@staticmethod
def get_getter_name():
return "String"
get_setter_name = get_getter_name
@staticmethod
def get_c_initializer():
return "\"\""
@staticmethod
def get_js_bind_type():
return "string"
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "String"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByPointer
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "String"
@staticmethod
def get_raw_type_model():
return TypeModel.String
class Int(BaseType):
@staticmethod
def get_getter_name():
return "Int"
@staticmethod
def get_setter_name():
return "Number"
@staticmethod
def get_c_initializer():
return "0"
@staticmethod
def get_js_bind_type():
return "number"
@classmethod
def get_raw_validator_call_text(cls):
return "RuntimeCastHelper::assertInt"
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByPointer
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "int"
@staticmethod
def get_raw_type_model():
return TypeModel.Int
class Number(BaseType):
@staticmethod
def get_getter_name():
return "Double"
@staticmethod
def get_setter_name():
return "Number"
@staticmethod
def get_c_initializer():
return "0"
@staticmethod
def get_js_bind_type():
return "number"
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "Number"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByPointer
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "double"
@staticmethod
def get_raw_type_model():
return TypeModel.Number
class Bool(BaseType):
@staticmethod
def get_getter_name():
return "Boolean"
get_setter_name = get_getter_name
@staticmethod
def get_c_initializer():
return "false"
@staticmethod
def get_js_bind_type():
return "boolean"
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "Boolean"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByPointer
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "bool"
@staticmethod
def get_raw_type_model():
return TypeModel.Bool
class Object(BaseType):
@staticmethod
def get_getter_name():
return "Object"
@staticmethod
def get_setter_name():
return "Value"
@staticmethod
def get_c_initializer():
return "InspectorObject::create()"
@staticmethod
def get_js_bind_type():
return "object"
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "Object"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByReference
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "InspectorObject"
@staticmethod
def get_raw_type_model():
return TypeModel.Object
class Any(BaseType):
@staticmethod
def get_getter_name():
return "Value"
get_setter_name = get_getter_name
@staticmethod
def get_c_initializer():
raise Exception("Unsupported")
@staticmethod
def get_js_bind_type():
raise Exception("Unsupported")
@staticmethod
def get_raw_validator_call_text():
return "RuntimeCastHelper::assertAny"
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByReference
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "InspectorValue"
@staticmethod
def get_raw_type_model():
return TypeModel.Any
class Array(BaseType):
@staticmethod
def get_getter_name():
return "Array"
@staticmethod
def get_setter_name():
return "Value"
@staticmethod
def get_c_initializer():
return "InspectorArray::create()"
@staticmethod
def get_js_bind_type():
return "object"
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "Array"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByReference
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "InspectorArray"
@staticmethod
def get_raw_type_model():
return TypeModel.Array
def replace_right_shift(input_str):
return input_str.replace(">>", "> >")
class CommandReturnPassModel:
class ByReference:
def __init__(self, var_type, set_condition):
self.var_type = var_type
self.set_condition = set_condition
def get_return_var_type(self):
return self.var_type
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def get_output_to_raw_expression():
return "%s"
def get_output_parameter_type(self):
return self.var_type + "&"
def get_set_return_condition(self):
return self.set_condition
class ByPointer:
def __init__(self, var_type):
self.var_type = var_type
def get_return_var_type(self):
return self.var_type
@staticmethod
def get_output_argument_prefix():
return "&"
@staticmethod
def get_output_to_raw_expression():
return "%s"
def get_output_parameter_type(self):
return self.var_type + "*"
@staticmethod
def get_set_return_condition():
return None
class OptOutput:
def __init__(self, var_type):
self.var_type = var_type
def get_return_var_type(self):
return "TypeBuilder::OptOutput<%s>" % self.var_type
@staticmethod
def get_output_argument_prefix():
return "&"
@staticmethod
def get_output_to_raw_expression():
return "%s.getValue()"
def get_output_parameter_type(self):
return "TypeBuilder::OptOutput<%s>*" % self.var_type
@staticmethod
def get_set_return_condition():
return "%s.isAssigned()"
class TypeModel:
class RefPtrBased(object):
def __init__(self, class_name):
self.class_name = class_name
self.optional = False
def get_optional(self):
result = TypeModel.RefPtrBased(self.class_name)
result.optional = True
return result
def get_command_return_pass_model(self):
if self.optional:
set_condition = "%s"
else:
set_condition = None
return CommandReturnPassModel.ByReference(replace_right_shift("RefPtr<%s>" % self.class_name), set_condition)
def get_input_param_type_text(self):
return replace_right_shift("PassRefPtr<%s>" % self.class_name)
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class Enum(object):
def __init__(self, base_type_name):
self.type_name = base_type_name + "::Enum"
def get_optional(base_self):
class EnumOptional:
@classmethod
def get_optional(cls):
return cls
@staticmethod
def get_command_return_pass_model():
return CommandReturnPassModel.OptOutput(base_self.type_name)
@staticmethod
def get_input_param_type_text():
return base_self.type_name + "*"
@staticmethod
def get_event_setter_expression_pattern():
raise Exception("TODO")
return EnumOptional
def get_command_return_pass_model(self):
return CommandReturnPassModel.ByPointer(self.type_name)
def get_input_param_type_text(self):
return self.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class ValueType(object):
def __init__(self, type_name, is_heavy):
self.type_name = type_name
self.is_heavy = is_heavy
def get_optional(self):
return self.ValueOptional(self)
def get_command_return_pass_model(self):
return CommandReturnPassModel.ByPointer(self.type_name)
def get_input_param_type_text(self):
if self.is_heavy:
return "const %s&" % self.type_name
else:
return self.type_name
def get_opt_output_type_(self):
return self.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class ValueOptional:
def __init__(self, base):
self.base = base
def get_optional(self):
return self
def get_command_return_pass_model(self):
return CommandReturnPassModel.OptOutput(self.base.get_opt_output_type_())
def get_input_param_type_text(self):
return "const %s* const" % self.base.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "*%s"
class ExactlyInt(ValueType):
def __init__(self):
TypeModel.ValueType.__init__(self, "int", False)
def get_input_param_type_text(self):
return "TypeBuilder::ExactlyInt"
def get_opt_output_type_(self):
return "TypeBuilder::ExactlyInt"
@classmethod
def init_class(cls):
cls.Bool = cls.ValueType("bool", False)
if EXACTLY_INT_SUPPORTED:
cls.Int = cls.ExactlyInt()
else:
cls.Int = cls.ValueType("int", False)
cls.Number = cls.ValueType("double", False)
cls.String = cls.ValueType("String", True,)
cls.Object = cls.RefPtrBased("InspectorObject")
cls.Array = cls.RefPtrBased("InspectorArray")
cls.Any = cls.RefPtrBased("InspectorValue")
TypeModel.init_class()
# Collection of InspectorObject class methods that are likely to be overloaded in generated class.
# We must explicitly import all overloaded methods or they won't be available to user.
INSPECTOR_OBJECT_SETTER_NAMES = frozenset(["setValue", "setBoolean", "setNumber", "setString", "setValue", "setObject", "setArray"])
def fix_type_name(json_name):
if json_name in TYPE_NAME_FIX_MAP:
fixed = TYPE_NAME_FIX_MAP[json_name]
class Result(object):
class_name = fixed
@staticmethod
def output_comment(writer):
writer.newline("// Type originally was named '%s'.\n" % json_name)
else:
class Result(object):
class_name = json_name
@staticmethod
def output_comment(writer):
pass
return Result
class Writer:
def __init__(self, output, indent):
self.output = output
self.indent = indent
def newline(self, str):
if (self.indent):
self.output.append(self.indent)
self.output.append(str)
def append(self, str):
self.output.append(str)
def newline_multiline(self, str):
parts = str.split('\n')
self.newline(parts[0])
for p in parts[1:]:
self.output.append('\n')
if p:
self.newline(p)
def append_multiline(self, str):
parts = str.split('\n')
self.append(parts[0])
for p in parts[1:]:
self.output.append('\n')
if p:
self.newline(p)
def get_indent(self):
return self.indent
def get_indented(self, additional_indent):
return Writer(self.output, self.indent + additional_indent)
def insert_writer(self, additional_indent):
new_output = []
self.output.append(new_output)
return Writer(new_output, self.indent + additional_indent)
class EnumConstants:
map_ = {}
constants_ = []
@classmethod
def add_constant(cls, value):
if value in cls.map_:
return cls.map_[value]
else:
pos = len(cls.map_)
cls.map_[value] = pos
cls.constants_.append(value)
return pos
@classmethod
def get_enum_constant_code(cls):
output = []
for item in cls.constants_:
output.append(" \"" + item + "\"")
return ",\n".join(output) + "\n"
# Typebuilder code is generated in several passes: first typedefs, then other classes.
# Manual pass management is needed because we cannot have forward declarations for typedefs.
class TypeBuilderPass:
TYPEDEF = "typedef"
MAIN = "main"
class TypeBindings:
@staticmethod
def create_named_type_declaration(json_typable, context_domain_name, type_data):
json_type = type_data.get_json_type()
class Helper:
is_ad_hoc = False
full_name_prefix_for_use = "TypeBuilder::" + context_domain_name + "::"
full_name_prefix_for_impl = "TypeBuilder::" + context_domain_name + "::"
@staticmethod
def write_doc(writer):
if "description" in json_type:
writer.newline("/* ")
writer.append(json_type["description"])
writer.append(" */\n")
@staticmethod
def add_to_forward_listener(forward_listener):
forward_listener.add_type_data(type_data)
fixed_type_name = fix_type_name(json_type["id"])
return TypeBindings.create_type_declaration_(json_typable, context_domain_name, fixed_type_name, Helper)
@staticmethod
def create_ad_hoc_type_declaration(json_typable, context_domain_name, ad_hoc_type_context):
class Helper:
is_ad_hoc = True
full_name_prefix_for_use = ad_hoc_type_context.container_relative_name_prefix
full_name_prefix_for_impl = ad_hoc_type_context.container_full_name_prefix
@staticmethod
def write_doc(writer):
pass
@staticmethod
def add_to_forward_listener(forward_listener):
pass
fixed_type_name = ad_hoc_type_context.get_type_name_fix()
return TypeBindings.create_type_declaration_(json_typable, context_domain_name, fixed_type_name, Helper)
@staticmethod
def create_type_declaration_(json_typable, context_domain_name, fixed_type_name, helper):
if json_typable["type"] == "string":
if "enum" in json_typable:
class EnumBinding:
need_user_runtime_cast_ = False
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
pass
@classmethod
def request_user_runtime_cast(cls, request):
if request:
cls.need_user_runtime_cast_ = True
request.acknowledge()
@classmethod
def request_internal_runtime_cast(cls):
cls.need_internal_runtime_cast_ = True
@classmethod
def get_code_generator(enum_binding_cls):
#FIXME: generate ad-hoc enums too once we figure out how to better implement them in C++.
comment_out = helper.is_ad_hoc
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
enum = json_typable["enum"]
helper.write_doc(writer)
enum_name = fixed_type_name.class_name
fixed_type_name.output_comment(writer)
writer.newline("struct ")
writer.append(enum_name)
writer.append(" {\n")
writer.newline(" enum Enum {\n")
for enum_item in enum:
enum_pos = EnumConstants.add_constant(enum_item)
item_c_name = enum_item.replace('-', '_')
item_c_name = Capitalizer.lower_camel_case_to_upper(item_c_name)
if item_c_name in TYPE_NAME_FIX_MAP:
item_c_name = TYPE_NAME_FIX_MAP[item_c_name]
writer.newline(" ")
writer.append(item_c_name)
writer.append(" = ")
writer.append("%s" % enum_pos)
writer.append(",\n")
writer.newline(" };\n")
if enum_binding_cls.need_user_runtime_cast_:
raise Exception("Not yet implemented")
if enum_binding_cls.need_internal_runtime_cast_:
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" static void assertCorrectValue(InspectorValue* value);\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
validator_writer = generate_context.validator_writer
domain_fixes = DomainNameFixes.get_fixed_data(context_domain_name)
domain_guard = domain_fixes.get_guard()
if domain_guard:
domain_guard.generate_open(validator_writer)
validator_writer.newline("void %s%s::assertCorrectValue(InspectorValue* value)\n" % (helper.full_name_prefix_for_impl, enum_name))
validator_writer.newline("{\n")
validator_writer.newline(" WTF::String s;\n")
validator_writer.newline(" bool cast_res = value->asString(&s);\n")
validator_writer.newline(" ASSERT(cast_res);\n")
if len(enum) > 0:
condition_list = []
for enum_item in enum:
enum_pos = EnumConstants.add_constant(enum_item)
condition_list.append("s == \"%s\"" % enum_item)
validator_writer.newline(" ASSERT(%s);\n" % " || ".join(condition_list))
validator_writer.newline("}\n")
if domain_guard:
domain_guard.generate_close(validator_writer)
validator_writer.newline("\n\n")
writer.newline("}; // struct ")
writer.append(enum_name)
writer.append("\n\n")
@staticmethod
def register_use(forward_listener):
pass
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::Enum"
@staticmethod
def get_setter_value_expression_pattern():
return "TypeBuilder::getEnumConstantValue(%s)"
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.Enum(helper.full_name_prefix_for_use + fixed_type_name.class_name)
return EnumBinding
else:
if helper.is_ad_hoc:
class PlainString:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
raise Exception("Unsupported")
@staticmethod
def request_internal_runtime_cast():
pass
@staticmethod
def get_code_generator():
return None
@classmethod
def get_validator_call_text(cls):
return RawTypes.String.get_raw_validator_call_text()
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.String
@staticmethod
def get_setter_value_expression_pattern():
return None
@classmethod
def get_array_item_c_type_text(cls):
return cls.reduce_to_raw_type().get_array_item_raw_c_type_text()
return PlainString
else:
class TypedefString:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
raise Exception("Unsupported")
@staticmethod
def request_internal_runtime_cast():
RawTypes.String.request_raw_internal_runtime_cast()
@staticmethod
def get_code_generator():
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
helper.write_doc(writer)
fixed_type_name.output_comment(writer)
writer.newline("typedef String ")
writer.append(fixed_type_name.class_name)
writer.append(";\n\n")
@staticmethod
def register_use(forward_listener):
pass
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.TYPEDEF
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return RawTypes.String.get_raw_validator_call_text()
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.ValueType("%s%s" % (helper.full_name_prefix_for_use, fixed_type_name.class_name), True)
@staticmethod
def get_setter_value_expression_pattern():
return None
@classmethod
def get_array_item_c_type_text(cls):
return "const %s%s&" % (helper.full_name_prefix_for_use, fixed_type_name.class_name)
return TypedefString
elif json_typable["type"] == "object":
if "properties" in json_typable:
class ClassBinding:
resolve_data_ = None
need_user_runtime_cast_ = False
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
if cls.resolve_data_:
return
properties = json_typable["properties"]
main = []
optional = []
ad_hoc_type_list = []
for prop in properties:
prop_name = prop["name"]
ad_hoc_type_context = cls.AdHocTypeContextImpl(prop_name, fixed_type_name.class_name, resolve_context, ad_hoc_type_list, helper.full_name_prefix_for_impl)
binding = resolve_param_type(prop, context_domain_name, ad_hoc_type_context)
code_generator = binding.get_code_generator()
if code_generator:
code_generator.register_use(resolve_context.forward_listener)
class PropertyData:
param_type_binding = binding
p = prop
if prop.get("optional"):
optional.append(PropertyData)
else:
main.append(PropertyData)
class ResolveData:
main_properties = main
optional_properties = optional
ad_hoc_types = ad_hoc_type_list
cls.resolve_data_ = ResolveData
for ad_hoc in ad_hoc_type_list:
ad_hoc.resolve_inner(resolve_context)
@classmethod
def request_user_runtime_cast(cls, request):
if not request:
return
cls.need_user_runtime_cast_ = True
request.acknowledge()
cls.request_internal_runtime_cast()
@classmethod
def request_internal_runtime_cast(cls):
if cls.need_internal_runtime_cast_:
return
cls.need_internal_runtime_cast_ = True
for p in cls.resolve_data_.main_properties:
p.param_type_binding.request_internal_runtime_cast()
for p in cls.resolve_data_.optional_properties:
p.param_type_binding.request_internal_runtime_cast()
@classmethod
def get_code_generator(class_binding_cls):
class CodeGenerator:
@classmethod
def generate_type_builder(cls, writer, generate_context):
resolve_data = class_binding_cls.resolve_data_
helper.write_doc(writer)
class_name = fixed_type_name.class_name
is_open_type = (context_domain_name + "." + class_name) in TYPES_WITH_OPEN_FIELD_LIST_SET
fixed_type_name.output_comment(writer)
writer.newline("class ")
writer.append(class_name)
writer.append(" : public ")
if is_open_type:
writer.append("InspectorObject")
else:
writer.append("InspectorObjectBase")
writer.append(" {\n")
writer.newline("public:\n")
ad_hoc_type_writer = writer.insert_writer(" ")
for ad_hoc_type in resolve_data.ad_hoc_types:
code_generator = ad_hoc_type.get_code_generator()
if code_generator:
code_generator.generate_type_builder(ad_hoc_type_writer, generate_context)
writer.newline_multiline(
""" enum {
NoFieldsSet = 0,
""")
state_enum_items = []
if len(resolve_data.main_properties) > 0:
pos = 0
for prop_data in resolve_data.main_properties:
item_name = Capitalizer.lower_camel_case_to_upper(prop_data.p["name"]) + "Set"
state_enum_items.append(item_name)
writer.newline(" %s = 1 << %s,\n" % (item_name, pos))
pos += 1
all_fields_set_value = "(" + (" | ".join(state_enum_items)) + ")"
else:
all_fields_set_value = "0"
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_1
% (all_fields_set_value, class_name, class_name))
pos = 0
for prop_data in resolve_data.main_properties:
prop_name = prop_data.p["name"]
param_type_binding = prop_data.param_type_binding
param_raw_type = param_type_binding.reduce_to_raw_type()
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_2
% (state_enum_items[pos],
Capitalizer.lower_camel_case_to_upper(prop_name),
param_type_binding.get_type_model().get_input_param_type_text(),
state_enum_items[pos], prop_name,
param_raw_type.get_setter_name(), prop_name,
format_setter_value_expression(param_type_binding, "value"),
state_enum_items[pos]))
pos += 1
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_3
% (class_name, class_name, class_name, class_name, class_name))
writer.newline(" /*\n")
writer.newline(" * Synthetic constructor:\n")
writer.newline(" * RefPtr<%s> result = %s::create()" % (class_name, class_name))
for prop_data in resolve_data.main_properties:
writer.append_multiline("\n * .set%s(...)" % Capitalizer.lower_camel_case_to_upper(prop_data.p["name"]))
writer.append_multiline(";\n */\n")
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_4)
writer.newline(" typedef TypeBuilder::StructItemTraits ItemTraits;\n")
for prop_data in resolve_data.optional_properties:
prop_name = prop_data.p["name"]
param_type_binding = prop_data.param_type_binding
setter_name = "set%s" % Capitalizer.lower_camel_case_to_upper(prop_name)
writer.append_multiline("\n void %s" % setter_name)
writer.append("(%s value)\n" % param_type_binding.get_type_model().get_input_param_type_text())
writer.newline(" {\n")
writer.newline(" this->set%s(\"%s\", %s);\n"
% (param_type_binding.reduce_to_raw_type().get_setter_name(), prop_data.p["name"],
format_setter_value_expression(param_type_binding, "value")))
writer.newline(" }\n")
if setter_name in INSPECTOR_OBJECT_SETTER_NAMES:
writer.newline(" using InspectorObjectBase::%s;\n\n" % setter_name)
if class_binding_cls.need_user_runtime_cast_:
writer.newline(" static PassRefPtr<%s> runtimeCast(PassRefPtr<InspectorValue> value)\n" % class_name)
writer.newline(" {\n")
writer.newline(" RefPtr<InspectorObject> object;\n")
writer.newline(" bool castRes = value->asObject(&object);\n")
writer.newline(" ASSERT_UNUSED(castRes, castRes);\n")
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" assertCorrectValue(object.get());\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" COMPILE_ASSERT(sizeof(%s) == sizeof(InspectorObjectBase), type_cast_problem);\n" % class_name)
writer.newline(" return static_cast<%s*>(static_cast<InspectorObjectBase*>(object.get()));\n" % class_name)
writer.newline(" }\n")
writer.append("\n")
if class_binding_cls.need_internal_runtime_cast_:
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" static void assertCorrectValue(InspectorValue* value);\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
closed_field_set = (context_domain_name + "." + class_name) not in TYPES_WITH_OPEN_FIELD_LIST_SET
validator_writer = generate_context.validator_writer
domain_fixes = DomainNameFixes.get_fixed_data(context_domain_name)
domain_guard = domain_fixes.get_guard()
if domain_guard:
domain_guard.generate_open(validator_writer)
validator_writer.newline("void %s%s::assertCorrectValue(InspectorValue* value)\n" % (helper.full_name_prefix_for_impl, class_name))
validator_writer.newline("{\n")
validator_writer.newline(" RefPtr<InspectorObject> object;\n")
validator_writer.newline(" bool castRes = value->asObject(&object);\n")
validator_writer.newline(" ASSERT_UNUSED(castRes, castRes);\n")
for prop_data in resolve_data.main_properties:
validator_writer.newline(" {\n")
it_name = "%sPos" % prop_data.p["name"]
validator_writer.newline(" InspectorObject::iterator %s;\n" % it_name)
validator_writer.newline(" %s = object->find(\"%s\");\n" % (it_name, prop_data.p["name"]))
validator_writer.newline(" ASSERT(%s != object->end());\n" % it_name)
validator_writer.newline(" %s(%s->value.get());\n" % (prop_data.param_type_binding.get_validator_call_text(), it_name))
validator_writer.newline(" }\n")
if closed_field_set:
validator_writer.newline(" int foundPropertiesCount = %s;\n" % len(resolve_data.main_properties))
for prop_data in resolve_data.optional_properties:
validator_writer.newline(" {\n")
it_name = "%sPos" % prop_data.p["name"]
validator_writer.newline(" InspectorObject::iterator %s;\n" % it_name)
validator_writer.newline(" %s = object->find(\"%s\");\n" % (it_name, prop_data.p["name"]))
validator_writer.newline(" if (%s != object->end()) {\n" % it_name)
validator_writer.newline(" %s(%s->value.get());\n" % (prop_data.param_type_binding.get_validator_call_text(), it_name))
if closed_field_set:
validator_writer.newline(" ++foundPropertiesCount;\n")
validator_writer.newline(" }\n")
validator_writer.newline(" }\n")
if closed_field_set:
validator_writer.newline(" if (foundPropertiesCount != object->size()) {\n")
validator_writer.newline(" FATAL(\"Unexpected properties in object: %s\\n\", object->toJSONString().ascii().data());\n")
validator_writer.newline(" }\n")
validator_writer.newline("}\n")
if domain_guard:
domain_guard.generate_close(validator_writer)
validator_writer.newline("\n\n")
if is_open_type:
cpp_writer = generate_context.cpp_writer
writer.append("\n")
writer.newline(" // Property names for type generated as open.\n")
for prop_data in resolve_data.main_properties + resolve_data.optional_properties:
prop_name = prop_data.p["name"]
prop_field_name = Capitalizer.lower_camel_case_to_upper(prop_name)
writer.newline(" static const char* %s;\n" % (prop_field_name))
cpp_writer.newline("const char* %s%s::%s = \"%s\";\n" % (helper.full_name_prefix_for_impl, class_name, prop_field_name, prop_name))
writer.newline("};\n\n")
@staticmethod
def generate_forward_declaration(writer):
class_name = fixed_type_name.class_name
writer.newline("class ")
writer.append(class_name)
writer.append(";\n")
@staticmethod
def register_use(forward_listener):
helper.add_to_forward_listener(forward_listener)
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@staticmethod
def get_validator_call_text():
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Object
@staticmethod
def get_type_model():
return TypeModel.RefPtrBased(helper.full_name_prefix_for_use + fixed_type_name.class_name)
class AdHocTypeContextImpl:
def __init__(self, property_name, class_name, resolve_context, ad_hoc_type_list, parent_full_name_prefix):
self.property_name = property_name
self.class_name = class_name
self.resolve_context = resolve_context
self.ad_hoc_type_list = ad_hoc_type_list
self.container_full_name_prefix = parent_full_name_prefix + class_name + "::"
self.container_relative_name_prefix = ""
def get_type_name_fix(self):
class NameFix:
class_name = Capitalizer.lower_camel_case_to_upper(self.property_name)
@staticmethod
def output_comment(writer):
writer.newline("// Named after property name '%s' while generating %s.\n" % (self.property_name, self.class_name))
return NameFix
def add_type(self, binding):
self.ad_hoc_type_list.append(binding)
return ClassBinding
else:
class PlainObjectBinding:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
pass
@staticmethod
def request_internal_runtime_cast():
RawTypes.Object.request_raw_internal_runtime_cast()
@staticmethod
def get_code_generator():
pass
@staticmethod
def get_validator_call_text():
return "RuntimeCastHelper::assertType<InspectorValue::TypeObject>"
@classmethod
def get_array_item_c_type_text(cls):
return cls.reduce_to_raw_type().get_array_item_raw_c_type_text()
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Object
@staticmethod
def get_type_model():
return TypeModel.Object
return PlainObjectBinding
elif json_typable["type"] == "array":
if "items" in json_typable:
ad_hoc_types = []
class AdHocTypeContext:
container_full_name_prefix = "<not yet defined>"
container_relative_name_prefix = ""
@staticmethod
def get_type_name_fix():
return fixed_type_name
@staticmethod
def add_type(binding):
ad_hoc_types.append(binding)
item_binding = resolve_param_type(json_typable["items"], context_domain_name, AdHocTypeContext)
class ArrayBinding:
resolve_data_ = None
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
if cls.resolve_data_:
return
class ResolveData:
item_type_binding = item_binding
ad_hoc_type_list = ad_hoc_types
cls.resolve_data_ = ResolveData
for t in ad_hoc_types:
t.resolve_inner(resolve_context)
@classmethod
def request_user_runtime_cast(cls, request):
raise Exception("Not implemented yet")
@classmethod
def request_internal_runtime_cast(cls):
if cls.need_internal_runtime_cast_:
return
cls.need_internal_runtime_cast_ = True
cls.resolve_data_.item_type_binding.request_internal_runtime_cast()
@classmethod
def get_code_generator(array_binding_cls):
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
ad_hoc_type_writer = writer
resolve_data = array_binding_cls.resolve_data_
for ad_hoc_type in resolve_data.ad_hoc_type_list:
code_generator = ad_hoc_type.get_code_generator()
if code_generator:
code_generator.generate_type_builder(ad_hoc_type_writer, generate_context)
@staticmethod
def generate_forward_declaration(writer):
pass
@staticmethod
def register_use(forward_listener):
item_code_generator = item_binding.get_code_generator()
if item_code_generator:
item_code_generator.register_use(forward_listener)
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return cls.get_array_item_c_type_text() + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return replace_right_shift("TypeBuilder::Array<%s>" % cls.resolve_data_.item_type_binding.get_array_item_c_type_text())
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Array
@classmethod
def get_type_model(cls):
return TypeModel.RefPtrBased(cls.get_array_item_c_type_text())
return ArrayBinding
else:
# Fall-through to raw type.
pass
raw_type = RawTypes.get(json_typable["type"])
return RawTypeBinding(raw_type)
class RawTypeBinding:
def __init__(self, raw_type):
self.raw_type_ = raw_type
def resolve_inner(self, resolve_context):
pass
def request_user_runtime_cast(self, request):
raise Exception("Unsupported")
def request_internal_runtime_cast(self):
self.raw_type_.request_raw_internal_runtime_cast()
def get_code_generator(self):
return None
def get_validator_call_text(self):
return self.raw_type_.get_raw_validator_call_text()
def get_array_item_c_type_text(self):
return self.raw_type_.get_array_item_raw_c_type_text()
def get_setter_value_expression_pattern(self):
return None
def reduce_to_raw_type(self):
return self.raw_type_
def get_type_model(self):
return self.raw_type_.get_raw_type_model()
class TypeData(object):
def __init__(self, json_type, json_domain, domain_data):
self.json_type_ = json_type
self.json_domain_ = json_domain
self.domain_data_ = domain_data
if "type" not in json_type:
raise Exception("Unknown type")
json_type_name = json_type["type"]
raw_type = RawTypes.get(json_type_name)
self.raw_type_ = raw_type
self.binding_being_resolved_ = False
self.binding_ = None
def get_raw_type(self):
return self.raw_type_
def get_binding(self):
if not self.binding_:
if self.binding_being_resolved_:
raise Error("Type %s is already being resolved" % self.json_type_["type"])
# Resolve only lazily, because resolving one named type may require resolving some other named type.
self.binding_being_resolved_ = True
try:
self.binding_ = TypeBindings.create_named_type_declaration(self.json_type_, self.json_domain_["domain"], self)
finally:
self.binding_being_resolved_ = False
return self.binding_
def get_json_type(self):
return self.json_type_
def get_name(self):
return self.json_type_["id"]
def get_domain_name(self):
return self.json_domain_["domain"]
class DomainData:
def __init__(self, json_domain):
self.json_domain = json_domain
self.types_ = []
def add_type(self, type_data):
self.types_.append(type_data)
def name(self):
return self.json_domain["domain"]
def types(self):
return self.types_
class TypeMap:
def __init__(self, api):
self.map_ = {}
self.domains_ = []
for json_domain in api["domains"]:
domain_name = json_domain["domain"]
domain_map = {}
self.map_[domain_name] = domain_map
domain_data = DomainData(json_domain)
self.domains_.append(domain_data)
if "types" in json_domain:
for json_type in json_domain["types"]:
type_name = json_type["id"]
type_data = TypeData(json_type, json_domain, domain_data)
domain_map[type_name] = type_data
domain_data.add_type(type_data)
def domains(self):
return self.domains_
def get(self, domain_name, type_name):
return self.map_[domain_name][type_name]
def resolve_param_type(json_parameter, scope_domain_name, ad_hoc_type_context):
if "$ref" in json_parameter:
json_ref = json_parameter["$ref"]
type_data = get_ref_data(json_ref, scope_domain_name)
return type_data.get_binding()
elif "type" in json_parameter:
result = TypeBindings.create_ad_hoc_type_declaration(json_parameter, scope_domain_name, ad_hoc_type_context)
ad_hoc_type_context.add_type(result)
return result
else:
raise Exception("Unknown type")
def resolve_param_raw_type(json_parameter, scope_domain_name):
if "$ref" in json_parameter:
json_ref = json_parameter["$ref"]
type_data = get_ref_data(json_ref, scope_domain_name)
return type_data.get_raw_type()
elif "type" in json_parameter:
json_type = json_parameter["type"]
return RawTypes.get(json_type)
else:
raise Exception("Unknown type")
def get_ref_data(json_ref, scope_domain_name):
dot_pos = json_ref.find(".")
if dot_pos == -1:
domain_name = scope_domain_name
type_name = json_ref
else:
domain_name = json_ref[:dot_pos]
type_name = json_ref[dot_pos + 1:]
return type_map.get(domain_name, type_name)
input_file = open(input_json_filename, "r")
json_string = input_file.read()
json_api = json.loads(json_string)
class Templates:
def get_this_script_path_(absolute_path):
absolute_path = os.path.abspath(absolute_path)
components = []
def fill_recursive(path_part, depth):
if depth <= 0 or path_part == '/':
return
fill_recursive(os.path.dirname(path_part), depth - 1)
components.append(os.path.basename(path_part))
# Typical path is /Source/WebCore/inspector/CodeGeneratorInspector.py
# Let's take 4 components from the real path then.
fill_recursive(absolute_path, 4)
return "/".join(components)
file_header_ = ("// File is generated by %s\n\n" % get_this_script_path_(sys.argv[0]) +
"""// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
""")
frontend_domain_class = string.Template(CodeGeneratorInspectorStrings.frontend_domain_class)
backend_method = string.Template(CodeGeneratorInspectorStrings.backend_method)
frontend_method = string.Template(CodeGeneratorInspectorStrings.frontend_method)
callback_method = string.Template(CodeGeneratorInspectorStrings.callback_method)
frontend_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.frontend_h)
backend_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.backend_h)
backend_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.backend_cpp)
frontend_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.frontend_cpp)
typebuilder_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.typebuilder_h)
typebuilder_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.typebuilder_cpp)
backend_js = string.Template(file_header_ + CodeGeneratorInspectorStrings.backend_js)
param_container_access_code = CodeGeneratorInspectorStrings.param_container_access_code
type_map = TypeMap(json_api)
class NeedRuntimeCastRequest:
def __init__(self):
self.ack_ = None
def acknowledge(self):
self.ack_ = True
def is_acknowledged(self):
return self.ack_
def resolve_all_types():
runtime_cast_generate_requests = {}
for type_name in TYPES_WITH_RUNTIME_CAST_SET:
runtime_cast_generate_requests[type_name] = NeedRuntimeCastRequest()
class ForwardListener:
type_data_set = set()
already_declared_set = set()
@classmethod
def add_type_data(cls, type_data):
if type_data not in cls.already_declared_set:
cls.type_data_set.add(type_data)
class ResolveContext:
forward_listener = ForwardListener
for domain_data in type_map.domains():
for type_data in domain_data.types():
# Do not generate forwards for this type any longer.
ForwardListener.already_declared_set.add(type_data)
binding = type_data.get_binding()
binding.resolve_inner(ResolveContext)
for domain_data in type_map.domains():
for type_data in domain_data.types():
full_type_name = "%s.%s" % (type_data.get_domain_name(), type_data.get_name())
request = runtime_cast_generate_requests.pop(full_type_name, None)
binding = type_data.get_binding()
if request:
binding.request_user_runtime_cast(request)
if request and not request.is_acknowledged():
raise Exception("Failed to generate runtimeCast in " + full_type_name)
for full_type_name in runtime_cast_generate_requests:
raise Exception("Failed to generate runtimeCast. Type " + full_type_name + " not found")
return ForwardListener
global_forward_listener = resolve_all_types()
def get_annotated_type_text(raw_type, annotated_type):
if annotated_type != raw_type:
return "/*%s*/ %s" % (annotated_type, raw_type)
else:
return raw_type
def format_setter_value_expression(param_type_binding, value_ref):
pattern = param_type_binding.get_setter_value_expression_pattern()
if pattern:
return pattern % value_ref
else:
return value_ref
class Generator:
frontend_class_field_lines = []
frontend_domain_class_lines = []
method_name_enum_list = []
backend_method_declaration_list = []
backend_method_implementation_list = []
backend_method_name_declaration_list = []
method_handler_list = []
frontend_method_list = []
backend_js_domain_initializer_list = []
backend_virtual_setters_list = []
backend_agent_interface_list = []
backend_setters_list = []
backend_constructor_init_list = []
backend_field_list = []
frontend_constructor_init_list = []
type_builder_fragments = []
type_builder_forwards = []
validator_impl_list = []
type_builder_impl_list = []
@staticmethod
def go():
Generator.process_types(type_map)
first_cycle_guardable_list_list = [
Generator.backend_method_declaration_list,
Generator.backend_method_implementation_list,
Generator.backend_method_name_declaration_list,
Generator.backend_agent_interface_list,
Generator.frontend_class_field_lines,
Generator.frontend_constructor_init_list,
Generator.frontend_domain_class_lines,
Generator.frontend_method_list,
Generator.method_handler_list,
Generator.method_name_enum_list,
Generator.backend_constructor_init_list,
Generator.backend_virtual_setters_list,
Generator.backend_setters_list,
Generator.backend_field_list]
for json_domain in json_api["domains"]:
domain_name = json_domain["domain"]
domain_name_lower = domain_name.lower()
domain_fixes = DomainNameFixes.get_fixed_data(domain_name)
domain_guard = domain_fixes.get_guard()
if domain_guard:
for l in first_cycle_guardable_list_list:
domain_guard.generate_open(l)
agent_field_name = domain_fixes.agent_field_name
frontend_method_declaration_lines = []
Generator.backend_js_domain_initializer_list.append("// %s.\n" % domain_name)
if not domain_fixes.skip_js_bind:
Generator.backend_js_domain_initializer_list.append("InspectorBackend.register%sDispatcher = InspectorBackend.registerDomainDispatcher.bind(InspectorBackend, \"%s\");\n" % (domain_name, domain_name))
if "types" in json_domain:
for json_type in json_domain["types"]:
if "type" in json_type and json_type["type"] == "string" and "enum" in json_type:
enum_name = "%s.%s" % (domain_name, json_type["id"])
Generator.process_enum(json_type, enum_name)
elif json_type["type"] == "object":
if "properties" in json_type:
for json_property in json_type["properties"]:
if "type" in json_property and json_property["type"] == "string" and "enum" in json_property:
enum_name = "%s.%s%s" % (domain_name, json_type["id"], to_title_case(json_property["name"]))
Generator.process_enum(json_property, enum_name)
if "events" in json_domain:
for json_event in json_domain["events"]:
Generator.process_event(json_event, domain_name, frontend_method_declaration_lines)
Generator.frontend_class_field_lines.append(" %s m_%s;\n" % (domain_name, domain_name_lower))
if Generator.frontend_constructor_init_list:
Generator.frontend_constructor_init_list.append(" , ")
Generator.frontend_constructor_init_list.append("m_%s(inspectorFrontendChannel)\n" % domain_name_lower)
Generator.frontend_domain_class_lines.append(Templates.frontend_domain_class.substitute(None,
domainClassName=domain_name,
domainFieldName=domain_name_lower,
frontendDomainMethodDeclarations="".join(flatten_list(frontend_method_declaration_lines))))
agent_interface_name = Capitalizer.lower_camel_case_to_upper(domain_name) + "CommandHandler"
Generator.backend_agent_interface_list.append(" class %s {\n" % agent_interface_name)
Generator.backend_agent_interface_list.append(" public:\n")
if "commands" in json_domain:
for json_command in json_domain["commands"]:
Generator.process_command(json_command, domain_name, agent_field_name, agent_interface_name)
Generator.backend_agent_interface_list.append("\n protected:\n")
Generator.backend_agent_interface_list.append(" virtual ~%s() { }\n" % agent_interface_name)
Generator.backend_agent_interface_list.append(" };\n\n")
Generator.backend_constructor_init_list.append(" , m_%s(0)" % agent_field_name)
Generator.backend_virtual_setters_list.append(" virtual void registerAgent(%s* %s) = 0;" % (agent_interface_name, agent_field_name))
Generator.backend_setters_list.append(" virtual void registerAgent(%s* %s) { ASSERT(!m_%s); m_%s = %s; }" % (agent_interface_name, agent_field_name, agent_field_name, agent_field_name, agent_field_name))
Generator.backend_field_list.append(" %s* m_%s;" % (agent_interface_name, agent_field_name))
if domain_guard:
for l in reversed(first_cycle_guardable_list_list):
domain_guard.generate_close(l)
Generator.backend_js_domain_initializer_list.append("\n")
@staticmethod
def process_enum(json_enum, enum_name):
enum_members = []
for member in json_enum["enum"]:
enum_members.append("%s: \"%s\"" % (fix_camel_case(member), member))
Generator.backend_js_domain_initializer_list.append("InspectorBackend.registerEnum(\"%s\", {%s});\n" % (
enum_name, ", ".join(enum_members)))
@staticmethod
def process_event(json_event, domain_name, frontend_method_declaration_lines):
event_name = json_event["name"]
ad_hoc_type_output = []
frontend_method_declaration_lines.append(ad_hoc_type_output)
ad_hoc_type_writer = Writer(ad_hoc_type_output, " ")
decl_parameter_list = []
json_parameters = json_event.get("parameters")
Generator.generate_send_method(json_parameters, event_name, domain_name, ad_hoc_type_writer,
decl_parameter_list,
Generator.EventMethodStructTemplate,
Generator.frontend_method_list, Templates.frontend_method, {"eventName": event_name})
backend_js_event_param_list = []
if json_parameters:
for parameter in json_parameters:
parameter_name = parameter["name"]
backend_js_event_param_list.append("\"%s\"" % parameter_name)
frontend_method_declaration_lines.append(
" void %s(%s);\n" % (event_name, ", ".join(decl_parameter_list)))
Generator.backend_js_domain_initializer_list.append("InspectorBackend.registerEvent(\"%s.%s\", [%s]);\n" % (
domain_name, event_name, ", ".join(backend_js_event_param_list)))
class EventMethodStructTemplate:
@staticmethod
def append_prolog(line_list):
line_list.append(" RefPtr<InspectorObject> paramsObject = InspectorObject::create();\n")
@staticmethod
def append_epilog(line_list):
line_list.append(" jsonMessage->setObject(\"params\", paramsObject);\n")
container_name = "paramsObject"
@staticmethod
def process_command(json_command, domain_name, agent_field_name, agent_interface_name):
json_command_name = json_command["name"]
cmd_enum_name = "k%s_%sCmd" % (domain_name, json_command["name"])
Generator.method_name_enum_list.append(" %s," % cmd_enum_name)
Generator.method_handler_list.append(" &InspectorBackendDispatcherImpl::%s_%s," % (domain_name, json_command_name))
Generator.backend_method_declaration_list.append(" void %s_%s(long callId, InspectorObject* requestMessageObject);" % (domain_name, json_command_name))
ad_hoc_type_output = []
Generator.backend_agent_interface_list.append(ad_hoc_type_output)
ad_hoc_type_writer = Writer(ad_hoc_type_output, " ")
Generator.backend_agent_interface_list.append(" virtual void %s(ErrorString*" % json_command_name)
method_in_code = ""
method_out_code = ""
agent_call_param_list = []
response_cook_list = []
request_message_param = ""
js_parameters_text = ""
if "parameters" in json_command:
json_params = json_command["parameters"]
method_in_code += Templates.param_container_access_code
request_message_param = " requestMessageObject"
js_param_list = []
for json_parameter in json_params:
json_param_name = json_parameter["name"]
param_raw_type = resolve_param_raw_type(json_parameter, domain_name)
getter_name = param_raw_type.get_getter_name()
optional = json_parameter.get("optional")
non_optional_type_model = param_raw_type.get_raw_type_model()
if optional:
type_model = non_optional_type_model.get_optional()
else:
type_model = non_optional_type_model
if optional:
code = (" bool %s_valueFound = false;\n"
" %s in_%s = get%s(paramsContainerPtr, \"%s\", &%s_valueFound, protocolErrorsPtr);\n" %
(json_param_name, non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name, getter_name, json_param_name, json_param_name))
param = ", %s_valueFound ? &in_%s : 0" % (json_param_name, json_param_name)
# FIXME: pass optional refptr-values as PassRefPtr
formal_param_type_pattern = "const %s*"
else:
code = (" %s in_%s = get%s(paramsContainerPtr, \"%s\", 0, protocolErrorsPtr);\n" %
(non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name, getter_name, json_param_name))
param = ", in_%s" % json_param_name
# FIXME: pass not-optional refptr-values as NonNullPassRefPtr
if param_raw_type.is_heavy_value():
formal_param_type_pattern = "const %s&"
else:
formal_param_type_pattern = "%s"
method_in_code += code
agent_call_param_list.append(param)
Generator.backend_agent_interface_list.append(", %s in_%s" % (formal_param_type_pattern % non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name))
js_bind_type = param_raw_type.get_js_bind_type()
js_param_text = "{\"name\": \"%s\", \"type\": \"%s\", \"optional\": %s}" % (
json_param_name,
js_bind_type,
("true" if ("optional" in json_parameter and json_parameter["optional"]) else "false"))
js_param_list.append(js_param_text)
js_parameters_text = ", ".join(js_param_list)
response_cook_text = ""
if json_command.get("async") == True:
callback_name = Capitalizer.lower_camel_case_to_upper(json_command_name) + "Callback"
callback_output = []
callback_writer = Writer(callback_output, ad_hoc_type_writer.get_indent())
decl_parameter_list = []
Generator.generate_send_method(json_command.get("returns"), json_command_name, domain_name, ad_hoc_type_writer,
decl_parameter_list,
Generator.CallbackMethodStructTemplate,
Generator.backend_method_implementation_list, Templates.callback_method,
{"callbackName": callback_name, "agentName": agent_interface_name})
callback_writer.newline("class " + callback_name + " : public CallbackBase {\n")
callback_writer.newline("public:\n")
callback_writer.newline(" " + callback_name + "(PassRefPtr<InspectorBackendDispatcherImpl>, int id);\n")
callback_writer.newline(" void sendSuccess(" + ", ".join(decl_parameter_list) + ");\n")
callback_writer.newline("};\n")
ad_hoc_type_output.append(callback_output)
method_out_code += " RefPtr<" + agent_interface_name + "::" + callback_name + "> callback = adoptRef(new " + agent_interface_name + "::" + callback_name + "(this, callId));\n"
agent_call_param_list.append(", callback")
response_cook_text += " if (!error.length()) \n"
response_cook_text += " return;\n"
response_cook_text += " callback->disable();\n"
Generator.backend_agent_interface_list.append(", PassRefPtr<%s> callback" % callback_name)
else:
if "returns" in json_command:
method_out_code += "\n"
for json_return in json_command["returns"]:
json_return_name = json_return["name"]
optional = bool(json_return.get("optional"))
return_type_binding = Generator.resolve_type_and_generate_ad_hoc(json_return, json_command_name, domain_name, ad_hoc_type_writer, agent_interface_name + "::")
raw_type = return_type_binding.reduce_to_raw_type()
setter_type = raw_type.get_setter_name()
initializer = raw_type.get_c_initializer()
type_model = return_type_binding.get_type_model()
if optional:
type_model = type_model.get_optional()
code = " %s out_%s;\n" % (type_model.get_command_return_pass_model().get_return_var_type(), json_return_name)
param = ", %sout_%s" % (type_model.get_command_return_pass_model().get_output_argument_prefix(), json_return_name)
var_name = "out_%s" % json_return_name
setter_argument = type_model.get_command_return_pass_model().get_output_to_raw_expression() % var_name
if return_type_binding.get_setter_value_expression_pattern():
setter_argument = return_type_binding.get_setter_value_expression_pattern() % setter_argument
cook = " result->set%s(\"%s\", %s);\n" % (setter_type, json_return_name,
setter_argument)
set_condition_pattern = type_model.get_command_return_pass_model().get_set_return_condition()
if set_condition_pattern:
cook = (" if (%s)\n " % (set_condition_pattern % var_name)) + cook
annotated_type = type_model.get_command_return_pass_model().get_output_parameter_type()
param_name = "out_%s" % json_return_name
if optional:
param_name = "opt_" + param_name
Generator.backend_agent_interface_list.append(", %s %s" % (annotated_type, param_name))
response_cook_list.append(cook)
method_out_code += code
agent_call_param_list.append(param)
response_cook_text = "".join(response_cook_list)
if len(response_cook_text) != 0:
response_cook_text = " if (!error.length()) {\n" + response_cook_text + " }"
backend_js_reply_param_list = []
if "returns" in json_command:
for json_return in json_command["returns"]:
json_return_name = json_return["name"]
backend_js_reply_param_list.append("\"%s\"" % json_return_name)
js_reply_list = "[%s]" % ", ".join(backend_js_reply_param_list)
Generator.backend_method_implementation_list.append(Templates.backend_method.substitute(None,
domainName=domain_name, methodName=json_command_name,
agentField="m_" + agent_field_name,
methodInCode=method_in_code,
methodOutCode=method_out_code,
agentCallParams="".join(agent_call_param_list),
requestMessageObject=request_message_param,
responseCook=response_cook_text,
commandNameIndex=cmd_enum_name))
Generator.backend_method_name_declaration_list.append(" \"%s.%s\"," % (domain_name, json_command_name))
Generator.backend_js_domain_initializer_list.append("InspectorBackend.registerCommand(\"%s.%s\", [%s], %s);\n" % (domain_name, json_command_name, js_parameters_text, js_reply_list))
Generator.backend_agent_interface_list.append(") = 0;\n")
class CallbackMethodStructTemplate:
@staticmethod
def append_prolog(line_list):
pass
@staticmethod
def append_epilog(line_list):
pass
container_name = "jsonMessage"
# Generates common code for event sending and callback response data sending.
@staticmethod
def generate_send_method(parameters, event_name, domain_name, ad_hoc_type_writer, decl_parameter_list,
method_struct_template,
generator_method_list, method_template, template_params):
method_line_list = []
if parameters:
method_struct_template.append_prolog(method_line_list)
for json_parameter in parameters:
parameter_name = json_parameter["name"]
param_type_binding = Generator.resolve_type_and_generate_ad_hoc(json_parameter, event_name, domain_name, ad_hoc_type_writer, "")
raw_type = param_type_binding.reduce_to_raw_type()
raw_type_binding = RawTypeBinding(raw_type)
optional = bool(json_parameter.get("optional"))
setter_type = raw_type.get_setter_name()
type_model = param_type_binding.get_type_model()
raw_type_model = raw_type_binding.get_type_model()
if optional:
type_model = type_model.get_optional()
raw_type_model = raw_type_model.get_optional()
annotated_type = type_model.get_input_param_type_text()
mode_type_binding = param_type_binding
decl_parameter_list.append("%s %s" % (annotated_type, parameter_name))
setter_argument = raw_type_model.get_event_setter_expression_pattern() % parameter_name
if mode_type_binding.get_setter_value_expression_pattern():
setter_argument = mode_type_binding.get_setter_value_expression_pattern() % setter_argument
setter_code = " %s->set%s(\"%s\", %s);\n" % (method_struct_template.container_name, setter_type, parameter_name, setter_argument)
if optional:
setter_code = (" if (%s)\n " % parameter_name) + setter_code
method_line_list.append(setter_code)
method_struct_template.append_epilog(method_line_list)
generator_method_list.append(method_template.substitute(None,
domainName=domain_name,
parameters=", ".join(decl_parameter_list),
code="".join(method_line_list), **template_params))
@staticmethod
def resolve_type_and_generate_ad_hoc(json_param, method_name, domain_name, ad_hoc_type_writer, container_relative_name_prefix_param):
param_name = json_param["name"]
ad_hoc_type_list = []
class AdHocTypeContext:
container_full_name_prefix = "<not yet defined>"
container_relative_name_prefix = container_relative_name_prefix_param
@staticmethod
def get_type_name_fix():
class NameFix:
class_name = Capitalizer.lower_camel_case_to_upper(param_name)
@staticmethod
def output_comment(writer):
writer.newline("// Named after parameter '%s' while generating command/event %s.\n" % (param_name, method_name))
return NameFix
@staticmethod
def add_type(binding):
ad_hoc_type_list.append(binding)
type_binding = resolve_param_type(json_param, domain_name, AdHocTypeContext)
class InterfaceForwardListener:
@staticmethod
def add_type_data(type_data):
pass
class InterfaceResolveContext:
forward_listener = InterfaceForwardListener
for type in ad_hoc_type_list:
type.resolve_inner(InterfaceResolveContext)
class InterfaceGenerateContext:
validator_writer = "not supported in InterfaceGenerateContext"
cpp_writer = validator_writer
for type in ad_hoc_type_list:
generator = type.get_code_generator()
if generator:
generator.generate_type_builder(ad_hoc_type_writer, InterfaceGenerateContext)
return type_binding
@staticmethod
def process_types(type_map):
output = Generator.type_builder_fragments
class GenerateContext:
validator_writer = Writer(Generator.validator_impl_list, "")
cpp_writer = Writer(Generator.type_builder_impl_list, "")
def generate_all_domains_code(out, type_data_callback):
writer = Writer(out, "")
for domain_data in type_map.domains():
domain_fixes = DomainNameFixes.get_fixed_data(domain_data.name())
domain_guard = domain_fixes.get_guard()
namespace_declared = []
def namespace_lazy_generator():
if not namespace_declared:
if domain_guard:
domain_guard.generate_open(out)
writer.newline("namespace ")
writer.append(domain_data.name())
writer.append(" {\n")
# What is a better way to change value from outer scope?
namespace_declared.append(True)
return writer
for type_data in domain_data.types():
type_data_callback(type_data, namespace_lazy_generator)
if namespace_declared:
writer.append("} // ")
writer.append(domain_data.name())
writer.append("\n\n")
if domain_guard:
domain_guard.generate_close(out)
def create_type_builder_caller(generate_pass_id):
def call_type_builder(type_data, writer_getter):
code_generator = type_data.get_binding().get_code_generator()
if code_generator and generate_pass_id == code_generator.get_generate_pass_id():
writer = writer_getter()
code_generator.generate_type_builder(writer, GenerateContext)
return call_type_builder
generate_all_domains_code(output, create_type_builder_caller(TypeBuilderPass.MAIN))
Generator.type_builder_forwards.append("// Forward declarations.\n")
def generate_forward_callback(type_data, writer_getter):
if type_data in global_forward_listener.type_data_set:
binding = type_data.get_binding()
binding.get_code_generator().generate_forward_declaration(writer_getter())
generate_all_domains_code(Generator.type_builder_forwards, generate_forward_callback)
Generator.type_builder_forwards.append("// End of forward declarations.\n\n")
Generator.type_builder_forwards.append("// Typedefs.\n")
generate_all_domains_code(Generator.type_builder_forwards, create_type_builder_caller(TypeBuilderPass.TYPEDEF))
Generator.type_builder_forwards.append("// End of typedefs.\n\n")
def flatten_list(input):
res = []
def fill_recursive(l):
for item in l:
if isinstance(item, list):
fill_recursive(item)
else:
res.append(item)
fill_recursive(input)
return res
# A writer that only updates file if it actually changed to better support incremental build.
class SmartOutput:
def __init__(self, file_name):
self.file_name_ = file_name
self.output_ = ""
def write(self, text):
self.output_ += text
def close(self):
text_changed = True
try:
read_file = open(self.file_name_, "r")
old_text = read_file.read()
read_file.close()
text_changed = old_text != self.output_
except:
# Ignore, just overwrite by default
pass
if text_changed or write_always:
out_file = open(self.file_name_, "w")
out_file.write(self.output_)
out_file.close()
Generator.go()
backend_h_file = SmartOutput(output_header_dirname + "/InspectorBackendDispatcher.h")
backend_cpp_file = SmartOutput(output_cpp_dirname + "/InspectorBackendDispatcher.cpp")
frontend_h_file = SmartOutput(output_header_dirname + "/InspectorFrontend.h")
frontend_cpp_file = SmartOutput(output_cpp_dirname + "/InspectorFrontend.cpp")
typebuilder_h_file = SmartOutput(output_header_dirname + "/InspectorTypeBuilder.h")
typebuilder_cpp_file = SmartOutput(output_cpp_dirname + "/InspectorTypeBuilder.cpp")
backend_js_file = SmartOutput(output_cpp_dirname + "/InspectorBackendCommands.js")
backend_h_file.write(Templates.backend_h.substitute(None,
virtualSetters="\n".join(Generator.backend_virtual_setters_list),
agentInterfaces="".join(flatten_list(Generator.backend_agent_interface_list)),
methodNamesEnumContent="\n".join(Generator.method_name_enum_list)))
backend_cpp_file.write(Templates.backend_cpp.substitute(None,
constructorInit="\n".join(Generator.backend_constructor_init_list),
setters="\n".join(Generator.backend_setters_list),
fieldDeclarations="\n".join(Generator.backend_field_list),
methodNameDeclarations="\n".join(Generator.backend_method_name_declaration_list),
methods="\n".join(Generator.backend_method_implementation_list),
methodDeclarations="\n".join(Generator.backend_method_declaration_list),
messageHandlers="\n".join(Generator.method_handler_list)))
frontend_h_file.write(Templates.frontend_h.substitute(None,
fieldDeclarations="".join(Generator.frontend_class_field_lines),
domainClassList="".join(Generator.frontend_domain_class_lines)))
frontend_cpp_file.write(Templates.frontend_cpp.substitute(None,
constructorInit="".join(Generator.frontend_constructor_init_list),
methods="\n".join(Generator.frontend_method_list)))
typebuilder_h_file.write(Templates.typebuilder_h.substitute(None,
typeBuilders="".join(flatten_list(Generator.type_builder_fragments)),
forwards="".join(Generator.type_builder_forwards),
validatorIfdefName=VALIDATOR_IFDEF_NAME))
typebuilder_cpp_file.write(Templates.typebuilder_cpp.substitute(None,
enumConstantValues=EnumConstants.get_enum_constant_code(),
implCode="".join(flatten_list(Generator.type_builder_impl_list)),
validatorCode="".join(flatten_list(Generator.validator_impl_list)),
validatorIfdefName=VALIDATOR_IFDEF_NAME))
backend_js_file.write(Templates.backend_js.substitute(None,
domainInitializers="".join(Generator.backend_js_domain_initializer_list)))
backend_h_file.close()
backend_cpp_file.close()
frontend_h_file.close()
frontend_cpp_file.close()
typebuilder_h_file.close()
typebuilder_cpp_file.close()
backend_js_file.close()
| bsd-3-clause |
Oteng/youtube-dl | youtube_dl/extractor/redtube.py | 118 | 1454 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import ExtractorError
class RedTubeIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?redtube\.com/(?P<id>[0-9]+)'
_TEST = {
'url': 'http://www.redtube.com/66418',
'md5': '7b8c22b5e7098a3e1c09709df1126d2d',
'info_dict': {
'id': '66418',
'ext': 'mp4',
'title': 'Sucked on a toilet',
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
if any(s in webpage for s in ['video-deleted-info', '>This video has been removed']):
raise ExtractorError('Video %s has been removed' % video_id, expected=True)
video_url = self._html_search_regex(
r'<source src="(.+?)" type="video/mp4">', webpage, 'video URL')
video_title = self._html_search_regex(
r'<h1 class="videoTitle[^"]*">(.+?)</h1>',
webpage, 'title')
video_thumbnail = self._og_search_thumbnail(webpage)
# No self-labeling, but they describe themselves as
# "Home of Videos Porno"
age_limit = 18
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'title': video_title,
'thumbnail': video_thumbnail,
'age_limit': age_limit,
}
| unlicense |
eurosata1/e2 | lib/python/Screens/TimeDateInput.py | 57 | 2460 | from Screen import Screen
from Components.config import ConfigClock, ConfigDateTime, getConfigListEntry
from Components.ActionMap import NumberActionMap
from Components.ConfigList import ConfigListScreen
from Components.Label import Label
from Components.Pixmap import Pixmap
import time
import datetime
class TimeDateInput(Screen, ConfigListScreen):
def __init__(self, session, config_time=None, config_date=None):
Screen.__init__(self, session)
self["oktext"] = Label(_("OK"))
self["canceltext"] = Label(_("Cancel"))
self["ok"] = Pixmap()
self["cancel"] = Pixmap()
self.createConfig(config_date, config_time)
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.keySelect,
"save": self.keyGo,
"cancel": self.keyCancel,
}, -2)
self.list = []
ConfigListScreen.__init__(self, self.list)
self.createSetup(self["config"])
def createConfig(self, conf_date, conf_time):
self.save_mask = 0
if conf_time:
self.save_mask |= 1
else:
conf_time = ConfigClock(default = time.time()),
if conf_date:
self.save_mask |= 2
else:
conf_date = ConfigDateTime(default = time.time(), formatstring = _("%d.%B %Y"), increment = 86400)
self.timeinput_date = conf_date
self.timeinput_time = conf_time
def createSetup(self, configlist):
self.list = [
getConfigListEntry(_("Date"), self.timeinput_date),
getConfigListEntry(_("Time"), self.timeinput_time)
]
configlist.list = self.list
configlist.l.setList(self.list)
def keyPageDown(self):
sel = self["config"].getCurrent()
if sel and sel[1] == self.timeinput_time:
self.timeinput_time.decrement()
self["config"].invalidateCurrent()
def keyPageUp(self):
sel = self["config"].getCurrent()
if sel and sel[1] == self.timeinput_time:
self.timeinput_time.increment()
self["config"].invalidateCurrent()
def keySelect(self):
self.keyGo()
def getTimestamp(self, date, mytime):
d = time.localtime(date)
dt = datetime.datetime(d.tm_year, d.tm_mon, d.tm_mday, mytime[0], mytime[1])
return int(time.mktime(dt.timetuple()))
def keyGo(self):
time = self.getTimestamp(self.timeinput_date.value, self.timeinput_time.value)
if self.save_mask & 1:
self.timeinput_time.save()
if self.save_mask & 2:
self.timeinput_date.save()
self.close((True, time))
def keyCancel(self):
if self.save_mask & 1:
self.timeinput_time.cancel()
if self.save_mask & 2:
self.timeinput_date.cancel()
self.close((False,))
| gpl-2.0 |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/distutils/archive_util.py | 249 | 7822 | """distutils.archive_util
Utility functions for creating archive files (tarballs, zip files,
that sort of thing)."""
__revision__ = "$Id$"
import os
from warnings import warn
import sys
from distutils.errors import DistutilsExecError
from distutils.spawn import spawn
from distutils.dir_util import mkpath
from distutils import log
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
owner=None, group=None):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'.
'compress' must be "gzip" (the default), "compress", "bzip2", or None.
(compress will be deprecated in Python 3.2)
'owner' and 'group' can be used to define an owner and a group for the
archive that is being built. If not provided, the current owner and group
will be used.
The output tar file will be named 'base_dir' + ".tar", possibly plus
the appropriate compression extension (".gz", ".bz2" or ".Z").
Returns the output filename.
"""
tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', None: '', 'compress': ''}
compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'compress': '.Z'}
# flags for compression program, each element of list will be an argument
if compress is not None and compress not in compress_ext.keys():
raise ValueError, \
("bad value for 'compress': must be None, 'gzip', 'bzip2' "
"or 'compress'")
archive_name = base_name + '.tar'
if compress != 'compress':
archive_name += compress_ext.get(compress, '')
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
# creating the tarball
import tarfile # late import so Python build itself doesn't break
log.info('Creating tar archive')
uid = _get_uid(owner)
gid = _get_gid(group)
def _set_uid_gid(tarinfo):
if gid is not None:
tarinfo.gid = gid
tarinfo.gname = group
if uid is not None:
tarinfo.uid = uid
tarinfo.uname = owner
return tarinfo
if not dry_run:
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
# compression using `compress`
if compress == 'compress':
warn("'compress' will be deprecated.", PendingDeprecationWarning)
# the option varies depending on the platform
compressed_name = archive_name + compress_ext[compress]
if sys.platform == 'win32':
cmd = [compress, archive_name, compressed_name]
else:
cmd = [compress, '-f', archive_name]
spawn(cmd, dry_run=dry_run)
return compressed_name
return archive_name
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):
"""Create a zip file from all the files under 'base_dir'.
The output zip file will be named 'base_name' + ".zip". Uses either the
"zipfile" Python module (if available) or the InfoZIP "zip" utility
(if installed and found on the default search path). If neither tool is
available, raises DistutilsExecError. Returns the name of the output zip
file.
"""
try:
import zipfile
except ImportError:
zipfile = None
zip_filename = base_name + ".zip"
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
# If zipfile module is not available, try spawning an external
# 'zip' command.
if zipfile is None:
if verbose:
zipoptions = "-r"
else:
zipoptions = "-rq"
try:
spawn(["zip", zipoptions, zip_filename, base_dir],
dry_run=dry_run)
except DistutilsExecError:
# XXX really should distinguish between "couldn't find
# external 'zip' command" and "zip failed".
raise DistutilsExecError, \
("unable to create zip file '%s': "
"could neither import the 'zipfile' module nor "
"find a standalone zip utility") % zip_filename
else:
log.info("creating '%s' and adding '%s' to it",
zip_filename, base_dir)
if not dry_run:
zip = zipfile.ZipFile(zip_filename, "w",
compression=zipfile.ZIP_DEFLATED)
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
zip.write(path, path)
log.info("adding '%s'" % path)
zip.close()
return zip_filename
ARCHIVE_FORMATS = {
'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
'zip': (make_zipfile, [],"ZIP file")
}
def check_archive_formats(formats):
"""Returns the first format from the 'format' list that is unknown.
If all formats are known, returns None
"""
for format in formats:
if format not in ARCHIVE_FORMATS:
return format
return None
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
dry_run=0, owner=None, group=None):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one of "zip", "tar", "ztar",
or "gztar".
'root_dir' is a directory that will be the root directory of the
archive; ie. we typically chdir into 'root_dir' before creating the
archive. 'base_dir' is the directory where we start archiving from;
ie. 'base_dir' will be the common prefix of all files and
directories in the archive. 'root_dir' and 'base_dir' both default
to the current directory. Returns the name of the archive file.
'owner' and 'group' are used when creating a tar archive. By default,
uses the current owner and group.
"""
save_cwd = os.getcwd()
if root_dir is not None:
log.debug("changing into '%s'", root_dir)
base_name = os.path.abspath(base_name)
if not dry_run:
os.chdir(root_dir)
if base_dir is None:
base_dir = os.curdir
kwargs = {'dry_run': dry_run}
try:
format_info = ARCHIVE_FORMATS[format]
except KeyError:
raise ValueError, "unknown archive format '%s'" % format
func = format_info[0]
for arg, val in format_info[1]:
kwargs[arg] = val
if format != 'zip':
kwargs['owner'] = owner
kwargs['group'] = group
try:
filename = func(base_name, base_dir, **kwargs)
finally:
if root_dir is not None:
log.debug("changing back to '%s'", save_cwd)
os.chdir(save_cwd)
return filename
| gpl-3.0 |
KeyWeeUsr/kivy | kivy/input/providers/linuxwacom.py | 10 | 14828 | '''
Native support of Wacom tablet from linuxwacom driver
=====================================================
To configure LinuxWacom, add this to your configuration::
[input]
pen = linuxwacom,/dev/input/event2,mode=pen
finger = linuxwacom,/dev/input/event3,mode=touch
.. note::
You must have read access to the input event.
You can use a custom range for the X, Y and pressure values.
On some drivers, the range reported is invalid.
To fix that, you can add these options to the argument line:
* invert_x : 1 to invert X axis
* invert_y : 1 to invert Y axis
* min_position_x : X minimum
* max_position_x : X maximum
* min_position_y : Y minimum
* max_position_y : Y maximum
* min_pressure : pressure minimum
* max_pressure : pressure maximum
'''
__all__ = ('LinuxWacomMotionEventProvider', 'LinuxWacomMotionEvent')
import os
from kivy.input.motionevent import MotionEvent
from kivy.input.shape import ShapeRect
class LinuxWacomMotionEvent(MotionEvent):
def depack(self, args):
self.is_touch = True
self.sx = args['x']
self.sy = args['y']
self.profile = ['pos']
if 'size_w' in args and 'size_h' in args:
self.shape = ShapeRect()
self.shape.width = args['size_w']
self.shape.height = args['size_h']
self.profile.append('shape')
if 'pressure' in args:
self.pressure = args['pressure']
self.profile.append('pressure')
super(LinuxWacomMotionEvent, self).depack(args)
def __str__(self):
return '<LinuxWacomMotionEvent id=%d pos=(%f, %f) device=%s>' \
% (self.id, self.sx, self.sy, self.device)
if 'KIVY_DOC' in os.environ:
# documentation hack
LinuxWacomMotionEventProvider = None
else:
import threading
import collections
import struct
import fcntl
from kivy.input.provider import MotionEventProvider
from kivy.input.factory import MotionEventFactory
from kivy.logger import Logger
#
# This part is taken from linux-source-2.6.32/include/linux/input.h
#
# Event types
EV_SYN = 0x00
EV_KEY = 0x01
EV_REL = 0x02
EV_ABS = 0x03
EV_MSC = 0x04
EV_SW = 0x05
EV_LED = 0x11
EV_SND = 0x12
EV_REP = 0x14
EV_FF = 0x15
EV_PWR = 0x16
EV_FF_STATUS = 0x17
EV_MAX = 0x1f
EV_CNT = (EV_MAX + 1)
KEY_MAX = 0x2ff
# Synchronization events
SYN_REPORT = 0
SYN_CONFIG = 1
SYN_MT_REPORT = 2
# Misc events
MSC_SERIAL = 0x00
MSC_PULSELED = 0x01
MSC_GESTURE = 0x02
MSC_RAW = 0x03
MSC_SCAN = 0x04
MSC_MAX = 0x07
MSC_CNT = (MSC_MAX + 1)
ABS_X = 0x00
ABS_Y = 0x01
ABS_PRESSURE = 0x18
ABS_MISC = 0x28 # if 0, it's touch up
ABS_MT_TOUCH_MAJOR = 0x30 # Major axis of touching ellipse
ABS_MT_TOUCH_MINOR = 0x31 # Minor axis (omit if circular)
ABS_MT_WIDTH_MAJOR = 0x32 # Major axis of approaching ellipse
ABS_MT_WIDTH_MINOR = 0x33 # Minor axis (omit if circular)
ABS_MT_ORIENTATION = 0x34 # Ellipse orientation
ABS_MT_POSITION_X = 0x35 # Center X ellipse position
ABS_MT_POSITION_Y = 0x36 # Center Y ellipse position
ABS_MT_TOOL_TYPE = 0x37 # Type of touching device
ABS_MT_BLOB_ID = 0x38 # Group a set of packets as a blob
ABS_MT_TRACKING_ID = 0x39 # Unique ID of initiated contact
ABS_MT_PRESSURE = 0x3a # Pressure on contact area
# some ioctl base (with 0 value)
EVIOCGNAME = 2147501318
EVIOCGBIT = 2147501344
EVIOCGABS = 2149074240
# sizeof(struct input_event)
struct_input_event_sz = struct.calcsize('LLHHi')
struct_input_absinfo_sz = struct.calcsize('iiiiii')
sz_l = struct.calcsize('Q')
class LinuxWacomMotionEventProvider(MotionEventProvider):
options = ('min_position_x', 'max_position_x',
'min_position_y', 'max_position_y',
'min_pressure', 'max_pressure',
'invert_x', 'invert_y')
def __init__(self, device, args):
super(LinuxWacomMotionEventProvider, self).__init__(device, args)
self.input_fn = None
self.default_ranges = dict()
self.mode = 'touch'
# split arguments
args = args.split(',')
if not args:
Logger.error('LinuxWacom: No filename given in config')
Logger.error('LinuxWacom: Use /dev/input/event0 for example')
return
# read filename
self.input_fn = args[0]
Logger.info('LinuxWacom: Read event from <%s>' % self.input_fn)
# read parameters
for arg in args[1:]:
if arg == '':
continue
arg = arg.split('=')
# ensure it's a key = value
if len(arg) != 2:
err = 'LinuxWacom: Bad parameter' \
'%s: Not in key=value format.' % arg
Logger.error(err)
continue
# ensure the key exist
key, value = arg
if key == 'mode':
self.mode = value
continue
if key not in LinuxWacomMotionEventProvider.options:
Logger.error('LinuxWacom: unknown %s option' % key)
continue
# ensure the value
try:
self.default_ranges[key] = int(value)
except ValueError:
err = 'LinuxWacom: value %s invalid for %s' % (key, value)
Logger.error(err)
continue
# all good!
msg = 'LinuxWacom: Set custom %s to %d' % (key, int(value))
Logger.info(msg)
Logger.info('LinuxWacom: mode is <%s>' % self.mode)
def start(self):
if self.input_fn is None:
return
self.uid = 0
self.queue = collections.deque()
self.thread = threading.Thread(
target=self._thread_run,
kwargs=dict(
queue=self.queue,
input_fn=self.input_fn,
device=self.device,
default_ranges=self.default_ranges))
self.thread.daemon = True
self.thread.start()
def _thread_run(self, **kwargs):
input_fn = kwargs.get('input_fn')
queue = kwargs.get('queue')
device = kwargs.get('device')
drs = kwargs.get('default_ranges').get
touches = {}
touches_sent = []
l_points = {}
# prepare some vars to get limit of some component
range_min_position_x = 0
range_max_position_x = 2048
range_min_position_y = 0
range_max_position_y = 2048
range_min_pressure = 0
range_max_pressure = 255
invert_x = int(bool(drs('invert_x', 0)))
invert_y = int(bool(drs('invert_y', 0)))
reset_touch = False
def process(points):
actives = list(points.keys())
for args in points.values():
tid = args['id']
try:
touch = touches[tid]
except KeyError:
touch = LinuxWacomMotionEvent(device, tid, args)
touches[touch.id] = touch
if touch.sx == args['x'] \
and touch.sy == args['y'] \
and tid in touches_sent:
continue
touch.move(args)
if tid not in touches_sent:
queue.append(('begin', touch))
touches_sent.append(tid)
queue.append(('update', touch))
for tid in list(touches.keys())[:]:
if tid not in actives:
touch = touches[tid]
if tid in touches_sent:
touch.update_time_end()
queue.append(('end', touch))
touches_sent.remove(tid)
del touches[tid]
def normalize(value, vmin, vmax):
return (value - vmin) / float(vmax - vmin)
# open the input
try:
fd = open(input_fn, 'rb')
except IOError:
Logger.exception('Unable to open %s' % input_fn)
return
# get the controler name (EVIOCGNAME)
device_name = fcntl.ioctl(fd, EVIOCGNAME + (256 << 16),
" " * 256).split('\x00')[0]
Logger.info('LinuxWacom: using <%s>' % device_name)
# get abs infos
bit = fcntl.ioctl(fd, EVIOCGBIT + (EV_MAX << 16), ' ' * sz_l)
bit, = struct.unpack('Q', bit)
for x in range(EV_MAX):
# preserve this, we may want other things than EV_ABS
if x != EV_ABS:
continue
# EV_ABS available for this device ?
if (bit & (1 << x)) == 0:
continue
# ask abs info keys to the devices
sbit = fcntl.ioctl(fd, EVIOCGBIT + x + (KEY_MAX << 16),
' ' * sz_l)
sbit, = struct.unpack('Q', sbit)
for y in range(KEY_MAX):
if (sbit & (1 << y)) == 0:
continue
absinfo = fcntl.ioctl(fd, EVIOCGABS + y +
(struct_input_absinfo_sz << 16),
' ' * struct_input_absinfo_sz)
abs_value, abs_min, abs_max, abs_fuzz, \
abs_flat, abs_res = struct.unpack('iiiiii', absinfo)
if y == ABS_X:
range_min_position_x = drs('min_position_x', abs_min)
range_max_position_x = drs('max_position_x', abs_max)
Logger.info('LinuxWacom: ' +
'<%s> range position X is %d - %d' % (
device_name, abs_min, abs_max))
elif y == ABS_Y:
range_min_position_y = drs('min_position_y', abs_min)
range_max_position_y = drs('max_position_y', abs_max)
Logger.info('LinuxWacom: ' +
'<%s> range position Y is %d - %d' % (
device_name, abs_min, abs_max))
elif y == ABS_PRESSURE:
range_min_pressure = drs('min_pressure', abs_min)
range_max_pressure = drs('max_pressure', abs_max)
Logger.info('LinuxWacom: ' +
'<%s> range pressure is %d - %d' % (
device_name, abs_min, abs_max))
# read until the end
changed = False
touch_id = 0
touch_x = 0
touch_y = 0
touch_pressure = 0
while fd:
data = fd.read(struct_input_event_sz)
if len(data) < struct_input_event_sz:
break
# extract each event
for i in range(len(data) / struct_input_event_sz):
ev = data[i * struct_input_event_sz:]
# extract timeval + event infos
tv_sec, tv_usec, ev_type, ev_code, ev_value = \
struct.unpack('LLHHi', ev[:struct_input_event_sz])
if ev_type == EV_SYN and ev_code == SYN_REPORT:
if touch_id in l_points:
p = l_points[touch_id]
else:
p = dict()
l_points[touch_id] = p
p['id'] = touch_id
if not reset_touch:
p['x'] = touch_x
p['y'] = touch_y
p['pressure'] = touch_pressure
if self.mode == 'pen' \
and touch_pressure == 0 \
and not reset_touch:
del l_points[touch_id]
if changed:
if 'x' not in p:
reset_touch = False
continue
process(l_points)
changed = False
if reset_touch:
l_points.clear()
reset_touch = False
process(l_points)
elif ev_type == EV_MSC and ev_code == MSC_SERIAL:
touch_id = ev_value
elif ev_type == EV_ABS and ev_code == ABS_X:
val = normalize(ev_value,
range_min_position_x,
range_max_position_x)
if invert_x:
val = 1. - val
touch_x = val
changed = True
elif ev_type == EV_ABS and ev_code == ABS_Y:
val = 1. - normalize(ev_value,
range_min_position_y,
range_max_position_y)
if invert_y:
val = 1. - val
touch_y = val
changed = True
elif ev_type == EV_ABS and ev_code == ABS_PRESSURE:
touch_pressure = normalize(ev_value,
range_min_pressure,
range_max_pressure)
changed = True
elif ev_type == EV_ABS and ev_code == ABS_MISC:
if ev_value == 0:
reset_touch = True
def update(self, dispatch_fn):
# dispatch all event from threads
try:
while True:
event_type, touch = self.queue.popleft()
dispatch_fn(event_type, touch)
except:
pass
MotionEventFactory.register('linuxwacom', LinuxWacomMotionEventProvider)
| mit |
hwlzc/3.4.50 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | 1935 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
| gpl-2.0 |
sgerhart/ansible | lib/ansible/utils/module_docs_fragments/vmware.py | 30 | 2063 | # Copyright: (c) 2016, Charles Paul <[email protected]>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Parameters for VMware modules
DOCUMENTATION = '''
options:
hostname:
description:
- The hostname or IP address of the vSphere vCenter or ESXi server.
- If the value is not specified in the task, the value of environment variable C(VMWARE_HOST) will be used instead.
- Environment variable supported added in version 2.6.
type: str
username:
description:
- The username of the vSphere vCenter or ESXi server.
- If the value is not specified in the task, the value of environment variable C(VMWARE_USER) will be used instead.
- Environment variable supported added in version 2.6.
type: str
aliases: [ admin, user ]
password:
description:
- The password of the vSphere vCenter or ESXi server.
- If the value is not specified in the task, the value of environment variable C(VMWARE_PASSWORD) will be used instead.
- Environment variable supported added in version 2.6.
type: str
aliases: [ pass, pwd ]
validate_certs:
description:
- Allows connection when SSL certificates are not valid. Set to C(false) when certificates are not trusted.
- If the value is not specified in the task, the value of environment variable C(VMWARE_VALIDATE_CERTS) will be used instead.
- Environment variable supported added in version 2.6.
- If set to C(yes), please make sure Python >= 2.7.9 is installed on the given machine.
type: bool
default: 'yes'
port:
description:
- The port number of the vSphere vCenter or ESXi server.
- If the value is not specified in the task, the value of environment variable C(VMWARE_PORT) will be used instead.
- Environment variable supported added in version 2.6.
type: int
default: 443
version_added: 2.5
'''
| mit |
qizenguf/MLC-STT | src/arch/x86/isa/insts/simd64/integer/shift/__init__.py | 91 | 2387 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["left_logical_shift",
"right_logical_shift",
"right_arithmetic_shift"]
microcode = '''
# 64 bit multimedia instructions
'''
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
| bsd-3-clause |
mchdks/python-social-auth | social/backends/khanacademy.py | 71 | 5039 | """
Khan Academy OAuth backend, docs at:
https://github.com/Khan/khan-api/wiki/Khan-Academy-API-Authentication
"""
import six
from oauthlib.oauth1 import SIGNATURE_HMAC, SIGNATURE_TYPE_QUERY
from requests_oauthlib import OAuth1
from social.backends.oauth import BaseOAuth1
from social.p3 import urlencode
class BrowserBasedOAuth1(BaseOAuth1):
"""Browser based mechanism OAuth authentication, fill the needed
parameters to communicate properly with authentication service.
REQUEST_TOKEN_URL Request token URL (opened in web browser)
ACCESS_TOKEN_URL Access token URL
"""
REQUEST_TOKEN_URL = ''
OAUTH_TOKEN_PARAMETER_NAME = 'oauth_token'
REDIRECT_URI_PARAMETER_NAME = 'redirect_uri'
ACCESS_TOKEN_URL = ''
def auth_url(self):
"""Return redirect url"""
return self.unauthorized_token_request()
def get_unauthorized_token(self):
return self.strategy.request_data()
def unauthorized_token_request(self):
"""Return request for unauthorized token (first stage)"""
params = self.request_token_extra_arguments()
params.update(self.get_scope_argument())
key, secret = self.get_key_and_secret()
# decoding='utf-8' produces errors with python-requests on Python3
# since the final URL will be of type bytes
decoding = None if six.PY3 else 'utf-8'
state = self.get_or_create_state()
auth = OAuth1(
key,
secret,
callback_uri=self.get_redirect_uri(state),
decoding=decoding,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_QUERY
)
url = self.REQUEST_TOKEN_URL + '?' + urlencode(params)
url, _, _ = auth.client.sign(url)
return url
def oauth_auth(self, token=None, oauth_verifier=None):
key, secret = self.get_key_and_secret()
oauth_verifier = oauth_verifier or self.data.get('oauth_verifier')
token = token or {}
# decoding='utf-8' produces errors with python-requests on Python3
# since the final URL will be of type bytes
decoding = None if six.PY3 else 'utf-8'
state = self.get_or_create_state()
return OAuth1(key, secret,
resource_owner_key=token.get('oauth_token'),
resource_owner_secret=token.get('oauth_token_secret'),
callback_uri=self.get_redirect_uri(state),
verifier=oauth_verifier,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_QUERY,
decoding=decoding)
class KhanAcademyOAuth1(BrowserBasedOAuth1):
"""
Class used for autorising with Khan Academy.
Flow of Khan Academy is a bit different than most OAuth 1.0 and consinsts
of the following steps:
1. Create signed params to attach to the REQUEST_TOKEN_URL
2. Redirect user to the REQUEST_TOKEN_URL that will respond with
oauth_secret, oauth_token, oauth_verifier that should be used with
ACCESS_TOKEN_URL
3. Go to ACCESS_TOKEN_URL and grab oauth_token_secret.
Note that we don't use the AUTHORIZATION_URL.
REQUEST_TOKEN_URL requires the following arguments:
oauth_consumer_key - Your app's consumer key
oauth_nonce - Random 64-bit, unsigned number encoded as an ASCII string
in decimal format. The nonce/timestamp pair should always be unique.
oauth_version - OAuth version used by your app. Must be "1.0" for now.
oauth_signature - String generated using the referenced signature method.
oauth_signature_method - Signature algorithm (currently only support
"HMAC-SHA1")
oauth_timestamp - Integer representing the time the request is sent.
The timestamp should be expressed in number of seconds
after January 1, 1970 00:00:00 GMT.
oauth_callback (optional) - URL to redirect to after request token is
received and authorized by the user's chosen identity provider.
"""
name = 'khanacademy-oauth1'
ID_KEY = 'user_id'
REQUEST_TOKEN_URL = 'http://www.khanacademy.org/api/auth/request_token'
ACCESS_TOKEN_URL = 'https://www.khanacademy.org/api/auth/access_token'
REDIRECT_URI_PARAMETER_NAME = 'oauth_callback'
USER_DATA_URL = 'https://www.khanacademy.org/api/v1/user'
EXTRA_DATA = [('user_id', 'user_id')]
def get_user_details(self, response):
"""Return user details from Khan Academy account"""
return {
'username': response.get('key_email'),
'email': response.get('key_email'),
'fullname': '',
'first_name': '',
'last_name': '',
'user_id': response.get('user_id')
}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
auth = self.oauth_auth(access_token)
url, _, _ = auth.client.sign(self.USER_DATA_URL)
return self.get_json(url)
| bsd-3-clause |
udxxabp/zulip | zerver/management/commands/check_redis.py | 115 | 2662 | from __future__ import absolute_import
from zerver.models import get_user_profile_by_id
from zerver.lib.rate_limiter import client, max_api_calls, max_api_window
from django.core.management.base import BaseCommand
from django.conf import settings
from optparse import make_option
import time, logging
class Command(BaseCommand):
help = """Checks redis to make sure our rate limiting system hasn't grown a bug and left redis with a bunch of data
Usage: ./manage.py [--trim] check_redis"""
option_list = BaseCommand.option_list + (
make_option('-t', '--trim',
dest='trim',
default=False,
action='store_true',
help="Actually trim excess"),
)
def _check_within_range(self, key, count_func, trim_func):
user_id = int(key.split(':')[1])
try:
user = get_user_profile_by_id(user_id)
except:
user = None
max_calls = max_api_calls(user=user)
age = int(client.ttl(key))
if age < 0:
logging.error("Found key with age of %s, will never expire: %s" % (age, key,))
count = count_func()
if count > max_calls:
logging.error("Redis health check found key with more elements \
than max_api_calls! (trying to trim) %s %s" % (key, count))
if self.trim:
client.expire(key, max_api_window(user=user))
trim_func(key, max_calls)
def handle(self, *args, **options):
if not settings.RATE_LIMITING:
print "This machine is not using redis or rate limiting, aborting"
exit(1)
# Find all keys, and make sure they're all within size constraints
wildcard_list = "ratelimit:*:*:list"
wildcard_zset = "ratelimit:*:*:zset"
self.trim = options['trim']
lists = client.keys(wildcard_list)
for list_name in lists:
self._check_within_range(list_name,
lambda: client.llen(list_name),
lambda key, max_calls: client.ltrim(key, 0, max_calls - 1))
zsets = client.keys(wildcard_zset)
for zset in zsets:
now = time.time()
# We can warn on our zset being too large, but we don't know what
# elements to trim. We'd have to go through every list item and take
# the intersection. The best we can do is expire it
self._check_within_range(zset,
lambda: client.zcount(zset, 0, now),
lambda key, max_calls: None)
| apache-2.0 |
safwanrahman/mozillians | vendor-local/lib/python/tablib/packages/odf/load.py | 67 | 3959 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2007-2008 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
# This script is to be embedded in opendocument.py later
# The purpose is to read an ODT/ODP/ODS file and create the datastructure
# in memory. The user should then be able to make operations and then save
# the structure again.
from xml.sax import make_parser,handler
from xml.sax.xmlreader import InputSource
import xml.sax.saxutils
from element import Element
from namespaces import OFFICENS
from cStringIO import StringIO
#
# Parse the XML files
#
class LoadParser(handler.ContentHandler):
""" Extract headings from content.xml of an ODT file """
triggers = (
(OFFICENS, 'automatic-styles'), (OFFICENS, 'body'),
(OFFICENS, 'font-face-decls'), (OFFICENS, 'master-styles'),
(OFFICENS, 'meta'), (OFFICENS, 'scripts'),
(OFFICENS, 'settings'), (OFFICENS, 'styles') )
def __init__(self, document):
self.doc = document
self.data = []
self.level = 0
self.parse = False
def characters(self, data):
if self.parse == False:
return
self.data.append(data)
def startElementNS(self, tag, qname, attrs):
if tag in self.triggers:
self.parse = True
if self.doc._parsing != "styles.xml" and tag == (OFFICENS, 'font-face-decls'):
self.parse = False
if self.parse == False:
return
self.level = self.level + 1
# Add any accumulated text content
content = ''.join(self.data)
if len(content.strip()) > 0:
self.parent.addText(content, check_grammar=False)
self.data = []
# Create the element
attrdict = {}
for (att,value) in attrs.items():
attrdict[att] = value
try:
e = Element(qname = tag, qattributes=attrdict, check_grammar=False)
self.curr = e
except AttributeError, v:
print "Error: %s" % v
if tag == (OFFICENS, 'automatic-styles'):
e = self.doc.automaticstyles
elif tag == (OFFICENS, 'body'):
e = self.doc.body
elif tag == (OFFICENS, 'master-styles'):
e = self.doc.masterstyles
elif tag == (OFFICENS, 'meta'):
e = self.doc.meta
elif tag == (OFFICENS,'scripts'):
e = self.doc.scripts
elif tag == (OFFICENS,'settings'):
e = self.doc.settings
elif tag == (OFFICENS,'styles'):
e = self.doc.styles
elif self.doc._parsing == "styles.xml" and tag == (OFFICENS, 'font-face-decls'):
e = self.doc.fontfacedecls
elif hasattr(self,'parent'):
self.parent.addElement(e, check_grammar=False)
self.parent = e
def endElementNS(self, tag, qname):
if self.parse == False:
return
self.level = self.level - 1
str = ''.join(self.data)
if len(str.strip()) > 0:
self.curr.addText(str, check_grammar=False)
self.data = []
self.curr = self.curr.parentNode
self.parent = self.curr
if tag in self.triggers:
self.parse = False
| bsd-3-clause |
srikantbmandal/ansible | lib/ansible/modules/inventory/group_by.py | 56 | 1483 | # -*- mode: python -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: group_by
short_description: Create Ansible groups based on facts
description:
- Use facts to create ad-hoc groups that can be used later in a playbook.
version_added: "0.9"
options:
key:
description:
- The variables whose values will be used as groups
required: true
author: "Jeroen Hoekx (@jhoekx)"
notes:
- Spaces in group names are converted to dashes '-'.
'''
EXAMPLES = '''
# Create groups based on the machine architecture
- group_by:
key: machine_{{ ansible_machine }}
# Create groups like 'kvm-host'
- group_by:
key: virt_{{ ansible_virtualization_type }}_{{ ansible_virtualization_role }}
'''
| gpl-3.0 |
ryanjmccall/nupic | nupic/regions/PictureSensorExplorers/rotate.py | 9 | 3563 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This file defines RotatePictureExplorer, an explorer for
PictureSensor.
"""
from nupic.regions.PictureSensor import PictureSensor
#+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
# RotatePictureExplorer
class RotatePictureExplorer(PictureSensor.PictureExplorer):
@classmethod
def queryRelevantParams(klass):
"""
Returns a sequence of parameter names that are relevant to
the operation of the explorer.
May be extended or overridden by sub-classes as appropriate.
"""
return ( 'numRepetitions',
'minAngularPosn', 'maxAngularPosn',
'minAngularVelocity', 'maxAngularVelocity',
)
def notifyParamUpdate(self, params):
"""
A callback that will be invoked if/when any of the explorer's
relevant parameters have their values changed.
@param params: a dict containing the new values of all parameters
that are relevant to the explorer's operation
(as specified by a call to queryRelevantParams()).
"""
# Parameter checks
if params['minAngularVelocity'] != params['maxAngularVelocity']:
raise NotImplementedError("'rotate' explorer currently supports " \
"only a fixed angular velocity; i.e., 'minAngularVelocity' (%d) " \
"must be identical to 'maxAngularVelocity' (%d)" \
% (params['minAngularVelocity'], params['maxAngularVelocity']))
super(RotatePictureExplorer, self).notifyParamUpdate(params)
def initSequence(self, state, params):
self._presentNextRotation(state, params)
def updateSequence(self, state, params):
self._presentNextRotation(state, params)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Internal helper method(s)
def _presentNextRotation(self, state, params):
"""
Compute the appropriate category and rotational angle
deterministically based on the current iteration count.
"""
# These don't change
state['posnX'] = 0
state['posnY'] = 0
state['velocityX'] = 0
state['velocityY'] = 0
state['angularVelocity'] = params['minAngularVelocity']
# These do change
sequenceLength = 1 + int((params['maxAngularPosn'] - params['minAngularPosn'])
/ params['minAngularVelocity'])
state['catIndex'] = self._getIterCount() / (sequenceLength * params['numRepetitions'])
seqIndex = self._getIterCount() % (sequenceLength * params['numRepetitions'])
state['angularPosn'] = params['maxAngularPosn'] \
- state['angularVelocity'] * seqIndex
| gpl-3.0 |
iddqd1/django-cms | cms/test_utils/project/objectpermissionsapp/models.py | 13 | 2569 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.db import models
from django.core.exceptions import ValidationError
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils.translation import ugettext_lazy as _
class UserObjectPermissionManager(models.Manager):
def assign_perm(self, perm, user, obj):
"""
Assigns permission with given ``perm`` for an instance ``obj`` and
``user``.
"""
if getattr(obj, 'pk', None) is None:
raise Exception("Object %s needs to be persisted first" % obj)
ctype = ContentType.objects.get_for_model(obj)
permission = Permission.objects.get(content_type=ctype, codename=perm)
kwargs = {'permission': permission, 'user': user}
kwargs['content_type'] = ctype
kwargs['object_pk'] = obj.pk
obj_perm, created = self.get_or_create(**kwargs) # @UnusedVariable
return obj_perm
def remove_perm(self, perm, user, obj):
"""
Removes permission ``perm`` for an instance ``obj`` and given ``user``.
"""
if getattr(obj, 'pk', None) is None:
raise Exception("Object %s needs to be persisted first" % obj)
filters = {
'permission__codename': perm,
'permission__content_type': ContentType.objects.get_for_model(obj),
'user': user,
}
filters['object_pk'] = obj.pk
self.filter(**filters).delete()
class UserObjectPermission(models.Model):
permission = models.ForeignKey(Permission)
content_type = models.ForeignKey(ContentType)
object_pk = models.CharField(_('object ID'), max_length=255)
content_object = GenericForeignKey(fk_field='object_pk')
user = models.ForeignKey(getattr(settings, 'AUTH_USER_MODEL', 'auth.User'))
objects = UserObjectPermissionManager()
def save(self, *args, **kwargs):
content_type = ContentType.objects.get_for_model(self.content_object)
if content_type != self.permission.content_type:
raise ValidationError("Cannot persist permission not designed for "
"this class (permission's type is %r and object's type is %r)"
% (self.permission.content_type, content_type))
return super(UserObjectPermission, self).save(*args, **kwargs)
class Meta:
unique_together = ['user', 'permission', 'object_pk']
| bsd-3-clause |
petrus-v/odoo | addons/account_anglo_saxon/invoice.py | 41 | 13486 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C)
# 2004-2010 Tiny SPRL (<http://tiny.be>).
# 2009-2010 Veritos (http://veritos.nl).
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
from openerp.tools.float_utils import float_round as round
class account_invoice_line(osv.osv):
_inherit = "account.invoice.line"
_columns = {
'move_id': fields.many2one('stock.move', string="Move line", help="If the invoice was generated from a stock.picking, reference to the related move line."),
}
def move_line_get(self, cr, uid, invoice_id, context=None):
res = super(account_invoice_line,self).move_line_get(cr, uid, invoice_id, context=context)
inv = self.pool.get('account.invoice').browse(cr, uid, invoice_id, context=context)
if inv.type in ('out_invoice','out_refund'):
for i_line in inv.invoice_line:
res.extend(self._anglo_saxon_sale_move_lines(cr, uid, i_line, res, context=context))
elif inv.type in ('in_invoice','in_refund'):
for i_line in inv.invoice_line:
res.extend(self._anglo_saxon_purchase_move_lines(cr, uid, i_line, res, context=context))
return res
def product_id_change(self, cr, uid, ids, product, uom_id, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, currency_id=False, company_id=None, context=None):
fiscal_pool = self.pool.get('account.fiscal.position')
res = super(account_invoice_line, self).product_id_change(cr, uid, ids, product, uom_id, qty, name, type, partner_id, fposition_id, price_unit, currency_id, company_id, context)
if not product:
return res
if type in ('in_invoice','in_refund'):
product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context)
if product_obj.type in ('consu', 'product'):
oa = product_obj.property_stock_account_input and product_obj.property_stock_account_input.id
if not oa:
oa = product_obj.categ_id.property_stock_account_input_categ and product_obj.categ_id.property_stock_account_input_categ.id
if oa:
fpos = fposition_id and fiscal_pool.browse(cr, uid, fposition_id, context=context) or False
a = fiscal_pool.map_account(cr, uid, fpos, oa)
res['value'].update({'account_id':a})
return res
def _get_price(self, cr, uid, inv, company_currency, i_line, price_unit):
cur_obj = self.pool.get('res.currency')
decimal_precision = self.pool.get('decimal.precision')
if inv.currency_id.id != company_currency:
price = cur_obj.compute(cr, uid, company_currency, inv.currency_id.id, price_unit * i_line.quantity, context={'date': inv.date_invoice})
else:
price = price_unit * i_line.quantity
return round(price, decimal_precision.precision_get(cr, uid, 'Account'))
def _anglo_saxon_sale_move_lines(self, cr, uid, i_line, res, context=None):
"""Return the additional move lines for sales invoices and refunds.
i_line: An account.invoice.line object.
res: The move line entries produced so far by the parent move_line_get.
"""
inv = i_line.invoice_id
fiscal_pool = self.pool.get('account.fiscal.position')
fpos = inv.fiscal_position or False
company_currency = inv.company_id.currency_id.id
if i_line.product_id.type != 'service' and i_line.product_id.valuation == 'real_time':
# debit account dacc will be the output account
# first check the product, if empty check the category
dacc = i_line.product_id.property_stock_account_output and i_line.product_id.property_stock_account_output.id
if not dacc:
dacc = i_line.product_id.categ_id.property_stock_account_output_categ and i_line.product_id.categ_id.property_stock_account_output_categ.id
# in both cases the credit account cacc will be the expense account
# first check the product, if empty check the category
cacc = i_line.product_id.property_account_expense and i_line.product_id.property_account_expense.id
if not cacc:
cacc = i_line.product_id.categ_id.property_account_expense_categ and i_line.product_id.categ_id.property_account_expense_categ.id
if dacc and cacc:
if i_line.move_id:
price_unit = i_line.move_id.price_unit
else:
price_unit = i_line.product_id.standard_price
from_unit = i_line.product_id.uom_id.id
to_unit = i_line.uos_id.id
price_unit = self.pool['product.uom']._compute_price(cr, uid, from_unit, price_unit, to_uom_id=to_unit)
return [
{
'type':'src',
'name': i_line.name[:64],
'price_unit':price_unit,
'quantity':i_line.quantity,
'price':self._get_price(cr, uid, inv, company_currency, i_line, price_unit),
'account_id':dacc,
'product_id':i_line.product_id.id,
'uos_id':i_line.uos_id.id,
'account_analytic_id': False,
'taxes':i_line.invoice_line_tax_id,
},
{
'type':'src',
'name': i_line.name[:64],
'price_unit':price_unit,
'quantity':i_line.quantity,
'price': -1 * self._get_price(cr, uid, inv, company_currency, i_line, price_unit),
'account_id':fiscal_pool.map_account(cr, uid, fpos, cacc),
'product_id':i_line.product_id.id,
'uos_id':i_line.uos_id.id,
'account_analytic_id': i_line.account_analytic_id.id,
'taxes':i_line.invoice_line_tax_id,
},
]
return []
def _anglo_saxon_purchase_move_lines(self, cr, uid, i_line, res, context=None):
"""Return the additional move lines for purchase invoices and refunds.
i_line: An account.invoice.line object.
res: The move line entries produced so far by the parent move_line_get.
"""
inv = i_line.invoice_id
company_currency = inv.company_id.currency_id.id
if i_line.product_id and i_line.product_id.valuation == 'real_time':
if i_line.product_id.type != 'service':
# get the price difference account at the product
acc = i_line.product_id.property_account_creditor_price_difference and i_line.product_id.property_account_creditor_price_difference.id
if not acc:
# if not found on the product get the price difference account at the category
acc = i_line.product_id.categ_id.property_account_creditor_price_difference_categ and i_line.product_id.categ_id.property_account_creditor_price_difference_categ.id
a = None
# oa will be the stock input account
# first check the product, if empty check the category
oa = i_line.product_id.property_stock_account_input and i_line.product_id.property_stock_account_input.id
if not oa:
oa = i_line.product_id.categ_id.property_stock_account_input_categ and i_line.product_id.categ_id.property_stock_account_input_categ.id
if oa:
# get the fiscal position
fpos = i_line.invoice_id.fiscal_position or False
a = self.pool.get('account.fiscal.position').map_account(cr, uid, fpos, oa)
diff_res = []
decimal_precision = self.pool.get('decimal.precision')
account_prec = decimal_precision.precision_get(cr, uid, 'Account')
# calculate and write down the possible price difference between invoice price and product price
for line in res:
if line.get('invl_id', 0) == i_line.id and a == line['account_id']:
uom = i_line.product_id.uos_id or i_line.product_id.uom_id
valuation_price_unit = self.pool.get('product.uom')._compute_price(cr, uid, uom.id, i_line.product_id.standard_price, i_line.uos_id.id)
if i_line.product_id.cost_method != 'standard' and i_line.purchase_line_id:
#for average/fifo/lifo costing method, fetch real cost price from incomming moves
stock_move_obj = self.pool.get('stock.move')
valuation_stock_move = stock_move_obj.search(cr, uid, [('purchase_line_id', '=', i_line.purchase_line_id.id)], limit=1, context=context)
if valuation_stock_move:
valuation_price_unit = stock_move_obj.browse(cr, uid, valuation_stock_move[0], context=context).price_unit
if inv.currency_id.id != company_currency:
valuation_price_unit = self.pool.get('res.currency').compute(cr, uid, company_currency, inv.currency_id.id, valuation_price_unit, context={'date': inv.date_invoice})
if valuation_price_unit != i_line.price_unit and line['price_unit'] == i_line.price_unit and acc:
# price with discount and without tax included
price_unit = self.pool['account.tax'].compute_all(cr, uid, line['taxes'],
i_line.price_unit * (1-(i_line.discount or 0.0)/100.0), line['quantity'])['total']
price_line = round(valuation_price_unit * line['quantity'], account_prec)
price_diff = round(price_unit - price_line, account_prec)
line.update({'price': price_line})
diff_res.append({
'type': 'src',
'name': i_line.name[:64],
'price_unit': round(price_diff / line['quantity'], account_prec),
'quantity': line['quantity'],
'price': price_diff,
'account_id': acc,
'product_id': line['product_id'],
'uos_id': line['uos_id'],
'account_analytic_id': line['account_analytic_id'],
'taxes': line.get('taxes', []),
})
return diff_res
return []
class account_invoice(osv.osv):
_inherit = "account.invoice"
def _prepare_refund(self, cr, uid, invoice, date=None, period_id=None, description=None, journal_id=None, context=None):
invoice_data = super(account_invoice, self)._prepare_refund(cr, uid, invoice, date, period_id,
description, journal_id, context=context)
if invoice.type == 'in_invoice':
fiscal_position = self.pool.get('account.fiscal.position')
for _, _, line_dict in invoice_data['invoice_line']:
if line_dict.get('product_id'):
product = self.pool.get('product.product').browse(cr, uid, line_dict['product_id'], context=context)
counterpart_acct_id = product.property_stock_account_output and \
product.property_stock_account_output.id
if not counterpart_acct_id:
counterpart_acct_id = product.categ_id.property_stock_account_output_categ and \
product.categ_id.property_stock_account_output_categ.id
if counterpart_acct_id:
fpos = invoice.fiscal_position or False
line_dict['account_id'] = fiscal_position.map_account(cr, uid,
fpos,
counterpart_acct_id)
return invoice_data
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
SivilTaram/edx-platform | openedx/core/djangoapps/user_api/tests/test_models.py | 54 | 5918 | """
Test UserPreferenceModel and UserPreference events
"""
from django.db import IntegrityError
from django.test import TestCase
from student.tests.factories import UserFactory
from student.tests.tests import UserSettingsEventTestMixin
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from ..tests.factories import UserPreferenceFactory, UserCourseTagFactory, UserOrgTagFactory
from ..models import UserPreference
from ..preferences.api import set_user_preference
class UserPreferenceModelTest(ModuleStoreTestCase):
def test_duplicate_user_key(self):
user = UserFactory.create()
UserPreferenceFactory.create(user=user, key="testkey", value="first")
self.assertRaises(
IntegrityError,
UserPreferenceFactory.create,
user=user,
key="testkey",
value="second"
)
def test_arbitrary_values(self):
user = UserFactory.create()
self._create_and_assert(user=user, key="testkey0", value="")
self._create_and_assert(user=user, key="testkey1", value="This is some English text!")
self._create_and_assert(user=user, key="testkey2", value="{'some': 'json'}")
self._create_and_assert(
user=user,
key="testkey3",
value="\xe8\xbf\x99\xe6\x98\xaf\xe4\xb8\xad\xe5\x9b\xbd\xe6\x96\x87\xe5\xad\x97'"
)
def _create_and_assert(self, user, key, value):
"""Create a new preference and assert the values. """
preference = UserPreferenceFactory.create(user=user, key=key, value=value)
self.assertEqual(preference.user, user)
self.assertEqual(preference.key, key)
self.assertEqual(preference.value, value)
return preference
def test_create_user_course_tags(self):
"""Create user preference tags and confirm properties are set accordingly. """
user = UserFactory.create()
course = CourseFactory.create()
tag = UserCourseTagFactory.create(user=user, course_id=course.id, key="testkey", value="foobar")
self.assertEquals(tag.user, user)
self.assertEquals(tag.course_id, course.id)
self.assertEquals(tag.key, "testkey")
self.assertEquals(tag.value, "foobar")
def test_create_user_org_tags(self):
"""Create org specific user tags and confirm all properties are set """
user = UserFactory.create()
course = CourseFactory.create()
tag = UserOrgTagFactory.create(user=user, org=course.id.org, key="testkey", value="foobar")
self.assertEquals(tag.user, user)
self.assertEquals(tag.org, course.id.org)
self.assertEquals(tag.key, "testkey")
self.assertEquals(tag.value, "foobar")
self.assertIsNotNone(tag.created)
self.assertIsNotNone(tag.modified)
# Modify the tag and save it. Check if the modified timestamp is updated.
original_modified = tag.modified
tag.value = "barfoo"
tag.save()
self.assertEquals(tag.value, "barfoo")
self.assertNotEqual(original_modified, tag.modified)
def test_get_value(self):
"""Verifies the behavior of get_value."""
user = UserFactory.create()
key = 'testkey'
value = 'testvalue'
# does a round trip
set_user_preference(user, key, value)
pref = UserPreference.get_value(user, key)
self.assertEqual(pref, value)
# get preference for key that doesn't exist for user
pref = UserPreference.get_value(user, 'testkey_none')
self.assertIsNone(pref)
class TestUserPreferenceEvents(UserSettingsEventTestMixin, TestCase):
"""
Mixin for verifying that user preference events are fired correctly.
"""
def setUp(self):
super(TestUserPreferenceEvents, self).setUp()
self.table = "user_api_userpreference"
self.user = UserFactory.create()
self.TEST_KEY = "test key"
self.TEST_VALUE = "test value"
self.user_preference = UserPreference.objects.create(user=self.user, key=self.TEST_KEY, value=self.TEST_VALUE)
self.reset_tracker()
def test_create_user_preference(self):
"""
Verify that we emit an event when a user preference is created.
"""
UserPreference.objects.create(user=self.user, key="new key", value="new value")
self.assert_user_setting_event_emitted(setting='new key', old=None, new="new value")
def test_update_user_preference(self):
"""
Verify that we emit an event when a user preference is updated.
"""
self.user_preference.value = "new value"
self.user_preference.save()
self.assert_user_setting_event_emitted(setting=self.TEST_KEY, old=self.TEST_VALUE, new="new value")
def test_delete_user_preference(self):
"""
Verify that we emit an event when a user preference is deleted.
"""
self.user_preference.delete()
self.assert_user_setting_event_emitted(setting=self.TEST_KEY, old=self.TEST_VALUE, new=None)
def test_truncated_user_preference_event(self):
"""
Verify that we truncate the preference value if it is too long.
"""
MAX_STRING_LENGTH = 12500
OVERSIZE_STRING_LENGTH = MAX_STRING_LENGTH + 10
self.user_preference.value = "z" * OVERSIZE_STRING_LENGTH
self.user_preference.save()
self.assert_user_setting_event_emitted(
setting=self.TEST_KEY, old=self.TEST_VALUE, new="z" * MAX_STRING_LENGTH, truncated=["new"]
)
self.user_preference.value = "x" * OVERSIZE_STRING_LENGTH
self.user_preference.save()
self.assert_user_setting_event_emitted(
setting=self.TEST_KEY, old="z" * MAX_STRING_LENGTH, new="x" * MAX_STRING_LENGTH, truncated=["old", "new"]
)
| agpl-3.0 |
iradul/qtwebkit | Tools/Scripts/webkitpy/common/system/platforminfo.py | 122 | 6974 | # Copyright (c) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
import sys
class PlatformInfo(object):
"""This class provides a consistent (and mockable) interpretation of
system-specific values (like sys.platform and platform.mac_ver())
to be used by the rest of the webkitpy code base.
Public (static) properties:
-- os_name
-- os_version
Note that 'future' is returned for os_version if the operating system is
newer than one known to the code.
"""
def __init__(self, sys_module, platform_module, executive):
self._executive = executive
self._platform_module = platform_module
self.os_name = self._determine_os_name(sys_module.platform)
if self.os_name == 'linux':
self.os_version = self._determine_linux_version()
if self.os_name == 'freebsd':
self.os_version = platform_module.release()
if self.os_name.startswith('mac'):
self.os_version = self._determine_mac_version(platform_module.mac_ver()[0])
if self.os_name.startswith('win'):
self.os_version = self._determine_win_version(self._win_version_tuple(sys_module))
self._is_cygwin = sys_module.platform == 'cygwin'
def is_mac(self):
return self.os_name == 'mac'
def is_win(self):
return self.os_name == 'win'
def is_cygwin(self):
return self._is_cygwin
def is_linux(self):
return self.os_name == 'linux'
def is_freebsd(self):
return self.os_name == 'freebsd'
def display_name(self):
# platform.platform() returns Darwin information for Mac, which is just confusing.
if self.is_mac():
return "Mac OS X %s" % self._platform_module.mac_ver()[0]
# Returns strings like:
# Linux-2.6.18-194.3.1.el5-i686-with-redhat-5.5-Final
# Windows-2008ServerR2-6.1.7600
return self._platform_module.platform()
def total_bytes_memory(self):
if self.is_mac():
return long(self._executive.run_command(["sysctl", "-n", "hw.memsize"]))
return None
def terminal_width(self):
"""Returns sys.maxint if the width cannot be determined."""
try:
if self.is_win():
# From http://code.activestate.com/recipes/440694-determine-size-of-console-window-on-windows/
from ctypes import windll, create_string_buffer
handle = windll.kernel32.GetStdHandle(-12) # -12 == stderr
console_screen_buffer_info = create_string_buffer(22) # 22 == sizeof(console_screen_buffer_info)
if windll.kernel32.GetConsoleScreenBufferInfo(handle, console_screen_buffer_info):
import struct
_, _, _, _, _, left, _, right, _, _, _ = struct.unpack("hhhhHhhhhhh", console_screen_buffer_info.raw)
# Note that we return 1 less than the width since writing into the rightmost column
# automatically performs a line feed.
return right - left
return sys.maxint
else:
import fcntl
import struct
import termios
packed = fcntl.ioctl(sys.stderr.fileno(), termios.TIOCGWINSZ, '\0' * 8)
_, columns, _, _ = struct.unpack('HHHH', packed)
return columns
except:
return sys.maxint
def _determine_os_name(self, sys_platform):
if sys_platform == 'darwin':
return 'mac'
if sys_platform.startswith('linux'):
return 'linux'
if sys_platform in ('win32', 'cygwin'):
return 'win'
if sys_platform.startswith('freebsd'):
return 'freebsd'
raise AssertionError('unrecognized platform string "%s"' % sys_platform)
def _determine_mac_version(self, mac_version_string):
release_version = mac_version_string.split('.')[1]
version_strings = {
'5': 'leopard',
'6': 'snowleopard',
'7': 'lion',
'8': 'mountainlion',
}
assert release_version >= min(version_strings.keys())
return version_strings.get(release_version, 'future')
def _determine_linux_version(self):
# FIXME: we ignore whatever the real version is and pretend it's lucid for now.
return 'lucid'
def _determine_win_version(self, win_version_tuple):
if win_version_tuple[:3] == (6, 1, 7600):
return '7sp0'
if win_version_tuple[:2] == (6, 0):
return 'vista'
if win_version_tuple[:2] == (5, 1):
return 'xp'
assert win_version_tuple[0] > 6 or win_version_tuple[1] >= 1, 'Unrecognized Windows version tuple: "%s"' % (win_version_tuple,)
return 'future'
def _win_version_tuple(self, sys_module):
if hasattr(sys_module, 'getwindowsversion'):
return sys_module.getwindowsversion()
return self._win_version_tuple_from_cmd()
def _win_version_tuple_from_cmd(self):
# Note that this should only ever be called on windows, so this should always work.
ver_output = self._executive.run_command(['cmd', '/c', 'ver'], decode_output=False)
match_object = re.search(r'(?P<major>\d)\.(?P<minor>\d)\.(?P<build>\d+)', ver_output)
assert match_object, 'cmd returned an unexpected version string: ' + ver_output
return tuple(map(int, match_object.groups()))
| gpl-2.0 |
jcpowermac/ansible | test/units/module_utils/facts/base.py | 118 | 2325 | # base unit test classes for ansible/module_utils/facts/ tests
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
class BaseFactsTest(unittest.TestCase):
# just a base class, not an actual test
__test__ = False
gather_subset = ['all']
valid_subsets = None
fact_namespace = None
collector_class = None
# a dict ansible_facts. Some fact collectors depend on facts gathered by
# other collectors (like 'ansible_architecture' or 'ansible_system') which
# can be passed via the collected_facts arg to collect()
collected_facts = None
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 5,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value=None)
return mock_module
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
return facts_dict
def test_collect_with_namespace(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect_with_namespace(module=module,
collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
return facts_dict
| gpl-3.0 |
jonaustin/advisoryscan | django/tests/regressiontests/one_to_one_regress/models.py | 4 | 1105 | from django.db import models
class Place(models.Model):
name = models.CharField(maxlength=50)
address = models.CharField(maxlength=80)
def __str__(self):
return "%s the place" % self.name
class Restaurant(models.Model):
place = models.OneToOneField(Place)
serves_hot_dogs = models.BooleanField()
serves_pizza = models.BooleanField()
def __str__(self):
return "%s the restaurant" % self.place.name
class Favorites(models.Model):
name = models.CharField(maxlength = 50)
restaurants = models.ManyToManyField(Restaurant)
def __str__(self):
return "Favorites for %s" % self.name
__test__ = {'API_TESTS':"""
# Regression test for #1064 and #1506: Check that we create models via the m2m
# relation if the remote model has a OneToOneField.
>>> p1 = Place(name='Demon Dogs', address='944 W. Fullerton')
>>> p1.save()
>>> r = Restaurant(place=p1, serves_hot_dogs=True, serves_pizza=False)
>>> r.save()
>>> f = Favorites(name = 'Fred')
>>> f.save()
>>> f.restaurants = [r]
>>> f.restaurants.all()
[<Restaurant: Demon Dogs the restaurant>]
"""}
| mit |
sunyanhui/pyresttest | pyresttest/ext/validator_jsonschema.py | 7 | 1784 | import traceback
import json
import yaml
import jsonschema
# TODO see if there's a clever way to avoid this nastiness
try:
import validators
import binding
import parsing
import contenthandling
except ImportError:
from pyresttest import validators
from pyresttest import binding
from pyresttest import parsing
from pyresttest import contenthandling
class JsonSchemaValidator(validators.AbstractValidator):
""" Json schema validator using the jsonschema library """
schema = None
def validate(self, body=None, headers=None, context=None):
schema_text = self.schema.get_content(context=context)
schema = yaml.safe_load(schema_text)
# TODO add caching of parsed schema
try:
# TODO try draft3/draft4 iter_errors - https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.IValidator.iter_errors
jsonschema.validate(json.loads(body), schema)
return True
except jsonschema.exceptions.ValidationError as ve:
trace = traceback.format_exc()
return validators.Failure(message="JSON Schema Validation Failed", details=trace, validator=self, failure_type=validators.FAILURE_VALIDATOR_EXCEPTION)
def get_readable_config(self, context=None):
return "JSON schema validation"
@classmethod
def parse(cls, config):
validator = JsonSchemaValidator()
config = parsing.lowercase_keys(config)
if 'schema' not in config:
raise ValueError("Cannot create schema validator without a 'schema' configuration element!")
validator.schema = contenthandling.ContentHandler.parse_content(config['schema'])
return validator
VALIDATORS = {'json_schema': JsonSchemaValidator.parse} | apache-2.0 |
db0/Doomtown-Reloaded-OCTGN | o8g/Scripts/CardScripts.py | 1 | 122681 | ### ANR CARD SCRIPTS ###
# 5 Equal Signs (=) signifiies a break between the description (what you're currently reading) and the code
# 5 Dashes (-) signifies a break between the card name, the GUID and the card scripts. The card name is ignored by the code, only the GUID and Scripts are used.
# 5 Plus Signs (+) signifies a break between AutoActions and AutoScripts for the same card
# 5 Dots (.) signifies a break between different cards.
# Card names which start with * have special custom code just for them (cards which use CustomScript or useCustomAbility don't have *)
# Do not edit below the line
ScriptsLocal = '''
=====
1st Baptist Church
-----
94c55a0c-1599-4eee-8699-ad9c63e375a1
-----
constantAbility:HandSizePlus1
+++++
.....
A Coach Comes to Town
-----
6656eabf-4a69-4854-950d-f30d7771c4ae
-----
onPlay:StartJob-AutoTargeted-atTown Square-jobEffects<Gain4Ghost Rock,Gain4Ghost Rock-onOpponent>
+++++
.....
Abram Grothe
-----
44946fbc-1bc0-4a1a-9a55-6138b795bfc8
-----
+++++
GR0B1R0:StartJob-DemiAutoTargeted-atDeed_and_Holy Ground-choose1-jobEffects<DiscardMulti-Targeted-atDude-MarkNotTheTarget,None>
.....
Ace in the Hole
-----
cf239340-c794-4a91-a241-e3cbafec2f6e
-----
+++++
GR0B1R0:BootHost-isCost$$Pull1Card-testHex6-spellEffects<AceTarget-DemiAutoTargeted-isDrawHand-choose1-isCost++SendToDrawTarget-DemiAutoTargeted-fromHand-choose1,None>
.....
Allie Hensman
-----
dfd2d635-4b88-4cea-9939-14deec3896cf
-----
+++++
GR0B1R0:Put1PermControlPlus
.....
Ambush
-----
cc2092bd-b575-4a26-8e96-aa13f8736d75
-----
onPlay:StartJob-DemiAutoTargeted-atDude-bootLeader-choose1-targetOpponents-jobEffects<AceTarget,None>
+++++
.....
Andreas Andregg
-----
97061219-899d-4db0-b4a2-ab59bd651df6
-----
+++++
.....
Androcles Brocklehurst
-----
ecfa0567-5576-427f-a829-a049e817f4b0
-----
+++++
GR0B1R0:Gain1Ghost Rock-perTargetProperty{Influence}-Targeted-atDude-targetOpponents
.....
Arnold McCadish
-----
1aa58444-fccc-4121-ac6c-482fd48e4b8e
-----
+++++
GR0B1R0:Pull1Card
.....
Auction
-----
57763c43-994c-4682-ab8d-d8d732fe915e
-----
+++++
.....
Auto Cattle-Feeder
-----
20376d1c-4d9a-42ce-8b34-a95a175f1623
-----
+++++
GR0B1R0:Gain1Ghost Rock
.....
Automatic Mini-Revolver
-----
239ef52d-12a1-47b6-8838-28a8c021a251
-----
+++++
.....
Avie Cline
-----
2871b5b3-fd14-40d1-90e5-f7ea4dae30ba
-----
+++++
.....
B&B Attorneys
-----
232f0a41-2ef5-47a0-9c32-6ccb8eb3c84e
-----
+++++
GR0B1R0:Put1Bounty-Targeted-atDude-hasMarker{Bounty}||GR0B1R0:Remove1Bounty-Targeted-atDude
.....
Bad Company
-----
9a18cd02-63dd-4381-ba9f-49d80fac935a
-----
onPlay:Put3BulletNoonPlus-Targeted-atDude-hasMarker{Bounty}$$Put1Bad Company-Targeted-atDude-hasMarker{Bounty}$$Remove999High Noon:Draw-Targeted-atDude-hasMarker{Bounty}-isSilent$$Put1High Noon:Stud-Targeted-atDude-hasMarker{Bounty}-isSilent
+++++
.....
Bank of California
-----
3c904ee6-15de-4478-8837-61e394bb31ee
-----
+++++
.....
Barton Everest
-----
c3638e9f-f664-43a6-abe0-645aab082455
-----
+++++
GR0B0R1:SimplyAnnounce{increase their draw hand rank by 1}
.....
Blake Ranch
-----
2ad321e1-eaa0-4984-bdd0-a8d088045588
-----
+++++
.....
Blood Curse
-----
597ff0fc-a578-4215-9c6d-c5452d387870
-----
+++++
GR0B1R0:Pull1Card-testHex9-spellEffects<Put2BulletShootoutMinus-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1,None>-onlyInShootouts||GR0B1R0:Pull1Card-testHex9-spellEffects<Put1BulletNoonMinus-Targeted-atDude++Put1InfluenceMinus-Targeted-atDude,None>-onlyInNoon
.....
Bluetick
-----
a914540e-8564-44a4-b4cf-64f9f392218b
-----
+++++
GR0B1R0:MoveHost-moveToDude-hasMarker{Bounty}
.....
Bobo
-----
abb271cf-7cbb-4a15-bd45-0458366c6f65
-----
+++++
.....
Bottom Dealin'
-----
1a3e8af5-302b-47e5-8c30-901b8ed995bc
-----
onPlay:CustomScript
+++++
.....
Bounty Hunter
-----
5cd89aa8-b973-4fa6-be81-087b5e369ed4
-----
onPlay:Spawn1Gunslinger-modAction:CalloutTarget-Targeted-atDude-targetOpponents-hasMarker{Bounty}
+++++
.....
Buffalo Rifle
-----
efaf839b-f06f-4aff-982b-a99ae00f340c
-----
+++++
.....
Bunkhouse
-----
9cba89ca-5a3f-4dad-aff0-91d62581dc31
-----
+++++
.....
Carter's Bounties
-----
6560b478-c5fe-4717-a2d1-40ef7c6effa6
-----
+++++
GR0B1R0:ParticipateTarget-DemiAutoTargeted-atDude-targetMine-choose1-isNotParticipating
.....
Cattle Market
-----
2cd7a17c-ce6a-4ffd-92b0-06fe310c7d6c
-----
+++++
.....
Charlie's Place
-----
57528427-b65a-4056-9fea-082bba8ef4a9
-----
+++++
GR0B1R0:Put2BulletNoonPlus-Targeted-atDude||GR0B1R0:Put2BulletNoonMinus-Targeted-atDude
.....
Cheatin' Varmint
-----
3029937b-17cc-4c87-a109-9888747f3134
-----
onPlay:CustomScript||onPlay:SimplyAnnounce{Reduce a player's draw rank by 2 hand ranks}
+++++
.....
Circle M Ranch
-----
ec741b63-8ac4-4b5a-9767-9854e05b91c3
-----
+++++
GR0B1R0:Draw1Card
.....
Clear Out!
-----
37d550e3-6bb5-4744-a544-fcbba0153780
-----
+++++
.....
Clementine Lepp
-----
5d610a44-7f13-4333-a941-ee5e1fa2fb37
-----
+++++
.....
Clint Ramsey
-----
077d03fd-1509-43a2-8844-c2bd8d62b837
-----
+++++
.....
Clyde Owens
-----
27d3649c-fdcf-4b40-83ec-d7d04543cefe
-----
+++++
GR0B0R0:CalloutTarget-Targeted-atDude
.....
Coachwhip
-----
0c41f0c4-2491-4723-8cf9-d8ab3e071e05
-----
onPlay:CustomScript
+++++
.....
Concealed Weapons
-----
9c4f6a3b-70e0-40d8-adde-e26c4e5eab12
-----
onPlay:CreateDummy$$SimplyAnnounce{use shoppin' as a Shootout play, and at any location}||atPhaseSundown:DiscardMyself-onlyforDummy
+++++
.....
Dead Dog Tavern
-----
9fe6ced8-a97c-49e8-8806-0ba465f535f6
-----
+++++
.....
Doyle's Hoyle
-----
0482af4d-f834-4f73-88fc-9f57e44c465a
-----
+++++
.....
Dr. Dawn Edwards
-----
36c91361-cab3-4988-bed2-2c3401746695
-----
+++++
GR0B0R1:DiscardMyself$$Retrieve1Card-toTable-grabEve Henry
.....
Elander Boldman
-----
8e50a03b-b42c-4207-9d0d-7a144ad31e3b
-----
+++++
GR0B0R0:CustomScript
.....
Establishin' Who's in Charge
-----
2ae1a463-0106-426c-b9fd-6bb22df27aff
-----
onPlay:StartJob-DemiAutoTargeted-atDeed-choose1-jobEffects<Put1PermControlPlus,None>
+++++
.....
Eve Henry
-----
f149dcc9-44a4-4399-8786-62d2496559e4
-----
+++++
GR0B0R1:DiscardMyself$$Retrieve1Card-toTable-grabDr. Dawn Edwards
.....
Extortion
-----
b61dc1ab-393c-4827-a6ec-4f9ffb870491
-----
onPlay:Gain1Ghost Rock-perTargetProperty{Production}-Targeted-atDeed-targetMine
+++++
.....
Flame-Thrower
-----
c20de5e3-daea-4dce-a8ce-eaa5349c8187
-----
+++++
GR0B1R0:RequestInt-Min1-Max3{Boost your Flamethrower by how much?}$$Lose1Ghost Rock-perX-isCost$$Put1BulletShootoutPlus-perX
.....
Force Field
-----
40302c3e-668e-474f-a2ae-7ab1bbdf9d63
-----
+++++
GR1B0R1:SimplyAnnounce{increase their hand rank by 1}
.....
Fred Aims
-----
fa1e44a1-2064-4d70-8286-854d80b50da2
-----
+++++
.....
Fresh Horses
-----
51b788af-7a36-4af4-9f2e-302441634966
-----
onPlay:UnbootMulti-Targeted-atHorse
+++++
.....
General Store
-----
45438749-ab3b-4f88-adb3-8c3cc65cfd54
-----
+++++
GR0B1R0:PlayTarget-DemiAutoTargeted-fromHand-atGoods_or_Spell-choose1-payCost-reduc2
.....
Gomorra Parish
-----
972201b6-4fc1-44ed-8ed0-a16b1d609265
-----
+++++
GR0B1R0:AceTarget-DemiAutoTargeted-fromHand-choose1$$Gain1Ghost Rock
.....
Good Stiff Drink
-----
74fbc995-aeca-4026-a4f4-682684e6feb1
-----
onPlay:Remove1UsedAbility-DemiAutoTargeted-atDude-hasMarker{UsedAbility}-choose1
+++++
.....
Harold Aimslee
-----
f64c8b68-0d63-4f5d-a898-f38576024f75
-----
+++++
GR0B1R0:DiscardTarget-DemiAutoTargeted-choose1-fromHand$$Retrieve1Cards-fromDiscard-grabGadget_and_nonWeapon
.....
Hex Slingin'
-----
7a949a25-df55-47e3-9d6e-674b51cc0f0f
-----
onPlay:DiscardTarget-Targeted-atHex-targetMine
+++++
.....
Hiding in the Shadows
-----
5c5b6579-a2de-419d-b531-6d08c1eba77d
-----
onPlay:Put1Hiding in the Shadows-Targeted-atDude
+++++
.....
Hired Guns
-----
fd307ab6-2c39-438c-8b2e-b6ffd74b15bc
-----
onPlay:Retrieve1Card-fromDiscard-grabDude
+++++
.....
Hot Lead Flyin'
-----
460fcab1-be68-41f1-90bf-41ee11aa17c4
-----
onPlay:Pull1Card
+++++
.....
Irving Patterson
-----
d9586ca6-50bc-446e-aa3b-fc84b0545dcd
-----
onParticipation:Gain1Ghost Rock
+++++
.....
Ivor Hawley
-----
b072a22f-6c55-441f-8cce-4351038ed15c
-----
+++++
GR0B0R0:Put1Huckster Skill Bonus
.....
Jackson's Strike
-----
0eeba3c3-792a-4d39-879a-aab77b0f3981
-----
+++++
.....
James Ghetty
-----
feb37a5b-a95c-4c03-92e0-acd27d7a3ef3
-----
onParticipation:Put4Ghost Rock||onUnparticipation:Remove999Ghost Rock-isSilent
+++++
GR0B0R1:Remove1Ghost Rock-isCost-isSilent$$Gain1Ghost Rock
.....
Jarrett Blake
-----
c57efc28-77d3-492b-81ad-f3c5ce130041
-----
+++++
GR0B0R0:CustomScript
.....
Jia Mein
-----
2419a7fc-569e-43bc-b5b7-360d41133e0c
-----
+++++
GR0B0R0:PlayTarget-DemiAutoTargeted-fromHand-atSpell-choose1-payCost-isCost$$Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud
.....
Jon Longstride
-----
0c0087d0-6356-44a4-8237-15f86573716a
-----
+++++
GR0B0R0:UnbootTarget-AutoTargeted-atHorse-onAttachment$$Remove1Used Ability-AutoTargeted-atHorse-onAttachment-isSilent
.....
Jonah Essex
-----
f95140da-3b72-4210-95fb-bfe7efe2cca4
-----
+++++
.....
Judge Harry Somerset
-----
9c2ec4f0-65cf-4408-81ff-b377e37f7ecb
-----
+++++
GR0B1R0:StartJob-DemiAutoTargeted-atDude-hasMarker{Bounty}-targetOpponents-choose1-jobEffects<AceTarget,None>
.....
Kevin Wainwright
-----
03c50bb2-d027-4d02-b8b6-4bc542dceddb
-----
+++++
GR0B0R0:MoveMyself-moveToDude_and_Huckster$$Remove999High Noon:Draw-isSilent$$Put1High Noon:Stud
.....
Kidnappin'
-----
e11367a5-6fad-40a4-b22a-a2571eb7c330
-----
onPlay:StartJob-DemiAutoTargeted-atDude-choose1-bootLeader-bountyPosse-targetOpponents-jobEffects<DiscardTarget,None>
+++++
.....
Killer Bunnies Casino
-----
b4442f8d-71d0-4d12-9000-4df27ea78643
-----
+++++
.....
Lady Luck
-----
aced4a1e-d5c9-423d-8d47-892c19c6a859
-----
+++++
.....
Lane Healey
-----
c263b2ff-3d4b-4beb-aa3b-d4e1f72e56ff
-----
+++++
.....
Law Dogs
-----
e6bee3e6-0ccd-40e6-a447-28cb032b7448
-----
+++++
GR0B1R0:Put1Bounty-Targeted-atDude-targetOpponents-choose1$$BootMulti-Targeted-atDude-targetMine
.....
Lawrence Blackwood
-----
8ed33c69-c79e-4ced-a180-3fe57bf0d25d
-----
+++++
GR0B1R0:Put1ControlPlus
.....
Legendary Holster
-----
677bbf45-1f98-49aa-b8cc-b63b8f3d5146
-----
+++++
GR0B1R0:Pull1Card
.....
Lucinda "Lucy" Clover
-----
567d93e7-2497-4ddb-aa04-01c927f00bc8
-----
+++++
GR0B0R1:Put1Bounty-AutoTargeted-atDude-isParticipating-targetOpponents
.....
Magical Distraction
-----
72853004-7ce4-4d66-a486-fc28bb87a048
-----
onPlay:DiscardTarget-Targeted-asSpell-targetMine$$Pull1Card
+++++
.....
Make the Smart Choice
-----
9ead8579-7077-45e4-a44d-cf284f55b3e5
-----
onPlay:Put1BulletShootoutMinus-DemiAutoTargeted-atDude-perTargetProperty{Influence}-isParticipating-choose1
+++++
.....
Marion Seville
-----
3bd5addb-ac4e-45aa-9971-bd445222344e
-----
+++++
.....
Max Baine
-----
4ad6e3d4-3c6c-43a4-a612-293586609150
-----
+++++
GR0B0R1:Refill1PermControlPlus
.....
Mechanical Horse
-----
aa1dec3f-9913-4bf1-bbcd-5d29457f6a80
-----
+++++
GR2B0R1:MoveHost-moveToDeed_or_Town Square_or_Outfit
.....
Micah Ryse
-----
6d31cb7e-a409-4256-8bba-26778de6c5f4
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atSpell-isUnbooted-onAttachment-choose1-isCost$$MoveMyself-moveToDeed_or_Town Square_or_Outfit
.....
Missed!
-----
00a45909-a052-457d-a71b-119e415c03c7
-----
onPlay:UnbootTarget-Targeted-atDude-isParticipating
+++++
.....
Mongwau the Mighty
-----
18866ac8-9a24-45d7-8f67-df20afdc2dbb
-----
+++++
GR0B0R0:DiscardTarget-DemiAutoTargeted-atSpell-onAttachment-isCost-choose1$$Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud
.....
Morgan Cattle Co.
-----
4f21000a-fb64-4e4b-8e8a-1c5d588dc577
-----
+++++
GR0B1R0:CustomScript
.....
Mustang
-----
9aa68e21-1eee-4d44-993e-dd8cf60ed613
-----
+++++
GR0B1R0:MoveHost-moveToDeed_or_Town Square_or_Outfit
.....
Olivia Jenks
-----
65d4cd16-fc61-4412-8a39-a6ad384fa766
-----
+++++
.....
One Good Turn
-----
1950bc30-abf5-4ed4-b20a-e56bb4032de0
-----
onPlay:Draw1Card||onPlay:Gain3Ghost Rock
+++++
.....
Pair of Six-Shooters
-----
2dbddcbc-d228-451d-8cde-32b65abe769e
-----
+++++
GR0B1R0:SimplyAnnounce{change one card in their draw hand to the suit and value of their choice.}
.....
Pancho Castillo
-----
d9d621f3-1905-4a99-8f5a-f1feb9639a64
-----
+++++
.....
Pat's Perch
-----
69ccb375-f660-4d91-a405-2103b578f100
-----
+++++
.....
Peacemaker
-----
022097bf-7e34-4b78-b7ca-3d60ca13eddd
-----
+++++
.....
Pearl-handled Revolver
-----
c8453a00-e4f9-4dbe-922c-3daf79586cef
-----
+++++
.....
Pearly's Palace
-----
ed26a010-8947-456f-b8f7-2741e083b54a
-----
+++++
GR0B1R0:SimplyAnnounce{make a Shootout play before anyone else}
.....
Philip Swinford
-----
fb5f0076-bbb3-486e-9420-02084643592b
-----
+++++
GR0B0R1:DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost$$Draw1Card
.....
Pinned Down
-----
b5fbbe61-aba3-4de7-b501-4feb1f9cf203
-----
onPlay:Put1Shootout:Pinned Down-Targeted-atDude-isParticipating-targetOpponents
+++++
.....
Pinto
-----
9e712f98-1a8b-4137-997e-e61e6549102c
-----
+++++
GR0B1R0:ParticipateHost
.....
Pistol Whip
-----
1536769f-42f4-4840-9360-96d2e7e3f366
-----
onPlay:Put1BulletShootoutMinus-Targeted-atDude-isParticipating-isUnbooted-targetMine$$BootTarget-DemiAutoTargeted-atDude-isParticipating-isUnbooted-targetMine-choose1-isCost$$SendHomeBootedTarget-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1
+++++
.....
Point Blank
-----
498d4eca-db79-40db-9a42-cc053abcfc43
-----
onPlay:BootTarget-DemiAutoTargeted-atDude-isParticipating-isUnbooted-isStudDude-targetMine-choose1-isCost$$SimplyAnnounce{force their opponent to ace a dude with less bullets}
+++++
.....
Pony Express
-----
a9b3a2b8-9f22-474d-b35a-59a380c0921f
-----
+++++
GR0B1R0:DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost$$Draw1Card
.....
Prescott Utter
-----
ab7172e2-0e3f-408d-81ec-afb478385cfe
-----
+++++
.....
Prof Eustace True
-----
f8437fa5-c088-41b2-b6c9-2060f69ec7be
-----
+++++
GR0B0R0:DiscardTarget-DemiAutoTargeted-atGadget-onAttachment-choose1-isCost$$MoveMyself-moveToDeed_or_Town Square_or_Outfit
.....
Railroad Station
-----
fa907076-61c7-4f1f-b9a4-87a1df59bfae
-----
+++++
GR0B1R0:MoveTarget-Targeted-atDude-moveToDeed_or_Town Square_or_Outfit
.....
Raising Hell
-----
0ab4453e-e9bd-4899-96e1-1eb6757ecd94
-----
+++++
GR0B1R0:Pull1Card-testHex8-spellEffects<AceTarget-DemiAutoTargeted-fromHand-choose1++Retrieve1Card-fromBootHill-toTable-grabAbomination-payCost++AceMyself,None>-onlyInShootouts
.....
Ramiro Mendoza
-----
d677fa1e-61f3-422a-9b95-745692c7b800
-----
onParticipation:Lose1Ghost Rock-isCost
+++++
.....
Recruitment Drive
-----
9ef9abea-373c-4265-b7fc-3d8391095f64
-----
onPlay:StartJob-AutoTargeted-atTown Square-jobEffects<Retrieve1Card-grabDeed_or_Dude-fromDiscard-toTable-payCost-reduc5,None>
+++++
.....
Remy Lapointe
-----
cb15bd27-76a2-48db-b324-99589b14982b
-----
+++++
GR0B0R0:RequestInt-Min1-Max4{Combien de ghost rock veux tu depenser pour augmenter ton niveau de bullet?}$$Lose1Ghost Rock-isCost-perX$$Put1BulletShootoutPlus-perX
.....
Reserves
-----
0672772c-7380-4b36-be78-1c7b23b3d950
-----
onPlay:Gain1Ghost Rock
+++++
.....
Roan
-----
ff6f51e6-ab2f-4fb9-8670-1715b278cc8c
-----
+++++
.....
Rumors
-----
d7e770dd-793f-4a53-a404-e3873a762ad1
-----
onPlay:Put1InfluenceMinus-Targeted-atDude-targetOpponents$$Put1Rumors-Targeted-atDude-targetOpponents
+++++
.....
Run 'em Down!
-----
3a1296d2-79f5-4a8a-9ea3-cabc4564e9eb
-----
onPlay:BootTarget-Targeted-atDude-targetOpponents$$MoveMulti-Targeted-atDude-targetMine-moveToDude-isBooted
+++++
.....
Sanford Taylor
-----
e686782d-1d7c-464d-82ed-910fcaa2945d
-----
+++++
GR0B1R0:CalloutTarget-Targeted-atDude
.....
Shadow Walk
-----
f9714fff-14d9-433c-b2cc-05068006c388
-----
+++++
GR0B1R0:Pull1Card-testHex7-spellEffects<MoveHost-moveToDude_or_Deed_or_Town Square_or_Outfit++ParticipateHost,None>-onlyInShootouts||GR0B1R0:Pull1Card-testHex7-spellEffects<MoveHost-moveToDeed_or_Town Square_or_Outfit,None>-onlyInNoon
.....
Sherriff Dave Montreal
-----
2af1e511-ca73-4f12-a1ae-9a7a340738da
-----
+++++
.....
Shotgun
-----
ac681d62-c7df-469f-8ac6-80816c781136
-----
+++++
GR0B1R0:AceTarget-Targeted-atDude-isParticipating
.....
Silas Aims
-----
28b5d3c7-bb58-4b63-aae5-659e86fee876
-----
+++++
GR0B0R1:Gain1PermBulletPlus
.....
Sloane
-----
8209b72a-15c4-440b-9ad7-8614a9d2f452
-----
+++++
.....
Soul Blast
-----
bf1c8173-843a-4a20-be3b-f4ae650cfdd2
-----
+++++
GR0B1R0:Pull1Card-testHexX-difficultyGrit-Targeted-isParticipating-spellEffects<SendHomeBootedTarget-Targeted-choose1,SendHomeBootedHost>
.....
Stagecoach Office
-----
2c51b78f-b130-4663-9c78-0cc69a20a059
-----
+++++
.....
Steele Archer
-----
d9a3a80d-deef-47a7-9a5a-e1c7e0109697
-----
+++++
.....
Steven Wiles
-----
335662d9-0d7c-430e-a556-11693385f5aa
-----
+++++
.....
Sun in Yer Eyes
-----
fcc6f54f-e9d1-40a6-9870-1be438eeef12
-----
onPlay:Put2BulletShootoutMinus-Targeted-atDude-isParticipating-targetOpponents$$Remove999Shootout:Stud-isSilent-Targeted-atDude-isParticipating$$Put1Shootout:Draw-isSilent-Targeted-atDude-isParticipating
+++++
.....
Takin' Yer With Me
-----
452fd385-a516-45d9-8408-8628f4788fa5
-----
onPlay:SimplyAnnounce{your opponent to take 1 casualty}
+++++
.....
Telegraph Office
-----
08d1664d-5d98-4093-88c3-e93b0c6cc84f
-----
+++++
GR0B1R0:Gain1Ghost Rock-perTargetProperty{Influence}-Targeted-atDude
.....
The Fourth Ring
-----
4137ced8-eb93-4ca6-9253-240b46b15886
-----
+++++
GR0B1R0:DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost$$Draw1Card$$Gain1Ghost Rock
.....
The Ghostly Gun
-----
91863a08-01d9-4b7e-9eff-0bb62826f433
-----
+++++
GR0B0R1:SimplyAnnounce{put The Ghostly Gun into their draw hand}
.....
The Morgan Research Institute
-----
75e35d17-de1e-457f-88e0-42e2bc9301bc
-----
+++++
GR0B1R0:Put2High Noon:Skill Bonus-Targeted-atDude||GR0B1R0:Put2High Noon:Skill Penalty-Targeted-atDude
.....
The Pharmacy
-----
84e33474-5247-4969-b7d1-e335b720a566
-----
+++++
GR0B1R0:UnbootTarget-DemiAutoTargeted-atDude-choose1
.....
The Sloane Gang
-----
f7b4b246-da6f-44e4-9eee-46bb8bfb931a
-----
+++++
GR0B1R0:BootTarget-Targeted-atDude-isCost$$Put1Come Git Some-Targeted-atDude
.....
The Stakes Just Rose
-----
8649f082-f7cd-414d-b360-9b1b72f6172b
-----
onPlay:ParticipateTarget-Targeted-atDude-targetMine$$Remove999Shootout:Draw-Targeted-atDude-targetMine-isSilent$$Put1Shootout:Stud-Targeted-atDude-targetMine
+++++
.....
The Town Hall
-----
7378c899-a9c3-46d4-8d68-f452c9640734
-----
+++++
GR0B1R0:Put1Town Hall-Targeted-atDude
.....
The Union Casino
-----
d340ff50-aec3-4800-a993-c76c954c34a9
-----
+++++
GR0B1R0:CustomScript
.....
This is a Holdup!
-----
7779228b-f33c-4c36-ae90-17fdf54cd142
-----
onPlay:CustomScript
+++++
.....
Tommy Harden
-----
2e81c214-539a-4b49-b2d0-69258204b240
-----
+++++
GR0B0R1:SimplyAnnounce{increase their draw hand rank by 1}
.....
Travis Moone
-----
03341e26-42b4-4545-897e-8626de5e3dd7
-----
+++++
GR0B1R0:ReshuffleHand$$Draw5Cards
.....
Tresspassin'
-----
d4133356-c738-4b89-82ac-9a6d5e35a744
-----
+++++
.....
Tyxarglenak
-----
e6db104b-d882-41c5-9285-84ed77a74eac
-----
+++++
.....
Undertaker
-----
a8843dcd-cd66-4f38-b6a9-12fbf5fd8bba
-----
+++++
GR0B0R1:Gain2Ghost Rock
.....
Unprepared
-----
1f230385-e07c-43e2-a205-79547ce18380
-----
onPlay:CustomScript
+++++
.....
War Paint
-----
b3140484-bfe8-4eb4-963f-27e8d9334b8a
-----
onPlay:Put2BulletNoonPlus-Targeted-atDude
+++++
.....
Whisky Flask
-----
80d43d02-7885-4e2e-98e5-62ba2b189155
-----
+++++
GR0B1R0:BootHost-isCost$$DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost$$Draw1Card
.....
Xiong "Wendy" Cheng
-----
e22efbb0-e796-4c72-9793-23ed7635dfce
-----
+++++
GR0B1R0:SendHomeBootedTarget-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1
.....
Yan Li's Tailoring
-----
b999405d-e2ca-4703-beda-67f9697fc977
-----
+++++
GR0B1R0:Put1InfluencePlus-Targeted-atDude
.....
Back Ways
-----
ef91b1f5-46b8-4246-8e58-401a39ebbf6f
-----
onPlay:MoveTarget-DemiAutoTargeted-atDude-hasMarker{Bounty}-targetMine-choose1-moveToDeed_or_Town Square_or_Outfit
+++++
.....
Kyle Wagner
-----
826655e8-00b7-43ae-a3fb-4cea781176ad
-----
+++++
GR0B1R0:UnbootTarget-Targeted-atRanch-targetMine$$Remove1Used Ability-Targeted-atRanch-targetMine-isSilent
.....
Telepathy Helmet
-----
b65c9b55-7a81-4052-87dd-70eb515b8b2f
-----
+++++
GR1B0R1:CustomScript
.....
Town Council
-----
aa9e21f5-9a84-479b-90f8-497b6084467b
-----
+++++
.....
Faithful Hound
-----
bf48f3c1-bd08-4542-a8c6-6af0a8e7cfa4
-----
+++++
GR0B1R0:Pull1Card
.....
Plasma Drill
-----
d6c082a2-8e92-404b-b427-a1f7a7cab4aa
-----
+++++
GR1B1R0:BootHost-isCost$$UseCustomAbility
.....
Slade Lighboy
-----
d9aefb8f-ea66-4782-8fee-1e5c88c257af
-----
+++++
GR0B0R0:AceTarget-DemiAutoTargeted-atSpell-onAttachment-isBooted-choose1-isCost$$Pull1Card
.....
The R&D Ranch
-----
ce8a1161-2c83-4a6f-a0de-7a6c19b47a6e
-----
+++++
GR0B1R0:CustomScript
.....
Wilber Crowley
-----
c765b5ba-c7b1-491c-b504-fcce730bb8a0
-----
+++++
.....
Hired Help
-----
1b68d5c9-e336-4822-aeb9-5ca0cace82a8
-----
onPlay:Spawn1Gunslinger-modAction:ParticipateMyself
+++++
.....
Roderick Byre
-----
0203a928-b0b6-4a95-8e71-d82b02a48e9a
-----
+++++
.....
Rafi Hamid
-----
dec376dc-2dcd-4cb8-b1ae-9c4dc7c7dd7e
-----
+++++
GR0B0R0:MoveTarget-Targeted-atDeputy-moveToGovernment$$ParticipateTarget-Targeted-atDeputy
.....
Ulysses Marks
-----
15fda041-c825-4278-a5de-15bc00cab80d
-----
+++++
GR0B0R0:MoveTarget-Targeted-atDude-moveToTown Square$$BootTarget-Targeted-atDude
.....
Paralysis Mark
-----
18e06cc3-eb57-451a-ba28-18665727999d
-----
+++++
GR0B1R0:Pull1Card-testHexX-difficultyValue-Targeted-atDude-spellEffects<BootTarget-Targeted,None>
.....
Wylie Jenks
-----
cbb6be72-e387-4742-99cb-b5681e88de82
-----
+++++
.....
Too Much Attention
-----
519a9e2e-56e9-42a2-8cda-64d1a7cd934f
-----
onPlay:BootTarget-Targeted
+++++
.....
Make 'em Sweat
-----
7bb386d8-73a0-4d31-bbd1-5204fcde0302
-----
onPlay:CustomScript
+++++
.....
Dulf Zug
-----
77653869-e142-4428-8595-c74886e8c8c8
-----
+++++
.....
Lillian Morgan
-----
32f4797f-7584-453c-b6a3-9cf3656dcc96
-----
onParticipation:Put3Ghost Rock||onUnparticipation:Remove999Ghost Rock-isSilent
+++++
GR0B0R1:Remove1Ghost Rock-isCost-isSilent$$Gain1Ghost Rock
.....
Alice Stowe
-----
55257be5-9428-4699-8df6-9b397f9fe258
-----
+++++
.....
Dr. Emanuel Ashbel
-----
17760a8d-adc3-40cc-9c85-578b7f2b30c5
-----
+++++
GR0B0R0:Pull1Cards
.....
..It's who you know
-----
48c8526d-6dee-4ee8-9636-01d3891db8f8
-----
onPlay:CalloutTarget-Targeted-atDude-targetOpponents-leaderTarget{Dude}$$Put1Shootout:Stud-Targeted-atDude-targetMine
+++++
.....
Horse Wranglin'
-----
f51e5631-7582-492f-8351-e1e808d39d19
-----
onPlay:Retrieve1Cards-grabHorse-toTable-payCost||onPlay:Retrieve1Cards-grabHorse-toTable-payCost-fromDiscard
+++++
.....
It's Not What You Know...
-----
a23d36e0-8231-49e7-bbc4-4e56c179b045
-----
+++++
onPlay:SimplyAnnounce{Reduce a player's draw rank by 1 hand rank}||SimplyAnnounce{Reduce a player's draw rank by 4 hand ranks}-isResolution
.....
Baird's Build and Loan
-----
949e9f9d-bc65-4486-b115-a1d364aaae0d
-----
+++++
GR0B1R0:PlayTarget-DemiAutoTargeted-fromHand-atDeed-choose1-payCost-reduc2
.....
Leonardo "Leon" Cavallo
-----
90b3fbde-2834-4c49-9577-1d8723feadd4
-----
+++++
GR0B1R0:Pull1Card-testHexX-difficultyValue-Targeted-atDude-spellEffects<BootTarget-Targeted-atDude,None>
.....
Mortimer Parsons
-----
e0434a23-02b1-406b-a683-0b230b73111d
-----
whileInPlay:SendHomeBootedMyself-onlyInShootouts-isParticipating-foreachCheatinRevealed-byMe$$Put1InfluenceMinus-perProperty{Influence}
+++++
.....
Gang Yi
-----
631a3c9a-b970-47f5-a196-c078ad913d56
-----
+++++
GR0B0R0:CustomScript
.....
Angelica Espinosa
-----
08fbd7c6-0413-4b5b-872e-a9ba16e3276d
-----
+++++
GR0B0R0:ParticipateMyself
.....
Jose Morales
-----
1e61fcd0-65fa-414f-8838-d76f1394b041
-----
constantAbility:Skill Bonus:1-perProperty{Bullets}-isParticipating
+++++
.....
Tallulah "Lula" Morgan
-----
434353cb-3040-4709-a7d5-e4f36006128b
-----
+++++
GR1B1R0:Gain1Ghost Rock-perTargetProperty{Production}-Targeted-atDeed-targetMine
.....
Ballot Counter
-----
ffed2e8d-67cf-42ab-8a34-110110b59c72
-----
+++++
GR1B0R1:Put1InfluencePlus
.....
Holy Wheel Gun
-----
dba818f0-4802-474c-91c8-07536e32fb3d
-----
+++++
GR0B1R0:Put1BulletShootoutMinus-Targeted-atDude-isParticipating$$Put1Shootout:Holy Wheel Gun Mark-Targeted-atAbomination-isParticipating-noTargetingError
.....
Stone Idol
-----
f94631b0-20f6-492b-b732-e36b816e523f
-----
+++++
GR0B1R0:Put3ValueNoonMinus-Targeted-atDude
.....
Corporeal Twist
-----
8272e52f-aa01-403e-9ea4-0408bdcf3fdc
-----
+++++
GR0B1R0:Pull1Card-testHex5-spellEffects<Put1BulletShootoutMinus-Targeted-atDude-isParticipating-targetOpponents++Put2ValueShootoutMinus-Targeted-atDude-isParticipating-targetOpponents,None>
.....
Forget
-----
0857fa1d-c061-4606-bb55-4736edcfd2e9
-----
+++++
GR0B1R0:Pull1Card-testHex5-spellEffects<Put1High Noon:Forget-Targeted-atDude,None>
.....
Surveyor's Office
-----
cdc1e1d2-d985-400f-ab3c-f91744619eed
-----
+++++
GR0B1R0:MoveTarget-DemiAutoTargeted-atDude-targetMine-choose1-moveToDeed_or_Town Square_or_Outfit
.....
Genesee "Gina" Tailfeathers
-----
437cd8a9-fe8a-497f-9625-9054511f91ea
-----
+++++
GR0B1R0:DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost$$Draw2Cards
.....
Richard Slavin
-----
68b6d3d2-207c-47ed-9311-0949f86308b9
-----
+++++
GR0B1R0:Pull1Card
.....
Fetch
-----
f138c8f1-00cc-4ca4-b145-e9c5105dc76a
-----
+++++
GR0B1R0:Pull1Card-isResolution
.....
The Brute
-----
13cdc52f-986e-4f3b-b053-c3261267790d
-----
+++++
.....
Smiling Tom
-----
d90e5abe-ed12-4dad-9007-16aa47afa2cc
-----
+++++
.....
Philip Swinford
-----
f1012bb4-2429-4ec3-9b89-6ebcb3f94184
-----
+++++
GR0B0R1:Draw1Card$$DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost
.....
Drew Beauman
-----
19fc7ff8-d4da-492f-8a41-7a838490c9e4
-----
+++++
GR0B0R1:PlayTarget-DemiAutoTargeted-fromHand-atGadget-choose1-payCost-isCost
.....
Dr. Arden Gillman
-----
95b9d8df-a95d-4b5e-aeed-98f8e99ca5a5
-----
+++++
GR0B1R0:Pull1Card
.....
Arvid Mardh
-----
33b97a88-5b7f-4073-8e30-5c66da9060c5
-----
+++++
.....
Allie Hensman
-----
28c37361-c5ac-4561-b1bc-5f41113e0625
-----
+++++
GR0B0R0:StartJob-DemiAutoTargeted-atDude-bootLeader-choose1-targetOpponents-jobEffects<UseCustomAbility,None>
.....
Milt Clemons
-----
cd99ead2-3b50-4989-8331-59f76b5edb0b
-----
+++++
.....
Jake Smiley
-----
36c80330-39fc-4fdc-8736-4c8d47758063
-----
+++++
.....
QUATERMAN
-----
144991c0-f3f4-4914-a075-ab2093991411
-----
+++++
.....
Angela Payne
-----
6b01cb98-4ae3-4b6e-b7d1-29fe035c9e2d
-----
+++++
GR0B1R0:Gain2Ghost Rock
.....
The Mayor's Office
-----
90568a28-333b-4836-b55c-8ebbef266ad1
-----
+++++
GR0B1R0:Put1InfluencePlus-Targeted-atDude||GR0B1R0:Put1InfluenceMinus-Targeted-atDude
.....
Hunter Protections
-----
1f6dc79e-65ad-4727-bf8e-1c42eee98fe1
-----
+++++
GR0B1R0:BootTarget-Targeted-atDude-isUnbooted-hasntMarker{PermControlPlus}-isCost$$Put2Bounty-Targeted-atDude-hasntMarker{PermControlPlus}$$Put1PermControlPlus-Targeted-atDude-hasntMarker{PermControlPlus}
.....
The Evidence
-----
9687ab08-0ae9-4a15-8304-6e9797edc87e
-----
+++++
GR0B0R0:Remove999Bounty-Targeted-atDude-hasMarker{Bounty}$$DiscardMyself||GR1B0R0:Put2Bounty-Targeted-atDude$$AceMyself
.....
Teleportation Device
-----
28d66d3e-bc96-4c2c-9ebb-38dfabceb440
-----
+++++
GR1B0R1:Pull1Card
.....
Mayfair Family Deck
-----
68310bec-735a-46bc-be6c-949e26bc5758
-----
+++++
.....
Puppet
-----
146c6a48-ffb5-4201-b07e-74e5560da771
-----
+++++
GR0B1R0:Pull1Cards
.....
Summoning
-----
791de7a5-3292-43cd-b907-40f737dc8600
-----
+++++
GR0B1R0:Pull1Card-testHex5-spellEffects<StartJob-AutoTargeted-atTown Square,None>
.....
Ridden Down
-----
cd378a50-f1c7-4a1f-a99d-80436806feea
-----
onPlay:BootTarget-DemiAutoTargeted-atHorse-isUnbooted-targetMine-choose1-isCost$$SendHomeBootedTarget-DemiAutoTargeted-atDude-targetOpponents-choose1
+++++
.....
Tail Between Yer Legs
-----
ff26cca7-830f-440a-90cd-08e32c629d7e
-----
onPlay:Put2BulletShootoutMinus-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1
+++++
.....
Election Day Slaughter
-----
25787651-53b5-456e-bb42-8f38fd7b3caf
-----
onPlay:StartJob-AutoTargeted-atTown Square
+++++
.....
Faster on the Draw
-----
07c1a52a-fcdc-4775-b42c-f352465942da
-----
onPlay:Put2BulletShootoutMinus-Targeted-atDude-isParticipating-targetOpponents$$Put1BulletShootoutPlus-Targeted-atDude-isParticipating-targetMine$$Put1Shootout:Stud-isSilent-Targeted-atDude_and_Deputy-isParticipating-targetMine-noTargetingError
+++++
.....
Swinford Finds Trouble
-----
0b1ae36c-6359-4871-b34b-16808e639243
-----
+++++
.....
Under the Weather
-----
adf4a674-bc8a-4158-ae1a-cb7224fbb850
-----
onPlay:Pull1Card
+++++
.....
This'll Hurt in the Mornin
-----
546a4107-75f3-49f6-84a1-4396ca3c61c1
-----
onPlay:CustomScript
+++++
.....
Prayer
-----
e12da368-0c9a-435b-91e7-e08378a6363c
-----
onPlay:Put1Prayer-Targeted-atDude-targetMine$$CreateDummy$$SimplyAnnounce{allow the dude to use shoppin' for Miracles as a Shootout play, and at any location}||atPhaseSundown:Remove999Prayer-AutoTargeted-atDude-hasMarker{Prayer}$$DiscardMyself-onlyforDummy
+++++
.....
Meet The New Boss
-----
c27e60e9-8450-4e6e-9c2e-73d19caec7c1
-----
onPlay:StartJob-AutoTargeted-atTown Square-jobEffects<Put1PermControlPlus-Targeted-LeaderIsTarget++Put1PermInfluencePlus-Targeted++AceMyself,AceMyself>
+++++
.....
Pettigrew's Pawnshop
-----
1b2b9609-05a4-4aed-ab98-06ddf6dbe400
-----
whileInPlay:Gain1Ghost Rock-foreachCardPlayed-typeGoods_and_notGadget
+++++
.....
California Tax Office
-----
c5bf5c92-9d15-42fa-aacf-67163913518a
-----
onPlay:BootMyself
+++++
GR0B1R0:CustomScript
.....
St. Anthony's Chapel
-----
4bd2615b-6a1b-465e-a16d-923fd77ee443
-----
+++++
.....
The Whateley Estate
-----
e77f6e44-7900-4d0e-b370-defa7cd6796d
-----
+++++
GR0B1R0:Retrieve1Cards-fromBootHill-toDiscard-grabnonDude
.....
Outlaw Mask
-----
4827d51e-458b-4945-8242-ddfd2fdd9821
-----
+++++
.....
Mirror, Mirror
-----
9b76ed8e-e25b-47a5-b8d7-e7c020e7a230
-----
+++++
GR0B1R0:Pull1Card-testHex4-spellEffects<UseCustomAbility-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1-isFirstCustom,None>||GR0B1R0:Pull1Card-testHex6-spellEffects<UseCustomAbility-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1-isSecondCustom,None>
.....
Lay on Hands
-----
8d8fa80c-580b-44b3-8e66-b89392299200
-----
+++++
GR0B1R0:Pull1Card-testMiracle8-spellEffects<BootHost++Put1NoUnboot-Targeted-atDude-targetMine++SendHomeBootedTarget-Targeted-atDude-targetMine,None>
.....
Holy Roller
-----
a806a33f-8bb2-4a47-9bdc-f3dc7d2058ea
-----
+++++
GR0B1R0:Pull1Card-testMiracle6-spellEffects<Put1BulletShootoutPlus-AutoTargeted-atDude-onHost++Put1Shootout:Holy Roller-AutoTargeted-atDude-onHost,None>
.....
The Lord Provides
-----
c5c2fed5-010f-4ece-a5e2-2cf200695064
-----
+++++
GR0B1R0:Pull1Card-testMiracle9-spellEffects<Retrieve1Cards-grabAction++DiscardMyself,None>
.....
Walk the Path
-----
77df4257-28e1-4865-8d20-5e0087fcd8d5
-----
+++++
GR0B1R0:Pull1Card-testMiracle7-spellEffects<MoveTarget-Targeted-atDude-targetMine-moveToHere++UnbootTarget-Targeted-atDude-targetMine++ParticipateTarget-Targeted-atDude-targetMine,None>-onlyInShootouts||GR0B1R0:Pull1Card-testMiracle6-spellEffects<MoveTarget-DemiAutoTargeted-atDude-targetMine-choose1-moveToHere,None>-onlyInNoon
.....
Phantasm
-----
c7d43986-30a5-46ac-99a3-01d94ceda540
-----
+++++
GR0B1R0:Pull1Card-testHex9-spellEffects<MoveTarget-DemiAutoTargeted-atDude-targetOpponents-isUnbooted-choose1-moveToDeed_or_Town Square_or_Outfit,None>||GR0B1R0:Pull1Card-testHex12-spellEffects<MoveTarget-DemiAutoTargeted-atDude-targetOpponents-isBooted-choose1-moveToDeed_or_Town Square_or_Outfit,None>
.....
Soothe
-----
4eba0738-ddf4-4bce-9c3c-ecb06d3381ff
-----
+++++
GR0B1R0:Pull1Card-testMiracle10-spellEffects<BootHost-isCost++UnbootTarget-DemiAutoTargeted-atDude-targetMine-isBooted-choose1,None>
.....
Evanor
-----
a0c826bd-311f-4918-aeeb-9c2a14b176c3
-----
+++++
GR0B1R0:SimplyAnnounce{Increase the casualties they inflict if they win, by 1}
.....
Clown Carriage
-----
a8546fc6-be73-41eb-b33a-62b2b8158696
-----
+++++
GR0B1R0:PlayTarget-DemiAutoTargeted-fromHand-atAbomination-choose1-payCost
.....
Bio-Charged Neutralizer
-----
bc4af72b-b53c-44ef-858b-04d33e1e6a4b
-----
+++++
.....
Sister Lois Otwell
-----
2923cd38-686c-4ab3-8996-94fd989bff9b
-----
+++++
GR0B0R0:BootTarget-DemiAutoTargeted-atMiracle-isUnbooted-onAttachment-choose1-isCost$$Put1BulletShootoutPlus-Targeted-atDude-isParticipating-targetMine$$Put3ValueShootoutPlus-Targeted-atDude-isParticipating-targetMine
.....
Felix Amador
-----
38b1ca04-c236-4d51-80dd-1c283957095e
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atMiracle-isUnbooted-onAttachment-choose1-isCost$$UseCustomAbility
.....
Nicodemus Whateley
-----
35a3cffb-6a57-4a90-ab93-63f519910c99
-----
+++++
GR0B0R1:BootTarget-Targeted-atDude-targetMine$$Put1ControlPlus
.....
Rev. Perry Inbody
-----
da32c302-02de-4a5b-89c4-8227dba9f3c7
-----
+++++
GR0B0R0:Pull1Card-testMiracle9-spellEffects<UnbootTarget-DemiAutoTargeted-atDude_and_Law Dogs-targetMine-isBooted-choose1++UnbootMyself,BootMyself>
.....
Zoe Halbrook
-----
cdfcd050-f5f1-40df-a10a-4039fb74935d
-----
+++++
.....
Abram Grothe
-----
5167ca6e-657d-4d96-a72b-247b2298abf9
-----
+++++
GR0B0R0:BootMulti-AutoTargeted-atWeapon-targetOpponents-isParticipating$$Put1BulletShootoutPlus
.....
William Specks
-----
5a2cbed4-9d68-4c5d-acb6-93e2d25b268f
-----
+++++
GR0B1R0:PlayTarget-DemiAutoTargeted-fromHand-atRanch_or_Out of Town_or_Gadget-choose1-payCost-reduc2
.....
Chuan "Jen" Qi
-----
7d2d0bb3-f477-4076-8739-9479911b95a6
-----
+++++
.....
Lane Healey
-----
a2b5195e-4d31-41e9-8407-c40ce54cc334
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atHorse-isUnbooted-onAttachment-isCost$$BootTarget-Targeted-atDude-targetOpponents$$MoveMyself-moveToDude-targetOpponents$$CalloutTarget-Targeted-atDude-targetOpponents
.....
The Fixer
-----
dfb10d22-9e8f-4031-9249-50cd13ee7203
-----
+++++
GR0B1R0:CustomScript
.....
Maria Kingsford
-----
9937f528-6b65-4625-85d8-05a51af488d2
-----
constantAbility:Skill Bonus:1-perMarker{Bounty}-isParticipating
+++++
.....
Makiao Kaleo, Esq.
-----
933eccf1-ad0d-4153-8381-a23c6de97a60
-----
+++++
GR0B0R0:Remove1Bounty-DemiAutoTargeted-atDude-hasMarker{Bounty}-targetMine-choose1-choiceTitle{Choose from which dude to remove the Bounty}$$Put1Bounty-DemiAutoTargeted-atDude-targetMine-choose1-choiceTitle{Choose which dude should receive the Bounty}
.....
Pagliaccio
-----
6a4d1a94-7e29-465e-9a41-f78389ef49cf
-----
+++++
GR0B1R0:Put1BulletShootoutMinus-Targeted-atDude-targetOpponents-isParticipating$$Put1ValueShootoutMinus-Targeted-atDude-targetOpponents-isParticipating
.....
Valeria Batten
-----
aba65d32-3cb2-4677-b000-f4b905c4a16e
-----
+++++
.....
Micah Ryse
-----
1ff5d90e-e801-4199-80bc-7313bc8cf99e
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atHex-isUnbooted-onAttachment-choose1-isCost$$Put3ValueShootoutMinus-Targeted-atDude-isParticipating-targetOpponents
.....
The Arsenal
-----
b6cfa257-5898-4f1a-968c-8f36956f499b
-----
+++++
GR0B0R1:BootTarget-DemiAutoTargeted-atGadget_or_Spell-isUnbooted-choose1-targetMine-isCost$$CalloutTarget-Targeted-atDude-targetOpponents-leaderTarget{Dude}
.....
Morgan Gadgetorium
-----
6db17405-bbd3-4c0c-b00f-b89991925291
-----
+++++
GR0B1R0:SimplyAnnounce{optimize the gadget creation}
.....
Desolation Row
-----
5669e37c-fab7-4954-acb8-7f841ca7a762
-----
+++++
GR0B1R0:StartJob-AutoTargeted-atTown Square-jobEffects<UseCustomAbility-LeaderIsTarget,None>
.....
Oddities of Nature
-----
d5ed8e5d-c9f9-4e9e-8bc9-48da7c938045
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atAbomination-targetMine-isUnbooted-choose1-isCost$$Gain1Ghost Rock$$BootTarget-Targeted-atDude-targetOpponents-noTargetingError
.....
Funtime Freddy
-----
2fab82a8-2a4d-4545-855a-9d1ea011a8c9
-----
+++++
GR0B1R0:CustomScript
.....
The Flying Popescus
-----
7d58d611-07a2-41e6-ad4d-fc51cc794847
-----
+++++
GR0B0R0:Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud
.....
Andrew Burton
-----
7b51deb6-fb5f-4624-8473-32533ec7309c
-----
+++++
GR0B1R0:Put1Bounty-Targeted-atDude$$DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost$$Draw1Cards
.....
Elmore Rhine
-----
e341450b-9b41-4ae1-a1aa-fa50ac743f90
-----
+++++
GR0B1R0:Gain1GhostRock-perTargetMarker{Bounty}
.....
Howard Aswell
-----
a44b9e09-8941-4e30-ab0e-c2171949e8d5
-----
+++++
GR0B0R0:CustomScript
.....
Louis Pasteur
-----
bc36b880-9e3c-458b-8ef4-9730a926bf38
-----
+++++
GR1B0R1:Pull1Card-testMad Science10-spellEffects<UnbootTarget-Targeted-atDude-targetMine,None>
.....
Benny McGill
-----
135a9ce8-4e8b-463a-9fcc-23f0168e4762
-----
+++++
GR0B0R0:BootTarget-DemiAutoTargeted-atSpell_and_Hex-onAttachment-choose1-isCost$$CalloutTarget-Targeted-atDude
.....
Marion Seville
-----
9440378f-dd04-426c-9696-f24e0789e528
-----
+++++
GR0B0R0:Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud$$BootTarget-DemiAutoTargeted-atWeapon_and_Melee-targetOpponents-choose1
.....
J.W. Byrne
-----
fde8c18b-58e2-40ab-b407-fe42a4fe549d
-----
+++++
.....
Shane & Graves Security
-----
18df456c-38ac-4e2b-b4ab-f309849d2cb7
-----
+++++
GR0B1R0:Spawn1Gunslinger-modAction:ParticipateMyself
.....
Gomorra Jail
-----
edc1d27d-2cd3-494f-90d8-36f9edfdbb61
-----
+++++
GR0B1R0:Put1PermControlPlus
.....
Diable en Boite
-----
438f980e-0981-473e-a9c5-0a8d07ee4634
-----
+++++
GR0B1R0:SendHomeBootedTarget-DemiAutoTargeted-isParticipating-choose1$$Draw1Card
.....
Legal Instruments
-----
caff0709-27f0-4596-8cbb-ad1b403a2368
-----
+++++
.....
Recursive Motion Machine
-----
4e59a712-6f55-4703-a420-a1f62b483afa
-----
onPlay:Put1ProdPlus-AutoTargeted-atGadget-onHost-noTargetingError
+++++
GR0B1R0:Put1Ghost Rock
.....
Winchester Model 1873
-----
c5a6c2ba-866e-4854-bbaf-4e3fd6ea49de
-----
+++++
GR0B1R0:BootHost-isCost$$Put1BulletShootoutPlus-AutoTargeted-atDude-onHost$$Remove999Shootout:Draw-isSilent-AutoTargeted-atDude-onHost$$Put1Shootout:Stud-AutoTargeted-atDude-onHost
.....
Fancy New Hat
-----
bb4aad56-6d02-4088-9f20-f64c571f593b
-----
+++++
.....
Confession
-----
51b83721-61e5-42bc-be9e-8b671f0668d4
-----
+++++
GR0B1R0:Pull1Card-testMiracle6-spellEffects<BootHost-isCost++UseCustomAbility-Targeted-atDude,None>||GR0B1R0:Pull1Card-testMiracle7-spellEffects<Remove1Bounty-Targeted-atDude-isCost++Gain1Ghost Rock,None>
.....
Shield of Faith
-----
ae0d585d-25e0-4495-8757-71ef0efffb0e
-----
+++++
GR0B1R0:Pull1Card-testMiracle7-spellEffects<SimplyAnnounce{prevent dudes from being aced or discarded during this shootout and reduce their casualties by 1 for this round},None>
.....
Rope and Ride
-----
1e6f4bb8-762f-4e61-9f19-b3585c0f5186
-----
onPlay:MoveTarget-DemiAutoTargeted-atDude-targetMine-choose1-moveToDeed_or_Town Square_or_Outfit$$MoveTarget-DemiAutoTargeted-atDude-targetOpponents-choose1-moveToDeed_or_Town Square_or_Outfit$$BootTarget-DemiAutoTargeted-atDude-targetOpponents-choose1
+++++
.....
Incubation
-----
f36622a2-667f-43bc-ac16-0f8c8dabec00
-----
onPlay:Put1PermInfluenceMinus-AutoTargeted-atDude-onHost$$Put1PermBulletMinus-AutoTargeted-atDude-onHost$$Put3ValuePermMinus-AutoTargeted-atDude-onHost
+++++
.....
Flight of the Lepus
-----
3e1d8cfb-03b1-433a-a7ae-b12f124fb7ef
-----
onPlay:SendHomeBootedMulti-doNotBoot-Targeted-atDude-isParticipating-targetMine-onlyInShootouts-noTargetingError$$SendHomeBootedMulti-Targeted-atDude-isParticipating-targetOpponents-onlyInShootouts-noTargetingError||onPlay:SendHomeBootedMulti-doNotBoot-Targeted-atDude-onlyInNoon
+++++
.....
Avie Cline
-----
2cba8b31-062a-4701-a9a4-76a330681bb7
-----
+++++
GR0B1R0:MoveTarget-Targeted-atDude-moveToDeed
.....
Judge Harry Sommerset
-----
04e204a6-8879-477b-8153-adcda50b1f51
-----
+++++
GR0B1R0:StartJob-DemiAutoTargeted-atDude-hasMarker{Bounty}-targetOpponents-choose1-jobEffects<DiscardTarget,None>$$Spawn1Gunslinger-modAction:ParticipateMyself
.....
Ebenezer Springfield
-----
799ea10c-9dc3-414e-bd72-c1d9fa662926
-----
+++++
GR0B0R0:Put1Bounty-Targeted-atDude-targetOpponents
.....
Elander Boldman
-----
294a7ce9-af00-46e1-b33c-aab21ebf3b09
-----
+++++
GR0B1R0:CustomScript
.....
Antheia Pansofia
-----
56fd7d6b-d14a-4b9d-a29c-40102bf4377b
-----
+++++
.....
Harry Highbinder
-----
fffb2b62-4d83-4b3c-9696-2905477d892f
-----
+++++
.....
Max Baine
-----
4d2ffa2b-f09d-47ef-845b-d227b060b603
-----
+++++
GR0B1R0:PlayTarget-Targeted-atDude-fromHand-payCost-reduc3
.....
El Grajo
-----
9eada35c-a9a5-485f-9513-305661421944
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atWeapon_and_Melee-onAttachment$$Gain2Bullets$$Remove999Shootout:Draw-isSilent
.....
Jacqueline Isham
-----
c65152fd-434e-4729-ac44-cf61d1affbdc
-----
+++++
GR0B0R1:Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud
.....
Huntsmen's Society
-----
0334ecdd-6da1-47bf-a3d0-b4988bdf8a07
-----
+++++
GR0B0R1:Put1ProdPlus-Targeted-atDude
.....
Secured Stockyard
-----
5af04c4c-8282-4abf-9e55-a13ace8a998b
-----
+++++
GR0B1R0:Gain1Ghost Rock
.....
La Quema
-----
68bce669-132a-43ca-9727-8de86b3b039c
-----
+++++
GR0B1R0:ParticipateHost$$BootTarget-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1
.....
Asyncoil Gun
-----
34970b97-c353-47a5-bf1c-4cdc0351eef5
-----
+++++
GR0B1R0:Pull1Card
.....
Scoop Hound
-----
b756df9e-6c24-4b33-a6f1-ff6b82dd51e5
-----
+++++
GR0B1R0:SimplyAnnounce{prevent dudes from joining or leaving the shootout via shootout actions}
.....
Rapier
-----
2950ad4b-c835-415f-a24c-b61c9702c728
-----
+++++
.....
Vitality Tonic
-----
03dc1c9f-3b91-4bf6-af64-6bcbdca36839
-----
+++++
GR2B0R1:Pull1Card
.....
Consecration
-----
00867833-050d-4214-bccb-d1e787f15baf
-----
+++++
GR0B0R0:Pull1Card-testMiracle7-spellEffects<Put2BulletNoonPlus-Targeted-atDude++Put2InfluencePlus-Targeted-atDude++Put1High Noon:Stud-Targeted-atDude,None>
.....
Mark of Pestilence
-----
12d7d9e6-cffa-46ef-8ae7-d8fbbeb88e4c
-----
+++++
GR0B1R0:Pull1Card-testHex9-spellEffects<BootMultiple-AutoTargeted-atDude-isParticipating++UnbootMyself,None>
.....
Cookin' Up Trouble
-----
f7d5aaa2-b476-4156-bef1-697cd39d7dd7
-----
onPlay:CustomScript
+++++
.....
Ol' Fashioned Hangin'
-----
0453a270-56fb-4714-b3d7-41129bf4ccd5
-----
onPlay:StartJob-DemiAutoTargeted-atDude-hasMarker{Bounty}-targetOpponents-choose1-jobEffects<AceTarget,None>
+++++
.....
No Turning Back
-----
b308c49b-6871-4067-a269-9aa7f8917bdc
-----
+++++
.....
Junior
-----
85989968-0332-4dc2-a5eb-9921267453ff
-----
onPlay:Retrieve1Card-toTable-grabGoods_and_Mystical_and_nonGadget-payCost-searchComplete
+++++
.....
The Fabulous Mister Miss
-----
4b50bf0d-29e3-4e93-8acc-f77c60b80e57
-----
+++++
GR0B0R1:Put3InfluenceMinus-Targeted-atDude-targetOpponents
.....
Sister Mary Gideon
-----
92d9bfc1-4be6-4973-b820-0ac65e5d7b01
-----
+++++
.....
Nathan Shane
-----
65b724a9-49e4-4c1c-9db8-e3f3081fa3fe
-----
+++++
GR0B1R0:CustomScript
.....
Warren Graves
-----
e20afd82-c3d7-4152-aac9-4392a29733b8
-----
+++++
GR0B0R0:SendHomeBootedTarget-DemiAutoTargeted-atDude-isParticipating-targetMine-choose1$$ParticipateMyself
.....
Jack O'Hara
-----
c4b260a0-1861-4300-8c96-6064f7367cb5
-----
+++++
.....
John "Aces" Radcliffe
-----
686696a5-da6b-46d7-92b2-2ba051a3153e
-----
+++++
.....
Steele Archer
-----
ef19f3d5-a52f-403a-a342-ae32f3753120
-----
+++++
GR0B0R0:UnbootTarget-DemiAutoTargeted-atHex-choose1
.....
Old Man McDroste
-----
8628bdf2-3900-4e36-941e-043560d81757
-----
+++++
GR0B1R0:Put5InfluenceMinus-Targeted-atDude
.....
Flint's Amusements
-----
29add78a-5369-43d9-80d9-4006f6757a6e
-----
whileInPlay:Gain1Ghost Rock-foreachResolution-typeAction
+++++
GR0B1R0:Draw1Card
.....
Lula's Exploit
-----
5529098d-fd5f-41c1-9121-58a4c2b41c89
-----
atPhaseGamblin:Refill2Ghost Rock
+++++
GR0B0R1:Remove1Ghost Rock-isCost-isSilent$$Gain1Ghost Rock
.....
Testing Range
-----
5448fd9b-6cce-4973-a40a-f4769b7f6f47
-----
+++++
GR2B1R0:UnbootTarget-DemiAutoTargeted-atMad Scientist-isBooted||GR0B1R0:Pull1Card
.....
Dog's Duster
-----
6da3418d-0c64-425c-8191-680e7f2480b3
-----
+++++
GR0B1R0:CalloutTarget-Targeted-atDude-hasMarker{Bounty}
.....
Stoker's Sabre
-----
0c37b916-b5e6-4e43-8640-8a970465b70e
-----
+++++
GR0B1R0:UnbootTarget-Targeted-atSpell$$Remove1Used Ability-Targeted-atSpell-targetMine-isSilent
.....
Fate Dispenser
-----
80905177-39e7-49fd-a244-aa6c70baa9e8
-----
+++++
GR0B1R0:Draw1Card
.....
Soul Cage
-----
59978194-81c0-482e-8748-1df5b3a6245f
-----
+++++
GR0B1R0:Retrieve1Card-fromBootHill-toTable-grabAbomination-searchComplete
.....
For Such a Time as This
-----
abed8d3b-d73f-4426-bbeb-6e0124aae9fb
-----
+++++
GR0B1R0:Pull1Card-testMiracle9-spellEffects<StartJob-AutoTargeted-atTown Square,None>||GR0B0R1:Retrieve1Card-toTable-grabDude-searchComplete-payCost-reduc4
.....
Sword of the Spirit
-----
39e4ddd4-987b-450b-a3dd-260a20db35a0
-----
+++++
GR0B0R0:Pull1Card-testMiracle7-spellEffects<Put1BulletNoonPlus-Targeted-atDude++Put1High Noon:Stud-Targeted-atDude,None>
.....
A Fight They'll Never Forget
-----
3f0bc78c-6b5d-4951-bcd1-a3ab3b08481e
-----
+++++
.....
Buried Treasure
-----
e5e6dd23-04d0-4614-a8da-a9a74785a7c2
-----
onPlay:Retrieve1Card-fromDiscard-toBootHill$$Gain3Ghost Rock$$Draw1Card
+++++
.....
Nightmare at Noon
-----
590b905c-f114-48d2-930f-66e446458c84
-----
onPlay:Put1BulletShootoutMinus-AutoTargeted-atDude-isParticipating-targetOpponents$$Put1Shootout:Draw-isSilent-AutoTargeted-atDude-isParticipating-isStudDude-hasProperty{Bullets}le1
+++++
.....
Raking Dragons
-----
c946d5d9-276e-4464-8958-c1bf3045a817
-----
onPlay:Pull1Card
+++++
GR0B0R0:BootTarget-Targeted-atDude-targetOpponents-isParticipating$$Put2ValueShootoutMinus-Targeted-atDude-targetOpponents-isParticipating
.....
Shifu Speaks
-----
12fb4862-f1e1-4ad3-b28f-2206b1a194e9
-----
onPlay:Pull1Card
+++++
GR0B0R0:Put1Influence-Targeted-atDude_and_Kung Fu-targetMine$$RequestInt-Max5{Discard how many cards?}$$Draw1Card-toDiscard-perX
.....
Rabbit's Lunar Leap
-----
ceaefbb7-2540-44cf-abaf-755042f318ae
-----
onPlay:Pull1Card
+++++
GR0B0R0:ParticipateTarget-Targeted-atDude_and_Kung Fu-targetMine$$UnbootTarget-Targeted-atDude_and_Kung Fu-targetMine
.....
Zhu's Reward
-----
23a1a9a2-cfc2-478c-9d93-46c23cb6a567
-----
onPlay:Pull1Card
+++++
GR0B0R0:BootTarget-DemiAutoTargeted-atDude_and_Kung Fu-TargetMine-isParticipating-choose1-isCost$$SendHomeBootedMulti-Targeted-atDude-targetOpponents-isParticipating
.....
Zhu's Ferocity
-----
c77c3030-224b-4cb2-bf27-7be6cf6d5fcc
-----
onPlay:Pull1Card
+++++
GR0B0R0:Put1BulletShootoutMinus-Targeted-atDude-isParticipating-targetOpponents$$Put1BulletShootoutPlus-Targeted-atDude_and_Kung Fu-isParticipating-targetMine
.....
Richard Faulkner
-----
1456fe5c-9edb-4521-abd7-3a029851222d
-----
+++++
.....
Mazatl
-----
3b853568-7da7-40d2-952c-72b611330f11
-----
+++++
GR0B0R0:MoveMyself-moveToDeed_or_Town Square_or_Outfit
.....
Marcia Ridge
-----
381e141f-b745-4c05-b455-b4fe3c1fc73c
-----
+++++
GR0B0R1:UseCustomAbility-DemiAutoTargeted-atDeed-targetOpponents-choose1
.....
Black Elk
-----
d9beac18-b2a7-451d-a17f-6b5202cd3b1a
-----
+++++
.....
Lydia Bear-Hands
-----
ff016aea-9c16-44a6-bacf-4ee2ca9a913e
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atSpell-isUnbooted-onAttachment-choose1-isCost$$Put1Shootout:Harrowed-DemiAutoTargeted-atDude-isParticipating-TargetMine-choose1
.....
Jackson Trouble
-----
1086db48-f246-4e5f-a6d2-2bd6ebce9ba8
-----
+++++
.....
Three-Eyed Hawk
-----
3b3815a8-ca49-48c0-96eb-d50d1d493d68
-----
+++++
GR0B0R1:Gain1Ghost Rock
.....
Bloody Teeth
-----
b49ca811-796c-4cb9-abd4-9657d88a9ceb
-----
+++++
.....
Butch Deuces
-----
2256eecb-02d3-4d01-b407-3417a109691e
-----
+++++
GR0B1R0:CustomScript
.....
Smiling Frog
-----
c665872f-61b0-4a23-b923-12ea6d5d8c95
-----
+++++
GR0B0R0:CustomScript
.....
Sarah Meoquanee
-----
245f816f-f407-41e7-b8e6-9fb6cd638bf3
-----
+++++
GR0B0R0:ParticipateMyself$$Put1Shootout:Stud
.....
Chief Stephen Seven-Eagles
-----
eed9713a-5a8b-4a10-91b1-4ecd27eee125
-----
onPay:Reduce1CostPlay-perEveryCard-AutoTargeted-atTotem_or_Improvement
+++++
GR0B1R0:SimplyAnnounce{make Chief Stephen Seven Eagles worth 1 control point per card attached to a deed they control}
.....
Laughing Crow
-----
7d921e83-5ac4-40c7-a756-4f596b1fe8ee
-----
+++++
GR0B0R0:CustomScript
.....
Benjamin Washington
-----
942e1f7e-d6d0-4882-b8e7-68135e9d52fc
-----
+++++
GR0B1R0:CustomScript
.....
Randall
-----
1b7d462c-4ba6-4317-904e-7efc49b4a15e
-----
+++++
GR0B0R1:Draw1Card
.....
Longwei Fu
-----
bcc0f3cf-105d-4030-a4fc-b45d978bfc7e
-----
+++++
.....
Xui Yin Chen
-----
910c051f-f36d-4245-88db-767d80998fc1
-----
+++++
GR0B1R0:Remove999Shootout:Draw-isSilent-AutoTargeted-atDude-targetMine-isParticipating$$Put1Shootout:Stud-AutoTargeted-atDude-targetMine-isParticipating
.....
Natalya
-----
48e2565e-1c96-45c8-837a-85533efc554b
-----
+++++
GR0B0R0:Gain1Ghost Rock-perTargetProperty{Production}-Targeted-atDeed
.....
Hamshanks
-----
05b9067b-7356-4fbf-b5a7-ee96c3135b9f
-----
+++++
.....
Hiram Capatch
-----
7a390eac-db25-477b-80a9-7bbc051d87da
-----
+++++
.....
Yunxu Jiang
-----
3c9f4821-0e6c-4e68-9666-d1784a0eca4e
-----
+++++
.....
Abuelita Espinoza
-----
f58ccb1d-d0f7-4339-abc7-6c017bac34c4
-----
+++++
.....
Bai Yang Chen
-----
306a6cce-32c1-480f-b8a6-e239dad1379c
-----
+++++
GR0B0R1:Draw2Cards$$Put1ProdPlus
.....
Daomei Wang
-----
1c3b7ec1-882e-458d-9409-21e23fef5f2d
-----
onPlay:MoveTarget-DemiAutoTargeted-atDude_and_108 Glorious Bandits-targetMine-choose1-moveToDeed_or_Town Square_or_Outfit
+++++
.....
Xiaodan Li
-----
ed07deb6-3693-4ede-ae62-0f132d538211
-----
+++++
.....
T'ou Chi Chow
-----
e5c43d70-6280-43e4-a0c6-8bc4239382e7
-----
onPay:Reduce1CostPlay-perEveryCard-AutoTargeted-atDeed-targetMine
+++++
GR0B0R1:BootTarget-Targeted-atDeed-targetMine-isCost$$UnbootTarget-Targeted-atDude-targetMine
.....
Nunchucks
-----
06c47afe-528b-416e-b068-5b609225afe0
-----
constantAbility:Kung Fu Bonus:1-isParticipating
+++++
GR0B1R0:SimplyAnnounce{reduce the pull by this dude's Kung Fu rating}
.....
Idol of Tlazolteotl
-----
f0e6f9a4-1a55-4cd6-9296-e06c823e22b2
-----
+++++
GR0B1R0:RehostTarget-Targeted-atTotem_or_Improvement_or_Condition
.....
Eagle Wardens
-----
dfb469f4-efc8-46f9-84df-66f5f973c09c
-----
+++++
GR0B1R0:UseCustomAbility-DemiAutoTargeted-atDude-isUnbooted-targetMine-choose1
.....
108 Righteous Bandits
-----
94c5b661-8f3d-46ea-975e-482ff0e2be8a
-----
+++++
GR0B1R0:MoveTarget-Targeted-atDude-moveToDeed_or_Town Square_or_Outfit
.....
Spirit Guidance
-----
d6931afe-58cd-430f-9fe2-178738614cf8
-----
+++++
GR0B1R0:Pull1Card-testSpirit7-spellEffects<DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost++Draw1Card,None>||GR0B1R0:Pull1Card-testSpirit10-spellEffects<Draw2Cards++DiscardTarget-DemiAutoTargeted-fromHand-choose1++DiscardTarget-DemiAutoTargeted-fromHand-choose1,None>
.....
The Pack Awakens
-----
79254945-f5f3-48ab-90d9-f1da1dd15d74
-----
+++++
GR0B1R0:Pull1Card-testSpirit8-spellEffects<Spawn1Nature Spirit-modAction:ParticipateMyself-BootMyself,None>
.....
Spirit Trail
-----
a655edaa-c2e8-41d3-9710-e96bf21fae1b
-----
+++++
GR0B1R0:Pull1Card-testSpirit6-spellEffects<MoveTarget-Targeted-atDude-targetMine-moveToDeed_or_Town Square_or_Outfit,None>
.....
Many Speak as One
-----
889fa2dd-dd3e-4830-8d59-7cb242e1c68e
-----
+++++
GR0B1R0:Pull1Card-testSpirit9-spellEffects<Spawn1Ancestor Spirit,None>
.....
Spirit Dance
-----
e4b062de-79ef-4005-a1e0-7ce0a4ed9d34
-----
+++++
GR0B1R0:Pull1Card-testSpirit10-spellEffects<Spawn1Nature Spirit-modAction:ParticipateMyself,None>
.....
Ancestor Spirit
-----
53a212a6-34a6-47b0-bb24-45f1888bebf6
-----
+++++
.....
Nature Spirit
-----
c4689399-c350-46b3-a79a-f8c62d926cd5
-----
+++++
.....
Rabbit's Deception
-----
ba290d31-e66f-427d-bc33-65200b20ad52
-----
onPlay:Pull1Card
+++++
GR0B0R0:SendHomeBootedTarget-DemiAutoTargeted-atDude_and_Kung Fu-isParticipating-targetMine-choose1$$SendHomeBootedTarget-Targeted-atDude-isParticipating-targetOpponents-noTargetingError
.....
A Hero's Passing
-----
d332f810-af0a-42ee-9649-a4b34809a6e1
-----
+++++
.....
Tummy Twister
-----
82cd476b-6ff5-4f49-adad-a3a283f8928a
-----
+++++
GR0B0R0:RehostMyself-Targeted-atDude$$Put1ProdMinus-AutoTargeted-onHost
.....
Backroom Deals
-----
59fba6dd-ae8f-4369-9596-1b6a44e6685e
-----
onPlay:Put1UpkeepPrePaid-perTargetProperty{Upkeep}-Targeted-atDude-targetMine-isSilent$$Put1ProdPlus-perTargetProperty{Upkeep}-Targeted-atDude-targetMine
+++++
.....
Civil War
-----
b4a04153-f279-4feb-aa29-a9e86607dd3b
-----
onPlay:SimplyAnnounce{to force their opponent to move one of theit targeted dudes to another location}
+++++
.....
Forced Quarantine
-----
ea2b5db4-659a-4524-8ca3-e84ae8d1f9b5
-----
onPlay:StartJob-Targeted-atDude-bootLeader-jobEffects<RehostMyself-AutoTargeted-atDude-isMark++BootHost,None>||atPhaseGamblin:DiscardHost
+++++
.....
The Extra Bet
-----
ff370ba2-fd39-4996-ae02-abd3f46c9af7
-----
+++++
GR0B1R0:CustomScript
.....
Morgan Mining Company
-----
b5ef38d4-d929-4a54-8ac8-36871ba2d66a
-----
atPhaseUpkeep:Gain1Ghost Rock-perEveryCard-AutoTargeted-atStrike-targetMine
+++++
.....
Quarantine Tent
-----
9312b25c-db3d-4889-9c6e-8b4ec495f3a8
-----
+++++
GR0B1R0:BootTarget-Targeted-atDude-targetMine-isCost$$UnbootTarget-AutoTargeted-atOutfit-targetMine$$Remove1UsedAbility-AutoTargeted-atOutfit-targetMine-isSilent
.....
Cooke's Nightcap
-----
a2cae75c-ac0b-437b-a951-a9e891ce4034
-----
+++++
.....
Margaret Hagerty
-----
89b1a934-a4a5-4900-a3b7-f2c5c98f1d1d
-----
+++++
.....
The Wretched
-----
2c14e947-f0de-4049-8016-9d4679f9b2a5
-----
onPlay:AceTarget-AutoTargeted-atDude-targetMine-choose1$$Put2Bounty-DemiAutoTargeted-atDude_and_Mad Scientist-targetMine-choose1-noTargetingError
+++++
.....
Deborah West
-----
d775dd42-183d-4165-83ed-3d825ea051a2
-----
+++++
GR0B0R1:Put1Shootout:Stud
.....
Abram Grothe
-----
11c7f4dc-334e-466f-868a-d5df9157322e
-----
onPay:Reduce1CostPlay-perEveryCardMarker{Bounty}-AutoTargeted-atDude-hasMarker{Bounty}-targetOpponents
+++++
GR0B0R0:UnbootMulti-Targeted-atDude_and_Deputy-targetMine$$UnbootMyself
.....
"Dead" Billy Jones
-----
5005c7d4-a6f3-440d-ae61-dc75412266fb
-----
+++++
GR0B0R0:MoveTarget-Targeted-atDude-moveToDeed_or_Town Square_or_Outfit
.....
Lillian Morgan
-----
a17a638e-7ad1-4216-8748-f9526be7107a
-----
onPay:Reduce1CostPlay-perEveryCard-AutoTargeted-atRanch_or_Horse-targetMine
+++++
GR0B0R0:Retrieve1Card-fromDiscard-grabAction-toTable-payCost
.....
Jim Cheveyo
-----
ca460412-7ba2-4b04-8ec2-44c86460ce1a
-----
+++++
.....
Enapay
-----
ea2ecabb-966e-466f-a285-29028dba628b
-----
+++++
GR0B1R0:Put1InfluencePlus-Targeted-atDude
.....
Danny Wilde
-----
aae392c5-3374-42af-827d-653a9ced3e9b
-----
+++++
.....
Michael "The Badger" Dodge
-----
a0a7fb60-18a3-4a8e-8dd1-a92931e724b1
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atDude-isParticipating-choose1$$Pull1Card
.....
Asakichi Cooke
-----
ad7f9fb2-c845-49bd-82e2-0a94037e1d77
-----
+++++
GR0B0R0:DiscardTarget-DemiAutoTargeted-fromHand-isCost-choose1$$MoveTarget-Targeted-atDude-targetMine-moveToDeed_or_Town Square_or_Outfit
.....
Emre, The Turkish Bear
-----
45a5acd3-3659-4654-8033-44ef084e1dba
-----
+++++
GR0B0R0:Pull1Card$$Remove999High Noon:Draw-isSilent$$Put1High Noon:Stud
.....
Dabney Scuttlesby
-----
ad157676-5166-40b3-99a4-9cbe1300366d
-----
+++++
.....
Ivor Hawley
-----
e1d93d5b-222d-4a82-b18f-62728f7791c0
-----
onPay:ReduceSCostPlay||onPlay:Retrieve2Cards-fromBootHill-grabAbomination_or_Hex-toTable-payCost-reduc3
+++++
.....
Samantha "Sammy" Cooke
-----
8aa596d9-7b03-4939-9217-3b150fbe77d1
-----
+++++
GR1B1R0:DiscardTarget-Targeted-atGoods$$Put1Bounty
.....
Sloane
-----
6a8c7f99-7f5c-4824-99b7-a9c5f9d9d715
-----
onPay:Reduce1CostPlay-perEveryCard-AutoTargeted-atDude-hasMarker{Bounty}-targetMine||onPlay:UnbootMulti-AutoTargeted-atDude-hasMarker{Bounty}-targetMine
+++++
.....
Mutant Cattle
-----
88967399-9c8c-4502-bf63-5f442aff836b
-----
+++++
.....
Monte Bank
-----
95d23e7f-daa9-44ef-bcd3-50683340db62
-----
+++++
.....
Wendy's Teethkickers
-----
31a6a524-0191-4034-a20c-ec3008d28a6e
-----
+++++
GR0B1R0:UnbootHost$$Put1InfluencePlus-AutoTargeted-onHost
.....
Rich Man's Guard Dog
-----
2a59bc29-adca-4b5c-94f3-e9c94353d4b5
-----
+++++
GR0B1R0:Pull1Card
.....
Devil's Joker (red)
-----
e5aa241d-bc8c-46ee-a250-dac44616415e
-----
+++++
.....
Devil's Joker (black)
-----
c8311233-937c-4468-89f1-8c667ced576b
-----
+++++
.....
Abram's Crusaders
-----
24d74ad6-5532-4a11-ac66-a8b9f258cbf9
-----
+++++
GR0B1R0:Put1High Noon:Deputy-Targeted-atDude-targetMine
.....
The Sanatorium
-----
6f9a4a04-411a-4c2b-b4c2-4db8d6c18157
-----
+++++
GR0B1R0:Put1BulletNoonMinus-Targeted-atDude-targetOpponents$$Put1ValueNoonMinus-Targeted-atDude-targetOpponents$$Put1InfluencePlus-Targeted-atDude-targetMine||GR0B1R0:Put1BulletNoonMinus-Targeted-atDude-targetOpponents$$Put1ValueNoonMinus-Targeted-atDude-targetOpponents$$Put1Noon:Huckster Skill Bonus-Targeted-atDude-targetMine
.....
Den of Thieves
-----
922a0a4b-c9b6-404a-b26b-87f079fc7a6c
-----
+++++
GR0B1R0:UseCustomAbility-DemiAutoTargeted-atDude_and_Grifter-targetMine-choose1$$Gain1Ghost Rock
.....
Dumbstruck
-----
c5f02d5f-ad42-452f-8a7d-b48f58c6e76b
-----
+++++
GR0B1R0:Pull1Card-testMiracle9-spellEffects<UnbootHost++Put1High Noon:Dumbstruck Protected-AutoTargeted-onHost,None>-onlyInNoon||GR0B1R0:Pull1Card-testMiracle9-spellEffects<UnbootHost++Put1Shootout:Dumbstruck Protected-AutoTargeted-onHost,None>-onlyInShootouts
.....
Fiery Rhetoric
-----
507348af-e310-42c1-9878-c8c605882128
-----
+++++
GR0B1R0:Pull1Card-testMiracle6-spellEffects<StartJob-AutoTargeted-atTown Square,None>
.....
Strength of the Ancestors
-----
98b3a9a9-3ab6-4413-b6c0-cc113ed26731
-----
+++++
GR0B1R0:Pull1Card-testSpirit5-spellEffects<Put3BulletNoonPlus-AutoTargeted-atDude-onHost++Remove999High Noon:Draw-AutTargeted-atDude-onHost-isSilent++Put1High Noon:Stud-AutoTargeted-atDude-onHost++Refill1High Noon:Strength of Ancestors-AutoTargeted-atDude-onHost-isSilent,None>
.....
Phantom Fingers
-----
899f3c84-3f9f-4589-83d9-2c2fe663d6a2
-----
+++++
GR0B1R0:Pull1Card-testHex6-spellEffects<BootTarget-Targeted-atGoods-targetOpponents++Refill1Phantom Fingers-Targeted-atGoods-targetOpponents-isSilent++UseCustomAbility-Targeted-atGoods-targetOpponents,None>
.....
Red Horse's Tail
-----
aea19556-2c30-4e03-a677-d0e6f6fa6c66
-----
+++++
GR0B1R0:Pull1Card-testSpiritX-difficultyValue-Targeted-atDude-spellEffects<BootTarget-Targeted-atDude,None>-onlyInNoon||GR0B1R0:Pull1Card-testSpiritX-difficultyGrit-Targeted-atDude-spellEffects<SendHomeBootedTarget-Targeted-atDude-targetOpponents,None>-onlyInShootouts
.....
Fire of Nanahbozho
-----
03da32d4-2e32-43d3-94c7-f3457414d298
-----
+++++
GR0B1R0:Pull1Card-testSpirit8-spellEffects<UnbootTarget-Targeted-atDude,None>
.....
Xiang Fang
-----
5c204eb2-3416-4f3e-8822-ac93c5f66978
-----
onPlay:Gain1Ghost Rock-perTargetProperty{Production}-Targeted-atDeed_or_Outfit$$Put1ProdMinus-perTargetProperty{Production}-Targeted-atDeed-targetOpponents-noTargetingError-isSilent$$Put1ProductionPrePaid-perTargetProperty{Production}-Targeted-atDeed-targetOpponents-noTargetingError-isSilent$$Put1ProdMinus-perTargetProperty{Production}-Targeted-atOutfit-targetMine-noTargetingError-isSilent$$Put1ProductionPrePaid-perTargetProperty{Production}-Targeted-atOutfit-targetMine-noTargetingError-isSilent
+++++
.....
He Fang
-----
555ee88e-b7a2-41b9-a781-357a9fc53f65
-----
+++++
GR0B1R0:PlayTarget-DemiAutoTargeted-fromHand-atAbomination_and_nonGadget-choose1-payCost-reduc2
.....
Kabeda Hakurei
-----
934a695a-50c9-4fcc-8434-cb87d6c5e967
-----
+++++
.....
Zachary Deloria
-----
4c2da757-17a9-43ed-b7cd-c26efcbc0ff7
-----
+++++
.....
Karl Odett
-----
1ee1c912-a6d1-4c74-8fb7-a3430a2cd1c8
-----
+++++
GR0B0R1:Put3PermInfluencePlus
.....
Erik Samson
-----
8cc64eef-41b5-4e60-a68a-e35384d41a7a
-----
+++++
.....
Dr. Brian Foxworth
-----
08c8a178-3613-4b03-a885-5fc205e7e68e
-----
+++++
GR0B0R1:SimplyAnnounce{reduce their casualties by this dude's influence}$$DiscardMyself
.....
Buford Hurley
-----
654372b0-0c12-4d64-9f78-128adf6d6457
-----
+++++
GR0B0R0:Put1ProdPlus$$Put1ProdMinus-Targeted-atDeed
.....
Doris Powell
-----
0f21f34d-76ba-4a54-b690-fafa160ef170
-----
+++++
GR0B0R0:Put1ControlPlus
.....
Rico Rodegain
-----
d760b8cd-7d30-4e1b-a4e6-9b7163eedf85
-----
+++++
GR0B1R0:CustomScript
.....
Maza Gang Hideout
-----
381aa2d3-aa99-49a9-8d53-6b09f8f4bceb
-----
+++++
GR0B0R0:Put1ProdPlus
.....
Miasmatic Purifier
-----
9c101ef5-5214-4d20-8e45-906f8fcc23b3
-----
atSundown:CustomScript
+++++
.....
Disgenuine Currency Press
-----
4bef1feb-f6b5-44ff-8af4-790d121d12e7
-----
onPlay:Gain5Ghost Rock$$UnbootHost
+++++
.....
Tlaloc's Furies
-----
774ebaf2-a983-414b-acc3-7250bd61b9f2
-----
constantAbility:Skill Bonus:1-perEveryCard-AutoTargeted-atTlaloc's Furies
+++++
.....
Personal Ornithopter
-----
e258726e-3705-4d0f-ae7a-d37ce2952b82
-----
+++++
GR1B0R1:ParticipateHost||GR0B1R0:SendHomeBootedHost
.....
Jael's Guile
-----
7eb84c0d-be8e-40b5-891c-b1b4c95b55af
-----
+++++
GR0B1R0:CustomScript
.....
Hustled
-----
0967a396-2be7-4a61-afe9-a119034266fc
-----
onPlay:BootTarget-DemiAutoTargeted-atDude-targetMine-choose1$$Gain2Ghost Rock$$BootTarget-Targeted-targetOpposing-noTargetingError
+++++
.....
An Accidental Reunion
-----
79ec0f19-cc3a-4f63-930e-f3dadf2645f7
-----
onPlay:SimplyAnnounce{force each cheating player to take 2 casualties}$$Lose2Ghost Rock-isCost-isOptional$$UninstallMyself
+++++
.....
A Slight Modification
-----
2c8fd25e-ff8b-4129-8a6c-2c6f3590c455
-----
onPlay:BootTarget-DemiAutoTargeted-atGoods_and_Gadget-targetMine-choose1$$SimplyAnnounce{cancel the shootout action}
+++++
.....
Rabbit Takes Revenge
-----
2f4ad55c-0be5-422e-bb47-ccb1a6fe21e0
-----
onPlay:Pull1Card
+++++
.....
Morgan Stables
-----
668fb674-5e1f-4bc9-98ae-ca7120277ea9
-----
+++++
GR0B0R1:PlayTarget-DemiAutoTargeted-atHorse-fromHand-choose1-payCost-reduc1||GR0B1R0:UseCustomAbility
.....
108 Worldly Desires
-----
fb84426c-6b63-4dee-a2df-2d3c7b89c230
-----
+++++
GR0B0R1:Draw1Card-toDrawHand$$DiscardTarget-DemiAutoTargeted-fromDrawHand-choose1
.....
Beyond the Veil
-----
d47cff43-d675-4410-929c-508475fd7814
-----
+++++
GR0B1R0:BootTarget-Targeted-atTotem-targetMine-isCost$$MoveTarget-Targeted-atDude-targetMine-moveToTotem
.....
Shizeng Lu
-----
bc2f230e-1c4a-47fe-ab97-31a58112e473
-----
+++++
.....
"Lucky" Sky Borne
-----
16d4df2b-4494-4e65-b910-2c35e07d604c
-----
+++++
GR0B0R1:Retrieve1Card-grabSidekick-toTable-payCost-preHost
.....
Riorden O'Lithen
-----
644870b4-541c-4824-af19-3e9a5cd28264
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atSpell-isUnbooted-onAttachment-choose1-isCost$$Put1Bounty-Targeted-atDude
.....
Mariel Lewis
-----
45f31939-bdaf-49ee-850f-b08f5c809dc0
-----
+++++
GR0B0R0:UnparticipateTarget-Targeted-atDude-targetOpponents$$MoveTarget-Targeted-atDude-moveToOutfit-targetOpponents
.....
Miranda Clarke
-----
43abee45-ec1d-4e5f-8997-e8501d66ed5b
-----
+++++
.....
Elliot Smithson
-----
662dd66d-9cb9-48f6-84bd-4496b3484d01
-----
+++++
GR0B0R0:Gain1Ghost Rock$$UnbootTarget-Targeted-atDude
.....
Forster Cooke
-----
1d283fb4-9fe2-4143-83e2-b6080149df3f
-----
+++++
.....
The Tattooed Man
-----
f1d866bc-8056-4cc8-976a-ee76afca5054
-----
+++++
.....
Joseph Dusty Hill
-----
7f2d6abb-5b50-4d3c-8a2d-aeb447ea5fbd
-----
+++++
.....
Shelby Hunt
-----
41a8c1e5-03e8-427e-a1f2-ffc59dbc420b
-----
+++++
GR0B0R0:Put1BulletShootoutPlus$$Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud
.....
Xemo's Turban
-----
288545a8-9c7b-4e31-ad00-c97910b19b17
-----
+++++
GR0B1R1:UseCustomAbility
.....
Guide Horse
-----
aa1c1ffa-db99-464b-849d-5cb6264245b7
-----
+++++
GR0B1R0:MoveHost-moveToTown Square
.....
Blight Serum
-----
0ed71726-f276-419e-b037-17eb62294489
-----
+++++
GR0B0R1:SendHomeBootedTarget-Targeted-atDude$$AceMyself||GR0B1R0:RehostMyself
.....
Marty
-----
fcac7bbf-dd62-43de-8ade-8ba9893a986f
-----
+++++
GR0B1R0:BootMyself-isCost$$UnbootHost
.....
The Joker's Smile
-----
d3a57ce1-7abd-45b0-82d7-fa0d6324a91d
-----
whileInPlay:Gain1Ghost Rock-foreachUsedJokerAced-ifPhaseGamblin
+++++
GR0B1R0:DiscardTarget-DemiAutoTargeted-atJoker-fromHand-choose1-isCost$$Gain1Ghost Rock$$Draw1Cards||GR1B1R0:Retrieve1Cards-fromBootHill-toDiscard-grabJoker
.....
Old Marge's Manor
-----
19bdb26b-0853-4520-9f72-71d7e4ab6c1d
-----
+++++
GR0B1R0:UseCustomAbility-AutoTargeted-hasMarker{Ghost Rock}-isMine-isCost-isNotMyself-choose1||GR0B1R0:Put1Ghost Rock
.....
Owl's Insight
-----
e530bd2e-0fae-4f00-a522-ba5801904a44
-----
+++++
GR0B1R0:Pull1Card-testSpirit5-spellEffects<PlayMulti-Targeted-atGoods_or_Spell-payCost-reduc1-fromHand++Draw999Cards,None>
.....
Righteous Fury
-----
dd9cbe71-952b-4b3f-8b53-afa6144488ac
-----
+++++
GR0B1R0:Pull1Card-testMiracle8-spellEffects<SimplyAnnounce{increase their opponent's casualties by 2 if they win},None>||GR0B1R0:Pull1Card-testMiracle12-spellEffects<SimplyAnnounce{increase their opponent's casualties by 2}++AceMyself,None>
.....
Outgunned
-----
24bb7502-06a2-4871-9ede-fe28d7539eec
-----
onPlay:BootTarget-DemiAutoTargeted-atDude-targetMine-isParticipating-isCost-choose1-isResolution
+++++
.....
Martyr's Cry
-----
27d7356b-3c4d-4b2d-9c23-28aca5cc9c81
-----
onPlay:DiscardTarget-Targeted-atMiracle-targetMine-isResolution
+++++
.....
Deliberate Infection
-----
31ac9e9d-ff78-4f42-a8c1-9014c7df461e
-----
onPlay:Put1PermInfluenceMinus-AutoTargeted-atDude-onHost$$Put1ProdMinus-AutoTargeted-atDude-onHost
+++++
.....
Sophie Lacoste
-----
e54fe16d-a197-4461-9309-42718820a294
-----
+++++
GR1B0R1:Put1InfluencePlus-Targeted-atDude-targetMine
.....
"Crazy" Mike Draksil
-----
12bea32a-b544-47e8-b2c7-4355aa1606ac
-----
+++++
.....
Horace Manse
-----
5126c19e-76f4-4355-8dad-8023adab95e7
-----
+++++
GR0B0R0:Retrieve1Card-fromBootHill-grabAbomination$$DiscardTarget-DemiAutoTargeted-fromHand-choose1
.....
Jia Mein
-----
1a69a29f-8d3e-4b90-a4b0-8050855974df
-----
+++++
GR0B0R0:PlayTarget-DemiAutoTargeted-atCondition-fromHand--payCost-reduc2-choose1||GR1B0R1:BootTarget-AutoTargeted-atSpell-isUnbooted-onAttachment-choose1-isCost$$Retrieve1Card-grabCondition-fromDiscard
.....
Janosz Pratt
-----
b791fa54-dbe2-4cda-90dd-91a0df866ed1
-----
+++++
GR0B1R0:Retrieve1Cards-fromDiscard-grabGadget_and_Weapon-toTable-payCost-reduc2$$Put1Janosz Rig-AutoTargeted-atGadget_and_Weapon-onAttachment-choose1$$RehostTarget-AutoTargeted-atGadget_and_Weapon-onAttachment
.....
Vasilis the Boar
-----
20ff3990-e458-4fe7-a8c3-8f7839430e73
-----
+++++
.....
Luke, the Errand Boy
-----
06798ab4-80bd-4497-b6fe-d2093c788ba8
-----
+++++
GR0B0R0:RehostTarget-Targeted-atGadget_and_Goods
.....
Arnold McCadish
-----
1aa58444-fccc-4121-ac6c-482fd48e4b8e
-----
+++++
GR0B1R0:Pull1Card
.....
Rick Henderson
-----
198921ba-aafd-4379-bf9d-205b4a4c7763
-----
+++++
GR0B1R0:CustomScript
.....
Willa Mae MacGowan
-----
4be0dce0-1671-4ac1-9539-d721e8fa5b34
-----
+++++
GR0B0R0:AceMyself$$SendHomeBootedMulti-AutoTargeted-atDude-targetMine-isParticipating
.....
The Orphanage
-----
6d816729-b023-4801-8aee-426ebe657835
-----
+++++
GR0B1R0:Put2ProdPlus-AutoTargeted-atDeed-hasProperty{Control}ge2$$Put1ControlMinus-AutoTargeted-atDeed-hasProperty{Control}ge2$$Put2UpkeepPrePaid-AutoTargeted-atDeed-hasProperty{Control}ge2
.....
The Place
-----
c9534712-af16-422c-8fb5-dac7b8c82c65
-----
+++++
GR0B0R1:Put2ProdPlus-Targeted-atDeed
.....
Hawley's Rose
-----
f316fc32-d8a7-4810-bb75-7c1491058ad8
-----
+++++
.....
LeMat Revolver
-----
3347bf9b-ca97-4625-8409-521a4b90af3e
-----
+++++
GR0B1R0:SimplyAnnounce{increase their hand rank by this dude's bullet rating}
.....
Yagn's Mechanical Skeleton
-----
2fb05302-5326-458a-81bf-c5ad611e63e1
-----
onPlay:Put3ValueNoonPlus
+++++
.....
Fool's Gold
-----
8aad1d23-54ea-4439-8d94-e84b1c283d67
-----
+++++
GR0B1R0:Pull1Card-testHex5-spellEffects<Put1Bounty-AutoTargeted-atDude-onHost++UseCustomAbility,None>
.....
Mother Bear's Rage
-----
5d7757ad-32cf-4383-959e-594e8a7314be
-----
+++++
GR0B1R0:Pull1Card-testSpirit5-spellEffects<Spawn1Nature Spirit-modAction:CalloutTarget-Targeted-atDude-targetOpponents,None>
.....
Focusing Chi
-----
614b4a5f-6a75-4a4d-af99-269ce0c53375
-----
onPull:CustomScript||onPlay:Pull1Card
+++++
.....
Mugging
-----
1ba6917a-9efe-4e3e-a118-3270e1c36854
-----
onPlay:BootMulti-Targeted-atGoods_or_Action_or_Spell-targetOpponents$$StartJob-DemiAutoTargeted-atDude-choose1-targetOpponents-jobEffects<SendHomeBootedTarget-Targeted-atDude++AceMulti-Targeted-atGoods_or_Action_or_Spell-isBooted-targetOpponents-MarkNotTheTarget,None>
+++++
.....
Signing Over the Stores
-----
2d728020-2cf6-4908-8034-7325e9fcb394
-----
onPlay:StartJob-AutoTargeted-atTown Square-jobEffects<RequestInt-Max5{Discard how many cards?}++Draw1Cards-perX-toDiscard++Retrieve3Cards-grabGoods-fromDiscard-toTable-payCost-reduc2,None>
+++++
.....
No Funny Stuff
-----
b046d41f-0b5c-4c0c-8d71-507c7877807e
-----
onPlay:SimplyAnnounce{prevent shootout or non-cheatin resolution abilities}
+++++
.....
Arnold Stewart
-----
55c7c4fb-53ee-4fef-96f2-666ef2f1ec2d
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atGadget-onAttachment-isCost$$UseCustomAbility
.....
Companhurst's
-----
97babd64-ce97-4996-a4a5-f6782660c895
-----
+++++
.....
Auto-Gatling
-----
e3efb184-9dc0-4c4f-9b07-b7c01c66a424
-----
+++++
.....
Heretic Joker (Red)
-----
e2e638ff-27cb-4704-b324-bd318dc9170a
-----
+++++
.....
Heretic Joker (Black)
-----
57039087-1868-4430-a94b-fb7eedeb04a5
-----
+++++
.....
Ol' Howard
-----
f9f258f4-dd3b-43c8-9081-79939b5b76b0
-----
+++++
GR0B1R0:CustomScript
.....
Market Street
-----
e734385a-eb43-4dd0-931d-16b99750de17
-----
+++++
GR0B0R0:BootMyself
.....
Silent Sigil
-----
3e3fff09-9401-46d2-ab8e-cfce77ddde58
-----
+++++
GR0B0R0:CustomScript
.....
Notary Public
-----
98487814-4411-4701-85e6-3c45340679d0
-----
+++++
GR0B1R0:CustomScript
.....
Constance Daughtry
-----
7ab8a00e-d449-42ec-ae60-3dff03151dce
-----
+++++
GR0B0R0:UnbootMyself
.....
"Mahogany" Jackson
-----
8bb9b173-38ce-4b99-8874-3197a596b17e
-----
onParticipation:Draw2Cards-toDiscard
+++++
.....
Gomorra Lot Commission
-----
74d2d9ea-bcc7-4c61-b4a1-0a54856501dc
-----
+++++
GR0B1R0:DiscardTarget-DemiAutoTargeted-atDeed-fromHand-choose1-isCost$$Gain1Ghost Rock
.....
Requiem For A Good Boy
-----
b1db504d-513e-4ce8-8113-c64ded1b0ad5
-----
onPlay:DiscardTarget-DemiAutoTargeted-atSidekick-targetMine-isParticipating-isCost-choiceTitle{Choose the sidekick to discard}-choose1-isResolution$$UnbootTarget-DemiAutoTargeted-atDude-isParticipating-targetMine-choiceTitle{Choose the Host of the discarded Sidekick}-choose1$$SendHomeBootedTarget-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choiceTitle{Choose the dude you're sending home booted}-choose1
+++++
.....
Crafty Hare
-----
b2371c67-bdd2-4ba6-b236-86a551e0f2c1
-----
+++++
GR0B1R0:Pull1Card-testSpiritX-difficultyValue-Targeted-atDude-isParticipating-targetOpponents-spellEffects<UnparticipateHost++MoveHost-moveToDeed_or_Town Square_or_Outfit,None>
.....
Framed
-----
92a85b68-c791-46ac-8515-22379ea67e99
-----
onPlay:CustomScript
+++++
.....
Rhonda Sageblossom
-----
a8b44597-b8b4-4a8a-b077-d6f0495248d7
-----
+++++
.....
Wagner Memorial Ranch
-----
aacdc6a0-f543-4cc2-8370-b2bec8eccc6a
-----
+++++
GR0B1R0:SimplyAnnounce{reduce the invention difficulty by 2}
.....
Fiddle Game
-----
2c318b93-e6b8-422d-b062-baecfc06e62a
-----
onPlay:StartJob-AutoTargeted-atOutfit-targetMine-jobEffects<RehostMyself-AutoTargeted-atOutfit-targetMine-isMark,None>
+++++
.....
Francisco Rosales
-----
f21a80e4-27f0-4c87-a44e-64661765a000
-----
+++++
GR0B1R0:StartJob-AutoTargeted-atTown Square-bootLeader-jobEffects<Retrieve1Cards-grabSidekick_or_Horse-fromDiscard-toTable-payCost-reduc2,None>
.....
Plague of Grasshoppers
-----
e27b4a68-2a7b-48e9-9dae-a42d02f8a66f
-----
onPlay:CustomScript
+++++
.....
Epidemic Laboratory
-----
df42a53c-8fab-4603-9294-f51d4a44e95e
-----
+++++
GR0B0R0:StartJob-bootLeader-DemiAutoTargeted-atEpidemic Laboratory-choose1-jobEffects<Put1PermControlPlus++Put1ProdPlus,None>
.....
Sunday Best
-----
63df2636-a2b4-4470-9b3f-596c2e311dd1
-----
+++++
GR0B1R0:MoveHost-moveToDeed_or_Town Square_or_Outfit
.....
Sight Beyond Sight
-----
27ddc945-bfc5-4dd9-84d8-d6bc44c904ae
-----
+++++
GR0B1R0:Pull1Card-testHex7-spellEffects<UseCustomAbility,None>
.....
Technological Exhibition
-----
083b92c1-bf91-4d5b-95e0-23a72bd0d05a
-----
onPlay:StartJob-DemiAutoTargeted-atDeed_or_Town Square_or_Outfit-choose1-jobEffects<UseCustomAbility,None>
+++++
.....
Carlton "Min" Rutherford
-----
8463332e-61fb-4744-b6cb-89fbcec18bdd
-----
+++++
GR1B0R0:UnbootMyself-ifRobinHood
.....
Walters Creek Distillery
-----
f7be47f3-b72b-4739-8391-e7d718868b90
-----
+++++
GR0B1R0:CustomScript
.....
A Piece Of The Action
-----
c5d383fa-b1e7-4135-bb1a-a530b9f8338d
-----
onPlay:CustomScript
+++++
.....
Foreboding Glance
-----
38c663b2-e6f8-4135-83d0-46122e5be7d6
-----
onPlay:CustomScript
+++++
.....
Spirit Steed
-----
ccc98846-eea5-4568-9726-2f86d9b90896
-----
+++++
GR0B1R0:MoveHost-moveToSpirit||GR0B0R1:BootTarget-DemiAutoTargeted-atSpirit-onAttachment-choose1-isCost$$MoveHost-moveToDeed_or_Town Square_or_Outfit
.....
Mountain Lion Friend
-----
54cd83d3-4735-4948-9a49-9570710da531
-----
+++++
GR0B0R0:DiscardMyself$$SimplyAnnounce{reduce their casualties by two}
.....
Ambrose Douglas
-----
62a29361-d2d6-4934-ac70-916435475094
-----
+++++
GR0B0R0:CustomScript
.....
Lucretia Fanzini
-----
c2d7e814-dddf-4f21-9315-d2d5993da004
-----
+++++
.....
Turtle's Guard
-----
28b4125d-61a9-4714-870c-2f27e4872e9f
-----
+++++
GR0B1R0:Pull1Card-testSpirit4-spellEffects<UseCustomAbility,None>
.....
Onward Christian Soldiers
-----
ed8fda6f-0cee-4d73-9af0-159fbb3db4e0
-----
+++++
GR0B1R0:Put1BulletShootoutPlus-AutoTargeted-atDude-targetMine-isParticipating$$Put1Shootout:Bullet Immunity-AutoTargeted-atDude-targetMine-isParticipating
.....
Pigging Out
-----
4964e22f-04f8-4d72-b70c-9b1c8d85a53a
-----
onPlay:Pull1Card
+++++
GR0B0R0:Draw5Cards-toDiscard$$Retrieve1Card-fromDiscard-grabTao of Zhu Bajie$$Put1Shootout:Pigging Out-Targeted-atDude-targetMine-isParticipating
.....
Tusk
-----
4b68dd9b-fb40-4912-8ff6-d7de771861dd
-----
onPlay:Draw1Card
+++++
.....
Darragh Meng
-----
991495ee-2b58-4e7f-92d0-11ffaca1cff5
-----
+++++
.....
Fool Me Once...
-----
61e47afb-cb64-46b5-86ae-893590b84139
-----
onPlay:Draw3Cards-onOpponent
+++++
GR0B0R1:CustomScript
.....
Henry Moran
-----
22df7ba7-5a3e-45de-b12a-a6c672ad6ccf
-----
+++++
.....
Tallulah "Lula" Morgan
-----
3c6ad64f-f716-4ee8-80db-f13602745d44
-----
+++++
GR0B0R1:Gain1Ghost Rock
.....
Theo Whateley-Boyer
-----
6bbd7290-5c98-4378-b581-0c044b96860a
-----
+++++
GR0B0R1:CustomScript
.....
Travis Moone
-----
cde1ec30-fedd-46de-af9b-110856d0f1c7
-----
+++++
GR0B1R0:Draw2Cards
.....
Caitlin McCue
-----
fe0b667f-b384-465c-8e3e-97b9f122031c
-----
+++++
.....
Bethany Shiile
-----
55f3b556-f7c1-4a0a-9f88-6f2a89635118
-----
+++++
.....
Tyxarglenak
-----
0e414687-3d4e-413a-93d1-eb7ccaf7364e
-----
+++++
.....
Dr. Dayl Burnett
-----
1f895c03-29d3-4c09-a884-2d24aea2c84e
-----
+++++
GR0B0R0:Remove1Bounty-DemiAutoTargeted-atDude-hasMarker{Bounty}-targetOpponents-isCost-choose1$$UseCustomAbility
.....
Maggie Harris
-----
7e03d2ef-dde6-4219-afd4-608280aee7bb
-----
+++++
GR0B1R0:StartJob-AutoTargeted-atOutfit-targetMine-bootLeader-jobEffects<Retrieve1Cards-grabHorse-fromDiscard-toTable-payCost-reduc1,None>
.....
Emilia Vivirias
-----
22e58549-c649-457b-a3da-4507ecfc30ec
-----
+++++
.....
"Professor" Duncan
-----
14d66d53-e32a-4077-a843-ec3b02e4b628
-----
+++++
.....
The Highbinder Hotel
-----
2dd93d6f-a777-4592-a4b2-224bd81bc08a
-----
+++++
GR0B1R0:SendHomeBootedTarget-DemiAutoTargeted-atDude-isParticipating-targetMine-choose1
.....
2nd Bank of Gomorra
-----
1db846be-917b-46a0-980d-4f43d2b83d2a
-----
+++++
GR0B1R0:Put2Ghost Rock||GR0B1R0:Gain1Ghost Rock-perMarker{Ghost Rock}$$Remove999Ghost Rock
.....
High Stakes Haven
-----
47391e41-4a0f-443b-91dc-e3b0e298fbf4
-----
whileInPlay:UseCustomAbility-foreachCheatinRevealed-byMe
+++++
.....
Culpability Scientizer
-----
e116ab3d-a5d5-46a3-860a-bc629bd0c63b
-----
onPlay:Put2Bounty-DemiAutoTargeted-atDude-choose1
+++++
GR0B0R0:DiscardMyself$$CalloutTarget-Targeted-atDude-hasMarker{Bounty}
.....
Espuelas
-----
05fc82d0-06e4-422e-9cd7-18110a50a244
-----
+++++
GR0B1R0:MoveHost-moveToDeed_or_Town Square_or_Outfit
.....
Rites of the Smoking Mirror
-----
aa2d0ea9-5ede-4074-b0f7-2e78656f0e15
-----
+++++
GR0B0R0:PlayTarget-DemiAutoTargeted-fromHand-atGoods_and_Mystical-choose1-payCost-isCost-reduc4$$AceMyself||GR0B0R0:UnbootTarget-Targeted-atDude$$DiscardMyself
.....
Essence of Armitage
-----
fc2842bf-c9b0-4b79-875b-3e35d91a5af2
-----
+++++
.....
Festering Grasp
-----
64605ce4-35a8-4167-8650-e9b99b0e0dd3
-----
+++++
GR0B1R0:Pull1Card-testHex13-spellEffects<Retrieve1Card-toTable-grabSidekick-fromDiscard-payCost-reduc2-preHost,None>
.....
Silver Pheasant's Bounty
-----
560d147e-37d3-4f17-91a5-33fcbd23deef
-----
onPlay:Put1ProdPlus-AutoTargeted-atDeed-onHost
+++++
GR0B1R0:Pull1Card-testSpirit6-spellEffects<BootHost-isCost++Gain2Ghost Rock,None>
.....
Comin' Up Roses
-----
2d97b6cb-9387-4081-8be5-85bb0bba0039
-----
+++++
.....
Run Rabbit Run
-----
53df6af3-7b21-4594-926d-9e3ec7e30274
-----
onPlay:Pull1Card
+++++
GR0B0R0:Draw5Cards-toDiscard$$Retrieve1Card-fromDiscard-grabTao of the Jade Rabbit$$Put1Shootout:Run Rabbit Run-Targeted-atDude-targetMine-isParticipating
.....
Putting the Pieces Together
-----
e42451a8-c431-4b2b-a454-9b94c187dca6
-----
onPlay:BootMyself||whileInPlay:BootMyself-foreachCheatinRevealed-byMe
+++++
.....
Someone Else's Problem
-----
8ac302b6-711f-41dc-9f0d-4ec19f16f813
-----
onPlay:Put1Shootout:Someone Elses Problem-Targeted-atDude-targetOpponents
+++++
.....
Lost to the Plague
-----
c22ec64d-d7d6-4c06-b09d-4a827c0dbad3
-----
+++++
.....
El Armadillo de Hierro
-----
8c1e6faf-c90d-4167-9834-793ffcaa47f7
-----
+++++
GR0B0R1:Put1BulletShootoutPlus
.....
Jim Hexter
-----
b1d4638a-0118-426f-a966-1a9c6ff0da5b
-----
+++++
GR0B0R1:Lose1Ghost Rock-isCost$$Gain1Ghost Rock-onOpponent$$Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud
.....
Hupirika Sue
-----
d2e8e560-15ad-421d-931b-7bb9a8bc6b2c
-----
+++++
GR0B0R0:Put2BulletShootoutPlus-Targeted-atDude-targetOpponents
.....
The Mixer
-----
0c1b3abb-69eb-4f9b-9dfc-69942c2442a6
-----
+++++
GR0B0R1:Put1Shootout:Mixed Medicine-Targeted-atDude-isParticipating
.....
The Harvester
-----
51b3db35-9811-4adf-89f6-7dd48b031bba
-----
+++++
.....
Lucy Clover
-----
7477d1e2-14c6-45e8-8acd-9619ec2fa578
-----
+++++
GR0B0R1:Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud
.....
Buckin' Billy Ballard
-----
314f6ee6-e572-44a2-9412-78bd822be671
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atHorse-onAttachment-isCost$$Draw1Cards$$BootTarget-Targeted-atDude-noTargetingError
.....
Antoine Peterson
-----
1120919a-d4b6-4655-b369-4f732b016e14
-----
onPlay:CustomScript
+++++
.....
Denise Brancini
-----
be0ace53-d90a-4fff-a6a6-c5545667d83f
-----
onDiscard:CustomScript
+++++
.....
"Open Wound"
-----
7cc6fca2-a48a-4386-ba79-2fd94534b6ec
-----
+++++
GR1B0R1:MoveTarget-DemiAutoTargeted-atDude-targetMine-choose1-moveToDeed_or_Town Square_or_Outfit
.....
Long Strides Ranch
-----
fd144b71-3727-4333-be79-b57ab4fe3afc
-----
+++++
GR0B1R0:PlayTarget-DemiAutoTargeted-fromHand-atHorse-choose1-payCost-reduc2
.....
The Gomorra Gazette
-----
d03f58a7-ef6a-428e-a950-67e0b6245b66
-----
+++++
GR0B1R0:Gain1Ghost Rock-perTargetProperty{Influence}-Targeted-atDude-targetOpponents
.....
Bacillus Pasteuria
-----
2e323023-c293-41ce-a87c-12fddba592d5
-----
+++++
GR0B0R0:UnbootTarget-Targeted-atDude
.....
Doomsday Supply
-----
4461e319-d77d-4a76-ba9c-e401a4a99223
-----
+++++
GR0B1R0:Retrieve1Card-grabGoods-fromDiscard-toTable-payCost-reduc2
.....
The Blighted
-----
af22f9d8-738b-4e7f-a13a-599089e51507
-----
+++++
.....
Sun-Touched Raven
-----
8b505b63-7954-42b6-b072-85b5b794c0fa
-----
+++++
GR0B1R0:Pull1Card-testSpirit6-spellEffects<UseCustomAbility,None>
.....
Inner Struggle
-----
ba6d6429-9a34-4912-96c4-aee1649650f3
-----
onPlay:BootMyself$$UseCustomAbility||whileInPlay:BootMyself-foreachCheatinRevealed-byMe
+++++
.....
Lighting the Fuse
-----
49496e5f-974e-43f1-a85f-d9bda3de8325
-----
+++++
.....
One Fights As Many
-----
18f82dad-6f87-4a26-9dee-2859b4854b70
-----
onPlay:StartJob-AutoTargeted-atTown Square
+++++
.....
Serendipitous Arrival
-----
f8fc9a76-cbd9-4ad5-b2c7-5cd40f150e79
-----
onPlay:CustomScript
+++++
.....
Siege of the Orphanage
-----
059481a6-d607-4896-8584-fb596c4647f7
-----
+++++
.....
Seamus McCaffrey
-----
32e3dc80-38a5-41e5-a267-f5b0f3a55d22
-----
+++++
.....
Shi Long Peng
-----
f23323c6-2381-4f8a-961c-606f84147c56
-----
+++++
.....
Gene North Star
-----
ea7a82ee-ad0d-4508-bbd7-a0a7b9fd610a
-----
+++++
GR0B0R0:DiscardTarget-DemiAutoTargeted-atSpell-onAttachment-isCost-choose1$$Put2BulletShootoutPlus
.....
Speaks-with-Earth
-----
1bea3954-dab3-42ef-830d-4d03a779b7b6
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atDude-targetOpponents$$UnbootTarget-DemiAutoTargeted-atTotem-noTargetingError
.....
The Grey Man
-----
49e72f76-4b92-468d-90c9-325689b607b1
-----
+++++
GR0B0R0:BootTarget-DemiAutoTargeted-atDude
.....
Erin Knight
-----
b66bd108-b7f4-4eb4-8dab-bdcf67dab50c
-----
+++++
.....
Nicholas Kramer
-----
96ac4bcb-e022-4dc3-b87f-a9a0ddc4c723
-----
+++++
.....
Quimby R. Tuttlemeir
-----
5f270369-68b9-4ce9-9e67-1e74f313ea6f
-----
+++++
GR0B0R0:AceTarget-DemiAutoTargeted-atDude_and_Abomination-targetMine$$Put1PermInfluencePlus
.....
Pancho Castillo
-----
4e724dee-6a9f-42dc-92ee-00e0f55be29f
-----
+++++
.....
Grimoires & More
-----
ef1b01c0-82e5-4243-8839-2136df14d4b1
-----
+++++
GR0B1R0:CustomScript
.....
J.W. Byrne, P. I.
-----
576ef086-4950-47b3-8ab5-71f3fdbce794
-----
+++++
GR0B1R0:BootTarget-Targeted-atGrifter-targetMine
.....
Knight's Chasuble
-----
92cf4c1a-cd1b-4377-a0dd-e06d52edbbff
-----
+++++
GR0B1R0:UnbootHost
.....
Pedro
-----
66f1e08f-2b5a-4e3e-97d2-0289c6379700
-----
onPlay:Put3ValueNoonMinus
+++++
.....
Aetheric Shockwave Inducer
-----
b63cda5e-d4cb-436f-be79-79dc74124a57
-----
+++++
GR0B0R1:Pull1Card
.....
The Gambler's Gun
-----
fac96abf-efee-492f-ae9e-72a944e518a6
-----
+++++
GR0B1R0:SimplyAnnounce{increase their hand rank by 2}
.....
Calling the Cavalry
-----
4bc332e3-b672-4445-90af-f4b7e4816fee
-----
onPlay:Remove999Shootout:Draw-isSilent-DemiAutoTargeted-atDude-targetMine-isParticipating$$Put1Shootout:Stud-DemiAutoTargeted-atDude-targetMine-isParticipating
+++++
.....
Get Behind Me, Satan!
-----
f5359fcb-0095-4ef0-8264-e3eec905528d
-----
+++++
GR0B1R0:Pull1Card-testMiracle5-spellEffects<SimplyAnnounce{reduce casualties by their blessed rating},None>
.....
Ghostly Communion
-----
7469036b-d154-4cb5-9eff-7d77fbdbcdf0
-----
+++++
GR0B1R0:Pull1Card-testSpirit7-spellEffects<MoveHost-moveToDude_or_Deed_or_Town Square_or_Outfit++ParticipateHost,None>-onlyInShootouts||GR0B1R0:Pull1Card-testSpirit5-spellEffects<MoveHost-moveToDeed_or_Town Square_or_Outfit,None>-onlyInNoon
.....
All or Nothing
-----
22e8401c-bf60-42e0-8119-36ce05c5d893
-----
+++++
onPlay:StartJob-DemiAutoTargeted-atDeed_or_Town Square_or_Outfit-choose1-jobEffects<SendHomeBootedMulti-Targeted-atDude-targetOpponents,None>
.....
Rite of Profane Abstersion
-----
722d4849-79a2-4bf3-b3d6-109de8762773
-----
+++++
.....
Showboating
-----
d67fe646-d457-41bd-b8ce-ec8f836ca3a5
-----
onPlay:Put1PermControlPlus-Targeted-atDude-targetMine
+++++
.....
Mr. Outang
-----
ef12ef9a-b1c8-433f-a4ac-eac38f5ffaf1
-----
+++++
.....
Zui Waidan
-----
407fccf5-1007-4be6-a1f5-8a06bc2c0674
-----
+++++
GR0B0R0:BootTarget-Targeted-atDeed-targetMine-isCost$$PutShootout:KungFu Bonus:2-DemiAutoTargeted-atDude-isParticipating-targetMine-choose1
.....
Stevie Lyndon
-----
52674538-37bf-4dc3-bba9-164f11abb7c4
-----
+++++
.....
Eva Bright Eyes
-----
4e453b4b-da87-4042-8ebf-92ff9a25c25c
-----
+++++
GR0B0R0:BootTarget-AutoTargeted-atHorse-onAttachment-choose1-isCost$$SendHomeBootedMyself
.....
Wei Xu
-----
043e1e23-de64-403f-a5ae-6df84823538f
-----
+++++
.....
Mick Aduladi
-----
1c769e8e-e7d5-49e5-94c7-7f83dc3b1cc7
-----
+++++
GR0B0R0:MoveMyself-moveToHoly Ground
.....
Hattie DeLorre
-----
77e140fc-3d2d-4715-bc47-af208e671b3e
-----
+++++
GR0B0R0:Put5BulletShootoutMinus-DemiAutoTargeted-atDude-hasMarker{Bounty}-targetOpponents-choose1
.....
Father Tolarios
-----
c213a4b2-95ee-487c-b0a8-8cc09ac87a7d
-----
+++++
GR0B0R0:CustomScript
.....
Jimmy "The Saint"
-----
163fc3b6-eea4-44ab-9edc-f03647046c3a
-----
+++++
GR0B0R0:Gain1Ghost Rock-perTargetProperty{Production}-Targeted-atDeed-targetMine
.....
Mario Crane
-----
2a6bafed-d2ea-4275-877a-f0f15aae4c85
-----
+++++
GR0B0R1:SimplyAnnounce{reduce their casualties by 2 this round}
.....
POST-A-TRON
-----
03c922cf-e0ef-4d47-bbb7-7c62428f90fb
-----
+++++
GR0B01R0:Gain3Ghost Rock
.....
Diego Linares
-----
82aaf3b6-9eb1-4fd2-9812-b6b727cf51b2
-----
+++++
.....
Christine Perfect
-----
75969ce9-d9e6-4cc1-b4e5-5ff29c3e8d69
-----
+++++
GR0B0R0:MoveMyself-moveToDude$$Put5ValueNoonMinus-Targeted-atDude-targetOpponents
.....
Absalom Hotchkiss
-----
2431b33e-b34c-4b53-9915-fa18ca6777e7
-----
+++++
GR0B0R0:MoveTarget-Targeted-atDude-targetMine-moveToAbsalom Hotchkiss
.....
The Caretaker
-----
543b1dab-41f2-4d79-80f5-a00fa3a0b6e8
-----
atPhaseGamblin:Put4ValueNoonPlus$$Put4BulletNoonPlus$$Put1High Noon:Stud
+++++
GR0B0R0:Remove4ValueNoonPlus$$Remove4BulletNoonPlus$$Remove1High Noon:Stud
.....
Gomorra Gaming Commission
-----
27194abd-3f50-4088-98f2-5dd70dfddd62
-----
+++++
GR0B1R0:Put1ProdPlus$$UseCustomAbility-isResolution
.....
Sherman Mortgage
-----
15b1a170-e713-40d5-ab39-6ed2f8cd97d6
-----
atPhaseShootoutStart:BootMyself
+++++
.....
Cliff's #4 Saloon
-----
76f4b9dd-7c1e-4ffd-a2ea-e7e2d0f253eb
-----
+++++
GR0B1R0:Remove999High Noon:Draw-DemiAutoTargeted-atDude-targetMine-choose1$$Put1High Noon:Stud-DemiAutoTargeted-atDude-targetMine-choose1
.....
Nickel Night Inn
-----
581c842b-d83b-4fcf-b525-7acad7f4a866
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atDude-targetOpponents-hasProperty{Value}le3-choose1
.....
Burn 'Em Out
-----
973b1148-56aa-421b-ba9a-5c6273710b6c
-----
OnPlay:StartJob-AutoTargeted-atOutfit-targetOpponents-bootLeader-bountyPosse-jobEffects<UseCustomAbility,None>
+++++
.....
The Law Goes Underground
-----
a149d4e1-b28c-4f5a-8202-9c12d7be2876
-----
onPlay:CustomScript
+++++
.....
Shiny Things
-----
3448fa9e-1adc-4094-80a9-7c2ed35db597
-----
onPlay:BootTarget-Targeted-atDude-targetOpponents$$Put2InfluencePlus-Targeted-atDude-targetOpponents$$Put7ValueNoonPlus-Targeted-atDude-targetOpponents$$Put1High Noon:Shiny Things-Targeted-atDude-targetOpponents
+++++
.....
Moving Forward
-----
eaf25cb8-16fb-4f52-ba4b-ce8a88295d2f
-----
onPlay:Retrieve1Card-fetchDeed-toTable-isResolution
+++++
.....
We Got a Beef!
-----
e4118ee1-a9eb-4738-a378-673ca6c918a7
-----
onPlay:CustomScript
+++++
.....
Baijiu Jar
-----
d88d3c39-4f35-40e9-b37b-046f6c9409de
-----
+++++
GR0B1R0:DiscardMulti-DemiAutoTargeted-fromHand-choose3-isCost$$Draw4Cards
.....
De Annulos Mysteriis
-----
765bff12-2d2b-44a0-a5ad-80920e291833
-----
+++++
GR0B0R0:CustomScript
.....
Rancher's Lariat
-----
08507600-22ab-4903-861e-1c3941a54e23
-----
+++++
GR0B1R0:BootTarget-Targeted-atWeapon-targetOpponents-noTargetingError$$Put1Shootout:Whipped-Targeted-atDude-isParticipating-targetOpponents
.....
Bowie Knife
-----
cb7e441a-1d78-4967-92ec-ab2d3a0f70fe
-----
+++++
.....
Hydro-Puncher
-----
86f8e98c-66a6-4a54-b0c0-48e7428dc891
-----
+++++
GR0B1R0:BootTarget-Targeted-atGoods_or_Spell-targetOpponents-noTargetingError$$BootTarget-Targeted-atDude-isParticipating-targetOpponents
.....
Heartseeker
-----
a12b3e09-b8af-4620-8479-20b5065df197
-----
+++++
GR0B1R0:Pull1Card-testHex5-spellEffects<SimplyAnnounce{mark the opposing shooter to be aced, and reduce the opponent's casualties by 3},None>-onlyInShootouts
.....
Amazing Grace
-----
1cd6c83a-2c5d-4f23-92d3-f9dd371413f0
-----
+++++
GR0B1R0:Pull1Card-testMiracle6-spellEffects<Put1InfluencePlus-AutoTargeted-atDude-onHost++Put1High Noon:Amazing Grace-AutoTargeted-atDude-onHost,None>
.....
Tse-Che-Nako's Weaving
-----
540d789b-98d5-47ac-8778-7a834851b10b
-----
+++++
GR0B1R0:Pull1Card-testSpirit6-spellEffects<MoveTarget-moveToDeed_or_Town Square_or_Outfit$$UseCustomAbility,None>
.....
Great Sage Matching Heaven
-----
39fa07ab-40d4-4ddd-b2a6-6c444a1f48cd
-----
onPlay:Pull1Card
+++++
GR0B0R0:Put1InfluencePlus$$Put1Shootout:Harrowed
.....
Two Hundred Fifty Rounds
-----
ea3990fa-ae15-4314-b8f7-d4d8aaf11788
-----
onPlay:Pull1Card
+++++
GR0B0R0:SimplyAnnounce{to reduce their casualties this round by 1}
.....
108 Drunken Masters
-----
72f2f4f7-2e0e-4994-aaa2-c6228fc3d8d9
-----
+++++
GR0B1R0:SimplyAnnounce{reduce the pull by the amount of saloons they control}
.....
Gateway to Beyond
-----
fd347528-2ce1-4cc5-9efb-a1052ee8603b
-----
+++++
GR0B1R0:CustomScript
.....
Justice in Exile
-----
e0943fa8-0024-451f-862f-644ee23011eb
-----
+++++
GR0B1R0:Gain2Ghost Rock
.....
Protection Racket
-----
e415100e-91bb-4d9e-87aa-ec2f23d63f84
-----
+++++
GR0B1R0:CustomScript
.....
Morgan Regulators
-----
b94ed693-16ed-4c7c-84d8-375498922a74
-----
+++++
GR0B1R0:MoveDude-moveToTown Square$$Put1High Noon:Stud-Targeted-atDude-targetMine$$UseCustomAbility
.....
Full Moon Brotherhood
-----
a9ec06b7-2fe5-4334-82f1-43e460b07967
-----
+++++
GR0B1R0:Put1High Noon:Brotherhood Mark-DemiAutoTargeted-atDude-targetOpponents-choose1
.....
"Thunder Boy" Nabbe
-----
1c4fd81f-8b08-4e7f-9325-8c63104f2694
-----
+++++
GR0B0R1:Put1BulletShootoutPlus-perProperty{Influence}$$Remove999Shootout:Draw-isSilent$$Put1Shootout:Stud-isSilent
.....
Dave "Slim" Gorman
-----
698ae5b9-1b37-45be-bbd2-118c03d64a30
-----
+++++
GR0B0R0:Remove999Shootout:Stud-isSilent-AutoTargeted-atDude-isParticipating$$Put1Shootout:Draw-isSilent-AutoTargeted-atDude-isParticipating
.....
Darius Hellstromme
-----
ae22bba2-cf1e-4038-b7bb-1d3429ca2daf
-----
+++++
GR0B1R0:CustomScript
.....
Ezekiah Grimme
-----
2733deda-5584-42e1-9dfd-d283ad68cf1f
-----
+++++
GR0B1R0:UseCustomAbility-isFirstCustom||GR0B1R0:UseCustomAbility-isSecondCustom
.....
Jasper Stone
-----
6bcacb58-f902-483e-8f25-6eef33e9dd18
-----
+++++
GR0B1R0:Put1Shootout:JasperCurse-DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1||GR0B0R1:Put1PermBulletPlus-Targeted-atDude-isMine$$Put1PermControlPlus-Targeted-atDude-isMine
.....
Raven
-----
1d0ac7a8-da18-4a99-9467-02edf80e6258
-----
+++++
GR0B1R0:Put1Noon:RavensCurse-DemiAutoTargeted-atDeed-choose1||GR0B0R1:Put1PermBulletPlus-DemiAutoTargeted-atDude-targetMine-choose1
.....
Wang Men Wu
-----
b1d048cf-7e94-4129-b817-5e0980038796
-----
+++++
GR0B0R0:Spawn1Gunslinger-modAction:ParticipateMyself
.....
Charging Bear
-----
0dff63ec-8e97-488d-87c1-d2505b44acc0
-----
+++++
GR0B0R0:AceTarget-Targeted-atDude-targetMine-choose1$$SendHomeBootedTarget-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1$$Put1Shootout:Stud
.....
Fears No Owls
-----
51fa6f06-ba8e-432d-aca5-639123f2b9b9
-----
+++++
GR0B0R0:MoveTarget-DemiAutoTargeted-atDude-targetMine-choose1-moveToDeed_or_Holy Groung
.....
Black Owl
-----
08137945-5919-4b1d-be80-6aff3f89118b
-----
+++++
GR0B1R0:Put5ValueShootoutMinus-Targeted-atDude-isParticipating$$Put1Shootout:FirstCasualty-Targeted-atDude-isParticipating
.....
Zeb Whateley-Dupont
-----
f8adf6a9-c944-4b37-98eb-c167c5bce2e7
-----
onPlay:CustomScript||atPhaseGamblin:CustomScript
+++++
.....
Rosenbaum's Golem
-----
dafad1b5-7067-4efa-b063-67a4c5c2b42a
-----
+++++
GR0B0R0:CustomScript
.....
Stewart Davidson
-----
720083f1-9fd9-4608-a112-9e0a28de43d2
-----
+++++
GR0B0R0:CustomScript
.....
Sheriff Eli Waters
-----
fb433634-025d-4333-aa2b-e8c9d230d020
-----
+++++
GR0B0R1:CustomScript||GR0B0R1:MoveMyself-moveToDude-hasMarker{Bounty}||GR0B0R1:MoveMyself-moveToDude-hasMarker{Bounty}$$ParticipateMyself
.....
Adrian Vallejo
-----
3d38ea0c-31d3-456e-a7cd-f7a66476395d
-----
+++++
GR0B0R0:ParticipateMyself
.....
Prof. Aloysius Roe
-----
ad68435b-5503-4af0-bd4e-7e5b15c04866
-----
+++++
GR0B0R0:SimplyAnnounce{The value of the pull is increased by Prof. Roe's MS rating}
.....
Rabid Rance Hitchcock
-----
9f3d837e-e317-403c-a72d-c9b8a48f5bcf
-----
+++++
GR0B0R1:Put1PermControlPlus-Targeted-atDeed-choose1||GR0B0R1:Put1PermControlMinus-Targeted-atDeed-choose1||GR0B0R1:MoveMyself-moveToDeed_and_Out of Town||GR0B0R1:CustomScript
.....
Morgan Lash
-----
711deb54-4548-4206-81af-77d5dcc8793a
-----
onPay:ReduceSCostPlay
+++++
.....
Johnny Brocklehurst
-----
5c6b6541-1253-4da2-a454-ce912ffcf474
-----
+++++
GR0B0R0:Put1InfluencePlus-Targeted-atDude||GR0B0R0:Remove1InfluencePlus-Targeted-atDude
.....
Agent Provocateur
-----
06d454b8-8713-4bba-b0d6-d9152d52423a
-----
+++++
GR0B0R0:CustomScript
.....
F1 Burch
-----
b7eae322-2208-4ef6-8ff2-d7af7ef5d2a1
-----
+++++
GR0B0R0:MoveMyself-moveToDude-choose1||GR0B0R0:MoveMyself-moveToDude-choose1$$Retrieve1Card-grabGoods_and_nonGadget_and_nonUnique-fromDiscard-toTable-payCost-reduc1||GR0B0R0:MoveMyself-moveToDude-choose1$$PlayTarget-DemiAutoTargeted-atGoods_and_nonGadget_and_nonUnique-fromHand-choose1-payCost-reduc1
.....
Taff's Distillery
-----
0d2710a6-4ed9-447c-9cfb-2536d2def29c
-----
+++++
GR0B1R0:Draw1Card
.....
Mausoleum
-----
e8906b23-85f3-44f0-89bb-d1cff708e8b3
-----
+++++
GR0B1R0:Put1PermControlPlus
.....
Epitaph Branch Office
-----
d95fe205-8bcb-4bc5-a895-501fe91a52f3
-----
+++++
GR0B1R0:CustomScript
.....
Buffalo Emporium
-----
2ee14351-c57d-4944-850c-d58cb5c8c304
-----
+++++
GR0B1R0:CustomScript
.....
Explorer's Lodge
-----
67c733e3-1842-44fe-9c40-33d1aad47b4a
-----
+++++
GR0B1R0:CustomScript
.....
The Oriental
-----
162aaf13-fe99-4bb6-8fbe-9587d71bd666
-----
+++++
GR0B1R0:CustomScript
.....
Hellstromme Plant #9
-----
ed34d5f8-3376-4be1-9db7-64c50cdebab9
-----
+++++
GR1B1R0:UnbootTarget-Targeted-atHorse_or_Gadget-choose1
.....
Bilton Collection Agency
-----
381ea2d4-eb1c-46e0-8aad-a500c406709a
-----
+++++
GR0B1R0:Gain1Ghost Rock-perTargetProperty{Production}-DemiAutoTargeted-atDeed-targetMine
.....
Decimator Array
-----
e2fed23a-b50b-4632-858d-ffd622184e5c
-----
onPlay:CustomScript
+++++
GR0B1R0:SimplyAnnounce{Changing suit or value of the card}||GR0B0R1:Put3ValuePermPlus-AutoTargeted-atDude-onHost$$Put1ProdPlus-AutoTargeted-atDude-onHost||GR0B0R1:Put3ValuePermMinus-AutoTargeted-atDude-onHost$$Put1ProdMinus-AutoTargeted-atDude-onHost
.....
Devil's Six Gun
-----
14eb0493-5ea9-4b44-b955-303fcea47e64
-----
+++++
GR0B1R0:CustomScript
.....
Forsaken Hound
-----
1ae574d7-9dd2-4999-a73f-90008198c1b9
-----
+++++
GR0B1R0:Put1Shootout:Cannot Run This Round-Targeted-atDude-targetOpponents_and_atDude-onHost
.....
The Bloody Star
-----
3572042d-4197-4753-830f-d138500aff64
-----
onPlay:Put2Bounty-AutoTargeted-atDude-onHost
+++++
.....
Cavalry Escort
-----
e527c15a-fce5-451a-b34d-51c9050d9cac
-----
+++++
GR0B1R0:BootHost$$ParticipateHost
.....
Nightmare Realm
-----
e3c6f0bb-a585-46b4-b530-bbe6f3347ae8
-----
+++++
GR0B1R0:Pull1Card-testHex3-spellEffects<Put1Shootout:Nightmare-DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1++Put1BulletShootoutPlus-AutoTargeted-atDude-onHost,None>||GR0B0R1:Put1BulletShootoutMinus-Targeted-atDude-hasMarker{Nightmare}-isParticipating-targetOpponenes$$Put1ValueShootoutMinus-Targeted-atDude-hasMarker{Nightmare}isParticipating-targetOpponents
.....
Sentinel
-----
be8dd9d7-31f8-4cf6-aa37-5a18067bb067
-----
+++++
GR0B1R0:Pull1Card-testMiracle5-spellEffects<UseCustomAbility,None>
.....
Censure
-----
c13ad872-7c89-44fc-8572-2da526866207
-----
+++++
GR0B1R0:Pull1Card-testMiracle6-spellEffects<UseCustomAbility,None>
.....
Raven's Ruin
-----
2e57f90e-6852-49a8-abdc-9d24be7018fe
-----
+++++
GR0B1R0:Pull1Card-testSpirit8-spellEffects<UseCustomAbility,None>
.....
Remedy
-----
d9433ac9-20fb-4f2f-adb5-8b11ee9045ff
-----
+++++
GR0B1R0:Pull1Card-testSpirit7-spellEffects<Put1Shootout:Remedy-DemiAutoTargeted-atDude-isParticipating-choose1,None>
.....
Intercession
-----
f638e171-5064-4c25-b1b7-7e5d762025b1
-----
+++++
GR0B1R0:Pull1Card-testMiracle5-spellEffects<UseCustomAbility-isFirstCustom,None>||GR0B1R0:Pull1Card-testMiracle7-spellEffects<UseCustomAbility,None>
.....
Disarm
-----
6afef9d1-9502-437b-915f-6450a35b3f30
-----
onPlay:CustomScript
+++++
.....
Grim Servant O' Death
-----
71e424b2-d62f-43a4-95c8-4b6b77e0b83d
-----
onPlay:CustomScript
+++++
.....
Behold White Bull
-----
d759b266-2dbb-4ee8-8a8f-40e456cbd5ae
-----
+++++
GR0B0R0:BootTarget-Targeted-atDude-targetMine-choose1$$SimplyAnnounce{Increase the casualties of both posses this round by this dude's influence.Your opponent may send all dues in their posse home booted. BY PRESSING ESCAPE}||GR0B0R0:Put2PermControlPlus-DemiAtutoTargeted-atDude-targetMine-choose1$$UnbootTarget-DemiAtutoTargeted-atDude-targetMine-choose1
.....
You Had ONE Job!
-----
3560016d-7c5f-4ac3-beb8-c3360539bb11
-----
onPlay:CustomScript||onPlay:SimplyAnnounce{All players who revealed a legal hand may reduce their casualties by 2.}
+++++
.....
Friends in High Places
-----
2d759fe3-f3e0-46c2-82f1-1df7c5b32aac
-----
onPlay:CustomScript||onPlay:Put1Shootout:Stud-DemiAutoTargeted-atDude-targetMine-isParticipating-choose1
+++++
.....
Shan Fan Showdown!
-----
e94de78b-0021-4167-9681-81b7cc5a9544
-----
onPlay:UnbootTarget-Targeted-atDude-targetMine-isParticipating-choose1 $$ Put1Shootout:Stud-Targeted-atDude-targetMine-isParticipating $$ SimplyAnnounce{For the remainder of the shootout non Cheatin Resolution abilities cannot increase or decrease hand ranks or increase or decrease casualties.}
+++++
.....
108 Gracious Gifts
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10055
-----
+++++
GR2B1R0:UnbootTarget-DemiAutoTargeted-atDude-isBooted-targetMine-choose1
.....
Property Is Theft
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10053
-----
+++++
GR0B1R0:Draw1Card$$UseCustomAbility
.....
The Spiritual Society
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10056
-----
atPhaseSundown:CustomScript
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atDude-isUnbooted-targetMine-choose1-isCost$$BootTarget-DemiAutoTargeted-atDude-targetOpponents-choose1
.....
Joe Vermilion
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10005
-----
+++++
GR1B0R0:Gain1Ghost Rock-onOpponent-isCost$$Gain1Ghost Rock-perTargetProperty{Production}-DemiAutoTargeted-atDeed-targetOpponents-choose1
.....
Ying-Ssi Chieh T'ang
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10004
-----
+++++
GR0B1R0:Gain1Ghost Rock-onOpponent-isCost-onlyInShootouts$$UseCustomAbility
.....
E Gui
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10003
-----
+++++
GR0B0R0:Gain1Ghost Rock-perTargetProperty{Production}-DemiAutoTargeted-atDeed-targetOpponents-choose1
.....
Buskers
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10001
-----
+++++
GR0B0R0:CustomScript
.....
Taiyari
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10002
-----
+++++
GR0B1R0:CustomScript
.....
Matilda Loomis
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10006
-----
+++++
.....
Alexander Sequoia
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10010
-----
+++++
GR0B0R0:UnbootMyself
.....
Matthew Rising Sun
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10009
-----
+++++
.....
Feichi Suitcase Lee
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10008
-----
+++++
GR0B1R0:CustomScript
.....
Geronimo
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10007
-----
+++++
GR0B0R0:DiscardTarget-DemiAutoTargeted-atGoods_or_Spell_or_Gadget-onAttachment-choose1-targetOpponents$$Gain1Ghost Rock
.....
Papa Marias
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10019
-----
+++++
GR0B0R0:Pull1Card-testHex7-spellEffects<MoveTarget-DemiAutoTargeted-atDude-targetMine-isUnbooted-choose1-moveToHere,None>-onlyInNoon
.....
Skinwalker
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10022
-----
+++++
GR0B0R0:BootTarget-DemiAutoTargeted-atAbomination-isParticipating-isMine-isCost-choose1$$Put2BulletShootoutPlus||GR0B0R0:BootTarget-DemiAutoTargeted-atAbomination-isParticipating-isMine-isCost-choose1$$BootTarget-DemiAutoTargeted-atGoods_or_Spell_or_Gadget-onAttachment-choose1-targetOpponents
.....
Tonton Macoute
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10021
-----
+++++
GR0B0R0:Put2BulletShootoutMinus-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1||GR0B0R0:BootTarget-DemiAutoTargeted--atWeapon-isCost-onlyInShootouts
.....
Kevin Wainwright (Exp.1)
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10020
-----
+++++
GR0B0R0:MoveMyself--moveToDude_and_Huckster_or_Abomination-onlyInShootouts$$ParticipateMyself$$Put1Shootout:Stud$$UseCustomAbility||GR0B0R0:MoveMyself--moveToDude_and_Huckster_or_Abomination-onlyInNoon$$Put1High Noon:Stud$$UseCustomAbility
.....
Padre Ernesto de Diaz
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10011
-----
+++++
GR0B0R1:BootTarget-DemiAutoTargeted-atSpell-onAttachment-isUnbooted-choose1-isCost$$Draw1Card
.....
Dr. Erik Yaple
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10013
-----
+++++
GR0B0R0:BootTarget-DemiAutoTargeted--atGadget_and_Weapon-isCost-onlyInShootouts$$Put1Bounty-DemiAutoTargeted-atDude-hasntMarker{Bounty}-choose1-targetOpponents
.....
Quincy Washburne
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10012
-----
+++++
GR0B0R0:UnbootMyself
.....
Xiong Wendy Cheng (Exp.1)
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10014
-----
+++++
GR0B1R0:BootMyself-isCost$$SendHomeBootedTarget-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1$$SimplyAnnounce{If that dudes bounty was higher than their grit, discard them}
.....
Takahashi Jinrai
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10017
-----
+++++
GR0B1R0:StartJob-DemiAutoTargeted-atDeed-choose1-jobEffects<Put1ProdPlus-DemiAutoTargeted-atDeed-choose1, None>
.
.....
Handsome Dan Deeds
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10016
-----
+++++
GR0B1R0:BootMyself-isCost$$Put1ControlPlus$$Put1HandsomeCP-DemiAutotargeted-atDeed-choose1
.....
Vida Azul
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10018
-----
+++++
GR0B0R0:CustomScript
.....
Bartholomew P. Fountain
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10015
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atRanch-targetMine-isCost$$MoveTarget-DemiAutoTargeted-atDude-choose1-moveToDeed_or_Town Square_or_Outfit
.....
Ike Clanton
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10024
-----
+++++
GR0B1R0:Put1Rowdy Ike
.....
Frank Stillwell
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10023
-----
+++++
GR0B0R0:UnparticipateMyself$$SendHomeBootedMyself
.....
Silas Aims (Exp.1)
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10026
-----
+++++
GR0B0R0:CustomScript
.....
Larry Sevens Swift
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10025
-----
+++++
GR0B0R0:BootTarget-DemiAutoTargeted-atSpell-isUnbooted-onAttachment-choose1-isCost$$Put1Bounty$$BootTarget-DemiAutoTargeted-atGoods-targetOpponents-choose1
.....
Virginia Ann Earp
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10027
-----
+++++
.....
Campbell & Hatch Billiard Parlor
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10032
-----
+++++
GR0B1R0:SimplyAnnounce{For the remainder of the shootout, hand ranks cannot be modified Shootout, React, or non-Cheatin' Resolution abilities. Dudes cannot be discarded or aced by Shootout or non-Cheatin' Resolution abilities during the first round.}
.....
Clanton Ranch
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10030
-----
+++++
GR0B1R0:Gain1Ghost Rock
.....
Concordia Cemetery
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10029
-----
+++++
GR0B1R0:Put1ProdPlus||GR0B1R0:Put1ProdPlus$$Put1PermControlPlus
.....
Ike's Place
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10033
-----
+++++
GR0B1R0:Put1ProdMinus-perTargetProperty{Production}-DemiAutoTargeted-atDeed$-choose1$$Put1Control Minus-perTargetProperty{Control}-DemiAutoTargeted-atDeed-choose1$$Put1Ike Place-DemiAutoTargeted-atDeed-choose1
.....
Five Aces Gambling Hall
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10028
-----
+++++
GR0B1R0:ParticipateTarget-DemiAutoTargeted-atDude-targetMine-hasMarker{Bounty}-choose1-isNotParticipating
.....
Old Washoe Club
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10031
-----
+++++
GR0B1R0:SendHomeBootedTarget-DemiAutoTargeted-atDude-isParticipating-targetOpponents-hasProperty{Bullets}le1-choose1||GR0B1R0:DiscardTarget-DemiAutoTargeted-atSidekick-choose1
.....
Quarter Horse
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10034
-----
+++++
GR0B1R0:UnparticipateHost
.....
Electrostatic Pump Gun
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10038
-----
+++++
GR0B1R0:CustomScript
.....
Claws
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10036
-----
+++++
.....
Analytical Cognisizer
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10039
-----
onPlay:CustomScript
+++++
GR0B1R0:DiscardTarget-DemiAutoTargeted-fromHand-atGoods-choose1-isCost$$SimplyAnnounce{The card was discarded to make pulled card's suit a heart}
.....
Ranger's Bible
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10035
-----
+++++
GR0B1R0:SimplyAnnounce{uses Ranger's Bible to lower players hand rank by dudes influence}
.....
Stone's Colt Dragoons
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10037
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1$$Put1NoUnboot-DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1
.....
Bedazzle
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10041
-----
+++++
GR0B1R0:Put1BulletShootoutMinus-DemiAutoTargeted-atDude-perTargetProperty{Bullets}-isParticipating-choose1$$SimplyAnnounce{ lowers players hand rank by 2}
.....
Exultant Translocation
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10045
-----
+++++
GR0B1R0:Pull1Card-testMiracle6-spellEffects<BootHost++Put1NoUnboot-Targeted-atDude-targetMine++SendHomeBootedTarget-Targeted-atDude-targetMine,None>
.....
Retribution
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10044
-----
+++++
GR0B1R0:Pull1Card-testMiracleX-difficultyValue-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1-spellEffects<Put1Shootout:Retribution-DemiAutoTargeted-atDude-isParticipating-targetOpponents-choose1,None>
.....
Gateway
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10040
-----
+++++
GR0B1R0:Pull1Card-testHex6-spellEffects<MoveTarget-Targeted-atDude-atAbomination-targetMine-moveToDeed_or_Town Square_or_Outfit,None>-onlyInNoon||GR0B1R0:Pull1Card-testHex6-spellEffects<ParticipateTarget-DemiAutoTargeted-atDude-atAbomination-isNotParticipating-targetMine-choose1,None>-onlyInShootouts
.....
Guiding Wind
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10042
-----
+++++
GR0B1R0:Pull1Card-testSpirit8-spellEffects<useCustomAbility,None>
.....
Mischievous Coyote
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10043
-----
+++++
GR0B1R0:Pull1Card-testSpirit9-spellEffects<SimplyAnnounce{ this round has draw and stud bonus switched.},None>
.....
Murdered in Tombstone
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10051
-----
onPlay:Remove999Shootout:Draw-DemiAutoTargeted-atDude-targetMine-choose1$$Put1Shootout:Stud-DemiAutoTargeted-atDude-targetMine-choose1$$UseCustomAbility
+++++
.....
Hostile Takeover
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10050
-----
onPlay:CustomScript
+++++
.....
Jade King Stance
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10054
-----
+++++
GR0B0R0:UnbootTarget-DemiAutoTargeted-atDude-targetMine-choose1-isCost$$Put1 HighNoon:KungFu Bonus:2-DemiAutoTargeted-atDude-targetMine-choose1$$Put2BulletNoonPlus-DemiAutoTargeted-atDude-targetMine-choose1$$Put1High Noon:Canno Move-DemiAtutoTargeted-atDude-targetMine-choose1
.....
Heist
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10048
-----
onPlay:StartJob-DemiAutoTargeted-atDeed-choose1-isCost-jobEffects<UseCustomAbility,None>
+++++
.....
I'm Your Huckleberry
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10047
-----
onPlay:CustomScript
+++++
.....
Monkey Goes to the Mountain
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10052
-----
+++++
GR0B0R0:UnparticipateMyself$$UnparticipateTarget-DemiAutoTargeted-atDude-targetOpponents-choose1$$MoveMyself-moveToDeed_or_Town Square$$MoveTarget-DemiAutoTargeted-atDude-targetOpponents-choose1-moveToDeed_or_TownSquare
.....
Curse of Failure
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10046
-----
onPlay:StartJob-Targeted-atDude-bootLeader-jobEffects<RehostMyself-AutoTargeted-atDude-isMark++SendHomeBootedTarget-AutoTargeted-atDude-isMark++Put1PermControlMinus-perTargetProperty{Control}-AutoTargeted-atDude-isMark++Put1PermInfluenceMinus-perTargetProperty{Influence}-AutoTargeted-atDude-isMark++Put1PermBulletsMinus-perTargetProperty{Bullets}-AutoTargeted-atDude-isMark++Put1ProdMinus,None>
+++++
.....
Ricochet
-----
ae22bba2-cf1e-4038-b7bb-1d3429c10049
-----
onPlay:SimplyAnnounce{Cheating player discards their shooter}$$UseCustomAbility
+++++
.....
House of Many Faiths
-----
f7c44d33-acb0-405b-b423-d7b9e6468b3a
-----
+++++
GR0B1R0:CustomScript
.....
Office of Ancestral Affairs
-----
409d1eba-9f66-9e24-9967-edc1a96ec4e2
-----
+++++
GR0B1R0:StartJob-DemiAutoTargeted-atDeed-hasProperty{Control}le2-choose1-jobEffects<Put1PermControlPlus-atDeed-isMark++Retrieve1Cards-fromDiscard-grabTotem-toTable-payCost, None>
.....
Bayou Vermilion Railroad
-----
85600051-92b4-a1e0-2e4d-65044d210164
-----
+++++
GR0B0R0:PlayTarget-DemiAutoTargeted-atGoods_and_Mystical_or_Spell_and_Mystical-fromHand-choose1-payCost-reduc1||GR0B1R0:BootTarget-DemiAutoTargeted-atGoods_and_Mystical_or_Spell_and_Mystical$$Gain1Ghost Rock||GR0B1R0:BootTarget-DemiAutoTargeted-atGoods_and_Mystical_or_Spell_and_Mystical$$Gain1Ghost Rock$$Put1PermControlMinus-perTargetProperty{Control}-DemiAutoTargeted-atDeed-choose1$$Put1ProdMinus-perTargetProperty{Production}-DemiAutoTargeted-atDeed-choose1
.....
Fort 51
-----
0e0bdb93-c1fa-db9b-0fac-a19e2b3d622c
-----
+++++
GR0B0R0:Put1Bounty-DemiAutoTargeted-atDude-targetOpponents||GR0B1R0:BootTarget-DemiAutoTargeted-atGadget-onAttachment-isMine-choose1-isCost$$Remove1Bounty-DemiAutoTargeted-atDude-targetOpponents-choose1$$SimplyAnnounce{you might discard a card from your hand.}
.....
Smith and Robards Trade Show
-----
afc89031-a660-00a4-8533-97d50ed8d4e2
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atGoods_and_Gadget-choose1-isCost$$DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost$$Draw1Card||GR0B1R0:BootTarget-DemiAutoTargeted-atGoods_and_Gadget-choose1-isCost$$DiscardTarget-DemiAutoTargeted-fromHand-choose1-isCost$$Draw1Card$$Put1ControlPlus-DemiAutoTargeted-atGoods_and_Gadget-choose1
.....
Jonah's Alliance
-----
91f9e67f-e18f-bb2d-05ae-a13f6ea11fb0
-----
+++++
GR0B1R0:CustomScript
.....
Doc Holliday
-----
8ac81a66-9e71-7413-af60-40b007b31c0e
-----
+++++
GR0B1R0:CustomScript
.....
Ke Wang
-----
4ffa4a62-2427-270b-5e74-2eea7d929c58
-----
+++++
GR0B0R0:CustomScript
.....
Baijiu Sue
-----
d6dcaddd-7bfa-1653-31fc-321222181729
-----
+++++
GR0B0R0:Put1ControlPlus
.....
Reverend Bob Hungate
-----
6080e7a5-917f-ae00-0c7d-cf8971489d29
-----
+++++
GR0B1R0:Lose1Ghost Rock-perTargetProperty{Influence}-DemiAutoTargeted-atDude-isMine-isParticipating-choose1-isCost$$Gain1Ghost Rock-onOpponent-perTargetProperty{Influence}-DemiAutoTargeted-atDude-isMine-isParticipating-choose1$$SimplyAnnounce{You and your opponent chose a dude that cannot be chosen as casualty in this round of shootout}
.....
Father Diamond
-----
9807eed4-2872-1ec7-5c15-d6bbc8b794d1
-----
+++++
GR0B0R0:Lose1Ghost Rock-isCost$$Gain1Ghost-onOpponent$$MoveTarget-Targeted-atDude-isMine-moveToOutfit$$MoveMyself-moveToDeed_or_Town Square_or_Outfit
.....
Master Shou
-----
aaa35070-83aa-af52-b701-fcbba379c127
-----
+++++
GR0B0R0:BootTarget-DemiAutoTargeted-atAbomination-targetMine-isUnbooted-choose1-isCost$$Lose1Ghost Rock-onOpponent$$Gain1Ghost Rock
.....
Brother Petrovic
-----
6872cce2-e0b1-96db-572e-d537d2f846a8
-----
+++++
GR0B0R0:Lose1Ghost Rock-perTargetProperty{Influence}-AutoTargeted-atMyself-onOpponent-isCost$$Gain1Ghost Rock-perTargetProperty{Influence}-AutoTargeted-atMyself
.....
Leychaa'i Youngheart
-----
60b58038-1cbb-d970-c289-074b26700e76
-----
+++++
GR0B0R0:RehostTarget-DemiAutoTargeted-atSidekick_and_notSpirit-choose1
.....
Monomi Miles
-----
b4fbeb36-b469-40a3-749b-0ce2b980a23f
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atSidekick-onAttachment-choose1-isCost$$SimplyAnnounce{Both posses take extra this round equal to Monomi's KF rating!!!!}
.....
Klah
-----
41e73eee-c0fb-dcf1-0472-2109b298ed6c
-----
+++++
GR0B1R0:BootTarget-DemiAutoTargeted-atSidekick-onAttachment-choose1-isCost$$Put1BulletShootoutPlus-DemiAutoTargeted-atDude-isMine-isParticipating
.....
Black Elk (Exp.1)
-----
de59a222-8544-de7c-f897-4af058263772
-----
+++++
.....
Nathaniel Tuwikaa
-----
b515134b-65d1-3c3b-d2f5-dfc74b3e9da8
-----
onPlay:
+++++
GR0B0R0:CalloutTarget-DemiAutoTargeted-atDude-targetOpponents-choose1$$SimplyAnnounce{If the shootout is accepted use influence instead of bullets rating when forming hands.}
.....
The Faceless One
-----
666dab03-ed30-d7ed-51f8-2793d4183f45
-----
+++++
GR0B0R0:Put1BulletShootoutPlus-perTargetProperty{Bullets}-DemiAutoTargeted-atDude-targetOpponents-choose1
.....
Lorena Corbett
-----
a900059b-bfb6-d6a2-e017-8d549774665a
-----
+++++
GR0B0R0:Put1Shootout:Minimum 11 Grit-DemiAutoTargeted-atDude-targetOpponents-isParticipating-choose1-isCost$$BootTarget-Targeted-atGoods_and_Mystical$$BootTarget-Targeted-atDude-targetOpponents-isParticipating
.....
Auntie Sinister
-----
91ad1f4b-d6f1-13d0-ef3b-1b04a3da3e0e
-----
+++++
GR0B0R0:Put2BulletShootoutPlus
.....
Mason Adler
-----
92762db4-a795-b339-750f-40d3825e1a39
-----
onPlay:
+++++
GR0B1R0:StartJob-DemiAutoTargeted-atDude-targetOpponents-choose1-jobEffects<AceTarget,None>-onlyInNoon||GR0B0R1:Put1Shootout:Stud$$Put1Shootout:Abomination-onlyInShootouts
.....
Enrique Dos Santos
-----
af6e568e-9008-0fa9-818c-ebc155a697e5
-----
onPlay:
+++++
GR0B0R0:
.....
Reverend Endicott
-----
537bca8b-1a35-c942-fe78-d44ae3c27527
-----
onPlay:
+++++
GR0B0R0:
.....
Sgt. Elijah Clay
-----
95288f06-e928-1467-80b5-3fba16475b57
-----
onPlay:
+++++
GR0B0R0:
.....
Wyatt Earp
-----
1f793cbc-37f2-4185-65ee-c29746d44d5f
-----
onPlay:
+++++
GR0B0R0:
.....
Eli Leatherwood
-----
b53b350e-2933-5856-d09a-2f809d4aa786
-----
onPlay:
+++++
GR0B0R0:
.....
Roberto Muratore
-----
9a364e65-b1f3-bc1b-f5ff-22af20d5f484
-----
onPlay:
+++++
GR0B0R0:
.....
Adelaide Rose
-----
761e1121-6b53-6f8f-7ae7-a21957d97b76
-----
onPlay:
+++++
GR0B0R0:
.....
Dr. Gregory Tremane
-----
8788feb4-409c-1048-afb6-f4d11453255c
-----
onPlay:
+++++
GR0B0R0:
.....
Big Nose Kate
-----
b29a0155-5019-a272-bb47-fd45b485b135
-----
onPlay:
+++++
GR0B0R0:
.....
Johnny Ringo
-----
b558faf1-8ed8-f8f5-c661-90019f0d6d61
-----
onPlay:
+++++
GR0B0R0:
.....
Violet Esperanza
-----
eabe0b00-67ec-7be0-a030-d6c35eb92a0d
-----
onPlay:
+++++
GR0B0R0:
.....
Curly Bill Brocius
-----
a582d4cc-805f-c49d-8115-f6a066e459d5
-----
onPlay:
+++++
GR0B0R0:
.....
Byron Decker
-----
8abb6faf-b1a3-df04-022f-b8532581ddd3
-----
onPlay:
+++++
GR0B0R0:
.....
Carter Richardson
-----
d84b903d-dd0f-8e80-c7fa-54e9f2fe2770
-----
onPlay:
+++++
GR0B0R0:
.....
Valeria Batten (Exp.1)
-----
3d5ee39c-5d5b-89b1-ed3e-9fe7d23c29f1
-----
onPlay:
+++++
GR0B0R0:
.....
Steph and Gertie's Tonsorium
-----
91a9982c-ca3a-1efd-1981-ba40a1f88a00
-----
onPlay:
+++++
GR0B0R0:
.....
Gateway Station
-----
db7edcd0-004b-d152-f3aa-3a983594db41
-----
onPlay:
+++++
GR0B0R0:
.....
Hart's Tea Shoppe
-----
7a660d76-48cb-3dc3-9325-54401b770469
-----
onPlay:
+++++
GR0B0R0:
.....
Bird Cage Theater
-----
b7b19a08-2bd0-cc0d-2262-e2cc0005a742
-----
onPlay:
+++++
GR0B0R0:
.....
Cochise County Courthouse
-----
3b0be104-7fc9-8db4-a5c3-7d189a73c6af
-----
onPlay:
+++++
GR0B0R0:
.....
Gage Export Co.
-----
e11656c2-ea70-799e-766a-b58a104585e7
-----
onPlay:
+++++
GR0B0R0:
.....
Magnum Opus Tenebri
-----
87ae9741-b861-72ae-523a-072b57085e39
-----
onPlay:
+++++
GR0B0R0:
.....
Scattergun
-----
8c115b49-44f4-0ec9-f08e-1e4ff6d3b30b
-----
onPlay:
+++++
GR0B0R0:
.....
Adler's Needle
-----
0c2524c4-0861-ecf7-6177-210a8321947c
-----
onPlay:
+++++
GR0B0R0:
.....
Wrathful Spider
-----
cda89314-ec5e-401d-3f5a-7e57e55aa80d
-----
onPlay:
+++++
GR0B0R0:
.....
Guiding Light
-----
e881abd7-b68d-bf1b-c811-ece3574735ad
-----
onPlay:
+++++
GR0B0R0:
.....
Champion
-----
13d69444-ddd2-c272-fd46-f4f4c0514ca4
-----
onPlay:
+++++
GR0B0R0:
.....
Malison
-----
75c8ef97-b3fd-c1b9-01fc-28602b53d51d
-----
onPlay:
+++++
GR0B0R0:
.....
Dancing Butterfly
-----
cc092027-6877-a850-8f93-be35911d2909
-----
onPlay:
+++++
GR0B0R0:
.....
Ten-Foot Punch
-----
1bb65618-90d9-0766-f606-3aede09da2b1
-----
onPlay:
+++++
GR0B0R0:
.....
Fleet-Footed
-----
702d4343-ba33-adca-78c5-01f532350721
-----
onPlay:
+++++
GR0B0R0:
.....
You're A Daisy If You Do
-----
2d8b5584-d18d-f87b-b766-304b552eb130
-----
onPlay:
+++++
GR0B0R0:
.....
Twilight Is Upon Us
-----
be288ae2-728a-224d-c8dd-c24d9c795fb4
-----
onPlay:
+++++
GR0B0R0:
.....
Test of Wills
-----
6aca9b4b-439e-1aeb-ae2c-bc1fb9503605
-----
onPlay:
+++++
GR0B0R0:
.....
ENDSCRIPTS
=====
'''
| agpl-3.0 |
LLNL/spack | var/spack/repos/builtin/packages/minivite/package.py | 5 | 1683 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Minivite(MakefilePackage):
"""miniVite is a proxy application that implements a single phase of
Louvain method in distributed memory for graph community detection.
"""
tags = ['proxy-app', 'ecp-proxy-app']
homepage = "http://hpc.pnl.gov/people/hala/grappolo.html"
git = "https://github.com/Exa-Graph/miniVite.git"
version('develop', branch='master')
version('1.0', tag='v1.0')
version('1.1', tag='v1.1')
variant('openmp', default=True, description='Build with OpenMP support')
variant('opt', default=True, description='Optimization flags')
depends_on('mpi')
@property
def build_targets(self):
targets = []
cxxflags = ['-std=c++11 -g -DCHECK_NUM_EDGES -DPRINT_EXTRA_NEDGES']
ldflags = []
if '+openmp' in self.spec:
cxxflags.append(self.compiler.openmp_flag)
ldflags.append(self.compiler.openmp_flag)
if '+opt' in self.spec:
cxxflags.append(' -O3 ')
targets.append('CXXFLAGS={0}'.format(' '.join(cxxflags)))
targets.append('OPTFLAGS={0}'.format(' '.join(ldflags)))
targets.append('CXX={0}'.format(self.spec['mpi'].mpicxx))
return targets
def install(self, spec, prefix):
mkdirp(prefix.bin)
if (self.version >= Version('1.1')):
install('miniVite', prefix.bin)
elif (self.version >= Version('1.0')):
install('dspl', prefix.bin)
| lgpl-2.1 |
PRIArobotics/HedgehogProtocol | hedgehog/protocol/messages/vision.py | 1 | 14634 | from typing import Any, Dict, List, Optional, Set, Tuple, Union
from dataclasses import dataclass
from . import RequestMsg, ReplyMsg, Message, SimpleMessage
from hedgehog.protocol.proto import vision_pb2
from hedgehog.utils import protobuf
__all__ = ['OpenCameraAction', 'CloseCameraAction', 'CreateChannelAction', 'UpdateChannelAction', 'DeleteChannelAction', 'ChannelRequest', 'ChannelReply', 'CaptureFrameAction', 'FrameRequest', 'FrameReply']
# <GSL customizable: module-header>
from hedgehog.protocol.proto.vision_pb2 import ChannelOperation
from hedgehog.protocol.proto.vision_pb2 import CREATE, READ, UPDATE, DELETE
@dataclass
class FacesChannel:
@classmethod
def _parse(cls, msg: vision_pb2.Channel) -> Tuple[str, 'FacesChannel']:
return msg.key, cls()
def _serialize(self, msg: vision_pb2.Channel, key: str) -> None:
msg.key = key
msg.faces.SetInParent()
@dataclass
class BlobsChannel:
hsv_min: Tuple[int, int, int]
hsv_max: Tuple[int, int, int]
@staticmethod
def _pack(hsv: Tuple[int, int, int]) -> int:
return int.from_bytes(hsv, 'big')
@staticmethod
def _unpack(hsv: int) -> Tuple[int, int, int]:
return tuple(hsv.to_bytes(3, 'big'))
@classmethod
def _parse(cls, msg: vision_pb2.Channel) -> Tuple[str, 'BlobsChannel']:
hsv_min = BlobsChannel._unpack(msg.blobs.hsv_min)
hsv_max = BlobsChannel._unpack(msg.blobs.hsv_max)
return msg.key, cls(hsv_min, hsv_max)
def _serialize(self, msg: vision_pb2.Channel, key: str) -> None:
msg.key = key
msg.blobs.hsv_min = BlobsChannel._pack(self.hsv_min)
msg.blobs.hsv_max = BlobsChannel._pack(self.hsv_max)
Channel = Union[FacesChannel, BlobsChannel]
def _parse_channel(msg: vision_pb2.Channel) -> Tuple[str, Channel]:
if msg.HasField('faces'):
return FacesChannel._parse(msg)
elif msg.HasField('blobs'):
return BlobsChannel._parse(msg)
else: # pragma: nocover
assert False
@dataclass
class Face:
bounding_rect: Tuple[int, int, int, int]
@classmethod
def _parse(cls, msg: vision_pb2.Face) -> 'Face':
return cls(
(msg.x, msg.y, msg.width, msg.height),
)
def _serialize(self, msg: vision_pb2.Face) -> None:
msg.x, msg.y, msg.width, msg.height = self.bounding_rect
@dataclass
class FacesFeature:
faces: List[Face]
@classmethod
def _parse(cls, msg: vision_pb2.Feature) -> 'FacesFeature':
return cls([Face._parse(face) for face in msg.faces.faces])
def _serialize(self, msg: vision_pb2.Feature) -> None:
msg.faces.SetInParent()
for face in self.faces:
face._serialize(msg.faces.faces.add())
@dataclass
class Blob:
bounding_rect: Tuple[int, int, int, int]
centroid: Tuple[int, int]
confidence: float
@classmethod
def _parse(cls, msg: vision_pb2.Blob) -> 'Blob':
return cls(
(msg.x, msg.y, msg.width, msg.height),
(msg.cx, msg.cy),
msg.confidence,
)
def _serialize(self, msg: vision_pb2.Face) -> None:
msg.x, msg.y, msg.width, msg.height = self.bounding_rect
msg.cx, msg.cy = self.centroid
msg.confidence = self.confidence
@dataclass
class BlobsFeature:
blobs: List[Blob]
@classmethod
def _parse(cls, msg: vision_pb2.Feature) -> 'BlobsFeature':
return cls([Blob._parse(blob) for blob in msg.blobs.blobs])
def _serialize(self, msg: vision_pb2.Feature) -> None:
msg.blobs.SetInParent()
for blob in self.blobs:
blob._serialize(msg.blobs.blobs.add())
Feature = Union[FacesFeature, BlobsFeature]
def _parse_feature(msg: vision_pb2.Feature) -> Feature:
if msg.HasField('faces'):
return FacesFeature._parse(msg)
elif msg.HasField('blobs'):
return BlobsFeature._parse(msg)
else: # pragma: nocover
assert False
__all__ += [
'ChannelOperation',
'CREATE', 'READ', 'UPDATE', 'DELETE',
'FacesChannel', 'BlobsChannel', 'Channel',
]
# </GSL customizable: module-header>
@protobuf.message(vision_pb2.VisionCameraAction, 'vision_camera_action', fields=())
@dataclass(frozen=True, repr=False)
class OpenCameraAction(Message):
def __post_init__(self):
# <default GSL customizable: OpenCameraAction-init-validation>
pass
# </GSL customizable: OpenCameraAction-init-validation>
# <default GSL customizable: OpenCameraAction-extra-members />
def _serialize(self, msg: vision_pb2.VisionCameraAction) -> None:
msg.open = True
@protobuf.message(vision_pb2.VisionCameraAction, 'vision_camera_action', fields=())
@dataclass(frozen=True, repr=False)
class CloseCameraAction(Message):
def __post_init__(self):
# <default GSL customizable: CloseCameraAction-init-validation>
pass
# </GSL customizable: CloseCameraAction-init-validation>
# <default GSL customizable: CloseCameraAction-extra-members />
def _serialize(self, msg: vision_pb2.VisionCameraAction) -> None:
msg.open = False
@protobuf.message(vision_pb2.VisionChannelMessage, 'vision_channel_message', fields=('channels',))
@dataclass(frozen=True, repr=False)
class CreateChannelAction(Message):
channels: Dict[str, Channel]
def __post_init__(self):
# <default GSL customizable: CreateChannelAction-init-validation>
pass
# </GSL customizable: CreateChannelAction-init-validation>
# <default GSL customizable: CreateChannelAction-extra-members />
def _serialize(self, msg: vision_pb2.VisionChannelMessage) -> None:
# <GSL customizable: CreateChannelAction-serialize-channels>
msg.op = CREATE
for key, channel in self.channels.items():
channel._serialize(msg.channels.add(), key)
# </GSL customizable: CreateChannelAction-serialize-channels>
@protobuf.message(vision_pb2.VisionChannelMessage, 'vision_channel_message', fields=('channels',))
@dataclass(frozen=True, repr=False)
class UpdateChannelAction(Message):
channels: Dict[str, Channel]
def __post_init__(self):
# <default GSL customizable: UpdateChannelAction-init-validation>
pass
# </GSL customizable: UpdateChannelAction-init-validation>
# <default GSL customizable: UpdateChannelAction-extra-members />
def _serialize(self, msg: vision_pb2.VisionChannelMessage) -> None:
# <GSL customizable: UpdateChannelAction-serialize-channels>
msg.op = UPDATE
for key, channel in self.channels.items():
channel._serialize(msg.channels.add(), key)
# </GSL customizable: UpdateChannelAction-serialize-channels>
@protobuf.message(vision_pb2.VisionChannelMessage, 'vision_channel_message', fields=('keys',))
@dataclass(frozen=True, repr=False)
class DeleteChannelAction(Message):
keys: Set[str]
def __post_init__(self):
# <default GSL customizable: DeleteChannelAction-init-validation>
pass
# </GSL customizable: DeleteChannelAction-init-validation>
# <default GSL customizable: DeleteChannelAction-extra-members />
def _serialize(self, msg: vision_pb2.VisionChannelMessage) -> None:
# <GSL customizable: DeleteChannelAction-serialize-keys>
msg.op = DELETE
for key in self.keys:
msg.channels.add().key = key
# </GSL customizable: DeleteChannelAction-serialize-keys>
@protobuf.message(vision_pb2.VisionChannelMessage, 'vision_channel_message', fields=('keys',))
@dataclass(frozen=True, repr=False)
class ChannelRequest(Message):
keys: Set[str]
def __post_init__(self):
# <default GSL customizable: ChannelRequest-init-validation>
pass
# </GSL customizable: ChannelRequest-init-validation>
# <default GSL customizable: ChannelRequest-extra-members />
def _serialize(self, msg: vision_pb2.VisionChannelMessage) -> None:
# <GSL customizable: ChannelRequest-serialize-keys>
msg.op = READ
for key in self.keys:
msg.channels.add().key = key
# </GSL customizable: ChannelRequest-serialize-keys>
@ReplyMsg.message(vision_pb2.VisionChannelMessage, 'vision_channel_message', fields=('channels',))
@dataclass(frozen=True, repr=False)
class ChannelReply(SimpleMessage):
channels: Dict[str, Channel]
def __post_init__(self):
# <default GSL customizable: ChannelReply-init-validation>
pass
# </GSL customizable: ChannelReply-init-validation>
# <default GSL customizable: ChannelReply-extra-members />
@classmethod
def _parse(cls, msg: vision_pb2.VisionChannelMessage) -> 'ChannelReply':
# <GSL customizable: ChannelReply-parse-channels>
channels = {key: channel for key, channel in (_parse_channel(msg) for msg in msg.channels)}
# </GSL customizable: ChannelReply-parse-channels>
return cls(channels)
def _serialize(self, msg: vision_pb2.VisionChannelMessage) -> None:
# <GSL customizable: ChannelReply-serialize-channels>
msg.op = READ
for key, channel in self.channels.items():
channel._serialize(msg.channels.add(), key)
# </GSL customizable: ChannelReply-serialize-channels>
@RequestMsg.message(vision_pb2.VisionCaptureFrameAction, 'vision_capture_frame_action', fields=())
@dataclass(frozen=True, repr=False)
class CaptureFrameAction(SimpleMessage):
def __post_init__(self):
# <default GSL customizable: CaptureFrameAction-init-validation>
pass
# </GSL customizable: CaptureFrameAction-init-validation>
# <default GSL customizable: CaptureFrameAction-extra-members />
@classmethod
def _parse(cls, msg: vision_pb2.VisionCaptureFrameAction) -> 'CaptureFrameAction':
return cls()
def _serialize(self, msg: vision_pb2.VisionCaptureFrameAction) -> None:
msg.SetInParent()
@RequestMsg.message(vision_pb2.VisionFrameMessage, 'vision_frame_message', fields=('highlight',))
@dataclass(frozen=True, repr=False)
class FrameRequest(SimpleMessage):
highlight: Optional[str]
def __post_init__(self):
# <default GSL customizable: FrameRequest-init-validation>
pass
# </GSL customizable: FrameRequest-init-validation>
# <default GSL customizable: FrameRequest-extra-members />
@classmethod
def _parse(cls, msg: vision_pb2.VisionFrameMessage) -> 'FrameRequest':
highlight = msg.highlight
return cls(highlight if highlight != '' else None)
def _serialize(self, msg: vision_pb2.VisionFrameMessage) -> None:
msg.highlight = self.highlight if self.highlight is not None else ''
@ReplyMsg.message(vision_pb2.VisionFrameMessage, 'vision_frame_message', fields=('highlight', 'frame',))
@dataclass(frozen=True, repr=False)
class FrameReply(SimpleMessage):
highlight: Optional[str]
frame: bytes
def __post_init__(self):
# <default GSL customizable: FrameReply-init-validation>
pass
# </GSL customizable: FrameReply-init-validation>
# <default GSL customizable: FrameReply-extra-members />
@classmethod
def _parse(cls, msg: vision_pb2.VisionFrameMessage) -> 'FrameReply':
highlight = msg.highlight
frame = msg.frame
return cls(highlight if highlight != '' else None, frame)
def _serialize(self, msg: vision_pb2.VisionFrameMessage) -> None:
msg.highlight = self.highlight if self.highlight is not None else ''
msg.frame = self.frame
@RequestMsg.message(vision_pb2.VisionFeatureMessage, 'vision_feature_message', fields=('channel',))
@dataclass(frozen=True, repr=False)
class FeatureRequest(SimpleMessage):
channel: str
def __post_init__(self):
# <default GSL customizable: FeatureRequest-init-validation>
pass
# </GSL customizable: FeatureRequest-init-validation>
# <default GSL customizable: FeatureRequest-extra-members />
@classmethod
def _parse(cls, msg: vision_pb2.VisionFeatureMessage) -> 'FeatureRequest':
channel = msg.channel
return cls(channel)
def _serialize(self, msg: vision_pb2.VisionFeatureMessage) -> None:
msg.channel = self.channel
@ReplyMsg.message(vision_pb2.VisionFeatureMessage, 'vision_feature_message', fields=('channel', 'feature',))
@dataclass(frozen=True, repr=False)
class FeatureReply(SimpleMessage):
channel: str
feature: Feature
def __post_init__(self):
# <default GSL customizable: FeatureReply-init-validation>
pass
# </GSL customizable: FeatureReply-init-validation>
# <default GSL customizable: FeatureReply-extra-members />
@classmethod
def _parse(cls, msg: vision_pb2.VisionFeatureMessage) -> 'FeatureReply':
channel = msg.channel
# <GSL customizable: FeatureReply-parse-feature>
feature = _parse_feature(msg.feature)
# </GSL customizable: FeatureReply-parse-feature>
return cls(channel, feature)
def _serialize(self, msg: vision_pb2.VisionFeatureMessage) -> None:
msg.channel = self.channel
# <GSL customizable: FeatureReply-serialize-feature>
self.feature._serialize(msg.feature)
# </GSL customizable: FeatureReply-serialize-feature>
@RequestMsg.parser('vision_camera_action')
def _parse_vision_camera_action_request(msg: vision_pb2.VisionCameraAction) -> Union[OpenCameraAction, CloseCameraAction]:
open = msg.open
# <GSL customizable: _parse_vision_camera_action_request-return>
if open:
return OpenCameraAction()
else:
return CloseCameraAction()
# </GSL customizable: _parse_vision_camera_action_request-return>
@RequestMsg.parser('vision_channel_message')
def _parse_vision_channel_message_request(msg: vision_pb2.VisionChannelMessage) -> Union[CreateChannelAction, UpdateChannelAction, DeleteChannelAction, ChannelRequest]:
op = msg.op
channels = msg.channels
# <GSL customizable: _parse_vision_channel_message_request-return>
if op in {READ, DELETE}:
keys = {msg.key for msg in channels}
if op == READ:
return ChannelRequest(keys)
else:
return DeleteChannelAction(keys)
elif op in {CREATE, UPDATE}:
channels = {key: channel for key, channel in (_parse_channel(msg) for msg in msg.channels)}
if op == CREATE:
return CreateChannelAction(channels)
else:
return UpdateChannelAction(channels)
else: # pragma: nocover
assert False
# </GSL customizable: _parse_vision_channel_message_request-return>
| agpl-3.0 |
charbeljc/account-invoice-reporting | __unported__/invoice_webkit/report/account_invoice.py | 10 | 1975 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2011 Camptocamp SA (http://www.camptocamp.com)
# @author Guewen Baconnier, Vincent Renaville, Nicolas Bessi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
from openerp import pooler
class AccountInvoice_Report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(AccountInvoice_Report, self
).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'cr': cr,
'uid': uid,
'company_vat': self._get_company_vat,
})
def _get_company_vat(self):
res_users_obj = pooler.get_pool(self.cr.dbname).get('res.users')
company_vat = res_users_obj.browse(
self.cr, self.uid, self.uid).company_id.partner_id.vat
if company_vat:
return company_vat
else:
return False
report_sxw.report_sxw('report.account.invoice.webkit',
'account.invoice',
'invoice_webkit/report/account_invoice.mako',
parser=AccountInvoice_Report)
| agpl-3.0 |
Russell-IO/ansible | test/sanity/code-smell/no-illegal-filenames.py | 30 | 2010 | #!/usr/bin/env python
# a script to check for illegal filenames on various Operating Systems. The
# main rules are derived from restrictions on Windows
# https://msdn.microsoft.com/en-us/library/aa365247#naming_conventions
import os
import re
import struct
from ansible.module_utils.basic import to_bytes
ILLEGAL_CHARS = [
b'<',
b'>',
b':',
b'"',
b'/',
b'\\',
b'|',
b'?',
b'*'
] + [struct.pack("b", i) for i in range(32)]
ILLEGAL_NAMES = [
"CON",
"PRN",
"AUX",
"NUL",
"COM1",
"COM2",
"COM3",
"COM4",
"COM5",
"COM6",
"COM7",
"COM8",
"COM9",
"LPT1",
"LPT2",
"LPT3",
"LPT4",
"LPT5",
"LPT6",
"LPT7",
"LPT8",
"LPT9",
]
ILLEGAL_END_CHARS = [
'.',
' ',
]
def check_path(path, dir=False):
type_name = 'directory' if dir else 'file'
parent, file_name = os.path.split(path)
name, ext = os.path.splitext(file_name)
if name.upper() in ILLEGAL_NAMES:
print("%s: illegal %s name %s" % (path, type_name, name.upper()))
if file_name[-1] in ILLEGAL_END_CHARS:
print("%s: illegal %s name end-char '%s'" % (path, type_name, file_name[-1]))
bfile = to_bytes(file_name, encoding='utf-8')
for char in ILLEGAL_CHARS:
if char in bfile:
bpath = to_bytes(path, encoding='utf-8')
print("%s: illegal char '%s' in %s name" % (bpath, char, type_name))
def main():
pattern = re.compile("^test/integration/targets/.*/backup")
for root, dirs, files in os.walk('.'):
if root == '.':
root = ''
elif root.startswith('./'):
root = root[2:]
# ignore test/integration/targets/*/backup
if pattern.match(root):
continue
for dir_name in dirs:
check_path(os.path.join(root, dir_name), dir=True)
for file_name in files:
check_path(os.path.join(root, file_name), dir=False)
if __name__ == '__main__':
main()
| gpl-3.0 |
jordiblasco/easybuild-easyconfigs | test/easyconfigs/styletests.py | 2 | 2236 | ##
# Copyright 2016 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Style tests for easyconfig files. Uses pep8.
@author: Ward Poelmans (Ghent University)
"""
import glob
import sys
from unittest import TestCase, TestLoader, main
from vsc.utils import fancylogger
from easybuild.framework.easyconfig.tools import get_paths_for
from easybuild.framework.easyconfig.style import check_easyconfigs_style
try:
import pep8
except ImportError:
pass
class StyleTest(TestCase):
log = fancylogger.getLogger("StyleTest", fname=False)
def test_style_conformance(self):
"""Check the easyconfigs for style"""
if 'pep8' not in sys.modules:
print "Skipping style checks (no pep8 available)"
return
# all available easyconfig files
easyconfigs_path = get_paths_for("easyconfigs")[0]
specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path)
specs = sorted(specs)
result = check_easyconfigs_style(specs)
self.assertEqual(result, 0, "Found code style errors (and/or warnings): %s" % result)
def suite():
"""Return all style tests for easyconfigs."""
return TestLoader().loadTestsFromTestCase(StyleTest)
if __name__ == '__main__':
main()
| gpl-2.0 |
idjaw/horizon | openstack_dashboard/dashboards/project/volumes/snapshots/views.py | 48 | 4008 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tabs
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.volumes \
.snapshots import forms as vol_snapshot_forms
from openstack_dashboard.dashboards.project.volumes \
.snapshots import tables as vol_snapshot_tables
from openstack_dashboard.dashboards.project.volumes \
.snapshots import tabs as vol_snapshot_tabs
class UpdateView(forms.ModalFormView):
form_class = vol_snapshot_forms.UpdateForm
form_id = "update_snapshot_form"
modal_header = _("Edit Snapshot")
template_name = 'project/volumes/snapshots/update.html'
submit_label = _("Save Changes")
submit_url = "horizon:project:volumes:snapshots:update"
success_url = reverse_lazy("horizon:project:volumes:index")
page_title = _("Edit Snapshot")
@memoized.memoized_method
def get_object(self):
snap_id = self.kwargs['snapshot_id']
try:
self._object = api.cinder.volume_snapshot_get(self.request,
snap_id)
except Exception:
msg = _('Unable to retrieve volume snapshot.')
url = reverse('horizon:project:volumes:index')
exceptions.handle(self.request, msg, redirect=url)
return self._object
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
context['snapshot'] = self.get_object()
args = (self.kwargs['snapshot_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
def get_initial(self):
snapshot = self.get_object()
return {'snapshot_id': self.kwargs["snapshot_id"],
'name': snapshot.name,
'description': snapshot.description}
class DetailView(tabs.TabView):
tab_group_class = vol_snapshot_tabs.SnapshotDetailTabs
template_name = 'project/volumes/snapshots/detail.html'
page_title = _("Volume Snapshot Details: {{ snapshot.name }}")
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
snapshot = self.get_data()
table = vol_snapshot_tables.VolumeSnapshotsTable(self.request)
context["snapshot"] = snapshot
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(snapshot)
return context
@memoized.memoized_method
def get_data(self):
try:
snapshot_id = self.kwargs['snapshot_id']
snapshot = api.cinder.volume_snapshot_get(self.request,
snapshot_id)
except Exception:
redirect = self.get_redirect_url()
exceptions.handle(self.request,
_('Unable to retrieve snapshot details.'),
redirect=redirect)
return snapshot
@staticmethod
def get_redirect_url():
return reverse('horizon:project:volumes:index')
def get_tabs(self, request, *args, **kwargs):
snapshot = self.get_data()
return self.tab_group_class(request, snapshot=snapshot, **kwargs)
| apache-2.0 |
wscullin/spack | var/spack/repos/builtin/packages/fslsfonts/package.py | 3 | 1735 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Fslsfonts(AutotoolsPackage):
"""fslsfonts produces a list of fonts served by an X font server."""
homepage = "http://cgit.freedesktop.org/xorg/app/fslsfonts"
url = "https://www.x.org/archive/individual/app/fslsfonts-1.0.5.tar.gz"
version('1.0.5', 'ef781bd6a7b529d3ed7a256055715730')
depends_on('libfs')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:', type='build')
depends_on('util-macros', type='build')
| lgpl-2.1 |
streethacker/koenig | koenig/utils/async.py | 1 | 1324 | # -*- coding: utf-8 -*-
import celery
import functools
import logging
from koenig.client import make_client
from koenig.settings import (
CELERY_BROKER,
CELERY_CONFIG,
)
logger = logging.getLogger(__name__)
broker = 'amqp://{user}:{password}@{host}:{port}/koenig'.\
format(**CELERY_BROKER)
app = celery.Celery(broker=broker)
app.conf.update(**CELERY_CONFIG)
MAX_RETRIES = 720
RETRY_WAIT = 5
@app.task(max_retries=MAX_RETRIES, bind=True)
def async_api(self, slug, api_name, *args, **kwargs):
def retry_exc(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
max_retries = kwargs.get('max_retries', MAX_RETRIES)
retry_wait = kwargs.get('retry_wait', RETRY_WAIT)
self.retry(
exc=e, countdown=retry_wait, max_retries=max_retries)
return wrapper
def run_api(api_name, *args):
with make_client() as c:
getattr(c, api_name)(*args)
retry_exc(run_api)(api_name, *args)
def send_task(service_slug, api_name, *args, **kwargs):
kwargs.setdefault('queue', '{}_queue'.format(service_slug))
r = async_api.si(service_slug, api_name, *args).apply_async(**kwargs)
return r
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.