text
stringlengths 4
1.02M
| meta
dict |
---|---|
from __future__ import unicode_literals
import os
import sys
import pbr.version
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..')))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.intersphinx',
'oslosphinx',
]
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'keystoneauth1'
copyright = 'OpenStack Contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
version_info = pbr.version.VersionInfo('keystoneauth1')
# The short X.Y version.
version = version_info.version_string()
# The full version, including alpha/beta/rc tags.
release = version_info.release_string()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', 'title', 'Authors name', 'manual'
man_pages = [
('man/keystone', 'keystone', 'Client for OpenStack Identity API',
['OpenStack Contributors'], 1),
]
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme_path = ["."]
#html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
git_cmd = "git log --pretty=format:'%ad, commit %h' --date=local -n1"
html_last_updated_fmt = os.popen(git_cmd).read()
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'keystoneauthdoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual])
# .
latex_documents = [
('index', 'keystoneauth1.tex',
'keystoneauth1 Documentation',
'Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss',
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
'osloconfig': ('http://docs.openstack.org/developer/oslo.config/', None),
}
| {
"content_hash": "9de35ac16b41a3179afcb33bb16ba2bc",
"timestamp": "",
"source": "github",
"line_count": 216,
"max_line_length": 79,
"avg_line_length": 32.101851851851855,
"alnum_prop": 0.6988751081626766,
"repo_name": "citrix-openstack-build/keystoneauth",
"id": "bc6a5c42fcfc8b1e71be00c0a6bb04444ca8041e",
"size": "7360",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "doc/source/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "393336"
}
],
"symlink_target": ""
} |
import re
import json
import os
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from product_spiders.items import Product, ProductLoaderWithNameStrip as ProductLoader
from product_spiders.utils import extract_price
HERE = os.path.dirname(os.path.abspath(__file__))
class DrillSpotSpider(BaseSpider):
name = 'drillspot.com'
allowed_domains = ['drillspot.com']
#start_urls = ('http://www.drillspot.com',)
def start_requests(self):
with open(os.path.join(HERE, 'drillspotcats')) as f:
urls = f.read().split()
for url in urls:
yield Request(url)
def parse(self, response):
hxs = HtmlXPathSelector(response)
'''
if response.url == self.start_urls[0]:
cats = hxs.select('//div[@class="sub categories"]//a/@href').extract()
for cat in cats:
yield Request(urljoin_rfc(get_base_url(response), cat + '?ps=120'))
'''
next_page = hxs.select('//li[@class="next-on"]/a/@href').extract()
if next_page:
yield Request(urljoin_rfc(get_base_url(response), next_page[0]))
for product in self.parse_products(hxs, response):
yield product
def parse_products(self, hxs, response):
products = hxs.select('//div[contains(@class, "product-list")]//div[@class="g-list-node"]')
for product in products:
loader = ProductLoader(selector=product, item=Product())
url = urljoin_rfc(get_base_url(response), product.select('.//p[@class="name"]//a/@href').extract()[0])
loader.add_value('url', url)
loader.add_xpath('name', './/p[@class="name"]//a/text()')
loader.add_xpath('price', './/p[@class="price"]/text()')
yield loader.load_item()
| {
"content_hash": "710335177b5d4f7be84e250957ba36e6",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 114,
"avg_line_length": 37.056603773584904,
"alnum_prop": 0.6242362525458248,
"repo_name": "0--key/lib",
"id": "02f3c81dcae4d7b861f800709a46de57c01548e6",
"size": "1964",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "portfolio/Python/scrapy/instrumart/drillspot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "28210"
},
{
"name": "Emacs Lisp",
"bytes": "76390"
},
{
"name": "HTML",
"bytes": "1136671"
},
{
"name": "JavaScript",
"bytes": "27718"
},
{
"name": "PHP",
"bytes": "378537"
},
{
"name": "Python",
"bytes": "1892998"
},
{
"name": "Shell",
"bytes": "4030"
}
],
"symlink_target": ""
} |
import os
import csv
import random
import collections
from builtins import len
import datasets
from tflearn.data_utils import to_categorical
class Acner():
def __init__(self, train_validate_split=None, test_split=None,
use_defaults=False, shuffle=True):
self.construct()
self.load(use_defaults, train_validate_split, test_split, shuffle)
#super(Acner, self).__init__(train_validate_split, test_split,
# use_defaults, shuffle)
def construct(self):
self.dataset_name = 'ACNER: Annotated Corpus for Named Entity Recognition'
self.dataset_description = 'A ~1M words (47957 sentences) corpus with ' \
'NER annotations.'
self.dataset_path = os.path.join(datasets.data_root_directory, 'acner')
self.train_path = os.path.join(self.dataset_path, 'train.txt')
self.validate_path = os.path.join(self.dataset_path, 'validate.txt')
self.test_path = os.path.join(self.dataset_path, 'test.txt')
self.vocab_paths = [os.path.join(self.dataset_path, 'vocab.txt'),
os.path.join(self.dataset_path, 'pos_vocab.txt'),
os.path.join(self.dataset_path, 'ner_vocab.txt')]
self.metadata_paths = [os.path.join(self.dataset_path, 'metadata.txt'),
os.path.join(self.dataset_path, 'pos_metadata.txt'),
os.path.join(self.dataset_path, 'ner_metadata.txt')]
self.w2v_paths = [os.path.join(self.dataset_path, 'w2v.npy'),
os.path.join(self.dataset_path, 'pos_w2v.npy'),
os.path.join(self.dataset_path, 'ner_w2v.npy')]
self.w2i = [None, None, None]
self.i2w = [None, None, None]
self.w2v = [None, None, None]
def load(self, use_defaults, train_validate_split, test_split, shuffle):
if (use_defaults or
train_validate_split is None or
test_split is None) and \
(os.path.exists(self.train_path) and
os.path.exists(self.validate_path) and
os.path.exists(self.test_path) and
datasets.paths_exist(self.vocab_paths) and
datasets.paths_exist(self.metadata_paths) and
datasets.paths_exist(self.w2v_paths)):
self.initialize_defaults(shuffle)
else:
if test_split is None:
test_split = datasets.test_split_small
if train_validate_split is None:
train_validate_split = datasets.train_validate_split
self.load_anew(train_validate_split, test_split,
shuffle=shuffle)
def initialize_defaults(self, shuffle):
# For now, we are happy that this works =)
self.load_anew(train_validate_split=datasets.train_validate_split,
test_split=datasets.test_split_small, shuffle=shuffle)
#train_data = self.load_data(self.train_path)
#validate_data = self.load_data(self.validate_path)
#test_data = self.load_data(self.test_path)
#self.w2i, self.i2w = datasets.load_vocabulary(self.vocab_path)
#self.w2v = datasets.load_w2v(self.w2v_path)
#self.train = DataSet(train_data, (self.w2i, self.i2w), shuffle)
#self.validate = DataSet(validate_data, (self.w2i, self.i2w), shuffle)
#self.test = DataSet(test_data, (self.w2i, self.i2w), shuffle)
def load_anew(self, train_validate_split, test_split, shuffle=True):
all_data = self.load_all_data(self.dataset_path)
if shuffle:
random.shuffle(all_data)
# First we take the test data away
total_length = len(all_data)
test_length = int(total_length * test_split)
train_validate_data, test_data = all_data[:-test_length],\
all_data[-test_length:]
# Then we split the training/validation data
train_validate_length = len(train_validate_data)
train_length = int(train_validate_length * train_validate_split)
train_data, validate_data = train_validate_data[:train_length], \
train_validate_data[train_length:]
self.dump_all_data(train_data, validate_data, test_data)
self.initialize_vocabulary()
self.initialize_datasets(train_data, validate_data, test_data, shuffle)
def load_all_data(self, path):
file_name = 'acner.csv'
path_plus_file_name = os.path.join(path, file_name)
with open(path_plus_file_name, 'r', encoding='cp1252') as f:
csv_reader = csv.reader(f, delimiter=',')
# Skip one line
next(csv_reader)
all_lines = [i for i in csv_reader]
return self.group_words_into_sentences(all_lines)
def initialize_vocabulary(self):
self.initialize_vocabulary_ll(['texts', 'pos', 'ner'], [5,1,1],
[False, False, False], ['spacy', 'split', 'split'])
def initialize_vocabulary_ll(self, names, min_frequencies,
downcases, tokenizer):
for i in range(len(self.vocab_paths)):
self.vocab_paths[i], self.w2v_paths[i], self.metadata_paths[i] = \
datasets.new_vocabulary(
files=[self.train_path], dataset_path=self.dataset_path,
min_frequency=min_frequencies[i], tokenizer=tokenizer[i],
downcase=downcases[i], max_vocab_size=None,
name=names[i],
line_processor=lambda line: line.split('\t')[i], lang='de')
self.w2i[i], self.i2w[i] = datasets.load_vocabulary(self.vocab_paths[i])
self.w2v[i] = datasets.preload_w2v(self.w2i[i], lang='de')
datasets.save_w2v(self.w2v_paths[i], self.w2v[i])
def initialize_datasets(self, train_data, validate_data, test_data, shuffle=True):
self.train = DataSet(train_data, self.w2i, self.i2w, shuffle)
self.validation = DataSet(validate_data, self.w2i, self.i2w, shuffle)
self.test = DataSet(test_data, self.w2i, self.i2w, shuffle)
def get_sentence_index(self, s):
# `str` should look like "Sentence: 1". I want to take the "1" there.
return int(s.split(' ')[1])
def group_words_into_sentences(self, lines):
words = []
parts_of_speech = []
ner_tags = []
ret = []
curr_sentence = 0
for i, l in enumerate(lines):
if l[0] != '':
if i != 0:
#ret.append("\t".join([" ".join(words),
# " ".join(parts_of_speech),
# " ".join(ner_tags),
# str(curr_sentence)])
# )
ret.append([" ".join(words),
" ".join(parts_of_speech),
" ".join(ner_tags),
str(curr_sentence)])
curr_sentence = self.get_sentence_index(l[0])
words = []
parts_of_speech = []
ner_tags = []
words.append(l[1])
parts_of_speech.append(l[2])
ner_tags.append(l[3])
# Add the last one
ret.append([" ".join(words),
" ".join(parts_of_speech),
" ".join(ner_tags),
curr_sentence])
return ret
def dump_all_data(self, train_data, validate_data, test_data):
self.dump_data(train_data, self.train_path)
self.dump_data(validate_data, self.validate_path)
self.dump_data(test_data, self.test_path)
def dump_data(self, data, path):
with open(path, 'w') as f:
for i in data:
f.write("{}\t{}\t{}\t{}\n".format(i[0], i[1], i[2], i[3]))
def __refresh(self, load_w2v):
# (Again)
# It doesn't seem to make sense to want to create a new vocabulary for
# the other two types of data (NER data or POS tags). So I'll only allow
# for new vocabularies on the text
self.w2i[0], self.i2w[0] = datasets.load_vocabulary(self.vocab_paths[0])
if load_w2v:
self.w2v[0] = datasets.preload_w2v(self.w2i[0], lang='de')
datasets.save_w2v(self.w2v_paths[0], self.w2v[0])
self.train.set_vocab(self.w2i, self.i2w, 0)
self.validation.set_vocab(self.w2i, self.i2w, 0)
self.test.set_vocab(self.w2i, self.i2w, 0)
def create_vocabulary(self, all_files,
min_frequency=5, tokenizer='spacy',
downcase=True, max_vocab_size=None,
name='new', load_w2v=True):
# It doesn't seem to make sense to want to create a new vocabulary for
# the other two types of data (NER data or POS tags). So I'll only allow
# for new vocabularies on the text
self.vocab_paths[0], self.w2v_paths[0], self.metadata_paths[0] = \
datasets.new_vocabulary(
files=all_files, dataset_path=self.dataset_path,
min_frequency=min_frequency,
tokenizer=tokenizer, downcase=downcase,
max_vocab_size=max_vocab_size, name=name,
line_processor=lambda line: line.split('\t')[0])
self.__refresh(load_w2v)
class DataSet():
def __init__(self, data, w2i, i2w, shuffle=True):
self._epochs_completed = 0
self._index_in_epoch = 0
self.datafile = None
self.set_vocab(w2i, i2w)
self.data = data
self.Batch = self.initialize_batch()
def initialize_batch(self):
return collections.namedtuple('Batch', ['sentences', 'pos', 'ner', 'lengths'])
# I got the number of parts of speech with:
# f = open('acner.csv', 'r', encoding='cp1252')
# csv_reader = csv.reader(f, delimiter=',')
# next(csv_reader)
# all_lines = [i for i in csv_reader]
# i, w, p, ner = zip(*all_lines)
# p = list(set(p))
# len(p)
def next_batch(self, batch_size=64, pad=0, raw=False,
tokenizer=['spacy', 'split', 'split'],
one_hot=False):
# format: either 'one_hot' or 'numerical'
# rescale: if format is 'numerical', then this should be a tuple
# (min, max)
samples = None
if self._index_in_epoch + batch_size > len(self.data):
samples = self.data[self._index_in_epoch: len(self.data)]
random.shuffle(self.data)
missing_samples = batch_size - (len(self.data) - self._index_in_epoch)
self._epochs_completed += 1
samples.extend(self.data[0 :missing_samples])
self._index_in_epoch = missing_samples
else:
samples = self.data[self._index_in_epoch :self._index_in_epoch + batch_size]
self._index_in_epoch += batch_size
data = list(zip(*samples))
sentences = data[0]
pos = data[1]
ner = data[2]
# Generate sequences
sentences = self.generate_sequences(sentences, tokenizer[0])
pos = self.generate_sequences(pos, tokenizer[1])
ner = self.generate_sequences(ner, tokenizer[2])
lengths = [len(s) if pad == 0 else min(pad, len(s)) for s in sentences]
if (raw) :
return self.Batch(sentences=sentences, pos=pos, ner=ner, lengths=lengths)
sentences = datasets.padseq(datasets.seq2id(sentences,
self.vocab_w2i[0]), pad)
pos = datasets.padseq(datasets.seq2id(pos, self.vocab_w2i[1]), pad)
ner = datasets.padseq(datasets.seq2id(ner, self.vocab_w2i[2]), pad)
if one_hot:
ner = [to_categorical(n, nb_classes=len(self.vocab_w2i[2]))
for n in ner]
batch = self.Batch(sentences=sentences, pos=pos, ner=ner, lengths=lengths)
return batch
def generate_sequences(self, x, tokenizer):
new_x = []
for instance in x:
tokens = datasets.tokenize(instance, tokenizer)
new_x.append(tokens)
return new_x
@property
def epochs_completed(self):
return self._epochs_completed
def set_vocab(self, w2i, i2w, which=None):
if (which is not None):
print("Setting vocab_w2i[{}]".format(which))
self.vocab_w2i[which] = w2i[which]
self.vocab_i2w[which] = i2w[which]
else:
self.vocab_w2i = w2i
self.vocab_i2w = i2w
if __name__ == '__main__':
import timeit
t = timeit.timeit(Acner, number=100)
print(t)
a = Acner()
b = a.train.next_batch()
print(b)
| {
"content_hash": "70234b295585d0adbdbd15ad0203219e",
"timestamp": "",
"source": "github",
"line_count": 317,
"max_line_length": 89,
"avg_line_length": 40.74447949526814,
"alnum_prop": 0.5556673892846082,
"repo_name": "mindgarage/Ovation",
"id": "74ee3b9906d8a9c3bdb75af00e20c5981b5f4918",
"size": "12916",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datasets/acner.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "5870"
},
{
"name": "Python",
"bytes": "264059"
},
{
"name": "Shell",
"bytes": "265"
}
],
"symlink_target": ""
} |
import subprocess
import os
def convertWavToFlac(filename='SendClip.wav'):
# Delete if filename exists
lastindex = filename.rfind('.')
newname = filename[0 : lastindex] + '.flac'
os.path.exists(newname) and os.remove(newname)
output = subprocess.Popen(["flac", "--keep-foreign-metadata", filename]).communicate()[0]
return output
| {
"content_hash": "42bcaf625a1d9f9bfe738f7210377bf1",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 93,
"avg_line_length": 32.36363636363637,
"alnum_prop": 0.6966292134831461,
"repo_name": "decebel/gspeech-py",
"id": "12ea53bc17cd205187648305172017ff4f9709f4",
"size": "356",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/new_test/convert.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4882"
}
],
"symlink_target": ""
} |
import kivy
kivy.require('1.9.0')
from kivy.app import App
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.behaviors import ToggleButtonBehavior
from kivy.uix.actionbar import ActionPrevious
from kivy.animation import Animation
from kivy.lang import Builder
import videocontroller
Builder.load_file('actiontextinput.kv')
class ActionListButton(ToggleButtonBehavior, ActionPrevious):
pass
class KivyPlayer(FloatLayout):
def hide_bars(self, instance, playing):
if playing:
self.list_button.state = 'normal'
self.animationAB = Animation(y=self.height)
self.action_bar.disabled = True
self.animationAB.start(self.action_bar)
else:
self.action_bar.disabled = False
self.action_bar.top = self.height
if hasattr(self, 'animationAB'):
self.animationAB.cancel(self.action_bar)
def toggle_mute(self, instance, state):
if state == 'down':
self.video_controller.video.volume = 0
else:
self.video_controller.video.volume = 1
def show_load_list(self):
pass
def search(self, text):
pass
class KivyPlayerApp(App):
def build(self):
return KivyPlayer()
if __name__=="__main__":
KivyPlayerApp().run()
| {
"content_hash": "4fd3937dae49fedab11b03c97dba3d4e",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 61,
"avg_line_length": 25.764705882352942,
"alnum_prop": 0.6552511415525114,
"repo_name": "pimier15/PyGUI",
"id": "54d9afc9858c32498819dd60bb6718565aba193b",
"size": "1341",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Kivy/Kivy/Bk_Interractive/sample/Chapter_06_code/07 - ActionBar - a responsive bar/kivyplayer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "636598"
}
],
"symlink_target": ""
} |
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options] [path...]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
Perforce
CVS
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import ConfigParser
import cookielib
import errno
import fnmatch
import getpass
import logging
import marshal
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
try:
import keyring
except ImportError:
keyring = None
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# The account type used for authentication.
# This line could be changed by the review server (see handler for
# upload.py).
AUTH_ACCOUNT_TYPE = "GOOGLE"
# URL of the default review server. As for AUTH_ACCOUNT_TYPE, this line could be
# changed by the review server (see handler for upload.py).
DEFAULT_REVIEW_SERVER = "codereview.appspot.com"
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# Constants for version control names. Used by GuessVCSName.
VCS_GIT = "Git"
VCS_MERCURIAL = "Mercurial"
VCS_SUBVERSION = "Subversion"
VCS_PERFORCE = "Perforce"
VCS_CVS = "CVS"
VCS_UNKNOWN = "Unknown"
VCS_ABBREVIATIONS = {
VCS_MERCURIAL.lower(): VCS_MERCURIAL,
"hg": VCS_MERCURIAL,
VCS_SUBVERSION.lower(): VCS_SUBVERSION,
"svn": VCS_SUBVERSION,
VCS_PERFORCE.lower(): VCS_PERFORCE,
"p4": VCS_PERFORCE,
VCS_GIT.lower(): VCS_GIT,
VCS_CVS.lower(): VCS_CVS,
}
# The result of parsing Subversion's [auto-props] setting.
svn_auto_props_map = None
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
self.info = args.get("Info", None)
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={},
save_cookies=False, account_type=AUTH_ACCOUNT_TYPE):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
account_type: Account type used for authentication. Defaults to
AUTH_ACCOUNT_TYPE.
"""
self.host = host
if (not self.host.startswith("http://") and
not self.host.startswith("https://")):
self.host = "http://" + self.host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.account_type = account_type
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data, headers={"Accept": "text/plain"})
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = self.account_type
if self.host.endswith(".google.com"):
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("%s/_ah/login?%s" %
(self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
print >>sys.stderr, ''
if e.reason == "BadAuthentication":
if e.info == "InvalidSecondFactor":
print >>sys.stderr, (
"Use an application-specific password instead "
"of your regular account password.\n"
"See http://www.google.com/"
"support/accounts/bin/answer.py?answer=185833")
else:
print >>sys.stderr, "Invalid username or password."
elif e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.\n"
"If you are using a Google Apps account the URL is:\n"
"https://www.google.com/a/yourdomain.com/UnlockCaptcha")
elif e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
elif e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
elif e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
elif e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
elif e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
elif e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
else:
# Unknown error.
raise
print >>sys.stderr, ''
continue
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
extra_headers=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
extra_headers: Dict containing additional HTTP headers that should be
included in the request (string header names mapped to their values),
or None to not include any additional headers.
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
if extra_headers:
for header, value in extra_headers.items():
req.add_header(header, value)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
elif e.code == 301:
# Handle permanent redirect manually.
url = e.info()["location"]
url_loc = urlparse.urlparse(url)
self.host = '%s://%s' % (url_loc[0], url_loc[1])
elif e.code >= 500:
ErrorExit(e.read())
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" %
self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
class CondensedHelpFormatter(optparse.IndentedHelpFormatter):
"""Frees more horizontal space by removing indentation from group
options and collapsing arguments between short and long, e.g.
'-o ARG, --opt=ARG' to -o --opt ARG"""
def format_heading(self, heading):
return "%s:\n" % heading
def format_option(self, option):
self.dedent()
res = optparse.HelpFormatter.format_option(self, option)
self.indent()
return res
def format_option_strings(self, option):
self.set_long_opt_delimiter(" ")
optstr = optparse.HelpFormatter.format_option_strings(self, option)
optlist = optstr.split(", ")
if len(optlist) > 1:
if option.takes_value():
# strip METAVAR from all but the last option
optlist = [x.split()[0] for x in optlist[:-1]] + optlist[-1:]
optstr = " ".join(optlist)
return optstr
parser = optparse.OptionParser(
usage="%prog [options] [-- diff_options] [path...]",
add_help_option=False,
formatter=CondensedHelpFormatter()
)
parser.add_option("-h", "--help", action="store_true",
help="Show this help message and exit.")
parser.add_option("-y", "--assume_yes", action="store_true",
dest="assume_yes", default=False,
help="Assume that the answer to yes/no questions is 'yes'.")
# Logging
group = parser.add_option_group("Logging options")
group.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
group.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs.")
group.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
group.add_option("--print_diffs", dest="print_diffs", action="store_true",
help="Print full diffs.")
# Review server
group = parser.add_option_group("Review server options")
group.add_option("-s", "--server", action="store", dest="server",
default=DEFAULT_REVIEW_SERVER,
metavar="SERVER",
help=("The server to upload to. The format is host[:port]. "
"Defaults to '%default'."))
group.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
group.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
group.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
group.add_option("--account_type", action="store", dest="account_type",
metavar="TYPE", default=AUTH_ACCOUNT_TYPE,
choices=["GOOGLE", "HOSTED"],
help=("Override the default account type "
"(defaults to '%default', "
"valid choices are 'GOOGLE' and 'HOSTED')."))
# Issue
group = parser.add_option_group("Issue options")
group.add_option("-t", "--title", action="store", dest="title",
help="New issue subject or new patch set title")
group.add_option("-m", "--message", action="store", dest="message",
default=None,
help="New issue description or new patch set message")
group.add_option("-F", "--file", action="store", dest="file",
default=None, help="Read the message above from file.")
group.add_option("-r", "--reviewers", action="store", dest="reviewers",
metavar="REVIEWERS", default=None,
help="Add reviewers (comma separated email addresses).")
group.add_option("--cc", action="store", dest="cc",
metavar="CC", default=None,
help="Add CC (comma separated email addresses).")
group.add_option("--private", action="store_true", dest="private",
default=False,
help="Make the issue restricted to reviewers and those CCed")
# Upload options
group = parser.add_option_group("Patch options")
group.add_option("-i", "--issue", type="int", action="store",
metavar="ISSUE", default=None,
help="Issue number to which to add. Defaults to new issue.")
group.add_option("--base_url", action="store", dest="base_url", default=None,
help="Base URL path for files (listed as \"Base URL\" when "
"viewing issue). If omitted, will be guessed automatically "
"for SVN repos and left blank for others.")
group.add_option("--download_base", action="store_true",
dest="download_base", default=False,
help="Base files will be downloaded by the server "
"(side-by-side diffs may not work on files with CRs).")
group.add_option("--rev", action="store", dest="revision",
metavar="REV", default=None,
help="Base revision/branch/tree to diff against. Use "
"rev1:rev2 range to review already committed changeset.")
group.add_option("--send_mail", action="store_true",
dest="send_mail", default=False,
help="Send notification email to reviewers.")
group.add_option("-p", "--send_patch", action="store_true",
dest="send_patch", default=False,
help="Same as --send_mail, but include diff as an "
"attachment, and prepend email subject with 'PATCH:'.")
group.add_option("--vcs", action="store", dest="vcs",
metavar="VCS", default=None,
help=("Version control system (optional, usually upload.py "
"already guesses the right VCS)."))
group.add_option("--emulate_svn_auto_props", action="store_true",
dest="emulate_svn_auto_props", default=False,
help=("Emulate Subversion's auto properties feature."))
# Perforce-specific
group = parser.add_option_group("Perforce-specific options "
"(overrides P4 environment variables)")
group.add_option("--p4_port", action="store", dest="p4_port",
metavar="P4_PORT", default=None,
help=("Perforce server and port (optional)"))
group.add_option("--p4_changelist", action="store", dest="p4_changelist",
metavar="P4_CHANGELIST", default=None,
help=("Perforce changelist id"))
group.add_option("--p4_client", action="store", dest="p4_client",
metavar="P4_CLIENT", default=None,
help=("Perforce client/workspace"))
group.add_option("--p4_user", action="store", dest="p4_user",
metavar="P4_USER", default=None,
help=("Perforce user"))
def GetRpcServer(server, email=None, host_override=None, save_cookies=True,
account_type=AUTH_ACCOUNT_TYPE):
"""Returns an instance of an AbstractRpcServer.
Args:
server: String containing the review server URL.
email: String containing user's email address.
host_override: If not None, string containing an alternate hostname to use
in the host header.
save_cookies: Whether authentication cookies should be saved to disk.
account_type: Account type for authentication, either 'GOOGLE'
or 'HOSTED'. Defaults to AUTH_ACCOUNT_TYPE.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
# If this is the dev_appserver, use fake authentication.
host = (host_override or server).lower()
if re.match(r'(http://)?localhost([:/]|$)', host):
if email is None:
email = "[email protected]"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
server,
lambda: (email, "password"),
host_override=host_override,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=save_cookies,
account_type=account_type)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
def GetUserCredentials():
"""Prompts the user for a username and password."""
# Create a local alias to the email variable to avoid Python's crazy
# scoping rules.
global keyring
local_email = email
if local_email is None:
local_email = GetEmail("Email (login for uploading to %s)" % server)
password = None
if keyring:
try:
password = keyring.get_password(host, local_email)
except:
# Sadly, we have to trap all errors here as
# gnomekeyring.IOError inherits from object. :/
print "Failed to get password from keyring"
keyring = None
if password is not None:
print "Using password from system keyring."
else:
password = getpass.getpass("Password for %s: " % local_email)
if keyring:
answer = raw_input("Store password in system keyring?(y/N) ").strip()
if answer == "y":
keyring.set_password(host, local_email, password)
return (local_email, password)
return rpc_server_class(server,
GetUserCredentials,
host_override=host_override,
save_cookies=save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
for (key, filename, value) in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCodeAndStderr(command, print_output=False,
universal_newlines=True,
env=os.environ):
"""Executes a command and returns the output from stdout, stderr and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (stdout, stderr, return code)
"""
logging.info("Running %s", command)
env = env.copy()
env['LC_MESSAGES'] = 'C'
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines,
env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, errout, p.returncode
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True,
env=os.environ):
"""Executes a command and returns the output from stdout and the return code."""
out, err, retcode = RunShellWithReturnCodeAndStderr(command, print_output,
universal_newlines, env)
return out, retcode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output,
universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GetGUID(self):
"""Return string to distinguish the repository from others, for example to
query all opened review issues for it"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def PostProcessDiff(self, diff):
"""Return the diff with any special post processing this VCS needs, e.g.
to include an svn-style "Index:"."""
return diff
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields,
[("data", filename, content)])
response_body = rpc_server.Send(url, body,
content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
UploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
UploadFile(filename, file_id, new_content, is_binary, status, False)
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinaryData(self, data):
"""Returns true if data contains a null byte."""
# Derived from how Mercurial's heuristic, see
# http://selenic.com/hg/file/848a6658069e/mercurial/util.py#l229
return bool(data and "\0" in data)
class SubversionVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Subversion."""
def __init__(self, options):
super(SubversionVCS, self).__init__(options)
if self.options.revision:
match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
if not match:
ErrorExit("Invalid Subversion revision %s." % self.options.revision)
self.rev_start = match.group(1)
self.rev_end = match.group(3)
else:
self.rev_start = self.rev_end = None
# Cache output from "svn list -r REVNO dirname".
# Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
self.svnls_cache = {}
# Base URL is required to fetch files deleted in an older revision.
# Result is cached to not guess it over and over again in GetBaseFile().
required = self.options.download_base or self.options.revision is not None
self.svn_base = self._GuessBase(required)
def GetGUID(self):
return self._GetInfo("Repository UUID")
def GuessBase(self, required):
"""Wrapper for _GuessBase."""
return self.svn_base
def _GuessBase(self, required):
"""Returns base URL for current diff.
Args:
required: If true, exits if the url can't be guessed, otherwise None is
returned.
"""
url = self._GetInfo("URL")
if url:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
guess = ""
# TODO(anatoli) - repository specific hacks should be handled by server
if netloc == "svn.python.org" and scheme == "svn+ssh":
path = "projects" + path
scheme = "http"
guess = "Python "
elif netloc.endswith(".googlecode.com"):
scheme = "http"
guess = "Google Code "
path = path + "/"
base = urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
logging.info("Guessed %sbase = %s", guess, base)
return base
if required:
ErrorExit("Can't find URL in output from svn info")
return None
def _GetInfo(self, key):
"""Parses 'svn info' for current dir. Returns value for key or None"""
for line in RunShell(["svn", "info"]).splitlines():
if line.startswith(key + ": "):
return line.split(":", 1)[1].strip()
def _EscapeFilename(self, filename):
"""Escapes filename for SVN commands."""
if "@" in filename and not filename.endswith("@"):
filename = "%s@" % filename
return filename
def GenerateDiff(self, args):
cmd = ["svn", "diff"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(args)
data = RunShell(cmd)
count = 0
for line in data.splitlines():
if line.startswith("Index:") or line.startswith("Property changes on:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from svn diff")
return data
def _CollapseKeywords(self, content, keyword_str):
"""Collapses SVN keywords."""
# svn cat translates keywords but svn diff doesn't. As a result of this
# behavior patching.PatchChunks() fails with a chunk mismatch error.
# This part was originally written by the Review Board development team
# who had the same problem (http://reviews.review-board.org/r/276/).
# Mapping of keywords to known aliases
svn_keywords = {
# Standard keywords
'Date': ['Date', 'LastChangedDate'],
'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
'Author': ['Author', 'LastChangedBy'],
'HeadURL': ['HeadURL', 'URL'],
'Id': ['Id'],
# Aliases
'LastChangedDate': ['LastChangedDate', 'Date'],
'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
'LastChangedBy': ['LastChangedBy', 'Author'],
'URL': ['URL', 'HeadURL'],
}
def repl(m):
if m.group(2):
return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
return "$%s$" % m.group(1)
keywords = [keyword
for name in keyword_str.split(" ")
for keyword in svn_keywords.get(name, [])]
return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
def GetUnknownFiles(self):
status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
unknown_files = []
for line in status.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
def ReadFile(self, filename):
"""Returns the contents of a file."""
file = open(filename, 'rb')
result = ""
try:
result = file.read()
finally:
file.close()
return result
def GetStatus(self, filename):
"""Returns the status of a file."""
if not self.options.revision:
status = RunShell(["svn", "status", "--ignore-externals",
self._EscapeFilename(filename)])
if not status:
ErrorExit("svn status returned no output for %s" % filename)
status_lines = status.splitlines()
# If file is in a cl, the output will begin with
# "\n--- Changelist 'cl_name':\n". See
# http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
if (len(status_lines) == 3 and
not status_lines[0] and
status_lines[1].startswith("--- Changelist")):
status = status_lines[2]
else:
status = status_lines[0]
# If we have a revision to diff against we need to run "svn list"
# for the old and the new revision and compare the results to get
# the correct status for a file.
else:
dirname, relfilename = os.path.split(filename)
if dirname not in self.svnls_cache:
cmd = ["svn", "list", "-r", self.rev_start,
self._EscapeFilename(dirname) or "."]
out, err, returncode = RunShellWithReturnCodeAndStderr(cmd)
if returncode:
# Directory might not yet exist at start revison
# svn: Unable to find repository location for 'abc' in revision nnn
if re.match('^svn: Unable to find repository location for .+ in revision \d+', err):
old_files = ()
else:
ErrorExit("Failed to get status for %s:\n%s" % (filename, err))
else:
old_files = out.splitlines()
args = ["svn", "list"]
if self.rev_end:
args += ["-r", self.rev_end]
cmd = args + [self._EscapeFilename(dirname) or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to run command %s" % cmd)
self.svnls_cache[dirname] = (old_files, out.splitlines())
old_files, new_files = self.svnls_cache[dirname]
if relfilename in old_files and relfilename not in new_files:
status = "D "
elif relfilename in old_files and relfilename in new_files:
status = "M "
else:
status = "A "
return status
def GetBaseFile(self, filename):
status = self.GetStatus(filename)
base_content = None
new_content = None
# If a file is copied its status will be "A +", which signifies
# "addition-with-history". See "svn st" for more information. We need to
# upload the original file or else diff parsing will fail if the file was
# edited.
if status[0] == "A" and status[3] != "+":
# We'll need to upload the new content if we're adding a binary file
# since diff's output won't contain it.
mimetype = RunShell(["svn", "propget", "svn:mime-type",
self._EscapeFilename(filename)], silent_ok=True)
base_content = ""
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if is_binary and self.IsImage(filename):
new_content = self.ReadFile(filename)
elif (status[0] in ("M", "D", "R") or
(status[0] == "A" and status[3] == "+") or # Copied file.
(status[0] == " " and status[1] == "M")): # Property change.
args = []
if self.options.revision:
# filename must not be escaped. We already add an ampersand here.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
# Don't change filename, it's needed later.
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
mimetype, returncode = RunShellWithReturnCode(cmd)
if returncode:
# File does not exist in the requested revision.
# Reset mimetype, it contains an error message.
mimetype = ""
else:
mimetype = mimetype.strip()
get_base = False
# this test for binary is exactly the test prescribed by the
# official SVN docs at
# http://subversion.apache.org/faq.html#binary-files
is_binary = (bool(mimetype) and
not mimetype.startswith("text/") and
mimetype not in ("image/x-xbitmap", "image/x-xpixmap"))
if status[0] == " ":
# Empty base content just to force an upload.
base_content = ""
elif is_binary:
if self.IsImage(filename):
get_base = True
if status[0] == "M":
if not self.rev_end:
new_content = self.ReadFile(filename)
else:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
new_content = RunShell(["svn", "cat", url],
universal_newlines=True, silent_ok=True)
else:
base_content = ""
else:
get_base = True
if get_base:
if is_binary:
universal_newlines = False
else:
universal_newlines = True
if self.rev_start:
# "svn cat -r REV delete_file.txt" doesn't work. cat requires
# the full URL with "@REV" appended instead of using "-r" option.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
else:
base_content, ret_code = RunShellWithReturnCode(
["svn", "cat", self._EscapeFilename(filename)],
universal_newlines=universal_newlines)
if ret_code and status[0] == "R":
# It's a replaced file without local history (see issue208).
# The base file needs to be fetched from the server.
url = "%s/%s" % (self.svn_base, filename)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
elif ret_code:
ErrorExit("Got error status from 'svn cat %s'" % filename)
if not is_binary:
args = []
if self.rev_start:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:keywords", url]
keywords, returncode = RunShellWithReturnCode(cmd)
if keywords and not returncode:
base_content = self._CollapseKeywords(base_content, keywords)
else:
StatusUpdate("svn status returned unexpected output: %s" % status)
sys.exit(1)
return base_content, new_content, is_binary, status[0:5]
class GitVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Git."""
def __init__(self, options):
super(GitVCS, self).__init__(options)
# Map of filename -> (hash before, hash after) of base file.
# Hashes for "no such file" are represented as None.
self.hashes = {}
# Map of new filename -> old filename for renames.
self.renames = {}
def GetGUID(self):
revlist = RunShell("git rev-list --parents HEAD".split()).splitlines()
# M-A: Return the 1st root hash, there could be multiple when a
# subtree is merged. In that case, more analysis would need to
# be done to figure out which HEAD is the 'most representative'.
for r in revlist:
if ' ' not in r:
return r
def PostProcessDiff(self, gitdiff):
"""Converts the diff output to include an svn-style "Index:" line as well
as record the hashes of the files, so we can upload them along with our
diff."""
# Special used by git to indicate "no such content".
NULL_HASH = "0"*40
def IsFileNew(filename):
return filename in self.hashes and self.hashes[filename][0] is None
def AddSubversionPropertyChange(filename):
"""Add svn's property change information into the patch if given file is
new file.
We use Subversion's auto-props setting to retrieve its property.
See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for
Subversion's [auto-props] setting.
"""
if self.options.emulate_svn_auto_props and IsFileNew(filename):
svnprops = GetSubversionPropertyChanges(filename)
if svnprops:
svndiff.append("\n" + svnprops + "\n")
svndiff = []
filecount = 0
filename = None
for line in gitdiff.splitlines():
match = re.match(r"diff --git a/(.*) b/(.*)$", line)
if match:
# Add auto property here for previously seen file.
if filename is not None:
AddSubversionPropertyChange(filename)
filecount += 1
# Intentionally use the "after" filename so we can show renames.
filename = match.group(2)
svndiff.append("Index: %s\n" % filename)
if match.group(1) != match.group(2):
self.renames[match.group(2)] = match.group(1)
else:
# The "index" line in a git diff looks like this (long hashes elided):
# index 82c0d44..b2cee3f 100755
# We want to save the left hash, as that identifies the base file.
match = re.match(r"index (\w+)\.\.(\w+)", line)
if match:
before, after = (match.group(1), match.group(2))
if before == NULL_HASH:
before = None
if after == NULL_HASH:
after = None
self.hashes[filename] = (before, after)
svndiff.append(line + "\n")
if not filecount:
ErrorExit("No valid patches found in output from git diff")
# Add auto property for the last seen file.
assert filename is not None
AddSubversionPropertyChange(filename)
return "".join(svndiff)
def GenerateDiff(self, extra_args):
extra_args = extra_args[:]
if self.options.revision:
if ":" in self.options.revision:
extra_args = self.options.revision.split(":", 1) + extra_args
else:
extra_args = [self.options.revision] + extra_args
# --no-ext-diff is broken in some versions of Git, so try to work around
# this by overriding the environment (but there is still a problem if the
# git config key "diff.external" is used).
env = os.environ.copy()
if 'GIT_EXTERNAL_DIFF' in env: del env['GIT_EXTERNAL_DIFF']
return RunShell(["git", "diff", "--no-ext-diff", "--full-index",
"--ignore-submodules", "-M"] + extra_args, env=env)
def GetUnknownFiles(self):
status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
silent_ok=True)
return status.splitlines()
def GetFileContent(self, file_hash, is_binary):
"""Returns the content of a file identified by its git hash."""
data, retcode = RunShellWithReturnCode(["git", "show", file_hash],
universal_newlines=not is_binary)
if retcode:
ErrorExit("Got error status from 'git show %s'" % file_hash)
return data
def GetBaseFile(self, filename):
hash_before, hash_after = self.hashes.get(filename, (None,None))
base_content = None
new_content = None
status = None
if filename in self.renames:
status = "A +" # Match svn attribute name for renames.
if filename not in self.hashes:
# If a rename doesn't change the content, we never get a hash.
base_content = RunShell(
["git", "show", "HEAD:" + filename], silent_ok=True)
elif not hash_before:
status = "A"
base_content = ""
elif not hash_after:
status = "D"
else:
status = "M"
is_binary = self.IsBinaryData(base_content)
is_image = self.IsImage(filename)
# Grab the before/after content if we need it.
# We should include file contents if it's text or it's an image.
if not is_binary or is_image:
# Grab the base content if we don't have it already.
if base_content is None and hash_before:
base_content = self.GetFileContent(hash_before, is_binary)
# Only include the "after" file if it's an image; otherwise it
# it is reconstructed from the diff.
if is_image and hash_after:
new_content = self.GetFileContent(hash_after, is_binary)
return (base_content, new_content, is_binary, status)
class CVSVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for CVS."""
def __init__(self, options):
super(CVSVCS, self).__init__(options)
def GetGUID(self):
"""For now we don't know how to get repository ID for CVS"""
return
def GetOriginalContent_(self, filename):
RunShell(["cvs", "up", filename], silent_ok=True)
# TODO need detect file content encoding
content = open(filename).read()
return content.replace("\r\n", "\n")
def GetBaseFile(self, filename):
base_content = None
new_content = None
status = "A"
output, retcode = RunShellWithReturnCode(["cvs", "status", filename])
if retcode:
ErrorExit("Got error status from 'cvs status %s'" % filename)
if output.find("Status: Locally Modified") != -1:
status = "M"
temp_filename = "%s.tmp123" % filename
os.rename(filename, temp_filename)
base_content = self.GetOriginalContent_(filename)
os.rename(temp_filename, filename)
elif output.find("Status: Locally Added"):
status = "A"
base_content = ""
elif output.find("Status: Needs Checkout"):
status = "D"
base_content = self.GetOriginalContent_(filename)
return (base_content, new_content, self.IsBinaryData(base_content), status)
def GenerateDiff(self, extra_args):
cmd = ["cvs", "diff", "-u", "-N"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(extra_args)
data, retcode = RunShellWithReturnCode(cmd)
count = 0
if retcode in [0, 1]:
for line in data.splitlines():
if line.startswith("Index:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from cvs diff")
return data
def GetUnknownFiles(self):
data, retcode = RunShellWithReturnCode(["cvs", "diff"])
if retcode not in [0, 1]:
ErrorExit("Got error status from 'cvs diff':\n%s" % (data,))
unknown_files = []
for line in data.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, repo_dir):
super(MercurialVCS, self).__init__(options)
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo_dir)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
def GetGUID(self):
# See chapter "Uniquely identifying a repository"
# http://hgbook.red-bean.com/read/customizing-the-output-of-mercurial.html
info = RunShell("hg log -r0 --template {node}".split())
return info.strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
absname = os.path.join(self.repo_dir, filename)
return os.path.relpath(absname)
def GenerateDiff(self, extra_args):
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
# "hg status" and "hg cat" both take a path relative to the current subdir,
# but "hg diff" has given us the path relative to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
status, _ = out[0].split(' ', 1)
if len(out) > 1 and status == "A":
# Moved/copied => considered as modified, use old filename to
# retrieve base contents
oldrelpath = out[1].strip()
status = "M"
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True)
is_binary = self.IsBinaryData(base_content)
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or self.IsBinaryData(new_content)
if is_binary and base_content:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
class PerforceVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Perforce."""
def __init__(self, options):
def ConfirmLogin():
# Make sure we have a valid perforce session
while True:
data, retcode = self.RunPerforceCommandWithReturnCode(
["login", "-s"], marshal_output=True)
if not data:
ErrorExit("Error checking perforce login")
if not retcode and (not "code" in data or data["code"] != "error"):
break
print "Enter perforce password: "
self.RunPerforceCommandWithReturnCode(["login"])
super(PerforceVCS, self).__init__(options)
self.p4_changelist = options.p4_changelist
if not self.p4_changelist:
ErrorExit("A changelist id is required")
if (options.revision):
ErrorExit("--rev is not supported for perforce")
self.p4_port = options.p4_port
self.p4_client = options.p4_client
self.p4_user = options.p4_user
ConfirmLogin()
if not options.title:
description = self.RunPerforceCommand(["describe", self.p4_changelist],
marshal_output=True)
if description and "desc" in description:
# Rietveld doesn't support multi-line descriptions
raw_title = description["desc"].strip()
lines = raw_title.splitlines()
if len(lines):
options.title = lines[0]
def GetGUID(self):
"""For now we don't know how to get repository ID for Perforce"""
return
def RunPerforceCommandWithReturnCode(self, extra_args, marshal_output=False,
universal_newlines=True):
args = ["p4"]
if marshal_output:
# -G makes perforce format its output as marshalled python objects
args.extend(["-G"])
if self.p4_port:
args.extend(["-p", self.p4_port])
if self.p4_client:
args.extend(["-c", self.p4_client])
if self.p4_user:
args.extend(["-u", self.p4_user])
args.extend(extra_args)
data, retcode = RunShellWithReturnCode(
args, print_output=False, universal_newlines=universal_newlines)
if marshal_output and data:
data = marshal.loads(data)
return data, retcode
def RunPerforceCommand(self, extra_args, marshal_output=False,
universal_newlines=True):
# This might be a good place to cache call results, since things like
# describe or fstat might get called repeatedly.
data, retcode = self.RunPerforceCommandWithReturnCode(
extra_args, marshal_output, universal_newlines)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (extra_args, data))
return data
def GetFileProperties(self, property_key_prefix = "", command = "describe"):
description = self.RunPerforceCommand(["describe", self.p4_changelist],
marshal_output=True)
changed_files = {}
file_index = 0
# Try depotFile0, depotFile1, ... until we don't find a match
while True:
file_key = "depotFile%d" % file_index
if file_key in description:
filename = description[file_key]
change_type = description[property_key_prefix + str(file_index)]
changed_files[filename] = change_type
file_index += 1
else:
break
return changed_files
def GetChangedFiles(self):
return self.GetFileProperties("action")
def GetUnknownFiles(self):
# Perforce doesn't detect new files, they have to be explicitly added
return []
def IsBaseBinary(self, filename):
base_filename = self.GetBaseFilename(filename)
return self.IsBinaryHelper(base_filename, "files")
def IsPendingBinary(self, filename):
return self.IsBinaryHelper(filename, "describe")
def IsBinaryHelper(self, filename, command):
file_types = self.GetFileProperties("type", command)
if not filename in file_types:
ErrorExit("Trying to check binary status of unknown file %s." % filename)
# This treats symlinks, macintosh resource files, temporary objects, and
# unicode as binary. See the Perforce docs for more details:
# http://www.perforce.com/perforce/doc.current/manuals/cmdref/o.ftypes.html
return not file_types[filename].endswith("text")
def GetFileContent(self, filename, revision, is_binary):
file_arg = filename
if revision:
file_arg += "#" + revision
# -q suppresses the initial line that displays the filename and revision
return self.RunPerforceCommand(["print", "-q", file_arg],
universal_newlines=not is_binary)
def GetBaseFilename(self, filename):
actionsWithDifferentBases = [
"move/add", # p4 move
"branch", # p4 integrate (to a new file), similar to hg "add"
"add", # p4 integrate (to a new file), after modifying the new file
]
# We only see a different base for "add" if this is a downgraded branch
# after a file was branched (integrated), then edited.
if self.GetAction(filename) in actionsWithDifferentBases:
# -Or shows information about pending integrations/moves
fstat_result = self.RunPerforceCommand(["fstat", "-Or", filename],
marshal_output=True)
baseFileKey = "resolveFromFile0" # I think it's safe to use only file0
if baseFileKey in fstat_result:
return fstat_result[baseFileKey]
return filename
def GetBaseRevision(self, filename):
base_filename = self.GetBaseFilename(filename)
have_result = self.RunPerforceCommand(["have", base_filename],
marshal_output=True)
if "haveRev" in have_result:
return have_result["haveRev"]
def GetLocalFilename(self, filename):
where = self.RunPerforceCommand(["where", filename], marshal_output=True)
if "path" in where:
return where["path"]
def GenerateDiff(self, args):
class DiffData:
def __init__(self, perforceVCS, filename, action):
self.perforceVCS = perforceVCS
self.filename = filename
self.action = action
self.base_filename = perforceVCS.GetBaseFilename(filename)
self.file_body = None
self.base_rev = None
self.prefix = None
self.working_copy = True
self.change_summary = None
def GenerateDiffHeader(diffData):
header = []
header.append("Index: %s" % diffData.filename)
header.append("=" * 67)
if diffData.base_filename != diffData.filename:
if diffData.action.startswith("move"):
verb = "rename"
else:
verb = "copy"
header.append("%s from %s" % (verb, diffData.base_filename))
header.append("%s to %s" % (verb, diffData.filename))
suffix = "\t(revision %s)" % diffData.base_rev
header.append("--- " + diffData.base_filename + suffix)
if diffData.working_copy:
suffix = "\t(working copy)"
header.append("+++ " + diffData.filename + suffix)
if diffData.change_summary:
header.append(diffData.change_summary)
return header
def GenerateMergeDiff(diffData, args):
# -du generates a unified diff, which is nearly svn format
diffData.file_body = self.RunPerforceCommand(
["diff", "-du", diffData.filename] + args)
diffData.base_rev = self.GetBaseRevision(diffData.filename)
diffData.prefix = ""
# We have to replace p4's file status output (the lines starting
# with +++ or ---) to match svn's diff format
lines = diffData.file_body.splitlines()
first_good_line = 0
while (first_good_line < len(lines) and
not lines[first_good_line].startswith("@@")):
first_good_line += 1
diffData.file_body = "\n".join(lines[first_good_line:])
return diffData
def GenerateAddDiff(diffData):
fstat = self.RunPerforceCommand(["fstat", diffData.filename],
marshal_output=True)
if "headRev" in fstat:
diffData.base_rev = fstat["headRev"] # Re-adding a deleted file
else:
diffData.base_rev = "0" # Brand new file
diffData.working_copy = False
rel_path = self.GetLocalFilename(diffData.filename)
diffData.file_body = open(rel_path, 'r').read()
# Replicate svn's list of changed lines
line_count = len(diffData.file_body.splitlines())
diffData.change_summary = "@@ -0,0 +1"
if line_count > 1:
diffData.change_summary += ",%d" % line_count
diffData.change_summary += " @@"
diffData.prefix = "+"
return diffData
def GenerateDeleteDiff(diffData):
diffData.base_rev = self.GetBaseRevision(diffData.filename)
is_base_binary = self.IsBaseBinary(diffData.filename)
# For deletes, base_filename == filename
diffData.file_body = self.GetFileContent(diffData.base_filename,
None,
is_base_binary)
# Replicate svn's list of changed lines
line_count = len(diffData.file_body.splitlines())
diffData.change_summary = "@@ -1"
if line_count > 1:
diffData.change_summary += ",%d" % line_count
diffData.change_summary += " +0,0 @@"
diffData.prefix = "-"
return diffData
changed_files = self.GetChangedFiles()
svndiff = []
filecount = 0
for (filename, action) in changed_files.items():
svn_status = self.PerforceActionToSvnStatus(action)
if svn_status == "SKIP":
continue
diffData = DiffData(self, filename, action)
# Is it possible to diff a branched file? Stackoverflow says no:
# http://stackoverflow.com/questions/1771314/in-perforce-command-line-how-to-diff-a-file-reopened-for-add
if svn_status == "M":
diffData = GenerateMergeDiff(diffData, args)
elif svn_status == "A":
diffData = GenerateAddDiff(diffData)
elif svn_status == "D":
diffData = GenerateDeleteDiff(diffData)
else:
ErrorExit("Unknown file action %s (svn action %s)." % \
(action, svn_status))
svndiff += GenerateDiffHeader(diffData)
for line in diffData.file_body.splitlines():
svndiff.append(diffData.prefix + line)
filecount += 1
if not filecount:
ErrorExit("No valid patches found in output from p4 diff")
return "\n".join(svndiff) + "\n"
def PerforceActionToSvnStatus(self, status):
# Mirroring the list at http://permalink.gmane.org/gmane.comp.version-control.mercurial.devel/28717
# Is there something more official?
return {
"add" : "A",
"branch" : "A",
"delete" : "D",
"edit" : "M", # Also includes changing file types.
"integrate" : "M",
"move/add" : "M",
"move/delete": "SKIP",
"purge" : "D", # How does a file's status become "purge"?
}[status]
def GetAction(self, filename):
changed_files = self.GetChangedFiles()
if not filename in changed_files:
ErrorExit("Trying to get base version of unknown file %s." % filename)
return changed_files[filename]
def GetBaseFile(self, filename):
base_filename = self.GetBaseFilename(filename)
base_content = ""
new_content = None
status = self.PerforceActionToSvnStatus(self.GetAction(filename))
if status != "A":
revision = self.GetBaseRevision(base_filename)
if not revision:
ErrorExit("Couldn't find base revision for file %s" % filename)
is_base_binary = self.IsBaseBinary(base_filename)
base_content = self.GetFileContent(base_filename,
revision,
is_base_binary)
is_binary = self.IsPendingBinary(filename)
if status != "D" and status != "SKIP":
relpath = self.GetLocalFilename(filename)
if is_binary and self.IsImage(relpath):
new_content = open(relpath, "rb").read()
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
def GuessVCSName(options):
"""Helper to guess the version control system.
This examines the current directory, guesses which VersionControlSystem
we're using, and returns an string indicating which VCS is detected.
Returns:
A pair (vcs, output). vcs is a string indicating which VCS was detected
and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, VCS_PERFORCE,
VCS_CVS, or VCS_UNKNOWN.
Since local perforce repositories can't be easily detected, this method
will only guess VCS_PERFORCE if any perforce options have been specified.
output is a string containing any interesting output from the vcs
detection routine, or None if there is nothing interesting.
"""
for attribute, value in options.__dict__.iteritems():
if attribute.startswith("p4") and value != None:
return (VCS_PERFORCE, None)
def RunDetectCommand(vcs_type, command):
"""Helper to detect VCS by executing command.
Returns:
A pair (vcs, output) or None. Throws exception on error.
"""
try:
out, returncode = RunShellWithReturnCode(command)
if returncode == 0:
return (vcs_type, out.strip())
except OSError, (errcode, message):
if errcode != errno.ENOENT: # command not found code
raise
# Mercurial has a command to get the base directory of a repository
# Try running it, but don't die if we don't have hg installed.
# NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
res = RunDetectCommand(VCS_MERCURIAL, ["hg", "root"])
if res != None:
return res
# Subversion from 1.7 has a single centralized .svn folder
# ( see http://subversion.apache.org/docs/release-notes/1.7.html#wc-ng )
# That's why we use 'svn info' instead of checking for .svn dir
res = RunDetectCommand(VCS_SUBVERSION, ["svn", "info"])
if res != None:
return res
# Git has a command to test if you're in a git tree.
# Try running it, but don't die if we don't have git installed.
res = RunDetectCommand(VCS_GIT, ["git", "rev-parse",
"--is-inside-work-tree"])
if res != None:
return res
# detect CVS repos use `cvs status && $? == 0` rules
res = RunDetectCommand(VCS_CVS, ["cvs", "status"])
if res != None:
return res
return (VCS_UNKNOWN, None)
def GuessVCS(options):
"""Helper to guess the version control system.
This verifies any user-specified VersionControlSystem (by command line
or environment variable). If the user didn't specify one, this examines
the current directory, guesses which VersionControlSystem we're using,
and returns an instance of the appropriate class. Exit with an error
if we can't figure it out.
Returns:
A VersionControlSystem instance. Exits if the VCS can't be guessed.
"""
vcs = options.vcs
if not vcs:
vcs = os.environ.get("CODEREVIEW_VCS")
if vcs:
v = VCS_ABBREVIATIONS.get(vcs.lower())
if v is None:
ErrorExit("Unknown version control system %r specified." % vcs)
(vcs, extra_output) = (v, None)
else:
(vcs, extra_output) = GuessVCSName(options)
if vcs == VCS_MERCURIAL:
if extra_output is None:
extra_output = RunShell(["hg", "root"]).strip()
return MercurialVCS(options, extra_output)
elif vcs == VCS_SUBVERSION:
return SubversionVCS(options)
elif vcs == VCS_PERFORCE:
return PerforceVCS(options)
elif vcs == VCS_GIT:
return GitVCS(options)
elif vcs == VCS_CVS:
return CVSVCS(options)
ErrorExit(("Could not guess version control system. "
"Are you in a working copy directory?"))
def CheckReviewer(reviewer):
"""Validate a reviewer -- either a nickname or an email addres.
Args:
reviewer: A nickname or an email address.
Calls ErrorExit() if it is an invalid email address.
"""
if "@" not in reviewer:
return # Assume nickname
parts = reviewer.split("@")
if len(parts) > 2:
ErrorExit("Invalid email address: %r" % reviewer)
assert len(parts) == 2
if "." not in parts[1]:
ErrorExit("Invalid email address: %r" % reviewer)
def LoadSubversionAutoProperties():
"""Returns the content of [auto-props] section of Subversion's config file as
a dictionary.
Returns:
A dictionary whose key-value pair corresponds the [auto-props] section's
key-value pair.
In following cases, returns empty dictionary:
- config file doesn't exist, or
- 'enable-auto-props' is not set to 'true-like-value' in [miscellany].
"""
if os.name == 'nt':
subversion_config = os.environ.get("APPDATA") + "\\Subversion\\config"
else:
subversion_config = os.path.expanduser("~/.subversion/config")
if not os.path.exists(subversion_config):
return {}
config = ConfigParser.ConfigParser()
config.read(subversion_config)
if (config.has_section("miscellany") and
config.has_option("miscellany", "enable-auto-props") and
config.getboolean("miscellany", "enable-auto-props") and
config.has_section("auto-props")):
props = {}
for file_pattern in config.options("auto-props"):
props[file_pattern] = ParseSubversionPropertyValues(
config.get("auto-props", file_pattern))
return props
else:
return {}
def ParseSubversionPropertyValues(props):
"""Parse the given property value which comes from [auto-props] section and
returns a list whose element is a (svn_prop_key, svn_prop_value) pair.
See the following doctest for example.
>>> ParseSubversionPropertyValues('svn:eol-style=LF')
[('svn:eol-style', 'LF')]
>>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg')
[('svn:mime-type', 'image/jpeg')]
>>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable')
[('svn:eol-style', 'LF'), ('svn:executable', '*')]
"""
key_value_pairs = []
for prop in props.split(";"):
key_value = prop.split("=")
assert len(key_value) <= 2
if len(key_value) == 1:
# If value is not given, use '*' as a Subversion's convention.
key_value_pairs.append((key_value[0], "*"))
else:
key_value_pairs.append((key_value[0], key_value[1]))
return key_value_pairs
def GetSubversionPropertyChanges(filename):
"""Return a Subversion's 'Property changes on ...' string, which is used in
the patch file.
Args:
filename: filename whose property might be set by [auto-props] config.
Returns:
A string like 'Property changes on |filename| ...' if given |filename|
matches any entries in [auto-props] section. None, otherwise.
"""
global svn_auto_props_map
if svn_auto_props_map is None:
svn_auto_props_map = LoadSubversionAutoProperties()
all_props = []
for file_pattern, props in svn_auto_props_map.items():
if fnmatch.fnmatch(filename, file_pattern):
all_props.extend(props)
if all_props:
return FormatSubversionPropertyChanges(filename, all_props)
return None
def FormatSubversionPropertyChanges(filename, props):
"""Returns Subversion's 'Property changes on ...' strings using given filename
and properties.
Args:
filename: filename
props: A list whose element is a (svn_prop_key, svn_prop_value) pair.
Returns:
A string which can be used in the patch file for Subversion.
See the following doctest for example.
>>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')])
Property changes on: foo.cc
___________________________________________________________________
Added: svn:eol-style
+ LF
<BLANKLINE>
"""
prop_changes_lines = [
"Property changes on: %s" % filename,
"___________________________________________________________________"]
for key, value in props:
prop_changes_lines.append("Added: " + key)
prop_changes_lines.append(" + " + value)
return "\n".join(prop_changes_lines) + "\n"
def RealMain(argv, data=None):
"""The real main function.
Args:
argv: Command line arguments.
data: Diff contents. If None (default) the diff is generated by
the VersionControlSystem implementation returned by GuessVCS().
Returns:
A 2-tuple (issue id, patchset id).
The patchset id is None if the base files are not uploaded by this
script (applies only to SVN checkouts).
"""
options, args = parser.parse_args(argv[1:])
if options.help:
if options.verbose < 2:
# hide Perforce options
parser.epilog = "Use '--help -v' to show additional Perforce options."
parser.option_groups.remove(parser.get_option_group('--p4_port'))
parser.print_help()
sys.exit(0)
global verbosity
verbosity = options.verbose
if verbosity >= 3:
logging.getLogger().setLevel(logging.DEBUG)
elif verbosity >= 2:
logging.getLogger().setLevel(logging.INFO)
vcs = GuessVCS(options)
base = options.base_url
if isinstance(vcs, SubversionVCS):
# Guessing the base field is only supported for Subversion.
# Note: Fetching base files may become deprecated in future releases.
guessed_base = vcs.GuessBase(options.download_base)
if base:
if guessed_base and base != guessed_base:
print "Using base URL \"%s\" from --base_url instead of \"%s\"" % \
(base, guessed_base)
else:
base = guessed_base
if not base and options.download_base:
options.download_base = True
logging.info("Enabled upload of base file")
if not options.assume_yes:
vcs.CheckForUnknownFiles()
if data is None:
data = vcs.GenerateDiff(args)
data = vcs.PostProcessDiff(data)
if options.print_diffs:
print "Rietveld diff start:*****"
print data
print "Rietveld diff end:*****"
files = vcs.GetBaseFiles(data)
if verbosity >= 1:
print "Upload server:", options.server, "(change with -s/--server)"
if options.issue:
prompt = "Title describing this patch set: "
else:
prompt = "New issue subject: "
title = options.title or raw_input(prompt).strip()
if not title:
ErrorExit("A non-empty title is required")
rpc_server = GetRpcServer(options.server,
options.email,
options.host,
options.save_cookies,
options.account_type)
form_fields = [("subject", title)]
repo_guid = vcs.GetGUID()
if repo_guid:
form_fields.append(("repo_guid", repo_guid))
if base:
b = urlparse.urlparse(base)
username, netloc = urllib.splituser(b.netloc)
if username:
logging.info("Removed username from base URL")
base = urlparse.urlunparse((b.scheme, netloc, b.path, b.params,
b.query, b.fragment))
form_fields.append(("base", base))
if options.issue:
form_fields.append(("issue", str(options.issue)))
if options.email:
form_fields.append(("user", options.email))
if options.reviewers:
for reviewer in options.reviewers.split(','):
CheckReviewer(reviewer)
form_fields.append(("reviewers", options.reviewers))
if options.cc:
for cc in options.cc.split(','):
CheckReviewer(cc)
form_fields.append(("cc", options.cc))
message = options.message
if options.file:
if options.message:
ErrorExit("Can't specify both message and message file options")
file = open(options.file, 'r')
message = file.read()
file.close()
if message:
if not options.issue:
form_fields.append(("description", message))
else:
# TODO: [ ] figure out how to send a comment from upload.py
pass
# Send a hash of all the base file so the server can determine if a copy
# already exists in an earlier patchset.
base_hashes = ""
for file, info in files.iteritems():
if not info[0] is None:
checksum = md5(info[0]).hexdigest()
if base_hashes:
base_hashes += "|"
base_hashes += checksum + ":" + file
form_fields.append(("base_hashes", base_hashes))
if options.private:
if options.issue:
print "Warning: Private flag ignored when updating an existing issue."
else:
form_fields.append(("private", "1"))
if options.send_patch:
options.send_mail = True
if not options.download_base:
form_fields.append(("content_upload", "1"))
if len(data) > MAX_UPLOAD_SIZE:
print "Patch is large, so uploading file patches separately."
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = rpc_server.Send("/upload", body, content_type=ctype)
patchset = None
if not options.download_base or not uploaded_diff_file:
lines = response_body.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
else:
msg = response_body
else:
msg = response_body
StatusUpdate(msg)
if not response_body.startswith("Issue created.") and \
not response_body.startswith("Issue updated."):
sys.exit(0)
issue = msg[msg.rfind("/")+1:]
if not uploaded_diff_file:
result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
if not options.download_base:
patches = result
if not options.download_base:
vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
payload = {} # payload for final request
if options.send_mail:
payload["send_mail"] = "yes"
if options.send_patch:
payload["attach_patch"] = "yes"
payload = urllib.urlencode(payload)
rpc_server.Send("/" + issue + "/upload_complete/" + (patchset or ""),
payload=payload)
return issue, patchset
def main():
try:
logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
os.environ['LC_ALL'] = 'C'
RealMain(sys.argv)
except KeyboardInterrupt:
print
StatusUpdate("Interrupted.")
sys.exit(1)
if __name__ == "__main__":
main()
| {
"content_hash": "1023fa8dc81ff19db6759db16883e76f",
"timestamp": "",
"source": "github",
"line_count": 2314,
"max_line_length": 111,
"avg_line_length": 36.9304235090752,
"alnum_prop": 0.624957581005652,
"repo_name": "nareshboddepalli/touchites-test",
"id": "c7478a09053c7eb7866e4438eff94d7032c35773",
"size": "86057",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "upload.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "18844"
},
{
"name": "Diff",
"bytes": "1199"
},
{
"name": "HTML",
"bytes": "77212"
},
{
"name": "JavaScript",
"bytes": "121613"
},
{
"name": "Makefile",
"bytes": "1320"
},
{
"name": "Python",
"bytes": "357390"
}
],
"symlink_target": ""
} |
"""
Copyright 2010-2012 Asidev s.r.l.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from . import AybuCPFunctionalTestsBase
import logging
log = logging.getLogger(__name__)
class SettingHandlerFunctionalTests(AybuCPFunctionalTestsBase):
def json_get(self, url, status):
return self.testapp.get(url, status=status).json
def base_assert(self, data):
self.assertIn('success', data)
self.assertIn('message', data)
self.assertIn('metaData', data)
def success_assert(self, data):
self.base_assert(data)
self.assertEqual(data['success'], True)
def test_list(self):
response = self.json_get(url='/admin/settings/list', status=200)
self.success_assert(response)
self.assertEqual(len(response['dataset']), response['dataset_len'])
# Save the length of dataset when no variables specified:
# it is needed to jump forward/backward in the collection.
# Example scenario: ExtJS paginator widget.
collection_length = response['dataset_len']
response = self.json_get('/admin/settings/list?start=0&limit=1',
status=200)
self.success_assert(response)
self.assertEqual(response['dataset_len'], collection_length)
self.assertEqual(len(response['dataset']), 1)
response = self.json_get('/admin/settings/list?sort=name&dir=desc',
status=200)
self.success_assert(response)
self.assertEqual(response['dataset_len'], collection_length)
self.assertEqual(len(response['dataset']), collection_length)
def test_update(self):
url = '/admin/settings/update?dataset={"name":"debug", "value":"on"}'
response = self.json_get(url=url, status=200)
self.assertIn('success', response)
self.assertIn('errors', response)
self.assertEqual(response['success'], True)
self.assertEqual(response['errors'], {})
url = '/admin/settings/update?name=debug&value=on'
response = self.json_get(url=url, status=200)
self.assertIn('success', response)
self.assertIn('errors', response)
self.assertEqual(response['success'], True)
self.assertEqual(response['errors'], {})
url = '/admin/settings/update'
response = self.json_get(url=url, status=400)
self.assertIn('success', response)
self.assertIn('errors', response)
self.assertEqual(response['success'], False)
self.assertNotEqual(response['errors'], {})
def test_info1(self):
url = '/admin/settings/info?name=debug'
response = self.json_get(url=url, status=200)
self.success_assert(response)
url = '/admin/settings/info'
response = self.json_get(url=url, status=400)
self.base_assert(response)
self.assertEqual(response['success'], False)
def test_info2(self):
url = '/admin/settings/types'
response = self.json_get(url=url, status=200)
self.success_assert(response)
| {
"content_hash": "8482cbeff14eea212af268b492afe5b8",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 77,
"avg_line_length": 36.275510204081634,
"alnum_prop": 0.6559774964838256,
"repo_name": "asidev/aybu-controlpanel",
"id": "590f16b8e96d8a2b2bcb8a488a9931bfa565f5e9",
"size": "3601",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_setting.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "149537"
}
],
"symlink_target": ""
} |
import json
from urllib import urlencode
from urlparse import urljoin
from error import build_api_error
from datetime import datetime
from http import http_request
# @todo Sandboxing?
# @todo "Single Request Call"
def require_access_token(func):
def inner_func(self, *args, **kwargs):
if not self.access_token:
raise Exception('`%s` method requires `access_token`' %
func.__name__)
return func(self, *args, **kwargs)
return inner_func
def as_dictionary(func):
def wrapper_func(*args, **kwargs):
retval = func(*args, **kwargs)
if retval.ok:
return json.loads(retval.content)
else:
build_api_error(retval)
return retval
return wrapper_func
def is_json(myjson):
try:
json_object = json.loads(myjson)
except ValueError, e:
return False
return True
class Client(object):
"""
Python Plain API v2 client https://plaid.io/
See official documentation at: https://plaid.io/v2/docs
"""
ACCOUNT_TYPES = (
('amex', 'American Express',),
('bofa', 'Bank of America',),
('chase', 'Chase',),
('citi', 'Citi',),
('wells', 'Wells Fargo',),
)
CATEGORY_TYPES = [
'plaid',
'foursquare',
'factual',
'amex'
]
endpoints = {
'connect': '/connect',
'connect_step': '/connect/step',
'entity': '/entity',
'categories': '/categories',
'category': '/categories/id/%s',
'balance': '/balance',
'auth': '/auth',
'auth_step': '/auth/step',
'numbers': '/auth/get',
'get_info': '/info/get',
'institutions': '/institutions',
'search_institutions': '/institutions/search',
'upgrade': '/upgrade',
'transactions': '/connect/get'
}
def __init__(self, client_id, secret, url, access_token=None):
"""
`client_id` str Your Plaid client ID
`secret` str Your Plaid secret
`access_token` str Access token if you already have one
`url` str Url of the plaid endpoint to hit
"""
self.client_id = client_id
self.secret = secret
self.access_token = None
self.url = url
if access_token:
self.set_access_token(access_token)
def set_access_token(self, access_token):
self.access_token = access_token
def get_access_token(self):
return self.access_token
def get_account_types(self):
return self.ACCOUNT_TYPES
@require_access_token
def sandboxed(self):
return self.access_token == 'test'
# Endpoints
@as_dictionary
def connect(self, account_type, login=None, username=None, password=None, pin=None, options=None, patch=False, login_only=False, webhook=None, start_date=None):
"""
Add a bank account user/login to Plaid and receive an access token
unless a 2nd level of authentication is required, in which case
an MFA (Multi Factor Authentication) question(s) is returned
`account_type` str The type of bank account you want to sign in
to, must be one of the keys in `ACCOUNT_TYPES`
`username` str The username for the bank account you want to
sign in to
`password` str The password for the bank account you want to
sign in to
`options` dict
`webhook` str URL to hit once the account's transactions
have been processed
`mfa_list` boolean List all available MFA (Multi Factor
Authentication) options
"""
if options is None:
options = {}
url = urljoin(self.url, self.endpoints['connect'])
if not login:
credentials = {
'username': username,
'password': password
}
else:
credentials = login
if pin:
credentials['pin'] = pin
if login_only:
options['login_only'] = login_only
if webhook:
options['webhook'] = webhook
if start_date:
options['start_date'] = start_date
data = {
'client_id': self.client_id,
'secret': self.secret,
'type': account_type,
'credentials': json.dumps(credentials)
}
if options:
data['options'] = json.dumps(options)
if patch:
data['access_token'] = self.access_token
response = http_request(url, 'PATCH', data)
else:
response = http_request(url, 'POST', data)
if response.ok:
json_data = json.loads(response.content)
if json_data.has_key('access_token'):
self.access_token = json_data['access_token']
return response
@as_dictionary
def auth(self, account_type, login=None, username=None, password=None, pin=None, options=None, patch=False):
"""
Add a bank account user/login to Plaid and receive an access token
unless a 2nd level of authentication is required, in which case
an MFA (Multi Factor Authentication) question(s) is returned
`account_type` str The type of bank account you want to sign in
to, must be one of the keys in `ACCOUNT_TYPES`
`username` str The username for the bank account you want to
sign in to
`password` str The password for the bank account you want to
sign in to
`options` dict
`webhook` str URL to hit once the account's transactions
have been processed
`mfa_list` boolean List all available MFA (Multi Factor
Authentication) options
"""
if options is None:
options = {}
url = urljoin(self.url, self.endpoints['auth'])
if not login:
credentials = {
'username': username,
'password': password
}
else:
credentials = login
if pin:
credentials['pin'] = pin
data = {
'client_id': self.client_id,
'secret': self.secret,
'type': account_type,
'credentials': json.dumps(credentials)
}
if options:
data['options'] = json.dumps(options)
if patch:
data['access_token'] = self.access_token
response = http_request(url, 'PATCH', data)
else:
response = http_request(url, 'POST', data)
if response.ok:
json_data = json.loads(response.content)
if json_data.has_key('access_token'):
self.access_token = json_data['access_token']
return response
@require_access_token
@as_dictionary
def connect_step(self, mfa, account_type=None, options=None, patch=False):
"""
Perform a MFA (Multi Factor Authentication) step, requires
`access_token`
`account_type` str The type of bank account you're performing MFA
on, must match what you used in the `connect`
call
`mfa` str The MFA answer, e.g. an answer to q security
question or code sent to your phone, etc.
`options` dict
`send_method` dict The send method your MFA answer is for,
e.g. {'type': Phone'}, should come from
the list from the `mfa_list` option in
the `connect` call
"""
if options is None:
options = {}
url = urljoin(self.url, self.endpoints['connect_step'])
# Handle dictionary/list MFAs
if isinstance(mfa, dict) or (isinstance(mfa, list) and not isinstance(mfa, basestring)):
mfa = json.dumps(mfa)
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token,
'type': account_type,
'mfa': mfa
}
if options:
data['options'] = json.dumps(options)
if patch:
return http_request(url, 'PATCH', data)
else:
return http_request(url, 'POST', data)
@require_access_token
@as_dictionary
def auth_step(self, mfa, account_type=None, options=None, patch=False):
"""
Perform a MFA (Multi Factor Authentication) step, requires
`access_token`
`account_type` str The type of bank account you're performing MFA
on, must match what you used in the `connect`
call
`mfa` str The MFA answer, e.g. an answer to q security
question or code sent to your phone, etc.
`options` dict
`send_method` dict The send method your MFA answer is for,
e.g. {'type': Phone'}, should come from
the list from the `mfa_list` option in
the `connect` call
"""
if options is None:
options = {}
url = urljoin(self.url, self.endpoints['auth_step'])
# Handle dictionary/list MFAs
if isinstance(mfa, dict) or (isinstance(mfa, list) and not isinstance(mfa, basestring)):
mfa = json.dumps(mfa)
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token,
'type': account_type,
'mfa': mfa
}
if options:
data['options'] = json.dumps(options)
if patch:
return http_request(url, 'PATCH', data)
else:
return http_request(url, 'POST', data)
@require_access_token
@as_dictionary
def upgrade(self, upgrade_to, account_type=None):
"""
Upgrade account to another plaid type
"""
url = urljoin(self.url, self.endpoints['upgrade'])
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token,
'upgrade_to': upgrade_to,
'type': account_type,
}
return http_request(url, 'POST', data)
@require_access_token
@as_dictionary
def delete_connect_user(self):
"""
Delete user from Plaid connect, requires `access_token`
"""
url = urljoin(self.url, self.endpoints['connect'])
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token
}
return http_request(url, 'DELETE', data)
@require_access_token
@as_dictionary
def delete_auth_user(self):
"""
Delete user from Plaid auth, requires `access_token`
"""
url = urljoin(self.url, self.endpoints['auth'])
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token
}
return http_request(url, 'DELETE', data)
@require_access_token
def transactions(self, options=None):
"""
Fetch a list of transactions, requires `access_token`
!!! DOES NOT USE as_dictionary decorator due to sketchy sandbox handling code
`options` dict
"""
url = urljoin(self.url, self.endpoints['transactions'])
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token,
}
if options and not self.sandboxed():
# Options not supported in sandbox mode - handle manually below
data['options'] = json.dumps(options)
# Make the request and raise exception if it's fucked up
transactions_request = http_request(url, 'POST', data)
if transactions_request.ok:
json_response = json.loads(transactions_request.content)
else:
build_api_error(transactions_request)
if self.sandboxed():
# We have to manually apply the specified options
filtered_transactions = []
transactions = json_response['transactions']
# Possible options:
# 1) filter by account_id ('account')
check_account = 'account' in options
# 2) filter by date greater than or equal to a given date ('gte')
check_date = 'gte' in options
correct_account = True
correct_date = True
for transaction in transactions:
if check_account:
correct_account = transaction['_account'] == options['account']
if check_date:
transaction_date = datetime.strptime(transaction['date'], "%Y-%m-%d").date()
threshold_date = datetime.strptime(options['gte'], "%Y-%m-%d").date()
correct_date = transaction_date >= threshold_date
if correct_date and correct_account:
filtered_transactions.append(transaction)
json_response['transactions'] = filtered_transactions
return json_response
@as_dictionary
def entity(self, entity_id, options=None):
"""
Fetch a specific entity's data
`entity_id` str Entity id to fetch
"""
url = urljoin(self.url, self.endpoints['entity'])
return http_request(url, 'GET', {'entity_id': entity_id})
@as_dictionary
def categories(self):
"""
Fetch all categories
"""
url = urljoin(self.url, self.endpoints['categories'])
return http_request(url, 'GET')
@as_dictionary
def category(self, category_id, options=None):
"""
Fetch a specific category
`category_id` str Category id to fetch
"""
url = urljoin(self.url, self.endpoints['category']) % category_id
return http_request(url, 'GET')
@as_dictionary
def categories_by_mapping(self, mapping, category_type, options=None):
"""
Fetch category data by category mapping and data source
`mapping` str The category mapping to explore,
e.g. "Food > Spanish Restaurant",
see all categories here:
https://github.com/plaid/Support/blob/master/categories.md
`category_type` str The category data source, must be a value from
`CATEGORY_TYPES`
`options` dict
`full_match` boolean Whether to try an exact match for
`mapping`. Setting to `False` will
return best match.
"""
if options is None:
options = {}
url = urljoin(self.url, self.endpoints['categories_by_mapping'])
data = {
'mapping': mapping,
'type': category_type
}
if options:
data['options'] = json.dumps(options)
return http_request(url, 'GET', data)
@require_access_token
@as_dictionary
def balance(self, options=None):
"""
Fetch the real-time balance of the user's accounts
"""
if options is None:
options = {}
url = urljoin(self.url, self.endpoints['balance'])
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token
}
if options:
data['options'] = json.dumps(options)
return http_request(url, 'GET', data)
@require_access_token
@as_dictionary
def numbers(self, account_type=None):
"""
Fetch the account/routing numbers for this user
"""
url = urljoin(self.url, self.endpoints['numbers'])
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token,
'type': account_type
}
return http_request(url, 'POST', data)
@require_access_token
@as_dictionary
def get_info(self, account_type=None):
"""
Fetch info about account holder
"""
url = urljoin(self.url, self.endpoints['get_info'])
data = {
'client_id': self.client_id,
'secret': self.secret,
'access_token': self.access_token,
'type': account_type
}
return http_request(url, 'POST', data)
@as_dictionary
def institutions(self):
"""
Fetch the available institutions
"""
url = urljoin(self.url, self.endpoints['institutions'])
return http_request(url, 'GET')
@as_dictionary
def institution(self, institution_id):
"""
Get institution by id
"""
url = urljoin(self.url, self.endpoints['institutions'] + '/' + institution_id)
return http_request(url, 'GET')
@as_dictionary
def search_institutions(self, query=None, product=None, institution_id=None):
"""
Search the available institutions (incl. intuit)
"""
url = urljoin(self.url, self.endpoints['search_institutions'])
data = {}
if query:
data['q'] = str(query)
if product:
data['p'] = str(product)
if institution_id:
data['id'] = str(institution_id)
params = urlencode(data)
url = url + '?' + params
return http_request(url, 'GET')
| {
"content_hash": "8512b99fef4e8f1437d586e3291a64fe",
"timestamp": "",
"source": "github",
"line_count": 564,
"max_line_length": 164,
"avg_line_length": 32.4113475177305,
"alnum_prop": 0.5299234135667396,
"repo_name": "LawnmowerIO/plaid-python",
"id": "20128cf75a950aa939c4049d6fec661c266abc9f",
"size": "18280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plaid/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "44"
},
{
"name": "Python",
"bytes": "27786"
}
],
"symlink_target": ""
} |
import unittest
from datetime import datetime
from unittest import mock
from urllib.parse import parse_qs
from bs4 import BeautifulSoup
from parameterized import parameterized
from airflow.www import utils
from tests.test_utils.config import conf_vars
class TestUtils(unittest.TestCase):
def test_empty_variable_should_not_be_hidden(self):
self.assertFalse(utils.should_hide_value_for_key(""))
self.assertFalse(utils.should_hide_value_for_key(None))
def test_normal_variable_should_not_be_hidden(self):
self.assertFalse(utils.should_hide_value_for_key("key"))
def test_sensitive_variable_should_be_hidden(self):
self.assertTrue(utils.should_hide_value_for_key("google_api_key"))
def test_sensitive_variable_should_be_hidden_ic(self):
self.assertTrue(utils.should_hide_value_for_key("GOOGLE_API_KEY"))
def check_generate_pages_html(self, current_page, total_pages,
window=7, check_middle=False):
extra_links = 4 # first, prev, next, last
search = "'>\"/><img src=x onerror=alert(1)>"
html_str = utils.generate_pages(current_page, total_pages,
search=search)
self.assertNotIn(search, html_str,
"The raw search string shouldn't appear in the output")
self.assertIn('search=%27%3E%22%2F%3E%3Cimg+src%3Dx+onerror%3Dalert%281%29%3E',
html_str)
self.assertTrue(
callable(html_str.__html__),
"Should return something that is HTML-escaping aware"
)
dom = BeautifulSoup(html_str, 'html.parser')
self.assertIsNotNone(dom)
ulist = dom.ul
ulist_items = ulist.find_all('li')
self.assertEqual(min(window, total_pages) + extra_links, len(ulist_items))
page_items = ulist_items[2:-2]
mid = int(len(page_items) / 2)
for i, item in enumerate(page_items):
a_node = item.a
href_link = a_node['href']
node_text = a_node.string
if node_text == str(current_page + 1):
if check_middle:
self.assertEqual(mid, i)
self.assertEqual('javascript:void(0)', href_link)
self.assertIn('active', item['class'])
else:
self.assertRegex(href_link, r'^\?', 'Link is page-relative')
query = parse_qs(href_link[1:])
self.assertListEqual(query['page'], [str(int(node_text) - 1)])
self.assertListEqual(query['search'], [search])
def test_generate_pager_current_start(self):
self.check_generate_pages_html(current_page=0,
total_pages=6)
def test_generate_pager_current_middle(self):
self.check_generate_pages_html(current_page=10,
total_pages=20,
check_middle=True)
def test_generate_pager_current_end(self):
self.check_generate_pages_html(current_page=38,
total_pages=39)
def test_params_no_values(self):
"""Should return an empty string if no params are passed"""
self.assertEqual('', utils.get_params())
def test_params_search(self):
self.assertEqual('search=bash_',
utils.get_params(search='bash_'))
@parameterized.expand([
(True, False, ''),
(False, True, ''),
(True, True, 'showPaused=True'),
(False, False, 'showPaused=False'),
(None, True, ''),
(None, False, ''),
])
def test_params_show_paused(self, show_paused, hide_by_default, expected_result):
with conf_vars({('webserver', 'hide_paused_dags_by_default'): str(hide_by_default)}):
self.assertEqual(expected_result,
utils.get_params(showPaused=show_paused))
@parameterized.expand([
(True, False, True),
(False, True, True),
(True, True, False),
(False, False, False),
(None, True, True),
(None, False, True),
])
def test_should_remove_show_paused_from_url_params(self, show_paused,
hide_by_default, expected_result):
with conf_vars({('webserver', 'hide_paused_dags_by_default'): str(hide_by_default)}):
self.assertEqual(
expected_result,
utils._should_remove_show_paused_from_url_params(
show_paused,
hide_by_default
)
)
def test_params_none_and_zero(self):
query_str = utils.get_params(a=0, b=None)
# The order won't be consistent, but that doesn't affect behaviour of a browser
pairs = list(sorted(query_str.split('&')))
self.assertListEqual(['a=0', 'b='], pairs)
def test_params_all(self):
query = utils.get_params(showPaused=False, page=3, search='bash_')
self.assertEqual(
{'page': ['3'],
'search': ['bash_'],
'showPaused': ['False']},
parse_qs(query)
)
def test_params_escape(self):
self.assertEqual('search=%27%3E%22%2F%3E%3Cimg+src%3Dx+onerror%3Dalert%281%29%3E',
utils.get_params(search="'>\"/><img src=x onerror=alert(1)>"))
def test_open_maybe_zipped_normal_file(self):
with mock.patch(
'io.open', mock.mock_open(read_data="data")) as mock_file:
utils.open_maybe_zipped('/path/to/some/file.txt')
mock_file.assert_called_once_with('/path/to/some/file.txt', mode='r')
def test_open_maybe_zipped_normal_file_with_zip_in_name(self):
path = '/path/to/fakearchive.zip.other/file.txt'
with mock.patch(
'io.open', mock.mock_open(read_data="data")) as mock_file:
utils.open_maybe_zipped(path)
mock_file.assert_called_once_with(path, mode='r')
@mock.patch("zipfile.is_zipfile")
@mock.patch("zipfile.ZipFile")
def test_open_maybe_zipped_archive(self, mocked_zip_file, mocked_is_zipfile):
mocked_is_zipfile.return_value = True
instance = mocked_zip_file.return_value
instance.open.return_value = mock.mock_open(read_data="data")
utils.open_maybe_zipped('/path/to/archive.zip/deep/path/to/file.txt')
mocked_is_zipfile.assert_called_once_with('/path/to/archive.zip')
mocked_zip_file.assert_called_once_with('/path/to/archive.zip', mode='r')
instance.open.assert_called_once_with('deep/path/to/file.txt')
def test_state_token(self):
# It's shouldn't possible to set these odd values anymore, but lets
# ensure they are escaped!
html = str(utils.state_token('<script>alert(1)</script>'))
self.assertIn(
'<script>alert(1)</script>',
html,
)
self.assertNotIn(
'<script>alert(1)</script>',
html,
)
def test_task_instance_link(self):
from airflow.www.app import cached_appbuilder
with cached_appbuilder(testing=True).app.test_request_context():
html = str(utils.task_instance_link({
'dag_id': '<a&1>',
'task_id': '<b2>',
'execution_date': datetime.now()
}))
self.assertIn('%3Ca%261%3E', html)
self.assertIn('%3Cb2%3E', html)
self.assertNotIn('<a&1>', html)
self.assertNotIn('<b2>', html)
def test_dag_link(self):
from airflow.www.app import cached_appbuilder
with cached_appbuilder(testing=True).app.test_request_context():
html = str(utils.dag_link({
'dag_id': '<a&1>',
'execution_date': datetime.now()
}))
self.assertIn('%3Ca%261%3E', html)
self.assertNotIn('<a&1>', html)
def test_dag_run_link(self):
from airflow.www.app import cached_appbuilder
with cached_appbuilder(testing=True).app.test_request_context():
html = str(utils.dag_run_link({
'dag_id': '<a&1>',
'run_id': '<b2>',
'execution_date': datetime.now()
}))
self.assertIn('%3Ca%261%3E', html)
self.assertIn('%3Cb2%3E', html)
self.assertNotIn('<a&1>', html)
self.assertNotIn('<b2>', html)
class TestAttrRenderer(unittest.TestCase):
def setUp(self):
self.attr_renderer = utils.get_attr_renderer()
def test_python_callable(self):
def example_callable(unused_self):
print("example")
rendered = self.attr_renderer["python_callable"](example_callable)
self.assertIn('"example"', rendered)
def test_python_callable_none(self):
rendered = self.attr_renderer["python_callable"](None)
self.assertEqual("", rendered)
def test_markdown(self):
markdown = "* foo\n* bar"
rendered = self.attr_renderer["doc_md"](markdown)
self.assertIn("<li>foo</li>", rendered)
self.assertIn("<li>bar</li>", rendered)
def test_markdown_none(self):
rendered = self.attr_renderer["python_callable"](None)
self.assertEqual("", rendered)
| {
"content_hash": "5d26f658089b2fcddd9bc24dddef5a75",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 93,
"avg_line_length": 38.19591836734694,
"alnum_prop": 0.5727719598204745,
"repo_name": "Fokko/incubator-airflow",
"id": "3d196b33261d8db66fe3100812f1b5b14a24d654",
"size": "10170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/www/test_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "14170"
},
{
"name": "HTML",
"bytes": "145596"
},
{
"name": "JavaScript",
"bytes": "25233"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "8787104"
},
{
"name": "Shell",
"bytes": "187296"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
} |
"""The cookie plugins manager object."""
from __future__ import unicode_literals
# pylint: disable=redundant-returns-doc
class CookiePluginsManager(object):
"""Class that implements the cookie plugins manager."""
_plugin_classes = {}
@classmethod
def DeregisterPlugin(cls, plugin_class):
"""Deregisters a plugin class.
The plugin classes are identified based on their lower case name.
Args:
plugin_class (type): the class object of the plugin.
Raises:
KeyError: if plugin class is not set for the corresponding name.
"""
plugin_name = plugin_class.NAME.lower()
if plugin_name not in cls._plugin_classes:
raise KeyError('Plugin class not set for name: {0:s}.'.format(
plugin_class.NAME))
del cls._plugin_classes[plugin_name]
@classmethod
def GetPlugins(cls):
"""Retrieves the cookie plugins.
Returns:
list[type]: list of all cookie plugin objects.
"""
return [plugin_class() for plugin_class in cls._plugin_classes.values()]
@classmethod
def RegisterPlugin(cls, plugin_class):
"""Registers a plugin class.
The plugin classes are identified based on their lower case name.
Args:
plugin_class (type): the class object of the plugin.
Raises:
KeyError: if plugin class is already set for the corresponding name.
"""
plugin_name = plugin_class.NAME.lower()
if plugin_name in cls._plugin_classes:
raise KeyError(('Plugin class already set for name: {0:s}.').format(
plugin_class.NAME))
cls._plugin_classes[plugin_name] = plugin_class
@classmethod
def RegisterPlugins(cls, plugin_classes):
"""Registers plugin classes.
The plugin classes are identified based on their lower case name.
Args:
plugin_classes (list[type]): a list of class objects of the plugins.
Raises:
KeyError: if plugin class is already set for the corresponding name.
"""
for plugin_class in plugin_classes:
cls.RegisterPlugin(plugin_class)
| {
"content_hash": "945ebf40c38e3ccb31bbcf535236b681",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 76,
"avg_line_length": 27.767123287671232,
"alnum_prop": 0.6822890971879625,
"repo_name": "rgayon/plaso",
"id": "bb6465f72fe5b80e6bb6f2c44539d0564ccc093c",
"size": "2051",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plaso/parsers/cookie_plugins/manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "415"
},
{
"name": "Dockerfile",
"bytes": "1047"
},
{
"name": "Makefile",
"bytes": "712"
},
{
"name": "PowerShell",
"bytes": "17771"
},
{
"name": "Python",
"bytes": "4803191"
},
{
"name": "Ruby",
"bytes": "926"
},
{
"name": "Shell",
"bytes": "46225"
}
],
"symlink_target": ""
} |
import collections
class Solution:
def removeDuplicateLetters(self, s: str) -> str:
table = collections.Counter(s)
visited = set()
result = []
for c in s:
table[c] -= 1
if c in visited:
continue
while result and c < result[-1] and table[result[-1]] > 0:
visited.remove(result[-1])
result.pop()
result.append(c)
visited.add(c)
return ''.join(result)
| {
"content_hash": "52819c7c674e5a3c23ff9abf6c734621",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 70,
"avg_line_length": 28.842105263157894,
"alnum_prop": 0.45072992700729925,
"repo_name": "jiadaizhao/LeetCode",
"id": "245a0e866a551d4a5ab7b1b20ad551fc6d7f73bb",
"size": "548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "0301-0400/0316-Remove Duplicate Letters/0316-Remove Duplicate Letters.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "1140864"
},
{
"name": "Java",
"bytes": "34062"
},
{
"name": "Python",
"bytes": "758800"
},
{
"name": "Shell",
"bytes": "698"
},
{
"name": "TSQL",
"bytes": "774"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import collections
import copy
import datetime
import decimal
import inspect
import re
import uuid
from collections import OrderedDict
from django.conf import settings
from django.core.exceptions import ValidationError as DjangoValidationError
from django.core.exceptions import ObjectDoesNotExist
from django.core.validators import (
EmailValidator, RegexValidator, URLValidator, ip_address_validators
)
from django.forms import FilePathField as DjangoFilePathField
from django.forms import ImageField as DjangoImageField
from django.utils import six, timezone
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time
)
from django.utils.duration import duration_string
from django.utils.encoding import is_protected_type, smart_text
from django.utils.formats import localize_input, sanitize_separators
from django.utils.functional import lazy
from django.utils.ipv6 import clean_ipv6_address
from django.utils.timezone import utc
from django.utils.translation import ugettext_lazy as _
from rest_framework import ISO_8601
from rest_framework.compat import (
InvalidTimeError, MaxLengthValidator, MaxValueValidator,
MinLengthValidator, MinValueValidator, get_remote_field, unicode_repr,
unicode_to_repr, value_from_object
)
from rest_framework.exceptions import ErrorDetail, ValidationError
from rest_framework.settings import api_settings
from rest_framework.utils import html, humanize_datetime, json, representation
class empty:
"""
This class is used to represent no data being provided for a given input
or output value.
It is required because `None` may be a valid input or output value.
"""
pass
if six.PY3:
def is_simple_callable(obj):
"""
True if the object is a callable that takes no arguments.
"""
if not (inspect.isfunction(obj) or inspect.ismethod(obj)):
return False
sig = inspect.signature(obj)
params = sig.parameters.values()
return all(
param.kind == param.VAR_POSITIONAL or
param.kind == param.VAR_KEYWORD or
param.default != param.empty
for param in params
)
else:
def is_simple_callable(obj):
function = inspect.isfunction(obj)
method = inspect.ismethod(obj)
if not (function or method):
return False
if method:
is_unbound = obj.im_self is None
args, _, _, defaults = inspect.getargspec(obj)
len_args = len(args) if function or is_unbound else len(args) - 1
len_defaults = len(defaults) if defaults else 0
return len_args <= len_defaults
def get_attribute(instance, attrs):
"""
Similar to Python's built in `getattr(instance, attr)`,
but takes a list of nested attributes, instead of a single attribute.
Also accepts either attribute lookup on objects or dictionary lookups.
"""
for attr in attrs:
try:
if isinstance(instance, collections.Mapping):
instance = instance[attr]
else:
instance = getattr(instance, attr)
except ObjectDoesNotExist:
return None
if is_simple_callable(instance):
try:
instance = instance()
except (AttributeError, KeyError) as exc:
# If we raised an Attribute or KeyError here it'd get treated
# as an omitted field in `Field.get_attribute()`. Instead we
# raise a ValueError to ensure the exception is not masked.
raise ValueError('Exception raised in callable attribute "{0}"; original exception was: {1}'.format(attr, exc))
return instance
def set_value(dictionary, keys, value):
"""
Similar to Python's built in `dictionary[key] = value`,
but takes a list of nested keys instead of a single key.
set_value({'a': 1}, [], {'b': 2}) -> {'a': 1, 'b': 2}
set_value({'a': 1}, ['x'], 2) -> {'a': 1, 'x': 2}
set_value({'a': 1}, ['x', 'y'], 2) -> {'a': 1, 'x': {'y': 2}}
"""
if not keys:
dictionary.update(value)
return
for key in keys[:-1]:
if key not in dictionary:
dictionary[key] = {}
dictionary = dictionary[key]
dictionary[keys[-1]] = value
def to_choices_dict(choices):
"""
Convert choices into key/value dicts.
to_choices_dict([1]) -> {1: 1}
to_choices_dict([(1, '1st'), (2, '2nd')]) -> {1: '1st', 2: '2nd'}
to_choices_dict([('Group', ((1, '1st'), 2))]) -> {'Group': {1: '1st', 2: '2'}}
"""
# Allow single, paired or grouped choices style:
# choices = [1, 2, 3]
# choices = [(1, 'First'), (2, 'Second'), (3, 'Third')]
# choices = [('Category', ((1, 'First'), (2, 'Second'))), (3, 'Third')]
ret = OrderedDict()
for choice in choices:
if not isinstance(choice, (list, tuple)):
# single choice
ret[choice] = choice
else:
key, value = choice
if isinstance(value, (list, tuple)):
# grouped choices (category, sub choices)
ret[key] = to_choices_dict(value)
else:
# paired choice (key, display value)
ret[key] = value
return ret
def flatten_choices_dict(choices):
"""
Convert a group choices dict into a flat dict of choices.
flatten_choices_dict({1: '1st', 2: '2nd'}) -> {1: '1st', 2: '2nd'}
flatten_choices_dict({'Group': {1: '1st', 2: '2nd'}}) -> {1: '1st', 2: '2nd'}
"""
ret = OrderedDict()
for key, value in choices.items():
if isinstance(value, dict):
# grouped choices (category, sub choices)
for sub_key, sub_value in value.items():
ret[sub_key] = sub_value
else:
# choice (key, display value)
ret[key] = value
return ret
def iter_options(grouped_choices, cutoff=None, cutoff_text=None):
"""
Helper function for options and option groups in templates.
"""
class StartOptionGroup(object):
start_option_group = True
end_option_group = False
def __init__(self, label):
self.label = label
class EndOptionGroup(object):
start_option_group = False
end_option_group = True
class Option(object):
start_option_group = False
end_option_group = False
def __init__(self, value, display_text, disabled=False):
self.value = value
self.display_text = display_text
self.disabled = disabled
count = 0
for key, value in grouped_choices.items():
if cutoff and count >= cutoff:
break
if isinstance(value, dict):
yield StartOptionGroup(label=key)
for sub_key, sub_value in value.items():
if cutoff and count >= cutoff:
break
yield Option(value=sub_key, display_text=sub_value)
count += 1
yield EndOptionGroup()
else:
yield Option(value=key, display_text=value)
count += 1
if cutoff and count >= cutoff and cutoff_text:
cutoff_text = cutoff_text.format(count=cutoff)
yield Option(value='n/a', display_text=cutoff_text, disabled=True)
def get_error_detail(exc_info):
"""
Given a Django ValidationError, return a list of ErrorDetail,
with the `code` populated.
"""
code = getattr(exc_info, 'code', None) or 'invalid'
return [
ErrorDetail(msg, code=code)
for msg in exc_info.messages
]
class CreateOnlyDefault(object):
"""
This class may be used to provide default values that are only used
for create operations, but that do not return any value for update
operations.
"""
def __init__(self, default):
self.default = default
def set_context(self, serializer_field):
self.is_update = serializer_field.parent.instance is not None
if callable(self.default) and hasattr(self.default, 'set_context') and not self.is_update:
self.default.set_context(serializer_field)
def __call__(self):
if self.is_update:
raise SkipField()
if callable(self.default):
return self.default()
return self.default
def __repr__(self):
return unicode_to_repr(
'%s(%s)' % (self.__class__.__name__, unicode_repr(self.default))
)
class CurrentUserDefault(object):
def set_context(self, serializer_field):
self.user = serializer_field.context['request'].user
def __call__(self):
return self.user
def __repr__(self):
return unicode_to_repr('%s()' % self.__class__.__name__)
class SkipField(Exception):
pass
REGEX_TYPE = type(re.compile(''))
NOT_READ_ONLY_WRITE_ONLY = 'May not set both `read_only` and `write_only`'
NOT_READ_ONLY_REQUIRED = 'May not set both `read_only` and `required`'
NOT_REQUIRED_DEFAULT = 'May not set both `required` and `default`'
USE_READONLYFIELD = 'Field(read_only=True) should be ReadOnlyField'
MISSING_ERROR_MESSAGE = (
'ValidationError raised by `{class_name}`, but error key `{key}` does '
'not exist in the `error_messages` dictionary.'
)
class Field(object):
_creation_counter = 0
default_error_messages = {
'required': _('This field is required.'),
'null': _('This field may not be null.')
}
default_validators = []
default_empty_html = empty
initial = None
def __init__(self, read_only=False, write_only=False,
required=None, default=empty, initial=empty, source=None,
label=None, help_text=None, style=None,
error_messages=None, validators=None, allow_null=False):
self._creation_counter = Field._creation_counter
Field._creation_counter += 1
# If `required` is unset, then use `True` unless a default is provided.
if required is None:
required = default is empty and not read_only
# Some combinations of keyword arguments do not make sense.
assert not (read_only and write_only), NOT_READ_ONLY_WRITE_ONLY
assert not (read_only and required), NOT_READ_ONLY_REQUIRED
assert not (required and default is not empty), NOT_REQUIRED_DEFAULT
assert not (read_only and self.__class__ == Field), USE_READONLYFIELD
self.read_only = read_only
self.write_only = write_only
self.required = required
self.default = default
self.source = source
self.initial = self.initial if (initial is empty) else initial
self.label = label
self.help_text = help_text
self.style = {} if style is None else style
self.allow_null = allow_null
if self.default_empty_html is not empty:
if default is not empty:
self.default_empty_html = default
if validators is not None:
self.validators = validators[:]
# These are set up by `.bind()` when the field is added to a serializer.
self.field_name = None
self.parent = None
# Collect default error message from self and parent classes
messages = {}
for cls in reversed(self.__class__.__mro__):
messages.update(getattr(cls, 'default_error_messages', {}))
messages.update(error_messages or {})
self.error_messages = messages
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
# .validators is a lazily loaded property, that gets its default
# value from `get_validators`.
@property
def validators(self):
if not hasattr(self, '_validators'):
self._validators = self.get_validators()
return self._validators
@validators.setter
def validators(self, validators):
self._validators = validators
def get_validators(self):
return self.default_validators[:]
def get_initial(self):
"""
Return a value to use when the field is being returned as a primitive
value, without any object instance.
"""
if callable(self.initial):
return self.initial()
return self.initial
def get_value(self, dictionary):
"""
Given the *incoming* primitive data, return the value for this field
that should be validated and transformed to a native value.
"""
if html.is_html_input(dictionary):
# HTML forms will represent empty fields as '', and cannot
# represent None or False values directly.
if self.field_name not in dictionary:
if getattr(self.root, 'partial', False):
return empty
return self.default_empty_html
ret = dictionary[self.field_name]
if ret == '' and self.allow_null:
# If the field is blank, and null is a valid value then
# determine if we should use null instead.
return '' if getattr(self, 'allow_blank', False) else None
elif ret == '' and not self.required:
# If the field is blank, and emptiness is valid then
# determine if we should use emptiness instead.
return '' if getattr(self, 'allow_blank', False) else empty
return ret
return dictionary.get(self.field_name, empty)
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if hasattr(self.default, 'set_context'):
self.default.set_context(self)
return self.default()
return self.default
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
return (True, None)
return (False, data)
def run_validation(self, data=empty):
"""
Validate a simple representation and return the internal value.
The provided data may be `empty` if no representation was included
in the input.
May raise `SkipField` if the field should not be included in the
validated data.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
self.run_validators(value)
return value
def run_validators(self, value):
"""
Test the given value against all the validators on the field,
and either raise a `ValidationError` or simply return.
"""
errors = []
for validator in self.validators:
if hasattr(validator, 'set_context'):
validator.set_context(self)
try:
validator(value)
except ValidationError as exc:
# If the validation error contains a mapping of fields to
# errors then simply raise it immediately rather than
# attempting to accumulate a list of errors.
if isinstance(exc.detail, dict):
raise
errors.extend(exc.detail)
except DjangoValidationError as exc:
errors.extend(get_error_detail(exc))
if errors:
raise ValidationError(errors)
def to_internal_value(self, data):
"""
Transform the *incoming* primitive data into a native value.
"""
raise NotImplementedError(
'{cls}.to_internal_value() must be implemented.'.format(
cls=self.__class__.__name__
)
)
def to_representation(self, value):
"""
Transform the *outgoing* native value into primitive data.
"""
raise NotImplementedError(
'{cls}.to_representation() must be implemented for field '
'{field_name}. If you do not need to support write operations '
'you probably want to subclass `ReadOnlyField` instead.'.format(
cls=self.__class__.__name__,
field_name=self.field_name,
)
)
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
@property
def root(self):
"""
Returns the top-level serializer for this field.
"""
root = self
while root.parent is not None:
root = root.parent
return root
@property
def context(self):
"""
Returns the context as passed to the root serializer on initialization.
"""
return getattr(self.root, '_context', {})
def __new__(cls, *args, **kwargs):
"""
When a field is instantiated, we store the arguments that were used,
so that we can present a helpful representation of the object.
"""
instance = super(Field, cls).__new__(cls)
instance._args = args
instance._kwargs = kwargs
return instance
def __deepcopy__(self, memo):
"""
When cloning fields we instantiate using the arguments it was
originally created with, rather than copying the complete state.
"""
# Treat regexes and validators as immutable.
# See https://github.com/encode/django-rest-framework/issues/1954
# and https://github.com/encode/django-rest-framework/pull/4489
args = [
copy.deepcopy(item) if not isinstance(item, REGEX_TYPE) else item
for item in self._args
]
kwargs = {
key: (copy.deepcopy(value) if (key not in ('validators', 'regex')) else value)
for key, value in self._kwargs.items()
}
return self.__class__(*args, **kwargs)
def __repr__(self):
"""
Fields are represented using their initial calling arguments.
This allows us to create descriptive representations for serializer
instances that show all the declared fields on the serializer.
"""
return unicode_to_repr(representation.field_repr(self))
# Boolean types...
class BooleanField(Field):
default_error_messages = {
'invalid': _('"{input}" is not a valid boolean.')
}
default_empty_html = False
initial = False
TRUE_VALUES = {
't', 'T',
'y', 'Y', 'yes', 'YES',
'true', 'True', 'TRUE',
'on', 'On', 'ON',
'1', 1,
True
}
FALSE_VALUES = {
'f', 'F',
'n', 'N', 'no', 'NO',
'false', 'False', 'FALSE',
'off', 'Off', 'OFF',
'0', 0, 0.0,
False
}
def __init__(self, **kwargs):
assert 'allow_null' not in kwargs, '`allow_null` is not a valid option. Use `NullBooleanField` instead.'
super(BooleanField, self).__init__(**kwargs)
def to_internal_value(self, data):
try:
if data in self.TRUE_VALUES:
return True
elif data in self.FALSE_VALUES:
return False
except TypeError: # Input is an unhashable type
pass
self.fail('invalid', input=data)
def to_representation(self, value):
if value in self.TRUE_VALUES:
return True
elif value in self.FALSE_VALUES:
return False
return bool(value)
class NullBooleanField(Field):
default_error_messages = {
'invalid': _('"{input}" is not a valid boolean.')
}
initial = None
TRUE_VALUES = {
't', 'T',
'y', 'Y', 'yes', 'YES',
'true', 'True', 'TRUE',
'on', 'On', 'ON',
'1', 1,
True
}
FALSE_VALUES = {
'f', 'F',
'n', 'N', 'no', 'NO',
'false', 'False', 'FALSE',
'off', 'Off', 'OFF',
'0', 0, 0.0,
False
}
NULL_VALUES = {'n', 'N', 'null', 'Null', 'NULL', '', None}
def __init__(self, **kwargs):
assert 'allow_null' not in kwargs, '`allow_null` is not a valid option.'
kwargs['allow_null'] = True
super(NullBooleanField, self).__init__(**kwargs)
def to_internal_value(self, data):
try:
if data in self.TRUE_VALUES:
return True
elif data in self.FALSE_VALUES:
return False
elif data in self.NULL_VALUES:
return None
except TypeError: # Input is an unhashable type
pass
self.fail('invalid', input=data)
def to_representation(self, value):
if value in self.NULL_VALUES:
return None
if value in self.TRUE_VALUES:
return True
elif value in self.FALSE_VALUES:
return False
return bool(value)
# String types...
class CharField(Field):
default_error_messages = {
'invalid': _('Not a valid string.'),
'blank': _('This field may not be blank.'),
'max_length': _('Ensure this field has no more than {max_length} characters.'),
'min_length': _('Ensure this field has at least {min_length} characters.')
}
initial = ''
def __init__(self, **kwargs):
self.allow_blank = kwargs.pop('allow_blank', False)
self.trim_whitespace = kwargs.pop('trim_whitespace', True)
self.max_length = kwargs.pop('max_length', None)
self.min_length = kwargs.pop('min_length', None)
super(CharField, self).__init__(**kwargs)
if self.max_length is not None:
message = lazy(
self.error_messages['max_length'].format,
six.text_type)(max_length=self.max_length)
self.validators.append(
MaxLengthValidator(self.max_length, message=message))
if self.min_length is not None:
message = lazy(
self.error_messages['min_length'].format,
six.text_type)(min_length=self.min_length)
self.validators.append(
MinLengthValidator(self.min_length, message=message))
def run_validation(self, data=empty):
# Test for the empty string here so that it does not get validated,
# and so that subclasses do not need to handle it explicitly
# inside the `to_internal_value()` method.
if data == '' or (self.trim_whitespace and six.text_type(data).strip() == ''):
if not self.allow_blank:
self.fail('blank')
return ''
return super(CharField, self).run_validation(data)
def to_internal_value(self, data):
# We're lenient with allowing basic numerics to be coerced into strings,
# but other types should fail. Eg. unclear if booleans should represent as `true` or `True`,
# and composites such as lists are likely user error.
if isinstance(data, bool) or not isinstance(data, six.string_types + six.integer_types + (float,)):
self.fail('invalid')
value = six.text_type(data)
return value.strip() if self.trim_whitespace else value
def to_representation(self, value):
return six.text_type(value)
class EmailField(CharField):
default_error_messages = {
'invalid': _('Enter a valid email address.')
}
def __init__(self, **kwargs):
super(EmailField, self).__init__(**kwargs)
validator = EmailValidator(message=self.error_messages['invalid'])
self.validators.append(validator)
class RegexField(CharField):
default_error_messages = {
'invalid': _('This value does not match the required pattern.')
}
def __init__(self, regex, **kwargs):
super(RegexField, self).__init__(**kwargs)
validator = RegexValidator(regex, message=self.error_messages['invalid'])
self.validators.append(validator)
class SlugField(CharField):
default_error_messages = {
'invalid': _('Enter a valid "slug" consisting of letters, numbers, underscores or hyphens.'),
'invalid_unicode': _('Enter a valid "slug" consisting of Unicode letters, numbers, underscores, or hyphens.')
}
def __init__(self, allow_unicode=False, **kwargs):
super(SlugField, self).__init__(**kwargs)
self.allow_unicode = allow_unicode
if self.allow_unicode:
validator = RegexValidator(re.compile(r'^[-\w]+\Z', re.UNICODE), message=self.error_messages['invalid_unicode'])
else:
validator = RegexValidator(re.compile(r'^[-a-zA-Z0-9_]+$'), message=self.error_messages['invalid'])
self.validators.append(validator)
class URLField(CharField):
default_error_messages = {
'invalid': _('Enter a valid URL.')
}
def __init__(self, **kwargs):
super(URLField, self).__init__(**kwargs)
validator = URLValidator(message=self.error_messages['invalid'])
self.validators.append(validator)
class UUIDField(Field):
valid_formats = ('hex_verbose', 'hex', 'int', 'urn')
default_error_messages = {
'invalid': _('"{value}" is not a valid UUID.'),
}
def __init__(self, **kwargs):
self.uuid_format = kwargs.pop('format', 'hex_verbose')
if self.uuid_format not in self.valid_formats:
raise ValueError(
'Invalid format for uuid representation. '
'Must be one of "{0}"'.format('", "'.join(self.valid_formats))
)
super(UUIDField, self).__init__(**kwargs)
def to_internal_value(self, data):
if not isinstance(data, uuid.UUID):
try:
if isinstance(data, six.integer_types):
return uuid.UUID(int=data)
elif isinstance(data, six.string_types):
return uuid.UUID(hex=data)
else:
self.fail('invalid', value=data)
except (ValueError):
self.fail('invalid', value=data)
return data
def to_representation(self, value):
if self.uuid_format == 'hex_verbose':
return str(value)
else:
return getattr(value, self.uuid_format)
class IPAddressField(CharField):
"""Support both IPAddressField and GenericIPAddressField"""
default_error_messages = {
'invalid': _('Enter a valid IPv4 or IPv6 address.'),
}
def __init__(self, protocol='both', **kwargs):
self.protocol = protocol.lower()
self.unpack_ipv4 = (self.protocol == 'both')
super(IPAddressField, self).__init__(**kwargs)
validators, error_message = ip_address_validators(protocol, self.unpack_ipv4)
self.validators.extend(validators)
def to_internal_value(self, data):
if not isinstance(data, six.string_types):
self.fail('invalid', value=data)
if ':' in data:
try:
if self.protocol in ('both', 'ipv6'):
return clean_ipv6_address(data, self.unpack_ipv4)
except DjangoValidationError:
self.fail('invalid', value=data)
return super(IPAddressField, self).to_internal_value(data)
# Number types...
class IntegerField(Field):
default_error_messages = {
'invalid': _('A valid integer is required.'),
'max_value': _('Ensure this value is less than or equal to {max_value}.'),
'min_value': _('Ensure this value is greater than or equal to {min_value}.'),
'max_string_length': _('String value too large.')
}
MAX_STRING_LENGTH = 1000 # Guard against malicious string inputs.
re_decimal = re.compile(r'\.0*\s*$') # allow e.g. '1.0' as an int, but not '1.2'
def __init__(self, **kwargs):
self.max_value = kwargs.pop('max_value', None)
self.min_value = kwargs.pop('min_value', None)
super(IntegerField, self).__init__(**kwargs)
if self.max_value is not None:
message = lazy(
self.error_messages['max_value'].format,
six.text_type)(max_value=self.max_value)
self.validators.append(
MaxValueValidator(self.max_value, message=message))
if self.min_value is not None:
message = lazy(
self.error_messages['min_value'].format,
six.text_type)(min_value=self.min_value)
self.validators.append(
MinValueValidator(self.min_value, message=message))
def to_internal_value(self, data):
if isinstance(data, six.text_type) and len(data) > self.MAX_STRING_LENGTH:
self.fail('max_string_length')
try:
data = int(self.re_decimal.sub('', str(data)))
except (ValueError, TypeError):
self.fail('invalid')
return data
def to_representation(self, value):
return int(value)
class FloatField(Field):
default_error_messages = {
'invalid': _('A valid number is required.'),
'max_value': _('Ensure this value is less than or equal to {max_value}.'),
'min_value': _('Ensure this value is greater than or equal to {min_value}.'),
'max_string_length': _('String value too large.')
}
MAX_STRING_LENGTH = 1000 # Guard against malicious string inputs.
def __init__(self, **kwargs):
self.max_value = kwargs.pop('max_value', None)
self.min_value = kwargs.pop('min_value', None)
super(FloatField, self).__init__(**kwargs)
if self.max_value is not None:
message = lazy(
self.error_messages['max_value'].format,
six.text_type)(max_value=self.max_value)
self.validators.append(
MaxValueValidator(self.max_value, message=message))
if self.min_value is not None:
message = lazy(
self.error_messages['min_value'].format,
six.text_type)(min_value=self.min_value)
self.validators.append(
MinValueValidator(self.min_value, message=message))
def to_internal_value(self, data):
if isinstance(data, six.text_type) and len(data) > self.MAX_STRING_LENGTH:
self.fail('max_string_length')
try:
return float(data)
except (TypeError, ValueError):
self.fail('invalid')
def to_representation(self, value):
return float(value)
class DecimalField(Field):
default_error_messages = {
'invalid': _('A valid number is required.'),
'max_value': _('Ensure this value is less than or equal to {max_value}.'),
'min_value': _('Ensure this value is greater than or equal to {min_value}.'),
'max_digits': _('Ensure that there are no more than {max_digits} digits in total.'),
'max_decimal_places': _('Ensure that there are no more than {max_decimal_places} decimal places.'),
'max_whole_digits': _('Ensure that there are no more than {max_whole_digits} digits before the decimal point.'),
'max_string_length': _('String value too large.')
}
MAX_STRING_LENGTH = 1000 # Guard against malicious string inputs.
def __init__(self, max_digits, decimal_places, coerce_to_string=None, max_value=None, min_value=None,
localize=False, **kwargs):
self.max_digits = max_digits
self.decimal_places = decimal_places
self.localize = localize
if coerce_to_string is not None:
self.coerce_to_string = coerce_to_string
if self.localize:
self.coerce_to_string = True
self.max_value = max_value
self.min_value = min_value
if self.max_digits is not None and self.decimal_places is not None:
self.max_whole_digits = self.max_digits - self.decimal_places
else:
self.max_whole_digits = None
super(DecimalField, self).__init__(**kwargs)
if self.max_value is not None:
message = lazy(
self.error_messages['max_value'].format,
six.text_type)(max_value=self.max_value)
self.validators.append(
MaxValueValidator(self.max_value, message=message))
if self.min_value is not None:
message = lazy(
self.error_messages['min_value'].format,
six.text_type)(min_value=self.min_value)
self.validators.append(
MinValueValidator(self.min_value, message=message))
def to_internal_value(self, data):
"""
Validate that the input is a decimal number and return a Decimal
instance.
"""
data = smart_text(data).strip()
if self.localize:
data = sanitize_separators(data)
if len(data) > self.MAX_STRING_LENGTH:
self.fail('max_string_length')
try:
value = decimal.Decimal(data)
except decimal.DecimalException:
self.fail('invalid')
# Check for NaN. It is the only value that isn't equal to itself,
# so we can use this to identify NaN values.
if value != value:
self.fail('invalid')
# Check for infinity and negative infinity.
if value in (decimal.Decimal('Inf'), decimal.Decimal('-Inf')):
self.fail('invalid')
return self.quantize(self.validate_precision(value))
def validate_precision(self, value):
"""
Ensure that there are no more than max_digits in the number, and no
more than decimal_places digits after the decimal point.
Override this method to disable the precision validation for input
values or to enhance it in any way you need to.
"""
sign, digittuple, exponent = value.as_tuple()
if exponent >= 0:
# 1234500.0
total_digits = len(digittuple) + exponent
whole_digits = total_digits
decimal_places = 0
elif len(digittuple) > abs(exponent):
# 123.45
total_digits = len(digittuple)
whole_digits = total_digits - abs(exponent)
decimal_places = abs(exponent)
else:
# 0.001234
total_digits = abs(exponent)
whole_digits = 0
decimal_places = total_digits
if self.max_digits is not None and total_digits > self.max_digits:
self.fail('max_digits', max_digits=self.max_digits)
if self.decimal_places is not None and decimal_places > self.decimal_places:
self.fail('max_decimal_places', max_decimal_places=self.decimal_places)
if self.max_whole_digits is not None and whole_digits > self.max_whole_digits:
self.fail('max_whole_digits', max_whole_digits=self.max_whole_digits)
return value
def to_representation(self, value):
coerce_to_string = getattr(self, 'coerce_to_string', api_settings.COERCE_DECIMAL_TO_STRING)
if not isinstance(value, decimal.Decimal):
value = decimal.Decimal(six.text_type(value).strip())
quantized = self.quantize(value)
if not coerce_to_string:
return quantized
if self.localize:
return localize_input(quantized)
return '{0:f}'.format(quantized)
def quantize(self, value):
"""
Quantize the decimal value to the configured precision.
"""
if self.decimal_places is None:
return value
context = decimal.getcontext().copy()
if self.max_digits is not None:
context.prec = self.max_digits
return value.quantize(
decimal.Decimal('.1') ** self.decimal_places,
context=context
)
# Date & time fields...
class DateTimeField(Field):
default_error_messages = {
'invalid': _('Datetime has wrong format. Use one of these formats instead: {format}.'),
'date': _('Expected a datetime but got a date.'),
'make_aware': _('Invalid datetime for the timezone "{timezone}".')
}
datetime_parser = datetime.datetime.strptime
def __init__(self, format=empty, input_formats=None, default_timezone=None, *args, **kwargs):
if format is not empty:
self.format = format
if input_formats is not None:
self.input_formats = input_formats
if default_timezone is not None:
self.timezone = default_timezone
super(DateTimeField, self).__init__(*args, **kwargs)
def enforce_timezone(self, value):
"""
When `self.default_timezone` is `None`, always return naive datetimes.
When `self.default_timezone` is not `None`, always return aware datetimes.
"""
field_timezone = getattr(self, 'timezone', self.default_timezone())
if field_timezone is not None:
if timezone.is_aware(value):
return value.astimezone(field_timezone)
try:
return timezone.make_aware(value, field_timezone)
except InvalidTimeError:
self.fail('make_aware', timezone=field_timezone)
elif (field_timezone is None) and timezone.is_aware(value):
return timezone.make_naive(value, utc)
return value
def default_timezone(self):
return timezone.get_current_timezone() if settings.USE_TZ else None
def to_internal_value(self, value):
input_formats = getattr(self, 'input_formats', api_settings.DATETIME_INPUT_FORMATS)
if isinstance(value, datetime.date) and not isinstance(value, datetime.datetime):
self.fail('date')
if isinstance(value, datetime.datetime):
return self.enforce_timezone(value)
for input_format in input_formats:
if input_format.lower() == ISO_8601:
try:
parsed = parse_datetime(value)
if parsed is not None:
return self.enforce_timezone(parsed)
except (ValueError, TypeError):
pass
else:
try:
parsed = self.datetime_parser(value, input_format)
return self.enforce_timezone(parsed)
except (ValueError, TypeError):
pass
humanized_format = humanize_datetime.datetime_formats(input_formats)
self.fail('invalid', format=humanized_format)
def to_representation(self, value):
if not value:
return None
output_format = getattr(self, 'format', api_settings.DATETIME_FORMAT)
if output_format is None or isinstance(value, six.string_types):
return value
if output_format.lower() == ISO_8601:
value = self.enforce_timezone(value)
value = value.isoformat()
if value.endswith('+00:00'):
value = value[:-6] + 'Z'
return value
return value.strftime(output_format)
class DateField(Field):
default_error_messages = {
'invalid': _('Date has wrong format. Use one of these formats instead: {format}.'),
'datetime': _('Expected a date but got a datetime.'),
}
datetime_parser = datetime.datetime.strptime
def __init__(self, format=empty, input_formats=None, *args, **kwargs):
if format is not empty:
self.format = format
if input_formats is not None:
self.input_formats = input_formats
super(DateField, self).__init__(*args, **kwargs)
def to_internal_value(self, value):
input_formats = getattr(self, 'input_formats', api_settings.DATE_INPUT_FORMATS)
if isinstance(value, datetime.datetime):
self.fail('datetime')
if isinstance(value, datetime.date):
return value
for input_format in input_formats:
if input_format.lower() == ISO_8601:
try:
parsed = parse_date(value)
except (ValueError, TypeError):
pass
else:
if parsed is not None:
return parsed
else:
try:
parsed = self.datetime_parser(value, input_format)
except (ValueError, TypeError):
pass
else:
return parsed.date()
humanized_format = humanize_datetime.date_formats(input_formats)
self.fail('invalid', format=humanized_format)
def to_representation(self, value):
if not value:
return None
output_format = getattr(self, 'format', api_settings.DATE_FORMAT)
if output_format is None or isinstance(value, six.string_types):
return value
# Applying a `DateField` to a datetime value is almost always
# not a sensible thing to do, as it means naively dropping
# any explicit or implicit timezone info.
assert not isinstance(value, datetime.datetime), (
'Expected a `date`, but got a `datetime`. Refusing to coerce, '
'as this may mean losing timezone information. Use a custom '
'read-only field and deal with timezone issues explicitly.'
)
if output_format.lower() == ISO_8601:
return value.isoformat()
return value.strftime(output_format)
class TimeField(Field):
default_error_messages = {
'invalid': _('Time has wrong format. Use one of these formats instead: {format}.'),
}
datetime_parser = datetime.datetime.strptime
def __init__(self, format=empty, input_formats=None, *args, **kwargs):
if format is not empty:
self.format = format
if input_formats is not None:
self.input_formats = input_formats
super(TimeField, self).__init__(*args, **kwargs)
def to_internal_value(self, value):
input_formats = getattr(self, 'input_formats', api_settings.TIME_INPUT_FORMATS)
if isinstance(value, datetime.time):
return value
for input_format in input_formats:
if input_format.lower() == ISO_8601:
try:
parsed = parse_time(value)
except (ValueError, TypeError):
pass
else:
if parsed is not None:
return parsed
else:
try:
parsed = self.datetime_parser(value, input_format)
except (ValueError, TypeError):
pass
else:
return parsed.time()
humanized_format = humanize_datetime.time_formats(input_formats)
self.fail('invalid', format=humanized_format)
def to_representation(self, value):
if value in (None, ''):
return None
output_format = getattr(self, 'format', api_settings.TIME_FORMAT)
if output_format is None or isinstance(value, six.string_types):
return value
# Applying a `TimeField` to a datetime value is almost always
# not a sensible thing to do, as it means naively dropping
# any explicit or implicit timezone info.
assert not isinstance(value, datetime.datetime), (
'Expected a `time`, but got a `datetime`. Refusing to coerce, '
'as this may mean losing timezone information. Use a custom '
'read-only field and deal with timezone issues explicitly.'
)
if output_format.lower() == ISO_8601:
return value.isoformat()
return value.strftime(output_format)
class DurationField(Field):
default_error_messages = {
'invalid': _('Duration has wrong format. Use one of these formats instead: {format}.'),
}
def to_internal_value(self, value):
if isinstance(value, datetime.timedelta):
return value
parsed = parse_duration(six.text_type(value))
if parsed is not None:
return parsed
self.fail('invalid', format='[DD] [HH:[MM:]]ss[.uuuuuu]')
def to_representation(self, value):
return duration_string(value)
# Choice types...
class ChoiceField(Field):
default_error_messages = {
'invalid_choice': _('"{input}" is not a valid choice.')
}
html_cutoff = None
html_cutoff_text = _('More than {count} items...')
def __init__(self, choices, **kwargs):
self.choices = choices
self.html_cutoff = kwargs.pop('html_cutoff', self.html_cutoff)
self.html_cutoff_text = kwargs.pop('html_cutoff_text', self.html_cutoff_text)
self.allow_blank = kwargs.pop('allow_blank', False)
super(ChoiceField, self).__init__(**kwargs)
def to_internal_value(self, data):
if data == '' and self.allow_blank:
return ''
try:
return self.choice_strings_to_values[six.text_type(data)]
except KeyError:
self.fail('invalid_choice', input=data)
def to_representation(self, value):
if value in ('', None):
return value
return self.choice_strings_to_values.get(six.text_type(value), value)
def iter_options(self):
"""
Helper method for use with templates rendering select widgets.
"""
return iter_options(
self.grouped_choices,
cutoff=self.html_cutoff,
cutoff_text=self.html_cutoff_text
)
def _get_choices(self):
return self._choices
def _set_choices(self, choices):
self.grouped_choices = to_choices_dict(choices)
self._choices = flatten_choices_dict(self.grouped_choices)
# Map the string representation of choices to the underlying value.
# Allows us to deal with eg. integer choices while supporting either
# integer or string input, but still get the correct datatype out.
self.choice_strings_to_values = {
six.text_type(key): key for key in self.choices.keys()
}
choices = property(_get_choices, _set_choices)
class MultipleChoiceField(ChoiceField):
default_error_messages = {
'invalid_choice': _('"{input}" is not a valid choice.'),
'not_a_list': _('Expected a list of items but got type "{input_type}".'),
'empty': _('This selection may not be empty.')
}
default_empty_html = []
def __init__(self, *args, **kwargs):
self.allow_empty = kwargs.pop('allow_empty', True)
super(MultipleChoiceField, self).__init__(*args, **kwargs)
def get_value(self, dictionary):
if self.field_name not in dictionary:
if getattr(self.root, 'partial', False):
return empty
# We override the default field access in order to support
# lists in HTML forms.
if html.is_html_input(dictionary):
return dictionary.getlist(self.field_name)
return dictionary.get(self.field_name, empty)
def to_internal_value(self, data):
if isinstance(data, type('')) or not hasattr(data, '__iter__'):
self.fail('not_a_list', input_type=type(data).__name__)
if not self.allow_empty and len(data) == 0:
self.fail('empty')
return {
super(MultipleChoiceField, self).to_internal_value(item)
for item in data
}
def to_representation(self, value):
return {
self.choice_strings_to_values.get(six.text_type(item), item) for item in value
}
class FilePathField(ChoiceField):
default_error_messages = {
'invalid_choice': _('"{input}" is not a valid path choice.')
}
def __init__(self, path, match=None, recursive=False, allow_files=True,
allow_folders=False, required=None, **kwargs):
# Defer to Django's FilePathField implementation to get the
# valid set of choices.
field = DjangoFilePathField(
path, match=match, recursive=recursive, allow_files=allow_files,
allow_folders=allow_folders, required=required
)
kwargs['choices'] = field.choices
super(FilePathField, self).__init__(**kwargs)
# File types...
class FileField(Field):
default_error_messages = {
'required': _('No file was submitted.'),
'invalid': _('The submitted data was not a file. Check the encoding type on the form.'),
'no_name': _('No filename could be determined.'),
'empty': _('The submitted file is empty.'),
'max_length': _('Ensure this filename has at most {max_length} characters (it has {length}).'),
}
def __init__(self, *args, **kwargs):
self.max_length = kwargs.pop('max_length', None)
self.allow_empty_file = kwargs.pop('allow_empty_file', False)
if 'use_url' in kwargs:
self.use_url = kwargs.pop('use_url')
super(FileField, self).__init__(*args, **kwargs)
def to_internal_value(self, data):
try:
# `UploadedFile` objects should have name and size attributes.
file_name = data.name
file_size = data.size
except AttributeError:
self.fail('invalid')
if not file_name:
self.fail('no_name')
if not self.allow_empty_file and not file_size:
self.fail('empty')
if self.max_length and len(file_name) > self.max_length:
self.fail('max_length', max_length=self.max_length, length=len(file_name))
return data
def to_representation(self, value):
if not value:
return None
use_url = getattr(self, 'use_url', api_settings.UPLOADED_FILES_USE_URL)
if use_url:
if not getattr(value, 'url', None):
# If the file has not been saved it may not have a URL.
return None
url = value.url
request = self.context.get('request', None)
if request is not None:
return request.build_absolute_uri(url)
return url
return value.name
class ImageField(FileField):
default_error_messages = {
'invalid_image': _(
'Upload a valid image. The file you uploaded was either not an image or a corrupted image.'
),
}
def __init__(self, *args, **kwargs):
self._DjangoImageField = kwargs.pop('_DjangoImageField', DjangoImageField)
super(ImageField, self).__init__(*args, **kwargs)
def to_internal_value(self, data):
# Image validation is a bit grungy, so we'll just outright
# defer to Django's implementation so we don't need to
# consider it, or treat PIL as a test dependency.
file_object = super(ImageField, self).to_internal_value(data)
django_field = self._DjangoImageField()
django_field.error_messages = self.error_messages
django_field.to_python(file_object)
return file_object
# Composite field types...
class _UnvalidatedField(Field):
def __init__(self, *args, **kwargs):
super(_UnvalidatedField, self).__init__(*args, **kwargs)
self.allow_blank = True
self.allow_null = True
def to_internal_value(self, data):
return data
def to_representation(self, value):
return value
class ListField(Field):
child = _UnvalidatedField()
initial = []
default_error_messages = {
'not_a_list': _('Expected a list of items but got type "{input_type}".'),
'empty': _('This list may not be empty.'),
'min_length': _('Ensure this field has at least {min_length} elements.'),
'max_length': _('Ensure this field has no more than {max_length} elements.')
}
def __init__(self, *args, **kwargs):
self.child = kwargs.pop('child', copy.deepcopy(self.child))
self.allow_empty = kwargs.pop('allow_empty', True)
self.max_length = kwargs.pop('max_length', None)
self.min_length = kwargs.pop('min_length', None)
assert not inspect.isclass(self.child), '`child` has not been instantiated.'
assert self.child.source is None, (
"The `source` argument is not meaningful when applied to a `child=` field. "
"Remove `source=` from the field declaration."
)
super(ListField, self).__init__(*args, **kwargs)
self.child.bind(field_name='', parent=self)
if self.max_length is not None:
message = self.error_messages['max_length'].format(max_length=self.max_length)
self.validators.append(MaxLengthValidator(self.max_length, message=message))
if self.min_length is not None:
message = self.error_messages['min_length'].format(min_length=self.min_length)
self.validators.append(MinLengthValidator(self.min_length, message=message))
def get_value(self, dictionary):
if self.field_name not in dictionary:
if getattr(self.root, 'partial', False):
return empty
# We override the default field access in order to support
# lists in HTML forms.
if html.is_html_input(dictionary):
val = dictionary.getlist(self.field_name, [])
if len(val) > 0:
# Support QueryDict lists in HTML input.
return val
return html.parse_html_list(dictionary, prefix=self.field_name)
return dictionary.get(self.field_name, empty)
def to_internal_value(self, data):
"""
List of dicts of native values <- List of dicts of primitive datatypes.
"""
if html.is_html_input(data):
data = html.parse_html_list(data)
if isinstance(data, type('')) or isinstance(data, collections.Mapping) or not hasattr(data, '__iter__'):
self.fail('not_a_list', input_type=type(data).__name__)
if not self.allow_empty and len(data) == 0:
self.fail('empty')
return [self.child.run_validation(item) for item in data]
def to_representation(self, data):
"""
List of object instances -> List of dicts of primitive datatypes.
"""
return [self.child.to_representation(item) if item is not None else None for item in data]
class DictField(Field):
child = _UnvalidatedField()
initial = {}
default_error_messages = {
'not_a_dict': _('Expected a dictionary of items but got type "{input_type}".')
}
def __init__(self, *args, **kwargs):
self.child = kwargs.pop('child', copy.deepcopy(self.child))
assert not inspect.isclass(self.child), '`child` has not been instantiated.'
assert self.child.source is None, (
"The `source` argument is not meaningful when applied to a `child=` field. "
"Remove `source=` from the field declaration."
)
super(DictField, self).__init__(*args, **kwargs)
self.child.bind(field_name='', parent=self)
def get_value(self, dictionary):
# We override the default field access in order to support
# dictionaries in HTML forms.
if html.is_html_input(dictionary):
return html.parse_html_dict(dictionary, prefix=self.field_name)
return dictionary.get(self.field_name, empty)
def to_internal_value(self, data):
"""
Dicts of native values <- Dicts of primitive datatypes.
"""
if html.is_html_input(data):
data = html.parse_html_dict(data)
if not isinstance(data, dict):
self.fail('not_a_dict', input_type=type(data).__name__)
return {
six.text_type(key): self.child.run_validation(value)
for key, value in data.items()
}
def to_representation(self, value):
"""
List of object instances -> List of dicts of primitive datatypes.
"""
return {
six.text_type(key): self.child.to_representation(val) if val is not None else None
for key, val in value.items()
}
class JSONField(Field):
default_error_messages = {
'invalid': _('Value must be valid JSON.')
}
def __init__(self, *args, **kwargs):
self.binary = kwargs.pop('binary', False)
super(JSONField, self).__init__(*args, **kwargs)
def get_value(self, dictionary):
if html.is_html_input(dictionary) and self.field_name in dictionary:
# When HTML form input is used, mark up the input
# as being a JSON string, rather than a JSON primitive.
class JSONString(six.text_type):
def __new__(self, value):
ret = six.text_type.__new__(self, value)
ret.is_json_string = True
return ret
return JSONString(dictionary[self.field_name])
return dictionary.get(self.field_name, empty)
def to_internal_value(self, data):
try:
if self.binary or getattr(data, 'is_json_string', False):
if isinstance(data, six.binary_type):
data = data.decode('utf-8')
return json.loads(data)
else:
json.dumps(data)
except (TypeError, ValueError):
self.fail('invalid')
return data
def to_representation(self, value):
if self.binary:
value = json.dumps(value)
# On python 2.x the return type for json.dumps() is underspecified.
# On python 3.x json.dumps() returns unicode strings.
if isinstance(value, six.text_type):
value = bytes(value.encode('utf-8'))
return value
# Miscellaneous field types...
class ReadOnlyField(Field):
"""
A read-only field that simply returns the field value.
If the field is a method with no parameters, the method will be called
and its return value used as the representation.
For example, the following would call `get_expiry_date()` on the object:
class ExampleSerializer(Serializer):
expiry_date = ReadOnlyField(source='get_expiry_date')
"""
def __init__(self, **kwargs):
kwargs['read_only'] = True
super(ReadOnlyField, self).__init__(**kwargs)
def to_representation(self, value):
return value
class HiddenField(Field):
"""
A hidden field does not take input from the user, or present any output,
but it does populate a field in `validated_data`, based on its default
value. This is particularly useful when we have a `unique_for_date`
constraint on a pair of fields, as we need some way to include the date in
the validated data.
"""
def __init__(self, **kwargs):
assert 'default' in kwargs, 'default is a required argument.'
kwargs['write_only'] = True
super(HiddenField, self).__init__(**kwargs)
def get_value(self, dictionary):
# We always use the default value for `HiddenField`.
# User input is never provided or accepted.
return empty
def to_internal_value(self, data):
return data
class SerializerMethodField(Field):
"""
A read-only field that get its representation from calling a method on the
parent serializer class. The method called will be of the form
"get_{field_name}", and should take a single argument, which is the
object being serialized.
For example:
class ExampleSerializer(self):
extra_info = SerializerMethodField()
def get_extra_info(self, obj):
return ... # Calculate some data to return.
"""
def __init__(self, method_name=None, **kwargs):
self.method_name = method_name
kwargs['source'] = '*'
kwargs['read_only'] = True
super(SerializerMethodField, self).__init__(**kwargs)
def bind(self, field_name, parent):
# In order to enforce a consistent style, we error if a redundant
# 'method_name' argument has been used. For example:
# my_field = serializer.SerializerMethodField(method_name='get_my_field')
default_method_name = 'get_{field_name}'.format(field_name=field_name)
assert self.method_name != default_method_name, (
"It is redundant to specify `%s` on SerializerMethodField '%s' in "
"serializer '%s', because it is the same as the default method name. "
"Remove the `method_name` argument." %
(self.method_name, field_name, parent.__class__.__name__)
)
# The method name should default to `get_{field_name}`.
if self.method_name is None:
self.method_name = default_method_name
super(SerializerMethodField, self).bind(field_name, parent)
def to_representation(self, value):
method = getattr(self.parent, self.method_name)
return method(value)
class ModelField(Field):
"""
A generic field that can be used against an arbitrary model field.
This is used by `ModelSerializer` when dealing with custom model fields,
that do not have a serializer field to be mapped to.
"""
default_error_messages = {
'max_length': _('Ensure this field has no more than {max_length} characters.'),
}
def __init__(self, model_field, **kwargs):
self.model_field = model_field
# The `max_length` option is supported by Django's base `Field` class,
# so we'd better support it here.
max_length = kwargs.pop('max_length', None)
super(ModelField, self).__init__(**kwargs)
if max_length is not None:
message = lazy(
self.error_messages['max_length'].format,
six.text_type)(max_length=self.max_length)
self.validators.append(
MaxLengthValidator(self.max_length, message=message))
def to_internal_value(self, data):
rel = get_remote_field(self.model_field, default=None)
if rel is not None:
return rel.model._meta.get_field(rel.field_name).to_python(data)
return self.model_field.to_python(data)
def get_attribute(self, obj):
# We pass the object instance onto `to_representation`,
# not just the field attribute.
return obj
def to_representation(self, obj):
value = value_from_object(self.model_field, obj)
if is_protected_type(value):
return value
return self.model_field.value_to_string(obj)
| {
"content_hash": "a7678f354618628908026ff99621896d",
"timestamp": "",
"source": "github",
"line_count": 1846,
"max_line_length": 127,
"avg_line_length": 36.02329360780065,
"alnum_prop": 0.5919788267492744,
"repo_name": "ossanna16/django-rest-framework",
"id": "7a79ae93cbf978807d28d2461cb64f861242778b",
"size": "66499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest_framework/fields.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "39327"
},
{
"name": "HTML",
"bytes": "81666"
},
{
"name": "JavaScript",
"bytes": "18007"
},
{
"name": "Python",
"bytes": "1140247"
}
],
"symlink_target": ""
} |
"""
Exception definitions.
"""
class UnsupportedVersion(Exception):
"""Indicates that the user is trying to use an unsupported
version of the API.
"""
pass
class InvalidAPIVersion(Exception):
pass
class CommandError(Exception):
pass
class AuthorizationFailure(Exception):
pass
class NoUniqueMatch(Exception):
pass
class AuthSystemNotFound(Exception):
"""When the user specifies an AuthSystem but not installed."""
def __init__(self, auth_system):
self.auth_system = auth_system
def __str__(self):
return "AuthSystemNotFound: %s" % repr(self.auth_system)
class NoTokenLookupException(Exception):
"""This form of authentication does not support looking up
endpoints from an existing token.
"""
pass
class EndpointNotFound(Exception):
"""Could not find Service or Region in Service Catalog."""
pass
class ConnectionError(Exception):
"""Could not open a connection to the API service."""
pass
class AmbiguousEndpoints(Exception):
"""Found more than one matching endpoint in Service Catalog."""
def __init__(self, endpoints=None):
self.endpoints = endpoints
def __str__(self):
return "AmbiguousEndpoints: %s" % repr(self.endpoints)
class ClientException(Exception):
"""
The base exception class for all exceptions this library raises.
"""
def __init__(self, code, message=None, details=None, request_id=None):
self.code = code
# NOTE(mriedem): Use getattr on self.__class__.message since
# BaseException.message was dropped in python 3, see PEP 0352.
self.message = message or getattr(self.__class__, 'message', None)
self.details = details
self.request_id = request_id
def __str__(self):
formatted_string = "%s (HTTP %s)" % (self.message, self.code)
if self.request_id:
formatted_string += " (Request-ID: %s)" % self.request_id
return formatted_string
class BadRequest(ClientException):
"""
HTTP 400 - Bad request: you sent some malformed data.
"""
http_status = 400
message = "Bad request"
class Unauthorized(ClientException):
"""
HTTP 401 - Unauthorized: bad credentials.
"""
http_status = 401
message = "Unauthorized"
class Forbidden(ClientException):
"""
HTTP 403 - Forbidden: your credentials don't give you access to this
resource.
"""
http_status = 403
message = "Forbidden"
class NotFound(ClientException):
"""
HTTP 404 - Not found
"""
http_status = 404
message = "Not found"
class NotAcceptable(ClientException):
"""
HTTP 406 - Not Acceptable
"""
http_status = 406
message = "Not Acceptable"
class Conflict(ClientException):
"""
HTTP 409 - Conflict
"""
http_status = 409
message = "Conflict"
class OverLimit(ClientException):
"""
HTTP 413 - Over limit: you're over the API limits for this time period.
"""
http_status = 413
message = "Over limit"
# NotImplemented is a python keyword.
class HTTPNotImplemented(ClientException):
"""
HTTP 501 - Not Implemented: the server does not support this operation.
"""
http_status = 501
message = "Not Implemented"
# In Python 2.4 Exception is old-style and thus doesn't have a __subclasses__()
# so we can do this:
# _code_map = dict((c.http_status, c)
# for c in ClientException.__subclasses__())
#
# Instead, we have to hardcode it:
_code_map = dict((c.http_status, c) for c in [BadRequest, Unauthorized,
Forbidden, NotFound,
NotAcceptable, Conflict,
OverLimit, HTTPNotImplemented])
def from_response(response, body):
"""
Return an instance of a ClientException or subclass
based on a requests response.
Usage::
resp, body = requests.request(...)
if resp.status_code != 200:
raise exceptions.from_response(resp, resp.text)
"""
cls = _code_map.get(response.status_code, ClientException)
if response.headers:
request_id = response.headers.get('x-compute-request-id')
else:
request_id = None
if body:
message = "n/a"
details = "n/a"
if hasattr(body, 'keys'):
error = body[list(body)[0]]
message = error.get('message', message)
details = error.get('details', details)
return cls(code=response.status_code, message=message, details=details,
request_id=request_id)
else:
return cls(code=response.status_code, request_id=request_id,
message=response.reason)
| {
"content_hash": "420e8d49f46c91873f6d43baed3ca221",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 79,
"avg_line_length": 25.386243386243386,
"alnum_prop": 0.6196331804918717,
"repo_name": "scottdangelo/cinderclient-api-microversions",
"id": "8399e607943f3c3f0df6df7531d6e92c9b6bcfc4",
"size": "5381",
"binary": false,
"copies": "1",
"ref": "refs/heads/cinderclient-api-microversions",
"path": "cinderclient/exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "829037"
},
{
"name": "Shell",
"bytes": "9081"
}
],
"symlink_target": ""
} |
"""
Support for getting the state of a Thermoworks Smoke Thermometer.
Requires Smoke Gateway Wifi with an internet connection.
"""
from __future__ import annotations
import logging
from requests import RequestException
from requests.exceptions import HTTPError
from stringcase import camelcase, snakecase
import thermoworks_smoke
import voluptuous as vol
from homeassistant.components.sensor import (
PLATFORM_SCHEMA,
SensorDeviceClass,
SensorEntity,
)
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
CONF_EMAIL,
CONF_EXCLUDE,
CONF_MONITORED_CONDITIONS,
CONF_PASSWORD,
TEMP_FAHRENHEIT,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
_LOGGER = logging.getLogger(__name__)
PROBE_1 = "probe1"
PROBE_2 = "probe2"
PROBE_1_MIN = "probe1_min"
PROBE_1_MAX = "probe1_max"
PROBE_2_MIN = "probe2_min"
PROBE_2_MAX = "probe2_max"
BATTERY_LEVEL = "battery"
FIRMWARE = "firmware"
SERIAL_REGEX = "^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$"
# map types to labels
SENSOR_TYPES = {
PROBE_1: "Probe 1",
PROBE_2: "Probe 2",
PROBE_1_MIN: "Probe 1 Min",
PROBE_1_MAX: "Probe 1 Max",
PROBE_2_MIN: "Probe 2 Min",
PROBE_2_MAX: "Probe 2 Max",
}
# exclude these keys from thermoworks data
EXCLUDE_KEYS = [FIRMWARE]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=[PROBE_1, PROBE_2]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_EXCLUDE, default=[]): vol.All(
cv.ensure_list, [cv.matches_regex(SERIAL_REGEX)]
),
}
)
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the thermoworks sensor."""
email = config[CONF_EMAIL]
password = config[CONF_PASSWORD]
monitored_variables = config[CONF_MONITORED_CONDITIONS]
excluded = config[CONF_EXCLUDE]
try:
mgr = thermoworks_smoke.initialize_app(email, password, True, excluded)
# list of sensor devices
dev = []
# get list of registered devices
for serial in mgr.serials():
for variable in monitored_variables:
dev.append(ThermoworksSmokeSensor(variable, serial, mgr))
add_entities(dev, True)
except HTTPError as error:
msg = f"{error.strerror}"
if "EMAIL_NOT_FOUND" in msg or "INVALID_PASSWORD" in msg:
_LOGGER.error("Invalid email and password combination")
else:
_LOGGER.error(msg)
class ThermoworksSmokeSensor(SensorEntity):
"""Implementation of a thermoworks smoke sensor."""
def __init__(self, sensor_type, serial, mgr):
"""Initialize the sensor."""
self.type = sensor_type
self.serial = serial
self.mgr = mgr
self._attr_name = "{name} {sensor}".format(
name=mgr.name(serial), sensor=SENSOR_TYPES[sensor_type]
)
self._attr_native_unit_of_measurement = TEMP_FAHRENHEIT
self._attr_unique_id = f"{serial}-{sensor_type}"
self._attr_device_class = SensorDeviceClass.TEMPERATURE
self.update_unit()
def update_unit(self):
"""Set the units from the data."""
if PROBE_2 in self.type:
self._attr_native_unit_of_measurement = self.mgr.units(self.serial, PROBE_2)
else:
self._attr_native_unit_of_measurement = self.mgr.units(self.serial, PROBE_1)
def update(self):
"""Get the monitored data from firebase."""
try:
values = self.mgr.data(self.serial)
# set state from data based on type of sensor
self._attr_native_value = values.get(camelcase(self.type))
# set units
self.update_unit()
# set basic attributes for all sensors
self._attr_extra_state_attributes = {
"time": values["time"],
"localtime": values["localtime"],
}
# set extended attributes for main probe sensors
if self.type in (PROBE_1, PROBE_2):
for key, val in values.items():
# add all attributes that don't contain any probe name
# or contain a matching probe name
if (self.type == PROBE_1 and key.find(PROBE_2) == -1) or (
self.type == PROBE_2 and key.find(PROBE_1) == -1
):
if key == BATTERY_LEVEL:
key = ATTR_BATTERY_LEVEL
else:
# strip probe label and convert to snake_case
key = snakecase(key.replace(self.type, ""))
# add to attrs
if key and key not in EXCLUDE_KEYS:
self._attr_extra_state_attributes[key] = val
# store actual unit because attributes are not converted
self._attr_extra_state_attributes[
"unit_of_min_max"
] = self._attr_native_unit_of_measurement
except (RequestException, ValueError, KeyError):
_LOGGER.warning("Could not update status for %s", self.name)
| {
"content_hash": "e2ab0fb96dfb53af3693f6cf0527c1de",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 88,
"avg_line_length": 32.84705882352941,
"alnum_prop": 0.6060171919770774,
"repo_name": "rohitranjan1991/home-assistant",
"id": "72be9a055192a2bc43de338385e5d5c9ceb00581",
"size": "5584",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/thermoworks_smoke/sensor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
} |
import abc
import json
import logging
from pathlib import Path
import networkx
logger = logging.getLogger(__name__)
class ProtocolException(Exception):
pass
class BaseRepository(abc.ABC):
def __init__(self, absolute_path: Path):
if not absolute_path.exists():
logger.error("repository path `%s` does not exist", absolute_path.name)
raise FileNotFoundError("repository path `%s` does not exist" % absolute_path.name)
self._absolute_path = absolute_path
if not self.info_path.exists():
logger.error("`%s` is not a valid BiReUS repository", absolute_path.name)
raise ValueError("`%s` is not a valid BiReUS repository" % absolute_path.name)
logger.debug("Initialize Repository @ %s ", absolute_path)
with self.info_path.open("r") as file:
self._metadata = json.load(file)
self.version_graph = networkx.read_gml(str(self.version_graph_path)) # type: networkx.Graph
@property
def absolute_path(self):
return self._absolute_path
@property
@abc.abstractmethod
def info_path(self) -> Path:
pass
@property
@abc.abstractmethod
def version_graph_path(self) -> Path:
pass
@property
def name(self) -> str:
return self._metadata['name']
@property
def first_version(self) -> str:
return self._metadata['first_version']
@property
def latest_version(self) -> str:
return self._metadata['latest_version']
def has_version(self, name: str):
return self.version_graph.has_node(name)
@property
def strategy(self) -> str:
return self._metadata['strategy']
@property
def protocol(self) -> int:
return self._metadata['protocol']
| {
"content_hash": "695c1a804da824c9ef9a48d5993cfeb0",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 100,
"avg_line_length": 26.279411764705884,
"alnum_prop": 0.634023503077784,
"repo_name": "Brutus5000/BiReUS",
"id": "cc0f9bf919215f0c1a911dfbccd339c604169d4b",
"size": "1802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bireus/shared/repository.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "219089"
}
],
"symlink_target": ""
} |
"""The Greplin root package."""
import pkg_resources
pkg_resources.declare_namespace('greplin')
| {
"content_hash": "9fa8228ff218d4c4b1657614cd021f32",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 42,
"avg_line_length": 24.25,
"alnum_prop": 0.7628865979381443,
"repo_name": "alex/scales",
"id": "f522ad0909205e2e6ecf3e060b770c20f70bdf50",
"size": "681",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/greplin/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""Base classes for our unit tests.
Allows overriding of config for use of fakes, and some black magic for
inline callbacks.
"""
import eventlet
eventlet.monkey_patch(os=False)
import os
import shutil
import sys
import fixtures
import mox
import stubout
import testtools
from oslo.config import cfg
from tuskar.db import migration
from tuskar.common import paths
from tuskar.openstack.common.db.sqlalchemy import session
from tuskar.openstack.common import log as logging
from tuskar.openstack.common import timeutils
from tuskar.tests import conf_fixture
from tuskar.tests import policy_fixture
test_opts = [
cfg.StrOpt('sqlite_clean_db',
default='clean.sqlite',
help='File name of clean sqlite db'),
]
CONF = cfg.CONF
CONF.register_opts(test_opts)
CONF.import_opt('connection',
'tuskar.openstack.common.db.sqlalchemy.session',
group='database')
CONF.import_opt('sqlite_db', 'tuskar.openstack.common.db.sqlalchemy.session')
CONF.set_override('use_stderr', False)
logging.setup('tuskar')
_DB_CACHE = None
class Database(fixtures.Fixture):
def __init__(self, db_session, db_migrate, sql_connection,
sqlite_db, sqlite_clean_db):
self.sql_connection = sql_connection
self.sqlite_db = sqlite_db
self.sqlite_clean_db = sqlite_clean_db
self.engine = db_session.get_engine()
self.engine.dispose()
conn = self.engine.connect()
if sql_connection == "sqlite://":
if db_migrate.db_version() > db_migrate.INIT_VERSION:
return
else:
testdb = paths.state_path_rel(sqlite_db)
if os.path.exists(testdb):
return
db_migrate.db_sync()
self.post_migrations()
if sql_connection == "sqlite://":
conn = self.engine.connect()
self._DB = "".join(line for line in conn.connection.iterdump())
self.engine.dispose()
else:
cleandb = paths.state_path_rel(sqlite_clean_db)
shutil.copyfile(testdb, cleandb)
def setUp(self):
super(Database, self).setUp()
if self.sql_connection == "sqlite://":
conn = self.engine.connect()
conn.connection.executescript(self._DB)
self.addCleanup(self.engine.dispose)
else:
shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db),
paths.state_path_rel(self.sqlite_db))
def post_migrations(self):
"""Any addition steps that are needed outside of the migrations."""
class ReplaceModule(fixtures.Fixture):
"""Replace a module with a fake module."""
def __init__(self, name, new_value):
self.name = name
self.new_value = new_value
def _restore(self, old_value):
sys.modules[self.name] = old_value
def setUp(self):
super(ReplaceModule, self).setUp()
old_value = sys.modules.get(self.name)
sys.modules[self.name] = self.new_value
self.addCleanup(self._restore, old_value)
class MoxStubout(fixtures.Fixture):
"""Deal with code around mox and stubout as a fixture."""
def setUp(self):
super(MoxStubout, self).setUp()
# emulate some of the mox stuff, we can't use the metaclass
# because it screws with our generators
self.mox = mox.Mox()
self.stubs = stubout.StubOutForTesting()
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.stubs.SmartUnsetAll)
self.addCleanup(self.mox.VerifyAll)
class TestingException(Exception):
pass
class TestCase(testtools.TestCase):
"""Test case base class for all unit tests."""
def setUp(self):
"""Run before each test method to initialize test environment."""
super(TestCase, self).setUp()
test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
try:
test_timeout = int(test_timeout)
except ValueError:
# If timeout value is invalid do not set a timeout.
test_timeout = 0
if test_timeout > 0:
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
self.useFixture(fixtures.NestedTempfile())
self.useFixture(fixtures.TempHomeDir())
if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
os.environ.get('OS_STDOUT_CAPTURE') == '1'):
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
os.environ.get('OS_STDERR_CAPTURE') == '1'):
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.log_fixture = self.useFixture(fixtures.FakeLogger())
self.useFixture(conf_fixture.ConfFixture(CONF))
global _DB_CACHE
if not _DB_CACHE:
_DB_CACHE = Database(session, migration,
sql_connection=CONF.database.connection,
sqlite_db=CONF.sqlite_db,
sqlite_clean_db=CONF.sqlite_clean_db)
self.useFixture(_DB_CACHE)
mox_fixture = self.useFixture(MoxStubout())
self.mox = mox_fixture.mox
self.stubs = mox_fixture.stubs
self.addCleanup(self._clear_attrs)
self.useFixture(fixtures.EnvironmentVariable('http_proxy'))
self.policy = self.useFixture(policy_fixture.PolicyFixture())
CONF.set_override('fatal_exception_format_errors', True)
def _clear_attrs(self):
# Delete attributes that don't start with _ so they don't pin
# memory around unnecessarily for the duration of the test
# suite
for key in [k for k in self.__dict__.keys() if k[0] != '_']:
del self.__dict__[key]
def config(self, **kw):
"""Override config options for a test."""
group = kw.pop('group', None)
for k, v in kw.iteritems():
CONF.set_override(k, v, group)
def path_get(self, project_file=None):
root = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..',
'..',
)
)
if project_file:
return os.path.join(root, project_file)
else:
return root
class TimeOverride(fixtures.Fixture):
"""Fixture to start and remove time override."""
def setUp(self):
super(TimeOverride, self).setUp()
timeutils.set_time_override()
self.addCleanup(timeutils.clear_time_override)
| {
"content_hash": "5005309abf652f85790cfdb7e0c0040a",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 77,
"avg_line_length": 32.83980582524272,
"alnum_prop": 0.6140428677014043,
"repo_name": "tuskar/tuskar",
"id": "092a116a811ffd96c5a659f63e152c16ea16a900",
"size": "7542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tuskar/tests/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "578219"
}
],
"symlink_target": ""
} |
"""
"""
def parallel_process(array, function, n_jobs=16, use_kwargs=False, front_num=3):
"""
A parallel version of the map function with a progress bar.
From : http://danshiebler.com/2016-09-14-parallel-progress-bar/
Args:
array (array-like): An array to iterate over.
function (function): A python function to apply to the elements of array
n_jobs (int, default=16): The number of cores to use
use_kwargs (boolean, default=False): Whether to consider the elements of array as dictionaries of
keyword arguments to function
front_num (int, default=3): The number of iterations to run serially before kicking off the parallel job.
Useful for catching bugs
Returns:
[function(array[0]), function(array[1]), ...]
"""
from tqdm.autonotebook import tqdm
from concurrent.futures import ProcessPoolExecutor, as_completed
# We run the first few iterations serially to catch bugs
if front_num > 0:
front = [function(**a) if use_kwargs else function(a) for a in array[:front_num]]
# If we set n_jobs to 1, just run a list comprehension. This is useful for benchmarking and debugging.
if n_jobs == 1:
return front + [function(**a) if use_kwargs else function(a) for a in tqdm(array[front_num:])]
# Assemble the workers
with ProcessPoolExecutor(max_workers=n_jobs) as pool:
# Pass the elements of array into function
if use_kwargs:
futures = [pool.submit(function, **a) for a in array[front_num:]]
else:
futures = [pool.submit(function, a) for a in array[front_num:]]
kwargs = {
'total': len(futures),
'unit': 'it',
'unit_scale': True,
'leave': True
}
# Print out the progress as tasks complete
progress = tqdm(as_completed(futures), **kwargs)
for f in progress:
pass
out = []
# Get the results from the futures.
for i, future in tqdm(enumerate(futures)):
try:
out.append(future.result())
except Exception as e:
out.append(e)
return front + out
| {
"content_hash": "4784e04d1d1210c642a14cea06c37b33",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 117,
"avg_line_length": 34,
"alnum_prop": 0.6042780748663101,
"repo_name": "sdrdl/sdipylib",
"id": "93eb06b386ae3c1cf11e51503e181488b5ce7e96",
"size": "2407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdipylib/parallel.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "4091"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from ..enums import IncrementalSearchDirection, InputMode
from ..keys import Keys
from ..line import ClipboardData, ClipboardDataType, SelectionType, indent, unindent
from ..selection import SelectionType
from .basic import basic_bindings
from .utils import create_handle_decorator
import codecs
__all__ = (
'vi_bindings',
)
class CursorRegion(object):
"""
Return struct for functions wrapped in ``change_delete_move_yank_handler``.
"""
def __init__(self, start, end=0):
self.start = start
self.end = end
def sorted(self):
"""
Return a (start, end) tuple where start <= end.
"""
if self.start < self.end:
return self.start, self.end
else:
return self.end, self.start
def vi_bindings(registry, cli_ref):
"""
Vi extensions.
# Overview of Readline Vi commands:
# http://www.catonmat.net/download/bash-vi-editing-mode-cheat-sheet.pdf
"""
basic_bindings(registry, cli_ref)
line = cli_ref().line
search_line = cli_ref().lines['search']
handle = create_handle_decorator(registry, line)
_last_character_find = [None] # (char, backwards) tuple
_search_direction = [IncrementalSearchDirection.FORWARD]
vi_transform_functions = [
# Rot 13 transformation
(('g', '?'), lambda string: codecs.encode(string, 'rot_13')),
# To lowercase
(('g', 'u'), lambda string: string.lower()),
# To uppercase.
(('g', 'U'), lambda string: string.upper()),
# Swap case.
# (XXX: If we would implement 'tildeop', the 'g' prefix is not required.)
(('g', '~'), lambda string: string.swapcase()),
]
@registry.add_after_handler_callback
def check_cursor_position(event):
"""
After every command, make sure that if we are in navigation mode, we
never put the cursor after the last character of a line. (Unless it's
an empty line.)
"""
if (
event.input_processor.input_mode == InputMode.VI_NAVIGATION and
line.document.is_cursor_at_the_end_of_line and
len(line.document.current_line) > 0):
line.cursor_position -= 1
@handle(Keys.Escape)
def _(event):
"""
Escape goes to vi navigation mode.
"""
if event.input_processor.input_mode == InputMode.SELECTION:
line.exit_selection()
event.input_processor.pop_input_mode()
else:
event.input_processor.input_mode = InputMode.VI_NAVIGATION
@handle(Keys.Up, in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Arrow up in navigation mode.
"""
line.auto_up(count=event.arg)
@handle(Keys.Down, in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Arrow down in navigation mode.
"""
line.auto_down(count=event.arg)
@handle(Keys.Backspace, in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
In navigation-mode, move cursor.
"""
line.cursor_position += line.document.get_cursor_left_position(count=event.arg)
@handle(Keys.ControlV, Keys.Any, in_mode=InputMode.INSERT)
def _(event):
"""
Insert a character literally (quoted insert).
"""
line.insert_text(event.data, overwrite=False)
@handle(Keys.ControlN, in_mode=InputMode.INSERT)
def _(event):
line.complete_next()
@handle(Keys.ControlN, in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Control-N: Next completion.
"""
line.auto_down()
@handle(Keys.ControlP, in_mode=InputMode.INSERT)
def _(event):
"""
Control-P: To previous completion.
"""
line.complete_previous()
@handle(Keys.ControlY, in_mode=InputMode.INSERT)
def _(event):
"""
Accept current completion.
"""
line.complete_state = None
@handle(Keys.ControlE, in_mode=InputMode.INSERT)
def _(event):
"""
Cancel completion. Go back to originally typed text.
"""
line.cancel_completion()
@handle(Keys.ControlP, in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
CtrlP in navigation mode goes up.
"""
line.auto_up()
@handle(Keys.ControlJ, in_mode=InputMode.VI_NAVIGATION)
@handle(Keys.ControlM, in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
In navigation mode, pressing enter will always return the input.
"""
if line.validate():
line.add_to_history()
cli_ref().set_return_value(line.document)
# ** In navigation mode **
# List of navigation commands: http://hea-www.harvard.edu/~fine/Tech/vi.html
@handle('a', in_mode=InputMode.VI_NAVIGATION)
def _(event):
line.cursor_position += line.document.get_cursor_right_position()
event.input_processor.input_mode = InputMode.INSERT
@handle('A', in_mode=InputMode.VI_NAVIGATION)
def _(event):
line.cursor_position += line.document.get_end_of_line_position()
event.input_processor.input_mode = InputMode.INSERT
@handle('C', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
# Change to end of line.
# Same as 'c$' (which is implemented elsewhere.)
"""
deleted = line.delete(count=line.document.get_end_of_line_position())
if deleted:
data = ClipboardData(deleted)
line.set_clipboard(data)
event.input_processor.input_mode = InputMode.INSERT
@handle('c', 'c', in_mode=InputMode.VI_NAVIGATION)
@handle('S', in_mode=InputMode.VI_NAVIGATION)
def _(event): # TODO: implement 'arg'
"""
Change current line
"""
# We copy the whole line.
data = ClipboardData(line.document.current_line, ClipboardDataType.LINES)
line.set_clipboard(data)
# But we delete after the whitespace
line.cursor_position += line.document.get_start_of_line_position(after_whitespace=True)
line.delete(count=line.document.get_end_of_line_position())
event.input_processor.input_mode = InputMode.INSERT
@handle('D', in_mode=InputMode.VI_NAVIGATION)
def _(event):
deleted = line.delete(count=line.document.get_end_of_line_position())
line.set_clipboard(ClipboardData(deleted))
@handle('d', 'd', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Delete line. (Or the following 'n' lines.)
"""
# Split string in before/deleted/after text.
lines = line.document.lines
before = '\n'.join(lines[:line.document.cursor_position_row])
deleted = '\n'.join(lines[line.document.cursor_position_row: line.document.cursor_position_row + event.arg])
after = '\n'.join(lines[line.document.cursor_position_row + event.arg:])
# Set new text.
if before and after:
before = before + '\n'
line.text = before + after
# Set cursor position. (At the start of the first 'after' line, after the leading whitespace.)
line.cursor_position = len(before) + len(after) - len(after.lstrip(' '))
# Set clipboard data
line.set_clipboard(ClipboardData(deleted, ClipboardDataType.LINES))
@handle('G', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
If an argument is given, move to this line in the history. (for
example, 15G) Otherwise, go the the last line of the current string.
"""
# If an arg has been given explicitely.
if event._arg:
line.go_to_history(event.arg - 1)
# Otherwise this goes to the last line of the file.
else:
line.cursor_position = len(line.text)
@handle('i', in_mode=InputMode.VI_NAVIGATION)
def _(event):
event.input_processor.input_mode = InputMode.INSERT
@handle('I', in_mode=InputMode.VI_NAVIGATION)
def _(event):
event.input_processor.input_mode = InputMode.INSERT
line.cursor_position += line.document.get_start_of_line_position(after_whitespace=True)
@handle('J', in_mode=InputMode.VI_NAVIGATION)
def _(event):
for i in range(event.arg):
line.join_next_line()
@handle('n', in_mode=InputMode.VI_NAVIGATION)
def _(event): # XXX: use `change_delete_move_yank_handler` and implement 'arg'
"""
Search next.
"""
line.incremental_search(_search_direction[0])
@handle('N', in_mode=InputMode.VI_NAVIGATION)
def _(event): # TODO: use `change_delete_move_yank_handler` and implement 'arg'
"""
Search previous.
"""
if _search_direction[0] == IncrementalSearchDirection.BACKWARD:
line.incremental_search(IncrementalSearchDirection.FORWARD)
else:
line.incremental_search(IncrementalSearchDirection.BACKWARD)
@handle('p', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Paste after
"""
for i in range(event.arg):
line.paste_from_clipboard()
@handle('P', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Paste before
"""
for i in range(event.arg):
line.paste_from_clipboard(before=True)
@handle('r', Keys.Any, in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Replace single character under cursor
"""
line.insert_text(event.data * event.arg, overwrite=True)
@handle('R', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Go to 'replace'-mode.
"""
event.input_processor.input_mode = InputMode.VI_REPLACE
@handle('s', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Substitute with new text
(Delete character(s) and go to insert mode.)
"""
data = ClipboardData(''.join(line.delete() for i in range(event.arg)))
line.set_clipboard(data)
event.input_processor.input_mode = InputMode.INSERT
@handle('u', in_mode=InputMode.VI_NAVIGATION)
def _(event):
for i in range(event.arg):
line.undo()
@handle('v', in_mode=InputMode.VI_NAVIGATION)
def _(event):
line.open_in_editor()
# @handle('v', in_mode=InputMode.VI_NAVIGATION)
# def _(event):
# """
# Start characters selection.
# """
# line.start_selection(selection_type=SelectionType.CHARACTERS)
# event.input_processor.push_input_mode(InputMode.SELECTION)
@handle('V', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Start lines selection.
"""
line.start_selection(selection_type=SelectionType.LINES)
event.input_processor.push_input_mode(InputMode.SELECTION)
@handle('x', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Delete character.
"""
data = ClipboardData(line.delete(count=event.arg))
line.set_clipboard(data)
@handle('x', in_mode=InputMode.SELECTION)
@handle('d', 'd', in_mode=InputMode.SELECTION)
def _(event):
"""
Cut selection.
"""
selection_type = line.selection_state.type
deleted = line.cut_selection()
line.set_clipboard(ClipboardData(deleted, selection_type))
event.input_processor.pop_input_mode()
@handle('c', in_mode=InputMode.SELECTION)
def _(event):
"""
Change selection (cut and go to insert mode).
"""
selection_type = line.selection_state.type
deleted = line.cut_selection()
line.set_clipboard(ClipboardData(deleted, selection_type))
event.input_processor.pop_input_mode()
event.input_processor.input_mode = InputMode.INSERT
@handle('y', in_mode=InputMode.SELECTION)
def _(event):
"""
Copy selection.
"""
selection_type = line.selection_state.type
deleted = line.copy_selection()
line.set_clipboard(ClipboardData(deleted, selection_type))
event.input_processor.pop_input_mode()
@handle('X', in_mode=InputMode.VI_NAVIGATION)
def _(event):
data = line.delete_before_cursor()
line.set_clipboard(data)
@handle('y', 'y', in_mode=InputMode.VI_NAVIGATION)
@handle('Y', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Yank the whole line.
"""
text = '\n'.join(line.document.lines_from_current[:event.arg])
data = ClipboardData(text, ClipboardDataType.LINES)
line.set_clipboard(data)
@handle('+', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Move to first non whitespace of next line
"""
line.cursor_position += line.document.get_cursor_down_position(count=event.arg)
line.cursor_position += line.document.get_start_of_line_position(after_whitespace=True)
@handle('-', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Move to first non whitespace of previous line
"""
line.cursor_position += line.document.get_cursor_up_position(count=event.arg)
line.cursor_position += line.document.get_start_of_line_position(after_whitespace=True)
@handle('>', '>', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Indent lines.
"""
current_row = line.document.cursor_position_row
indent(line, current_row, current_row + event.arg)
@handle('<', '<', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Unindent lines.
"""
current_row = line.document.cursor_position_row
unindent(line, current_row, current_row + event.arg)
@handle('>', in_mode=InputMode.SELECTION)
def _(event):
"""
Indent selection
"""
selection_type = line.selection_state.type
if selection_type == SelectionType.LINES:
from_, to = line.document.selection_range()
from_, _ = line.document.translate_index_to_position(from_)
to, _ = line.document.translate_index_to_position(to)
indent(line, from_ - 1, to, count=event.arg) # XXX: why does translate_index_to_position return 1-based indexing???
event.input_processor.pop_input_mode()
@handle('<', in_mode=InputMode.SELECTION)
def _(event):
"""
Unindent selection
"""
selection_type = line.selection_state.type
if selection_type == SelectionType.LINES:
from_, to = line.document.selection_range()
from_, _ = line.document.translate_index_to_position(from_)
to, _ = line.document.translate_index_to_position(to)
unindent(line, from_ - 1, to, count=event.arg)
event.input_processor.pop_input_mode()
@handle('O', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Open line above and enter insertion mode
"""
line.insert_line_above()
event.input_processor.input_mode = InputMode.INSERT
@handle('o', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Open line below and enter insertion mode
"""
line.insert_line_below()
event.input_processor.input_mode = InputMode.INSERT
@handle('~', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Reverse case of current character and move cursor forward.
"""
c = line.document.current_char
if c is not None and c != '\n':
c = (c.upper() if c.islower() else c.lower())
line.insert_text(c, overwrite=True)
@handle('/', in_mode=InputMode.VI_NAVIGATION)
@handle(Keys.ControlS, in_mode=InputMode.INSERT)
@handle(Keys.ControlS, in_mode=InputMode.VI_NAVIGATION)
@handle(Keys.ControlS, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Vi-style forward search.
"""
_search_direction[0] = direction = IncrementalSearchDirection.FORWARD
line.incremental_search(direction)
if event.input_processor.input_mode != InputMode.VI_SEARCH:
event.input_processor.push_input_mode(InputMode.VI_SEARCH)
@handle('?', in_mode=InputMode.VI_NAVIGATION)
@handle(Keys.ControlR, in_mode=InputMode.INSERT)
@handle(Keys.ControlR, in_mode=InputMode.VI_NAVIGATION)
@handle(Keys.ControlR, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Vi-style backward search.
"""
_search_direction[0] = direction = IncrementalSearchDirection.BACKWARD
line.incremental_search(direction)
if event.input_processor.input_mode != InputMode.VI_SEARCH:
event.input_processor.push_input_mode(InputMode.VI_SEARCH)
@handle('#', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Go to previous occurence of this word.
"""
pass
@handle('*', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
Go to next occurence of this word.
"""
pass
@handle('(', in_mode=InputMode.VI_NAVIGATION)
def _(event):
# TODO: go to begin of sentence.
pass
@handle(')', in_mode=InputMode.VI_NAVIGATION)
def _(event):
# TODO: go to end of sentence.
pass
def change_delete_move_yank_handler(*keys, **kw):
"""
Register a change/delete/move/yank handlers. e.g. 'dw'/'cw'/'w'/'yw'
The decorated function should return a ``CursorRegion``.
This decorator will create both the 'change', 'delete' and move variants,
based on that ``CursorRegion``.
"""
no_move_handler = kw.pop('no_move_handler', False)
# TODO: Also do '>' and '<' indent/unindent operators.
# TODO: Also "gq": text formatting
# See: :help motion.txt
def decorator(func):
if not no_move_handler:
@handle(*keys, in_mode=InputMode.VI_NAVIGATION)
@handle(*keys, in_mode=InputMode.SELECTION)
def move(event):
""" Create move handler. """
region = func(event)
line.cursor_position += region.start
def create_transform_handler(transform_func, *a):
@handle(*(a + keys), in_mode=InputMode.VI_NAVIGATION)
def _(event):
""" Apply transformation (uppercase, lowercase, rot13, swap case). """
region = func(event)
start, end = region.sorted()
# Transform.
line.transform_region(
line.cursor_position + start,
line.cursor_position + end,
transform_func)
# Move cursor
line.cursor_position += (region.end or region.start)
for k, f in vi_transform_functions:
create_transform_handler(f, *k)
@handle('y', *keys, in_mode=InputMode.VI_NAVIGATION)
def yank_handler(event):
""" Create yank handler. """
region = func(event)
start, end = region.sorted()
substring = line.text[line.cursor_position + start: line.cursor_position + end]
if substring:
line.set_clipboard(ClipboardData(substring))
def create(delete_only):
""" Create delete and change handlers. """
@handle('cd'[delete_only], *keys, in_mode=InputMode.VI_NAVIGATION)
@handle('cd'[delete_only], *keys, in_mode=InputMode.VI_NAVIGATION)
def _(event):
region = func(event)
deleted = ''
if region:
start, end = region.sorted()
# Move to the start of the region.
line.cursor_position += start
# Delete until end of region.
deleted = line.delete(count=end-start)
# Set deleted/changed text to clipboard.
if deleted:
line.set_clipboard(ClipboardData(''.join(deleted)))
# Only go back to insert mode in case of 'change'.
if not delete_only:
event.input_processor.input_mode = InputMode.INSERT
create(True)
create(False)
return func
return decorator
@change_delete_move_yank_handler('b') # Move one word or token left.
@change_delete_move_yank_handler('B') # Move one non-blank word left ((# TODO: difference between 'b' and 'B')
def key_b(event):
return CursorRegion(line.document.find_start_of_previous_word(count=event.arg) or 0)
@change_delete_move_yank_handler('$')
def key_dollar(event):
""" 'c$', 'd$' and '$': Delete/change/move until end of line. """
return CursorRegion(line.document.get_end_of_line_position())
@change_delete_move_yank_handler('w') # TODO: difference between 'w' and 'W'
def key_w(event):
""" 'cw', 'de', 'w': Delete/change/move one word. """
return CursorRegion(line.document.find_next_word_beginning(count=event.arg) or 0)
@change_delete_move_yank_handler('e') # TODO: difference between 'e' and 'E'
def key_e(event):
""" 'ce', 'de', 'e' """
end = line.document.find_next_word_ending(count=event.arg)
return CursorRegion(end - 1 if end else 0)
@change_delete_move_yank_handler('i', 'w', no_move_handler=True)
def key_iw(event):
""" ciw and diw """
# Change inner word: change word under cursor.
start, end = line.document.find_boundaries_of_current_word()
return CursorRegion(start, end)
@change_delete_move_yank_handler('^')
def key_circumflex(event):
""" 'c^', 'd^' and '^': Soft start of line, after whitespace. """
return CursorRegion(line.document.get_start_of_line_position(after_whitespace=True))
@change_delete_move_yank_handler('0', no_move_handler=True)
def key_zero(event):
"""
'c0', 'd0': Hard start of line, before whitespace.
(The move '0' key is implemented elsewhere, because a '0' could also change the `arg`.)
"""
return CursorRegion(line.document.get_start_of_line_position(after_whitespace=False))
def create_ci_ca_handles(ci_start, ci_end, inner):
# TODO: 'dab', 'dib', (brackets or block) 'daB', 'diB', Braces.
# TODO: 'dat', 'dit', (tags (like xml)
"""
Delete/Change string between this start and stop character. But keep these characters.
This implements all the ci", ci<, ci{, ci(, di", di<, ca", ca<, ... combinations.
"""
@change_delete_move_yank_handler('ai'[inner], ci_start, no_move_handler=True)
@change_delete_move_yank_handler('ai'[inner], ci_end, no_move_handler=True)
def _(event):
start = line.document.find_backwards(ci_start, in_current_line=True)
end = line.document.find(ci_end, in_current_line=True)
if start is not None and end is not None:
offset = 0 if inner else 1
return CursorRegion(start + 1 - offset, end + offset)
for inner in (False, True):
for ci_start, ci_end in [('"', '"'), ("'", "'"), ("`", "`"),
('[', ']'), ('<', '>'), ('{', '}'), ('(', ')')]:
create_ci_ca_handles(ci_start, ci_end, inner)
@change_delete_move_yank_handler('{') # TODO: implement 'arg'
def _(event):
"""
Move to previous blank-line separated section.
Implements '{', 'c{', 'd{', 'y{'
"""
line_index = line.document.find_previous_matching_line(
lambda text: not text or text.isspace())
if line_index:
index = line.document.get_cursor_up_position(count=-line_index)
else:
index = 0
return CursorRegion(index)
@change_delete_move_yank_handler('}') # TODO: implement 'arg'
def _(event):
"""
Move to next blank-line separated section.
Implements '}', 'c}', 'd}', 'y}'
"""
line_index = line.document.find_next_matching_line(
lambda text: not text or text.isspace())
if line_index:
index = line.document.get_cursor_down_position(count=line_index)
else:
index = 0
return CursorRegion(index)
@change_delete_move_yank_handler('f', Keys.Any)
def _(event):
"""
Go to next occurance of character. Typing 'fx' will move the
cursor to the next occurance of character. 'x'.
"""
_last_character_find[0] = (event.data, False)
match = line.document.find(event.data, in_current_line=True, count=event.arg)
return CursorRegion(match or 0)
@change_delete_move_yank_handler('F', Keys.Any)
def _(event):
"""
Go to previous occurance of character. Typing 'Fx' will move the
cursor to the previous occurance of character. 'x'.
"""
_last_character_find[0] = (event.data, True)
return CursorRegion(line.document.find_backwards(event.data, in_current_line=True, count=event.arg) or 0)
@change_delete_move_yank_handler('t', Keys.Any)
def _(event):
"""
Move right to the next occurance of c, then one char backward.
"""
_last_character_find[0] = (event.data, False)
match = line.document.find(event.data, in_current_line=True, count=event.arg)
return CursorRegion(match - 1 if match else 0)
@change_delete_move_yank_handler('T', Keys.Any)
def _(event):
"""
Move left to the previous occurance of c, then one char forward.
"""
_last_character_find[0] = (event.data, True)
match = line.document.find_backwards(event.data, in_current_line=True, count=event.arg)
return CursorRegion(match + 1 if match else 0)
def repeat(reverse):
"""
Create ',' and ';' commands.
"""
@change_delete_move_yank_handler(',' if reverse else ';')
def _(event):
# Repeat the last 'f'/'F'/'t'/'T' command.
pos = 0
if _last_character_find[0]:
char, backwards = _last_character_find[0]
if reverse:
backwards = not backwards
if backwards:
pos = line.document.find_backwards(char, in_current_line=True, count=event.arg)
else:
pos = line.document.find(char, in_current_line=True, count=event.arg)
return CursorRegion(pos or 0)
repeat(True)
repeat(False)
@change_delete_move_yank_handler('h')
@change_delete_move_yank_handler(Keys.Left)
def _(event):
""" Implements 'ch', 'dh', 'h': Cursor left. """
return CursorRegion(line.document.get_cursor_left_position(count=event.arg))
@change_delete_move_yank_handler('j')
def _(event):
""" Implements 'cj', 'dj', 'j', ... Cursor up. """
return CursorRegion(line.document.get_cursor_down_position(count=event.arg))
@change_delete_move_yank_handler('k')
def _(event):
""" Implements 'ck', 'dk', 'k', ... Cursor up. """
return CursorRegion(line.document.get_cursor_up_position(count=event.arg))
@change_delete_move_yank_handler('l')
@change_delete_move_yank_handler(' ')
@change_delete_move_yank_handler(Keys.Right)
def _(event):
""" Implements 'cl', 'dl', 'l', 'c ', 'd ', ' '. Cursor right. """
return CursorRegion(line.document.get_cursor_right_position(count=event.arg))
@change_delete_move_yank_handler('H')
def _(event):
""" Implements 'cH', 'dH', 'H'. """
# Vi moves to the start of the visible region.
# cursor position 0 is okay for us.
return CursorRegion(-len(line.document.text_before_cursor))
@change_delete_move_yank_handler('L')
def _(event):
# Vi moves to the end of the visible region.
# cursor position 0 is okay for us.
return CursorRegion(len(line.document.text_after_cursor))
@change_delete_move_yank_handler('%')
def _(event):
"""
Implements 'c%', 'd%', '%, 'y%' (Move to corresponding bracket.)
If an 'arg' has been given, go this this % position in the file.
"""
if event._arg:
# If 'arg' has been given, the meaning of % is to go to the 'x%'
# row in the file.
if 0 < event.arg <= 100:
absolute_index = line.document.translate_row_col_to_index(
int(event.arg * line.document.line_count / 100), 0)
return CursorRegion(absolute_index - line.document.cursor_position)
else:
return CursorRegion(0) # Do nothing.
else:
# Move to the corresponding opening/closing bracket (()'s, []'s and {}'s).
return CursorRegion(line.document.matching_bracket_position)
@change_delete_move_yank_handler('|')
def _(event):
# Move to the n-th column (you may specify the argument n by typing
# it on number keys, for example, 20|).
return CursorRegion(line.document.get_column_cursor_position(event.arg))
@change_delete_move_yank_handler('g', 'g')
def _(event):
"""
Implements 'gg', 'cgg', 'ygg'
"""
# Move to the top of the input.
return CursorRegion(line.document.home_position)
@handle('!', in_mode=InputMode.VI_NAVIGATION)
def _(event):
"""
'!' opens the system prompt.
"""
event.input_processor.push_input_mode(InputMode.SYSTEM)
@handle(Keys.Any, in_mode=InputMode.VI_NAVIGATION)
@handle(Keys.Any, in_mode=InputMode.SELECTION)
def _(event):
"""
Always handle numberics in navigation mode as arg.
"""
if event.data in '123456789' or (event._arg and event.data == '0'):
event.append_to_arg_count(event.data)
elif event.data == '0':
line.cursor_position += line.document.get_start_of_line_position(after_whitespace=False)
@handle(Keys.Any, in_mode=InputMode.VI_REPLACE)
def _(event):
"""
Insert data at cursor position.
"""
line.insert_text(event.data, overwrite=True)
@handle(Keys.Any, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Insert text after the / or ? prompt.
"""
search_line.insert_text(event.data)
line.set_search_text(search_line.text)
@handle(Keys.ControlJ, in_mode=InputMode.VI_SEARCH)
@handle(Keys.ControlM, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Enter at the / or ? prompt.
"""
# Add query to history of searh line.
search_line.add_to_history()
search_line.reset()
# Go back to navigation mode.
event.input_processor.pop_input_mode()
@handle(Keys.Backspace, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Backspace at the vi-search prompt.
"""
if search_line.text:
search_line.delete_before_cursor()
line.set_search_text(search_line.text)
else:
# If no text after the prompt, cancel search.
line.exit_isearch(restore_original_line=True)
search_line.reset()
event.input_processor.pop_input_mode()
@handle(Keys.Up, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Go to the previous history item at the search prompt.
"""
search_line.auto_up()
line.set_search_text(search_line.text)
@handle(Keys.Down, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Go to the next history item at the search prompt.
"""
search_line.auto_down()
search_line.cursor_position = len(search_line.text)
line.set_search_text(search_line.text)
@handle(Keys.Left, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Arrow left at the search prompt.
"""
search_line.cursor_left()
@handle(Keys.Right, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Arrow right at the search prompt.
"""
search_line.cursor_right()
@handle(Keys.ControlC, in_mode=InputMode.VI_SEARCH)
def _(event):
"""
Cancel search.
"""
line.exit_isearch(restore_original_line=True)
search_line.reset()
event.input_processor.pop_input_mode()
def create_selection_transform_handler(keys, transform_func):
"""
Apply transformation on selection (uppercase, lowercase, rot13, swap case).
"""
@handle(*keys, in_mode=InputMode.SELECTION)
def _(event):
range = line.document.selection_range()
if range:
line.transform_region(range[0], range[1], transform_func)
event.input_processor.pop_input_mode()
for k, f in vi_transform_functions:
create_selection_transform_handler(k, f)
@handle(Keys.ControlX, Keys.ControlL, in_mode=InputMode.INSERT)
def _(event):
"""
Pressing the ControlX - ControlL sequence in Vi mode does line
completion based on the other lines in the document and the history.
"""
line.start_history_lines_completion()
@handle(Keys.ControlX, Keys.ControlF, in_mode=InputMode.INSERT)
def _(event):
"""
Complete file names.
"""
# TODO
pass
| {
"content_hash": "e7bf69006ffc4f2389ca7af08d0d3157",
"timestamp": "",
"source": "github",
"line_count": 973,
"max_line_length": 128,
"avg_line_length": 34.87872559095581,
"alnum_prop": 0.5826384182455727,
"repo_name": "Carreau/python-prompt-toolkit",
"id": "05e9224ca03b38591665dd67902190c1fc82ad7e",
"size": "33937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "prompt_toolkit/key_bindings/vi.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "354355"
},
{
"name": "Shell",
"bytes": "6699"
}
],
"symlink_target": ""
} |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# BY: رضا (User:reza1615 on fa.wikipedia)
# Distributed under the terms of the CC-BY-SA 3.0.
import wikipedia
import pagegenerators,query,sys
import fa_cosmetic_changes
import re, os, codecs, catlib,login
wikipedia.config.put_throttle = 0
wikipedia.put_throttle.setDelay()
secondwiki='en'
faSite = wikipedia.getSite('fa')
enSite = wikipedia.getSite(secondwiki)
txtTmp=''
faChrs = u'ءاآأإئؤبپتثجچحخدذرزژسشصضطظعغفقکگلمنوهیًٌٍَُِّْٓيك'
msg = u'ربات: افزودن نگارخانهٔ آزاد به مقاله'
usernames=u'Fatranslator'
_cache={}
def login_fa(usernames):
try:
password_fa = open("/data/project/rezabot/pywikipedia/passfile2", 'r')
except:
password_fa = open("/home/reza/compat/passfile2", 'r')
password_fa=password_fa.read().replace('"','').strip()
passwords=password_fa.split('(')[1].split(',')[1].split(')')[0].strip()
#-------------------------------------------
botlog=login.LoginManager(password=passwords,username=usernames,site=faSite)
botlog.login()
def englishdictionry( enlink ,firstsite,secondsite):
try:
enlink=unicode(str(enlink),'UTF-8').replace(u'[[',u'').replace(u']]',u'').replace(u'en:',u'').replace(u'fa:',u'')
except:
enlink=enlink.replace(u'[[',u'').replace(u']]',u'').replace(u'en:',u'').replace(u'fa:',u'')
enlink=enlink.split(u'#')[0].strip()
enlink=enlink.replace(u' ',u'_')
if _cache.get(tuple([enlink, 'englishdictionry'])):
return _cache[tuple([enlink, 'englishdictionry'])]
if enlink==u'':
_cache[tuple([enlink, 'englishdictionry'])]=False
return False
site = wikipedia.getSite(firstsite)
sitesecond= wikipedia.getSite(secondsite)
params = {
'action': 'query',
'prop': 'langlinks',
'titles': enlink,
'redirects': 1,
'lllimit':500,
}
try:
categoryname = query.GetData(params,site)
for item in categoryname[u'query'][u'pages']:
case=categoryname[u'query'][u'pages'][item][u'langlinks']
for item in case:
if item[u'lang']==secondsite:
intersec=item[u'*']
break
result=intersec
_cache[tuple([enlink, 'englishdictionry'])]=result
return result
except:
_cache[tuple([enlink, 'englishdictionry'])]=False
return False
def text_translator(text):
linken = re.findall(ur'\[\[.*?\]\]',text, re.S)
for item in linken:
itemmain=item
item=item.replace(u'en:',u'')
if item.find('user:')!=-1 or item.find('User:')!=-1 or item.find('template:')!=-1 or item.find('Template:')!=-1 or item.find('category:')!=-1 or item.find('Category:')!=-1 or item.find('Wikipedia:')!=-1 or item.find('wikipedia:')!=-1 or item.find('Talk:')!=-1 or item.find('talk:')!=-1 or item.find('Help:')!=-1 or item.find('help:')!=-1:
continue
itemen=item.split(u'|')[0].replace(u'[[',u'').replace(u']]',u'').strip()
if text.find(itemmain)!=-1:
itemfa=englishdictionry(itemen ,'en','fa')
wikipedia.output(itemen)
else:
continue
if itemfa==False:
itemen=item.replace(u'[[',u'').replace(u']]',u'').strip()
itemen=itemen.replace(u'[[',u'').replace(u']]',u'')
text=text.replace(u'[['+itemen+u']]',u'@1@'+itemen+u'@2@')
continue
else:
text=text.replace(itemmain,u'@1@'+itemfa+u'@2@')
linken = re.findall(ur'\[\[.*?\]\]',text, re.S)
text=text.replace(u'@1@',u'[[').replace(u'@2@',u']]')
return text
def checksite(gallery,text):
commons_images,image_texts=[],[]
for aa in range(0,30):
gallery=gallery.replace(u' |',u'|').replace(u'\t',u'')
gallery=gallery.replace(u'\n\n',u'\n').replace(u'\n\n',u'\n').replace(u'\n\n',u'\n').replace(u'\n\n',u'\n').replace(u'\n\n',u'\n')
images=gallery.replace(u'\r',u'').split(u'\n')
for image in images:
if image.strip()=="":
continue
imagefa=image
image=image.split(u'|')[0].strip()
imagecheck=image.replace(u'File:',u'').replace(u'file:',u'').replace(u'Image:',u'').replace(u'image:',u'')
imagecheck=imagecheck.replace(u'پرونده:',u'').replace(u'تصویر:',u'').replace(u'تصوير:',u'').replace(u'رسانه:',u'')
if image=="":
continue
if image.find(u'.ogg')!=-1 or image.find(u'>')!=-1 or image.find(u'.oga')!=-1 or image.find(u'.ogv')!=-1 or image.find(u'.mid')!=-1:
continue
params = {
'action': 'query',
'titles': image,#Image name
'prop': 'imageinfo'
}
try :
extend=imagefa.split(u'|')[1]
image_text=imagefa.replace(image.split(u'|')[0]+u'|',image.split(u'|')[0]+u'|<!--')+u'-->'
except:
image_text=imagefa.split(u'|')[0].strip()+u'|'
try:
queryresult = query.GetData(params)
items=queryresult['query']['pages']
for item in items:
if queryresult['query']['pages'][item]['imagerepository']=='shared':
if text.lower().find(imagecheck.lower())!=-1 or text.lower().find(imagecheck.replace(u'_',u' ').lower())!=-1:
continue
image_texts.append(image_text)
else:
continue
except:
continue
if image_texts !=[]:
gallery=u'<gallery>\n'
for item in image_texts:
gallery+=item+u'\n'
gallery+=u'</gallery>\n'
gallery=text_translator(gallery)
wikipedia.output(u'\03{lightgreen}'+gallery+u'\03{default}')
return gallery
else:
return False
def enwikiimagecheck(text_en2):
try:
im = re.search(ur'<(?:gallery)[^<>]*>[\S\s]*?</(?:gallery)>', text_en2)
imagename=im.group(0).strip()
return imagename.replace(u'<gallery>',u'<gallery>\n').replace(u'</gallery>',u'\n</gallery>')
except:
return False
def BotRun(page,text_fa,text_en):
try:
pagename=page.replace(u'Fa:',u'').replace(u'fa:',u'').replace(u'[[',u'').replace(u']]',u'').strip()
except:
pagename=unicode(str(page),'UTF-8').replace(u'Fa:',u'').replace(u'fa:',u'').replace(u'[[',u'').replace(u']]',u'').strip()
page=wikipedia.Page(faSite,pagename)
#--------------------------------------------------------------action that you want to do on pages-----------------
text_fa2=text_fa.replace(u'\r',u'')
text_en2=text_en.replace(u'\r',u'')
imagename=u''
try:
imagename=enwikiimagecheck(text_en2)
if imagename:
engallerry=checksite(imagename,text_fa)
if engallerry:
try:
imfa = re.search(ur'<(?:gallery)[^<>]*>[\S\s]*?</(?:gallery)>', text_fa2)
imagename=imfa.group(0).strip()
wikipedia.output( u"--fa wiki's page had gallery so it pass!--" )
return False
except:
text_fa2=text_fa2.replace(u'\r',u'')
if text_fa2.find(u'== جستارهای وابسته ==')!=-1 or text_fa2.find(u'==جستارهای وابسته==')!=-1 or text_fa2.find(u'== جُستارهای وابسته ==')!=-1 or text_fa2.find(u'==جُستارهای وابسته==')!=-1:
text_fa2=text_fa2.replace(u'==جستارهای وابسته==',u'== جستارهای وابسته ==').replace(u'== جُستارهای وابسته ==',u'== جستارهای وابسته ==').replace(u'==جُستارهای وابسته==',u'== جستارهای وابسته ==')
text_fa2=text_fa2.replace(u'== جستارهای وابسته ==',u'== نگارخانه ==\n'+engallerry+u'\n== جستارهای وابسته ==')
return text_fa2
if text_fa2.find(u'== پانویس ==')!=-1 or text_fa2.find(u'==پانویس==')!=-1 :
text_fa2=text_fa2.replace(u'== پانویس ==',u'== نگارخانه ==\n'+engallerry+u'\n== پانویس ==')
return text_fa2
if text_fa2.find(u'== منابع ==')!=-1 or text_fa2.find(u'==منابع==')!=-1 or text_fa2.find(u'==منبع==')!=-1 or text_fa2.find(u'==منبعها==')!=-1 or text_fa2.find(u'== منبع ==')!=-1 or text_fa2.find(u'== منبعها ==')!=-1:
text_fa2=text_fa2.replace(u'==منابع==',u'== منابع ==').replace(u'==منبع==',u'== منابع ==').replace(u'==منبعها==',u'== منابع ==').replace(u'== منبع ==',u'== منابع ==').replace(u'== منبعها ==',u'== منابع ==')
text_fa2=text_fa2.replace(u'== منابع ==',u'== نگارخانه ==\n'+engallerry+u'\n== منابع ==')
return text_fa2
if text_fa2.find(u'== پیوند به بیرون ==')!=-1 or text_fa2.find(u'==پیوند به بیرون==')!=-1 :
text_fa2=text_fa2.replace(u'== پیوند به بیرون ==',u'== نگارخانه ==\n'+engallerry+u'\n== پیوند به بیرون ==')
return text_fa2
if text_fa2.find(ur'رده:')!=-1:
num=text_fa2.find(ur'[[رده:')
text_fa2=text_fa2[:num]+u'== نگارخانه ==\n'+engallerry+'\n'+text_fa2[num:]
else:
m = re.search(ur'\[\[([a-z]{2,3}|[a-z]{2,3}\-[a-z\-]{2,}|simple):.*?\]\]', text_fa2)
if m:
if m.group(0)==u'[[en:Article]]':
try:
if string.count(text_fa2,u'[[en:Article]] --->')==1:
text_fa2=text_fa2.split(u'[[en:Article]] --->')[0]+u'[[en:Article]] --->\n'+u'== نگارخانه ==\n'+engallerry+'\n'+text.split(u'[[en:Article]] --->')[1]
else:
text_fa2+='\n== نگارخانه ==\n'+engallerry
except:
text_fa2+='\n== نگارخانه ==\n'+engallerry
else:
num=text_fa2.find(m.group(0))
text_fa2=text_fa2[:num]+u'== نگارخانه ==\n'+engallerry+'\n'+text_fa2[num:]
else:
text_fa2+='\n== نگارخانه ==\n'+engallerry
return text_fa2
else:
wikipedia.output( u"--en wiki's dosen't have gallery or images are locale!--" )
return False
except:
wikipedia.output( u"--en wiki's dosen't have gallery or images are locale!--" )
return False
#----------------------------------------------------------end of action that you want to do on pages---------------
def enpageget(interwiki):
text_en=u' '
for inter in interwiki:
inters=str(inter)
if inters.find(secondwiki+':')!=-1:
if inters.find('#')!=-1:
return u' '
enSite = wikipedia.getSite(secondwiki)
page=wikipedia.Page(enSite,inter.title())
try:
if not page.canBeEdited():
wikipedia.output( u'Skipping locked page %s' % page.title() )
return u' '
text_en = page.get()#------------------------------geting pages content
return text_en
except wikipedia.NoPage:
wikipedia.output( u'Page %s not found' % page.title() )
continue
except wikipedia.IsRedirectPage:#----------------------geting pages redirects contents
pageRedirect = page.getRedirectTarget()
try:
text_en = pageRedirect.get()
wikipedia.output( u'Page %s was Redirect but edited!' % pageRedirect )
return text_en
except:
continue
except:
continue
return u' '
def run(generator):
for pages in generator:
if englishdictionry( pages ,'fa','en')==False:
wikipedia.output( pages.title()+u' with out interwiki')
continue
try:
pagename=unicode(str(pages),'UTF-8').replace(u'Fa:',u'').replace(u'fa:',u'').replace(u'[[',u'').replace(u']]',u'').strip()
except:
pagename=str(pages).replace(u'Fa:',u'').replace(u'fa:',u'').replace(u'[[',u'').replace(u']]',u'').strip()
if pagename.find(u':')!=-1:
continue
pagefa=wikipedia.Page(faSite,pagename)
try:
if not pagefa.canBeEdited():
wikipedia.output( u'Skipping locked page %s' % pagefa.title() )
continue
text_fa = pagefa.get()#------------------------------geting pages content
interwikis= pagefa.interwiki()
except wikipedia.NoPage:
wikipedia.output( u'Page %s not found' % pagefa.title() )
continue
except wikipedia.IsRedirectPage:#----------------------geting pages redirects contents
pageRedirect = pagefa.getRedirectTarget()
text_fa = pageRedirect.get()
interwikis= pagefa.interwiki()
wikipedia.output( u'Page %s was Redirect but edited!' % pageRedirect )
except:
continue
if pagefa.namespace()!=0:
wikipedia.output( u'---------------------------')
wikipedia.output( pagename)
wikipedia.output( u"names space is not article's namespace")
continue
if interwikis==[]:
wikipedia.output( u'---------------------------')
wikipedia.output( pagename)
wikipedia.output( u"dosen't have english page!")
continue
text_en=enpageget(interwikis)
try:
test=text_en.replace(u'\n',u'')
except:
wikipedia.output( u'---------------------------')
wikipedia.output( pagename)
wikipedia.output( u"dosen't have english page!")
continue
if text_en==u' ' or text_en==u'':
wikipedia.output( u'---------------------------')
wikipedia.output( pagename)
wikipedia.output( u"dosen't have english page!")
continue
wikipedia.output( u'---------------------------')
wikipedia.output( pagename)
if text_fa.find(u'<gallery')!=-1 or text_fa.find(u'</gallery')!=-1:
wikipedia.output( u'---------------------------')
wikipedia.output( pagename)
wikipedia.output( u"--fa wiki's page has gallery so it is passed")
continue
pagesize=sys.getsizeof (text_fa)
if pagesize>15000:#---------------------------------------------------مقالات خرد----------------
wikipedia.output( u'---------------------------')
wikipedia.output( pagename)
wikipedia.output( u"--fa wiki's page is'nt SubArticle" )
continue
new_text=BotRun(pagename,text_fa,text_en)
if new_text:
savepart(pagename,new_text )#---------------saving changes in page with new_text content-----------------------------------
else:
wikipedia.output( u'Skipping %s ' % pagename )
def savepart( page,new_text,msg=msg):
pagename=page.replace(u'Fa:',u'').replace(u'fa:',u'').replace(u'[[',u'').replace(u']]',u'').strip()
page=wikipedia.Page(faSite,pagename)
new_text,cleaning_version,msg_clean=fa_cosmetic_changes.fa_cosmetic_changes(new_text,page,msg_short=False)
if msg_clean:
msg+=u' + '+msg_clean
msg=msg.replace(u'+ +',u'+').strip()
try:
page.put( new_text,msg ,watchArticle = None)
except wikipedia.EditConflict:
wikipedia.output( u'Skipping %s because of edit conflict' % ( page.title() ) )
except wikipedia.SpamfilterError,url:
wikipedia.output( u'Cannot change %s because of blacklist entry %s' % ( page.title(),url ) )
def categorydown(listacategory):
wikipedia.config.put_throttle = 0
wikipedia.put_throttle.setDelay()
count=1
for catname in listacategory:
count+=1
if count==200:
break
gencat = pagegenerators.SubCategoriesPageGenerator(catname, recurse=False)
for subcat in gencat:
try:
wikipedia.output(subcat)
except:
wikipedia.output(str(subcat))
if subcat in listacategory:
continue
else:
listacategory.append(subcat)
return listacategory
def facatlist(facat):
wikipedia.config.put_throttle = 0
wikipedia.put_throttle.setDelay()
count=0
listenpageTitle=[]
PageTitle=facat.replace(u'[[',u'').replace(u']]',u'').strip()
language='fa'
PageTitles =[PageTitle]
for PageTitle in PageTitles:
cat = catlib.Category( wikipedia.getSite(language),PageTitle )
listacategory=[cat]
listacategory=categorydown(listacategory)
for enpageTitle in listacategory:
enpageTitle=str(enpageTitle).split(u'|')[0].split(u']]')[0].replace(u'[[',u'').strip()
cat = catlib.Category( wikipedia.getSite(language),enpageTitle )
gent = pagegenerators.CategorizedPageGenerator( cat )
for pagework in gent:
count+=1
try:
link=str(pagework).split(u'|')[0].split(u']]')[0].replace(u'[[',u'').strip()
except:
pagework=unicode(str(pagework),'UTF-8')
link=pagework.split(u'|')[0].split(u']]')[0].replace(u'[[',u'').strip()
wikipedia.output(link)
fapagetitle=link
wikipedia.output(u'adding '+fapagetitle+u' to fapage lists')
listenpageTitle.append(fapagetitle)
if listenpageTitle==[]:
return False
return listenpageTitle
def main():
summary_commandline,template,gen = None,None,None
exceptions,PageTitles,namespaces = [],[],[]
cat=''
autoText,autoTitle = False,False
genFactory = pagegenerators.GeneratorFactory()
arg=False#------if you dont want to work with arguments leave it False if you want change it to True---
if arg==False:
for arg in wikipedia.handleArgs():
if arg == '-autotitle':
autoTitle = True
elif arg == '-autotext':
autoText = True
elif arg.startswith( '-page:' ):
if len(arg) == 6:
PageTitles.append(wikipedia.input( u'Which page do you want to chage?' ))
else:
PageTitles.append(arg[6:])
elif arg.startswith( '-cat:' ):
if len(arg) == 5:
cat=wikipedia.input( u'Which Category do you want to chage?' )
else:
cat='Category:'+arg[5:]
elif arg.startswith( '-template:' ):
if len(arg) == 10:
template.append(wikipedia.input( u'Which Template do you want to chage?' ))
else:
template.append('Template:'+arg[10:])
elif arg.startswith('-except:'):
exceptions.append(arg[8:])
elif arg.startswith( '-namespace:' ):
namespaces.append( int( arg[11:] ) )
elif arg.startswith( '-ns:' ):
namespaces.append( int( arg[4:] ) )
elif arg.startswith( '-summary:' ):
wikipedia.setAction( arg[9:] )
summary_commandline = True
else:
generator = genFactory.handleArg(arg)
if generator:
gen = generator
else:
PageTitles = [raw_input(u'Page:> ').decode('utf-8')]
if cat!='':
facatfalist=facatlist(cat)
if facatfalist!=False:
run(facatfalist)
if PageTitles:
pages = [wikipedia.Page(faSite,PageTitle) for PageTitle in PageTitles]
gen = iter( pages )
if not gen:
wikipedia.stopme()
sys.exit()
if namespaces != []:
gen = pagegenerators.NamespaceFilterPageGenerator( gen,namespaces )
preloadingGen = pagegenerators.PreloadingGenerator( gen,pageNumber = 60 )#---number of pages that you want load at same time
run(preloadingGen)
if __name__ == "__main__":
login_fa(usernames)
main() | {
"content_hash": "8487d106f42f1ef52cc33afc3189655e",
"timestamp": "",
"source": "github",
"line_count": 435,
"max_line_length": 354,
"avg_line_length": 48.836781609195405,
"alnum_prop": 0.49322161551496896,
"repo_name": "PersianWikipedia/fawikibot",
"id": "842c1d2f4a9399d3100d24842fad790f396bd6b0",
"size": "21786",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Compat codes/zzgallery.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1432549"
},
{
"name": "Shell",
"bytes": "1107"
}
],
"symlink_target": ""
} |
import sys
import re
import os
from stat import *
import getopt
err = sys.stderr.write
dbg = err
rep = sys.stdout.write
new_interpreter = None
preserve_timestamps = False
create_backup = True
keep_flags = False
add_flags = b''
def main():
global new_interpreter
global preserve_timestamps
global create_backup
global keep_flags
global add_flags
usage = ('usage: %s -i /interpreter -p -n -k -a file-or-directory ...\n' %
sys.argv[0])
try:
opts, args = getopt.getopt(sys.argv[1:], 'i:a:kpn')
except getopt.error as msg:
err(str(msg) + '\n')
err(usage)
sys.exit(2)
for o, a in opts:
if o == '-i':
new_interpreter = a.encode()
if o == '-p':
preserve_timestamps = True
if o == '-n':
create_backup = False
if o == '-k':
keep_flags = True
if o == '-a':
add_flags = a.encode()
if b' ' in add_flags:
err("-a option doesn't support whitespaces")
sys.exit(2)
if not new_interpreter or not new_interpreter.startswith(b'/') or \
not args:
err('-i option or file-or-directory missing\n')
err(usage)
sys.exit(2)
bad = 0
for arg in args:
if os.path.isdir(arg):
if recursedown(arg): bad = 1
elif os.path.islink(arg):
err(arg + ': will not process symbolic links\n')
bad = 1
else:
if fix(arg): bad = 1
sys.exit(bad)
def ispython(name):
return name.endswith('.py')
def recursedown(dirname):
dbg('recursedown(%r)\n' % (dirname,))
bad = 0
try:
names = os.listdir(dirname)
except OSError as msg:
err('%s: cannot list directory: %r\n' % (dirname, msg))
return 1
names.sort()
subdirs = []
for name in names:
if name in (os.curdir, os.pardir): continue
fullname = os.path.join(dirname, name)
if os.path.islink(fullname): pass
elif os.path.isdir(fullname):
subdirs.append(fullname)
elif ispython(name):
if fix(fullname): bad = 1
for fullname in subdirs:
if recursedown(fullname): bad = 1
return bad
def fix(filename):
## dbg('fix(%r)\n' % (filename,))
try:
f = open(filename, 'rb')
except IOError as msg:
err('%s: cannot open: %r\n' % (filename, msg))
return 1
with f:
line = f.readline()
fixed = fixline(line)
if line == fixed:
rep(filename+': no change\n')
return
head, tail = os.path.split(filename)
tempname = os.path.join(head, '@' + tail)
try:
g = open(tempname, 'wb')
except IOError as msg:
err('%s: cannot create: %r\n' % (tempname, msg))
return 1
with g:
rep(filename + ': updating\n')
g.write(fixed)
BUFSIZE = 8*1024
while 1:
buf = f.read(BUFSIZE)
if not buf: break
g.write(buf)
# Finishing touch -- move files
mtime = None
atime = None
# First copy the file's mode to the temp file
try:
statbuf = os.stat(filename)
mtime = statbuf.st_mtime
atime = statbuf.st_atime
os.chmod(tempname, statbuf[ST_MODE] & 0o7777)
except OSError as msg:
err('%s: warning: chmod failed (%r)\n' % (tempname, msg))
# Then make a backup of the original file as filename~
if create_backup:
try:
os.rename(filename, filename + '~')
except OSError as msg:
err('%s: warning: backup failed (%r)\n' % (filename, msg))
else:
try:
os.remove(filename)
except OSError as msg:
err('%s: warning: removing failed (%r)\n' % (filename, msg))
# Now move the temp file to the original file
try:
os.rename(tempname, filename)
except OSError as msg:
err('%s: rename failed (%r)\n' % (filename, msg))
return 1
if preserve_timestamps:
if atime and mtime:
try:
os.utime(filename, (atime, mtime))
except OSError as msg:
err('%s: reset of timestamp failed (%r)\n' % (filename, msg))
return 1
# Return success
return 0
def parse_shebang(shebangline):
shebangline = shebangline.rstrip(b'\n')
start = shebangline.find(b' -')
if start == -1:
return b''
return shebangline[start:]
def populate_flags(shebangline):
old_flags = b''
if keep_flags:
old_flags = parse_shebang(shebangline)
if old_flags:
old_flags = old_flags[2:]
if not (old_flags or add_flags):
return b''
# On Linux, the entire string following the interpreter name
# is passed as a single argument to the interpreter.
# e.g. "#! /usr/bin/python3 -W Error -s" runs "/usr/bin/python3 "-W Error -s"
# so shebang should have single '-' where flags are given and
# flag might need argument for that reasons adding new flags is
# between '-' and original flags
# e.g. #! /usr/bin/python3 -sW Error
return b' -' + add_flags + old_flags
def fixline(line):
if not line.startswith(b'#!'):
return line
if b"python" not in line:
return line
flags = populate_flags(line)
return b'#! ' + new_interpreter + flags + b'\n'
if __name__ == '__main__':
main()
| {
"content_hash": "886866bf397e36d5bf922ebb5589c417",
"timestamp": "",
"source": "github",
"line_count": 198,
"max_line_length": 81,
"avg_line_length": 27.8989898989899,
"alnum_prop": 0.5454380883417813,
"repo_name": "batermj/algorithm-challenger",
"id": "d252321a21a172a9cb6a483e3ad0573298f5c52a",
"size": "6787",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Tools/scripts/pathfix.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "655185"
},
{
"name": "Batchfile",
"bytes": "127416"
},
{
"name": "C",
"bytes": "33127630"
},
{
"name": "C++",
"bytes": "1364796"
},
{
"name": "CSS",
"bytes": "3163"
},
{
"name": "Common Lisp",
"bytes": "48962"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "DTrace",
"bytes": "2196"
},
{
"name": "Go",
"bytes": "26248"
},
{
"name": "HTML",
"bytes": "385719"
},
{
"name": "Haskell",
"bytes": "33612"
},
{
"name": "Java",
"bytes": "1084"
},
{
"name": "JavaScript",
"bytes": "20754"
},
{
"name": "M4",
"bytes": "403992"
},
{
"name": "Makefile",
"bytes": "238185"
},
{
"name": "Objective-C",
"bytes": "4934684"
},
{
"name": "PHP",
"bytes": "3513"
},
{
"name": "PLSQL",
"bytes": "45772"
},
{
"name": "Perl",
"bytes": "649"
},
{
"name": "PostScript",
"bytes": "27606"
},
{
"name": "PowerShell",
"bytes": "21737"
},
{
"name": "Python",
"bytes": "55270625"
},
{
"name": "R",
"bytes": "29951"
},
{
"name": "Rich Text Format",
"bytes": "14551"
},
{
"name": "Roff",
"bytes": "292490"
},
{
"name": "Ruby",
"bytes": "519"
},
{
"name": "Scala",
"bytes": "846446"
},
{
"name": "Shell",
"bytes": "491113"
},
{
"name": "Swift",
"bytes": "881"
},
{
"name": "TeX",
"bytes": "337654"
},
{
"name": "VBScript",
"bytes": "140"
},
{
"name": "XSLT",
"bytes": "153"
}
],
"symlink_target": ""
} |
""" test parquet compat """
import pytest
import datetime
from distutils.version import LooseVersion
from warnings import catch_warnings
import numpy as np
import pandas as pd
from pandas.compat import PY3, is_platform_windows, is_platform_mac
from pandas.io.parquet import (to_parquet, read_parquet, get_engine,
PyArrowImpl, FastParquetImpl)
from pandas.util import testing as tm
try:
import pyarrow # noqa
_HAVE_PYARROW = True
except ImportError:
_HAVE_PYARROW = False
try:
import fastparquet # noqa
_HAVE_FASTPARQUET = True
except ImportError:
_HAVE_FASTPARQUET = False
# setup engines & skips
@pytest.fixture(params=[
pytest.param('fastparquet',
marks=pytest.mark.skipif(not _HAVE_FASTPARQUET,
reason='fastparquet is '
'not installed')),
pytest.param('pyarrow',
marks=pytest.mark.skipif(not _HAVE_PYARROW,
reason='pyarrow is '
'not installed'))])
def engine(request):
return request.param
@pytest.fixture
def pa():
if not _HAVE_PYARROW:
pytest.skip("pyarrow is not installed")
if LooseVersion(pyarrow.__version__) < LooseVersion('0.7.0'):
pytest.skip("pyarrow is < 0.7.0")
return 'pyarrow'
@pytest.fixture
def fp():
if not _HAVE_FASTPARQUET:
pytest.skip("fastparquet is not installed")
return 'fastparquet'
@pytest.fixture
def fp_lt_014():
if not _HAVE_FASTPARQUET:
pytest.skip("fastparquet is not installed")
if LooseVersion(fastparquet.__version__) >= LooseVersion('0.1.4'):
pytest.skip("fastparquet is >= 0.1.4")
return 'fastparquet'
@pytest.fixture
def df_compat():
return pd.DataFrame({'A': [1, 2, 3], 'B': 'foo'})
@pytest.fixture
def df_cross_compat():
df = pd.DataFrame({'a': list('abc'),
'b': list(range(1, 4)),
# 'c': np.arange(3, 6).astype('u1'),
'd': np.arange(4.0, 7.0, dtype='float64'),
'e': [True, False, True],
'f': pd.date_range('20130101', periods=3),
# 'g': pd.date_range('20130101', periods=3,
# tz='US/Eastern'),
# 'h': pd.date_range('20130101', periods=3, freq='ns')
})
return df
@pytest.fixture
def df_full():
return pd.DataFrame(
{'string': list('abc'),
'string_with_nan': ['a', np.nan, 'c'],
'string_with_none': ['a', None, 'c'],
'bytes': [b'foo', b'bar', b'baz'],
'unicode': [u'foo', u'bar', u'baz'],
'int': list(range(1, 4)),
'uint': np.arange(3, 6).astype('u1'),
'float': np.arange(4.0, 7.0, dtype='float64'),
'float_with_nan': [2., np.nan, 3.],
'bool': [True, False, True],
'datetime': pd.date_range('20130101', periods=3),
'datetime_with_nat': [pd.Timestamp('20130101'),
pd.NaT,
pd.Timestamp('20130103')]})
def check_round_trip(df, engine=None, path=None,
write_kwargs=None, read_kwargs=None,
expected=None, check_names=True,
repeat=2):
"""Verify parquet serializer and deserializer produce the same results.
Performs a pandas to disk and disk to pandas round trip,
then compares the 2 resulting DataFrames to verify equality.
Parameters
----------
df: Dataframe
engine: str, optional
'pyarrow' or 'fastparquet'
path: str, optional
write_kwargs: dict of str:str, optional
read_kwargs: dict of str:str, optional
expected: DataFrame, optional
Expected deserialization result, otherwise will be equal to `df`
check_names: list of str, optional
Closed set of column names to be compared
repeat: int, optional
How many times to repeat the test
"""
write_kwargs = write_kwargs or {'compression': None}
read_kwargs = read_kwargs or {}
if expected is None:
expected = df
if engine:
write_kwargs['engine'] = engine
read_kwargs['engine'] = engine
def compare(repeat):
for _ in range(repeat):
df.to_parquet(path, **write_kwargs)
with catch_warnings(record=True):
actual = read_parquet(path, **read_kwargs)
tm.assert_frame_equal(expected, actual,
check_names=check_names)
if path is None:
with tm.ensure_clean() as path:
compare(repeat)
else:
compare(repeat)
def test_invalid_engine(df_compat):
with pytest.raises(ValueError):
check_round_trip(df_compat, 'foo', 'bar')
def test_options_py(df_compat, pa):
# use the set option
with pd.option_context('io.parquet.engine', 'pyarrow'):
check_round_trip(df_compat)
def test_options_fp(df_compat, fp):
# use the set option
with pd.option_context('io.parquet.engine', 'fastparquet'):
check_round_trip(df_compat)
def test_options_auto(df_compat, fp, pa):
# use the set option
with pd.option_context('io.parquet.engine', 'auto'):
check_round_trip(df_compat)
def test_options_get_engine(fp, pa):
assert isinstance(get_engine('pyarrow'), PyArrowImpl)
assert isinstance(get_engine('fastparquet'), FastParquetImpl)
with pd.option_context('io.parquet.engine', 'pyarrow'):
assert isinstance(get_engine('auto'), PyArrowImpl)
assert isinstance(get_engine('pyarrow'), PyArrowImpl)
assert isinstance(get_engine('fastparquet'), FastParquetImpl)
with pd.option_context('io.parquet.engine', 'fastparquet'):
assert isinstance(get_engine('auto'), FastParquetImpl)
assert isinstance(get_engine('pyarrow'), PyArrowImpl)
assert isinstance(get_engine('fastparquet'), FastParquetImpl)
with pd.option_context('io.parquet.engine', 'auto'):
assert isinstance(get_engine('auto'), PyArrowImpl)
assert isinstance(get_engine('pyarrow'), PyArrowImpl)
assert isinstance(get_engine('fastparquet'), FastParquetImpl)
@pytest.mark.xfail(is_platform_windows() or is_platform_mac(),
reason="reading pa metadata failing on Windows/mac",
strict=True)
def test_cross_engine_pa_fp(df_cross_compat, pa, fp):
# cross-compat with differing reading/writing engines
df = df_cross_compat
with tm.ensure_clean() as path:
df.to_parquet(path, engine=pa, compression=None)
result = read_parquet(path, engine=fp)
tm.assert_frame_equal(result, df)
result = read_parquet(path, engine=fp, columns=['a', 'd'])
tm.assert_frame_equal(result, df[['a', 'd']])
def test_cross_engine_fp_pa(df_cross_compat, pa, fp):
# cross-compat with differing reading/writing engines
df = df_cross_compat
with tm.ensure_clean() as path:
df.to_parquet(path, engine=fp, compression=None)
with catch_warnings(record=True):
result = read_parquet(path, engine=pa)
tm.assert_frame_equal(result, df)
result = read_parquet(path, engine=pa, columns=['a', 'd'])
tm.assert_frame_equal(result, df[['a', 'd']])
class Base(object):
def check_error_on_write(self, df, engine, exc):
# check that we are raising the exception on writing
with tm.ensure_clean() as path:
with pytest.raises(exc):
to_parquet(df, path, engine, compression=None)
class TestBasic(Base):
def test_error(self, engine):
for obj in [pd.Series([1, 2, 3]), 1, 'foo', pd.Timestamp('20130101'),
np.array([1, 2, 3])]:
self.check_error_on_write(obj, engine, ValueError)
def test_columns_dtypes(self, engine):
df = pd.DataFrame({'string': list('abc'),
'int': list(range(1, 4))})
# unicode
df.columns = [u'foo', u'bar']
check_round_trip(df, engine)
def test_columns_dtypes_invalid(self, engine):
df = pd.DataFrame({'string': list('abc'),
'int': list(range(1, 4))})
# numeric
df.columns = [0, 1]
self.check_error_on_write(df, engine, ValueError)
if PY3:
# bytes on PY3, on PY2 these are str
df.columns = [b'foo', b'bar']
self.check_error_on_write(df, engine, ValueError)
# python object
df.columns = [datetime.datetime(2011, 1, 1, 0, 0),
datetime.datetime(2011, 1, 1, 1, 1)]
self.check_error_on_write(df, engine, ValueError)
@pytest.mark.parametrize('compression', [None, 'gzip', 'snappy', 'brotli'])
def test_compression(self, engine, compression):
if compression == 'snappy':
pytest.importorskip('snappy')
elif compression == 'brotli':
pytest.importorskip('brotli')
df = pd.DataFrame({'A': [1, 2, 3]})
check_round_trip(df, engine, write_kwargs={'compression': compression})
def test_read_columns(self, engine):
# GH18154
df = pd.DataFrame({'string': list('abc'),
'int': list(range(1, 4))})
expected = pd.DataFrame({'string': list('abc')})
check_round_trip(df, engine, expected=expected,
read_kwargs={'columns': ['string']})
def test_write_index(self, engine):
check_names = engine != 'fastparquet'
if engine == 'pyarrow':
import pyarrow
if LooseVersion(pyarrow.__version__) < LooseVersion('0.7.0'):
pytest.skip("pyarrow is < 0.7.0")
df = pd.DataFrame({'A': [1, 2, 3]})
check_round_trip(df, engine)
indexes = [
[2, 3, 4],
pd.date_range('20130101', periods=3),
list('abc'),
[1, 3, 4],
]
# non-default index
for index in indexes:
df.index = index
check_round_trip(df, engine, check_names=check_names)
# index with meta-data
df.index = [0, 1, 2]
df.index.name = 'foo'
check_round_trip(df, engine)
def test_write_multiindex(self, pa):
# Not suppoprted in fastparquet as of 0.1.3 or older pyarrow version
engine = pa
df = pd.DataFrame({'A': [1, 2, 3]})
index = pd.MultiIndex.from_tuples([('a', 1), ('a', 2), ('b', 1)])
df.index = index
check_round_trip(df, engine)
def test_write_column_multiindex(self, engine):
# column multi-index
mi_columns = pd.MultiIndex.from_tuples([('a', 1), ('a', 2), ('b', 1)])
df = pd.DataFrame(np.random.randn(4, 3), columns=mi_columns)
self.check_error_on_write(df, engine, ValueError)
def test_multiindex_with_columns(self, pa):
engine = pa
dates = pd.date_range('01-Jan-2018', '01-Dec-2018', freq='MS')
df = pd.DataFrame(np.random.randn(2 * len(dates), 3),
columns=list('ABC'))
index1 = pd.MultiIndex.from_product(
[['Level1', 'Level2'], dates],
names=['level', 'date'])
index2 = index1.copy(names=None)
for index in [index1, index2]:
df.index = index
check_round_trip(df, engine)
check_round_trip(df, engine, read_kwargs={'columns': ['A', 'B']},
expected=df[['A', 'B']])
def test_write_ignoring_index(self, engine):
# ENH 20768
# Ensure index=False omits the index from the written Parquet file.
df = pd.DataFrame({'a': [1, 2, 3], 'b': ['q', 'r', 's']})
write_kwargs = {
'compression': None,
'index': False,
}
# Because we're dropping the index, we expect the loaded dataframe to
# have the default integer index.
expected = df.reset_index(drop=True)
check_round_trip(df, engine, write_kwargs=write_kwargs,
expected=expected)
# Ignore custom index
df = pd.DataFrame({'a': [1, 2, 3], 'b': ['q', 'r', 's']},
index=['zyx', 'wvu', 'tsr'])
check_round_trip(df, engine, write_kwargs=write_kwargs,
expected=expected)
# Ignore multi-indexes as well.
arrays = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
df = pd.DataFrame({'one': [i for i in range(8)],
'two': [-i for i in range(8)]}, index=arrays)
expected = df.reset_index(drop=True)
check_round_trip(df, engine, write_kwargs=write_kwargs,
expected=expected)
class TestParquetPyArrow(Base):
def test_basic(self, pa, df_full):
df = df_full
# additional supported types for pyarrow
import pyarrow
if LooseVersion(pyarrow.__version__) >= LooseVersion('0.7.0'):
df['datetime_tz'] = pd.date_range('20130101', periods=3,
tz='Europe/Brussels')
df['bool_with_none'] = [True, None, True]
check_round_trip(df, pa)
# TODO: This doesn't fail on all systems; track down which
@pytest.mark.xfail(reason="pyarrow fails on this (ARROW-1883)")
def test_basic_subset_columns(self, pa, df_full):
# GH18628
df = df_full
# additional supported types for pyarrow
df['datetime_tz'] = pd.date_range('20130101', periods=3,
tz='Europe/Brussels')
check_round_trip(df, pa, expected=df[['string', 'int']],
read_kwargs={'columns': ['string', 'int']})
def test_duplicate_columns(self, pa):
# not currently able to handle duplicate columns
df = pd.DataFrame(np.arange(12).reshape(4, 3),
columns=list('aaa')).copy()
self.check_error_on_write(df, pa, ValueError)
@pytest.mark.xfail(reason="failing for pyarrow < 0.11.0")
def test_unsupported(self, pa):
# period
df = pd.DataFrame({'a': pd.period_range('2013', freq='M', periods=3)})
# pyarrow 0.11 raises ArrowTypeError
# older pyarrows raise ArrowInvalid
self.check_error_on_write(df, pa, Exception)
# timedelta
df = pd.DataFrame({'a': pd.timedelta_range('1 day',
periods=3)})
self.check_error_on_write(df, pa, NotImplementedError)
# mixed python objects
df = pd.DataFrame({'a': ['a', 1, 2.0]})
# pyarrow 0.11 raises ArrowTypeError
# older pyarrows raise ArrowInvalid
self.check_error_on_write(df, pa, Exception)
def test_categorical(self, pa):
# supported in >= 0.7.0
df = pd.DataFrame({'a': pd.Categorical(list('abc'))})
# de-serialized as object
expected = df.assign(a=df.a.astype(object))
check_round_trip(df, pa, expected=expected)
def test_s3_roundtrip(self, df_compat, s3_resource, pa):
# GH #19134
check_round_trip(df_compat, pa,
path='s3://pandas-test/pyarrow.parquet')
class TestParquetFastParquet(Base):
def test_basic(self, fp, df_full):
df = df_full
# additional supported types for fastparquet
if LooseVersion(fastparquet.__version__) >= LooseVersion('0.1.4'):
df['datetime_tz'] = pd.date_range('20130101', periods=3,
tz='US/Eastern')
df['timedelta'] = pd.timedelta_range('1 day', periods=3)
check_round_trip(df, fp)
@pytest.mark.skip(reason="not supported")
def test_duplicate_columns(self, fp):
# not currently able to handle duplicate columns
df = pd.DataFrame(np.arange(12).reshape(4, 3),
columns=list('aaa')).copy()
self.check_error_on_write(df, fp, ValueError)
def test_bool_with_none(self, fp):
df = pd.DataFrame({'a': [True, None, False]})
expected = pd.DataFrame({'a': [1.0, np.nan, 0.0]}, dtype='float16')
check_round_trip(df, fp, expected=expected)
def test_unsupported(self, fp):
# period
df = pd.DataFrame({'a': pd.period_range('2013', freq='M', periods=3)})
self.check_error_on_write(df, fp, ValueError)
# mixed
df = pd.DataFrame({'a': ['a', 1, 2.0]})
self.check_error_on_write(df, fp, ValueError)
def test_categorical(self, fp):
if LooseVersion(fastparquet.__version__) < LooseVersion("0.1.3"):
pytest.skip("CategoricalDtype not supported for older fp")
df = pd.DataFrame({'a': pd.Categorical(list('abc'))})
check_round_trip(df, fp)
def test_datetime_tz(self, fp_lt_014):
# fastparquet<0.1.4 doesn't preserve tz
df = pd.DataFrame({'a': pd.date_range('20130101', periods=3,
tz='US/Eastern')})
# warns on the coercion
with catch_warnings(record=True):
check_round_trip(df, fp_lt_014,
expected=df.astype('datetime64[ns]'))
def test_filter_row_groups(self, fp):
d = {'a': list(range(0, 3))}
df = pd.DataFrame(d)
with tm.ensure_clean() as path:
df.to_parquet(path, fp, compression=None,
row_group_offsets=1)
result = read_parquet(path, fp, filters=[('a', '==', 0)])
assert len(result) == 1
def test_s3_roundtrip(self, df_compat, s3_resource, fp):
# GH #19134
check_round_trip(df_compat, fp,
path='s3://pandas-test/fastparquet.parquet')
| {
"content_hash": "0bcb15284205bcd5d594e9bb41dadd52",
"timestamp": "",
"source": "github",
"line_count": 521,
"max_line_length": 79,
"avg_line_length": 34.53934740882917,
"alnum_prop": 0.5583773270352875,
"repo_name": "harisbal/pandas",
"id": "3b3e7f757bf60dc81cfd8ad34c95943aede5c30e",
"size": "17995",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/tests/io/test_parquet.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4907"
},
{
"name": "C",
"bytes": "404689"
},
{
"name": "C++",
"bytes": "17194"
},
{
"name": "HTML",
"bytes": "551714"
},
{
"name": "Makefile",
"bytes": "574"
},
{
"name": "Python",
"bytes": "14298777"
},
{
"name": "Shell",
"bytes": "28914"
},
{
"name": "Smarty",
"bytes": "2069"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function
# noinspection PyPackageRequirements,PyUnresolvedReferences
from six.moves.urllib.parse import urlsplit
__author__ = 'Tyler Butler <[email protected]>'
try:
# noinspection PyPackageRequirements,PyUnresolvedReferences
from flask import request, url_for
except ImportError:
print("Flask is required but cannot be imported.")
raise
def absolute_url_for(view, **kwargs):
absolute_url = url_for(view, _external=True, **kwargs)
return absolute_url
def is_local_url(url_to_check):
current_netloc = urlsplit(request.url_root).netloc
check_netloc = urlsplit(url_to_check).netloc
return current_netloc == check_netloc
| {
"content_hash": "54a3f334b342f2b247feeff3720d022d",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 63,
"avg_line_length": 29.583333333333332,
"alnum_prop": 0.7464788732394366,
"repo_name": "tylerbutler/propane",
"id": "17922b44f805283d58627957e74e65b1dec20c7c",
"size": "725",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "propane/flask/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52169"
}
],
"symlink_target": ""
} |
"""Tests for the PVOutput config flow."""
from unittest.mock import AsyncMock, MagicMock
from pvo import PVOutputAuthenticationError, PVOutputConnectionError
from homeassistant.components.pvoutput.const import CONF_SYSTEM_ID, DOMAIN
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER
from homeassistant.const import CONF_API_KEY
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import (
RESULT_TYPE_ABORT,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_FORM,
)
from tests.common import MockConfigEntry
async def test_full_user_flow(
hass: HomeAssistant,
mock_pvoutput_config_flow: MagicMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test the full user configuration flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result.get("type") == RESULT_TYPE_FORM
assert result.get("step_id") == SOURCE_USER
assert "flow_id" in result
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "tadaaa",
},
)
assert result2.get("type") == RESULT_TYPE_CREATE_ENTRY
assert result2.get("title") == "12345"
assert result2.get("data") == {
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "tadaaa",
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_pvoutput_config_flow.system.mock_calls) == 1
async def test_full_flow_with_authentication_error(
hass: HomeAssistant,
mock_pvoutput_config_flow: MagicMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test the full user configuration flow with incorrect API key.
This tests tests a full config flow, with a case the user enters an invalid
PVOutput API key, but recovers by entering the correct one.
"""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result.get("type") == RESULT_TYPE_FORM
assert result.get("step_id") == SOURCE_USER
assert "flow_id" in result
mock_pvoutput_config_flow.system.side_effect = PVOutputAuthenticationError
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "invalid",
},
)
assert result2.get("type") == RESULT_TYPE_FORM
assert result2.get("step_id") == SOURCE_USER
assert result2.get("errors") == {"base": "invalid_auth"}
assert "flow_id" in result2
assert len(mock_setup_entry.mock_calls) == 0
assert len(mock_pvoutput_config_flow.system.mock_calls) == 1
mock_pvoutput_config_flow.system.side_effect = None
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
user_input={
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "tadaaa",
},
)
assert result3.get("type") == RESULT_TYPE_CREATE_ENTRY
assert result3.get("title") == "12345"
assert result3.get("data") == {
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "tadaaa",
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_pvoutput_config_flow.system.mock_calls) == 2
async def test_connection_error(
hass: HomeAssistant, mock_pvoutput_config_flow: MagicMock
) -> None:
"""Test API connection error."""
mock_pvoutput_config_flow.system.side_effect = PVOutputConnectionError
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "tadaaa",
},
)
assert result.get("type") == RESULT_TYPE_FORM
assert result.get("errors") == {"base": "cannot_connect"}
assert len(mock_pvoutput_config_flow.system.mock_calls) == 1
async def test_already_configured(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_pvoutput_config_flow: MagicMock,
) -> None:
"""Test we abort if the PVOutput system is already configured."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert "flow_id" in result
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "tadaaa",
},
)
assert result2.get("type") == RESULT_TYPE_ABORT
assert result2.get("reason") == "already_configured"
async def test_reauth_flow(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_pvoutput_config_flow: MagicMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test the reauthentication configuration flow."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": SOURCE_REAUTH,
"unique_id": mock_config_entry.unique_id,
"entry_id": mock_config_entry.entry_id,
},
data=mock_config_entry.data,
)
assert result.get("type") == RESULT_TYPE_FORM
assert result.get("step_id") == "reauth_confirm"
assert "flow_id" in result
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_API_KEY: "some_new_key"},
)
await hass.async_block_till_done()
assert result2.get("type") == RESULT_TYPE_ABORT
assert result2.get("reason") == "reauth_successful"
assert mock_config_entry.data == {
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "some_new_key",
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_pvoutput_config_flow.system.mock_calls) == 1
async def test_reauth_with_authentication_error(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_pvoutput_config_flow: MagicMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test the reauthentication configuration flow with an authentication error.
This tests tests a reauth flow, with a case the user enters an invalid
API key, but recover by entering the correct one.
"""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": SOURCE_REAUTH,
"unique_id": mock_config_entry.unique_id,
"entry_id": mock_config_entry.entry_id,
},
data=mock_config_entry.data,
)
assert result.get("type") == RESULT_TYPE_FORM
assert result.get("step_id") == "reauth_confirm"
assert "flow_id" in result
mock_pvoutput_config_flow.system.side_effect = PVOutputAuthenticationError
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_API_KEY: "invalid_key"},
)
await hass.async_block_till_done()
assert result2.get("type") == RESULT_TYPE_FORM
assert result2.get("step_id") == "reauth_confirm"
assert result2.get("errors") == {"base": "invalid_auth"}
assert "flow_id" in result2
assert len(mock_setup_entry.mock_calls) == 0
assert len(mock_pvoutput_config_flow.system.mock_calls) == 1
mock_pvoutput_config_flow.system.side_effect = None
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
user_input={CONF_API_KEY: "valid_key"},
)
await hass.async_block_till_done()
assert result3.get("type") == RESULT_TYPE_ABORT
assert result3.get("reason") == "reauth_successful"
assert mock_config_entry.data == {
CONF_SYSTEM_ID: 12345,
CONF_API_KEY: "valid_key",
}
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_pvoutput_config_flow.system.mock_calls) == 2
async def test_reauth_api_error(
hass: HomeAssistant,
mock_pvoutput_config_flow: MagicMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test API error during reauthentication."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": SOURCE_REAUTH,
"unique_id": mock_config_entry.unique_id,
"entry_id": mock_config_entry.entry_id,
},
data=mock_config_entry.data,
)
assert result.get("type") == RESULT_TYPE_FORM
assert result.get("step_id") == "reauth_confirm"
assert "flow_id" in result
mock_pvoutput_config_flow.system.side_effect = PVOutputConnectionError
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_API_KEY: "some_new_key"},
)
await hass.async_block_till_done()
assert result2.get("type") == RESULT_TYPE_FORM
assert result2.get("step_id") == "reauth_confirm"
assert result2.get("errors") == {"base": "cannot_connect"}
| {
"content_hash": "2a798e71400c8c523f942acdd2edac36",
"timestamp": "",
"source": "github",
"line_count": 282,
"max_line_length": 81,
"avg_line_length": 31.77304964539007,
"alnum_prop": 0.6466517857142857,
"repo_name": "toddeye/home-assistant",
"id": "444a35565f69faaa525ed7267888ce7695d451fe",
"size": "8960",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/pvoutput/test_config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3005"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "47414832"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
r"""HTTP cookie handling for web clients.
This module has (now fairly distant) origins in Gisle Aas' Perl module
HTTP::Cookies, from the libwww-perl library.
Docstrings, comments and debug strings in this code refer to the
attributes of the HTTP cookie system as cookie-attributes, to distinguish
them clearly from Python attributes.
Class diagram (note that BSDDBCookieJar and the MSIE* classes are not
distributed with the Python standard library, but are available from
http://wwwsearch.sf.net/):
CookieJar____
/ \ \
FileCookieJar \ \
/ | \ \ \
MozillaCookieJar | LWPCookieJar \ \
| | \
| ---MSIEBase | \
| / | | \
| / MSIEDBCookieJar BSDDBCookieJar
|/
MSIECookieJar
"""
__all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy',
'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar']
import copy
import datetime
import re
import time
import urllib.parse, urllib.request
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import http.client # only for the default HTTP port
from calendar import timegm
debug = False # set to True to enable debugging via the logging module
logger = None
def _debug(*args):
if not debug:
return
global logger
if not logger:
import logging
logger = logging.getLogger("http.cookiejar")
return logger.debug(*args)
DEFAULT_HTTP_PORT = str(http.client.HTTP_PORT)
MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar "
"instance initialised with one)")
def _warn_unhandled_exception():
# There are a few catch-all except: statements in this module, for
# catching input that's bad in unexpected ways. Warn if any
# exceptions are caught there.
import io, warnings, traceback
f = io.StringIO()
traceback.print_exc(None, f)
msg = f.getvalue()
warnings.warn("http.cookiejar bug!\n%s" % msg, stacklevel=2)
# Date/time conversion
# -----------------------------------------------------------------------------
EPOCH_YEAR = 1970
def _timegm(tt):
year, month, mday, hour, min, sec = tt[:6]
if ((year >= EPOCH_YEAR) and (1 <= month <= 12) and (1 <= mday <= 31) and
(0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)):
return timegm(tt)
else:
return None
DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
MONTHS_LOWER = []
for month in MONTHS: MONTHS_LOWER.append(month.lower())
def time2isoz(t=None):
"""Return a string representing time in seconds since epoch, t.
If the function is called without an argument, it will use the current
time.
The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ",
representing Universal Time (UTC, aka GMT). An example of this format is:
1994-11-24 08:49:37Z
"""
if t is None:
dt = datetime.datetime.utcnow()
else:
dt = datetime.datetime.utcfromtimestamp(t)
return "%04d-%02d-%02d %02d:%02d:%02dZ" % (
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
def time2netscape(t=None):
"""Return a string representing time in seconds since epoch, t.
If the function is called without an argument, it will use the current
time.
The format of the returned string is like this:
Wed, DD-Mon-YYYY HH:MM:SS GMT
"""
if t is None:
dt = datetime.datetime.utcnow()
else:
dt = datetime.datetime.utcfromtimestamp(t)
return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % (
DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1],
dt.year, dt.hour, dt.minute, dt.second)
UTC_ZONES = {"GMT": None, "UTC": None, "UT": None, "Z": None}
TIMEZONE_RE = re.compile(r"^([-+])?(\d\d?):?(\d\d)?$", re.ASCII)
def offset_from_tz_string(tz):
offset = None
if tz in UTC_ZONES:
offset = 0
else:
m = TIMEZONE_RE.search(tz)
if m:
offset = 3600 * int(m.group(2))
if m.group(3):
offset = offset + 60 * int(m.group(3))
if m.group(1) == '-':
offset = -offset
return offset
def _str2time(day, mon, yr, hr, min, sec, tz):
# translate month name to number
# month numbers start with 1 (January)
try:
mon = MONTHS_LOWER.index(mon.lower())+1
except ValueError:
# maybe it's already a number
try:
imon = int(mon)
except ValueError:
return None
if 1 <= imon <= 12:
mon = imon
else:
return None
# make sure clock elements are defined
if hr is None: hr = 0
if min is None: min = 0
if sec is None: sec = 0
yr = int(yr)
day = int(day)
hr = int(hr)
min = int(min)
sec = int(sec)
if yr < 1000:
# find "obvious" year
cur_yr = time.localtime(time.time())[0]
m = cur_yr % 100
tmp = yr
yr = yr + cur_yr - m
m = m - tmp
if abs(m) > 50:
if m > 0: yr = yr + 100
else: yr = yr - 100
# convert UTC time tuple to seconds since epoch (not timezone-adjusted)
t = _timegm((yr, mon, day, hr, min, sec, tz))
if t is not None:
# adjust time using timezone string, to get absolute time since epoch
if tz is None:
tz = "UTC"
tz = tz.upper()
offset = offset_from_tz_string(tz)
if offset is None:
return None
t = t - offset
return t
STRICT_DATE_RE = re.compile(
r"^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) "
"(\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$", re.ASCII)
WEEKDAY_RE = re.compile(
r"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*", re.I | re.ASCII)
LOOSE_HTTP_DATE_RE = re.compile(
r"""^
(\d\d?) # day
(?:\s+|[-\/])
(\w+) # month
(?:\s+|[-\/])
(\d+) # year
(?:
(?:\s+|:) # separator before clock
(\d\d?):(\d\d) # hour:min
(?::(\d\d))? # optional seconds
)? # optional clock
\s*
([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone
\s*
(?:\(\w+\))? # ASCII representation of timezone in parens.
\s*$""", re.X | re.ASCII)
def http2time(text):
"""Returns time in seconds since epoch of time represented by a string.
Return value is an integer.
None is returned if the format of str is unrecognized, the time is outside
the representable range, or the timezone string is not recognized. If the
string contains no timezone, UTC is assumed.
The timezone in the string may be numerical (like "-0800" or "+0100") or a
string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the
timezone strings equivalent to UTC (zero offset) are known to the function.
The function loosely parses the following formats:
Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format
Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format
Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format
09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday)
08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday)
08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday)
The parser ignores leading and trailing whitespace. The time may be
absent.
If the year is given with only 2 digits, the function will select the
century that makes the year closest to the current date.
"""
# fast exit for strictly conforming string
m = STRICT_DATE_RE.search(text)
if m:
g = m.groups()
mon = MONTHS_LOWER.index(g[1].lower()) + 1
tt = (int(g[2]), mon, int(g[0]),
int(g[3]), int(g[4]), float(g[5]))
return _timegm(tt)
# No, we need some messy parsing...
# clean up
text = text.lstrip()
text = WEEKDAY_RE.sub("", text, 1) # Useless weekday
# tz is time zone specifier string
day, mon, yr, hr, min, sec, tz = [None]*7
# loose regexp parse
m = LOOSE_HTTP_DATE_RE.search(text)
if m is not None:
day, mon, yr, hr, min, sec, tz = m.groups()
else:
return None # bad format
return _str2time(day, mon, yr, hr, min, sec, tz)
ISO_DATE_RE = re.compile(
"""^
(\d{4}) # year
[-\/]?
(\d\d?) # numerical month
[-\/]?
(\d\d?) # day
(?:
(?:\s+|[-:Tt]) # separator before clock
(\d\d?):?(\d\d) # hour:min
(?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional)
)? # optional clock
\s*
([-+]?\d\d?:?(:?\d\d)?
|Z|z)? # timezone (Z is "zero meridian", i.e. GMT)
\s*$""", re.X | re. ASCII)
def iso2time(text):
"""
As for http2time, but parses the ISO 8601 formats:
1994-02-03 14:15:29 -0100 -- ISO 8601 format
1994-02-03 14:15:29 -- zone is optional
1994-02-03 -- only date
1994-02-03T14:15:29 -- Use T as separator
19940203T141529Z -- ISO 8601 compact format
19940203 -- only date
"""
# clean up
text = text.lstrip()
# tz is time zone specifier string
day, mon, yr, hr, min, sec, tz = [None]*7
# loose regexp parse
m = ISO_DATE_RE.search(text)
if m is not None:
# XXX there's an extra bit of the timezone I'm ignoring here: is
# this the right thing to do?
yr, mon, day, hr, min, sec, tz, _ = m.groups()
else:
return None # bad format
return _str2time(day, mon, yr, hr, min, sec, tz)
# Header parsing
# -----------------------------------------------------------------------------
def unmatched(match):
"""Return unmatched part of re.Match object."""
start, end = match.span(0)
return match.string[:start]+match.string[end:]
HEADER_TOKEN_RE = re.compile(r"^\s*([^=\s;,]+)")
HEADER_QUOTED_VALUE_RE = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"")
HEADER_VALUE_RE = re.compile(r"^\s*=\s*([^\s;,]*)")
HEADER_ESCAPE_RE = re.compile(r"\\(.)")
def split_header_words(header_values):
r"""Parse header values into a list of lists containing key,value pairs.
The function knows how to deal with ",", ";" and "=" as well as quoted
values after "=". A list of space separated tokens are parsed as if they
were separated by ";".
If the header_values passed as argument contains multiple values, then they
are treated as if they were a single value separated by comma ",".
This means that this function is useful for parsing header fields that
follow this syntax (BNF as from the HTTP/1.1 specification, but we relax
the requirement for tokens).
headers = #header
header = (token | parameter) *( [";"] (token | parameter))
token = 1*<any CHAR except CTLs or separators>
separators = "(" | ")" | "<" | ">" | "@"
| "," | ";" | ":" | "\" | <">
| "/" | "[" | "]" | "?" | "="
| "{" | "}" | SP | HT
quoted-string = ( <"> *(qdtext | quoted-pair ) <"> )
qdtext = <any TEXT except <">>
quoted-pair = "\" CHAR
parameter = attribute "=" value
attribute = token
value = token | quoted-string
Each header is represented by a list of key/value pairs. The value for a
simple token (not part of a parameter) is None. Syntactically incorrect
headers will not necessarily be parsed as you would want.
This is easier to describe with some examples:
>>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz'])
[[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]]
>>> split_header_words(['text/html; charset="iso-8859-1"'])
[[('text/html', None), ('charset', 'iso-8859-1')]]
>>> split_header_words([r'Basic realm="\"foo\bar\""'])
[[('Basic', None), ('realm', '"foobar"')]]
"""
assert not isinstance(header_values, str)
result = []
for text in header_values:
orig_text = text
pairs = []
while text:
m = HEADER_TOKEN_RE.search(text)
if m:
text = unmatched(m)
name = m.group(1)
m = HEADER_QUOTED_VALUE_RE.search(text)
if m: # quoted value
text = unmatched(m)
value = m.group(1)
value = HEADER_ESCAPE_RE.sub(r"\1", value)
else:
m = HEADER_VALUE_RE.search(text)
if m: # unquoted value
text = unmatched(m)
value = m.group(1)
value = value.rstrip()
else:
# no value, a lone token
value = None
pairs.append((name, value))
elif text.lstrip().startswith(","):
# concatenated headers, as per RFC 2616 section 4.2
text = text.lstrip()[1:]
if pairs: result.append(pairs)
pairs = []
else:
# skip junk
non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text)
assert nr_junk_chars > 0, (
"split_header_words bug: '%s', '%s', %s" %
(orig_text, text, pairs))
text = non_junk
if pairs: result.append(pairs)
return result
HEADER_JOIN_ESCAPE_RE = re.compile(r"([\"\\])")
def join_header_words(lists):
"""Do the inverse (almost) of the conversion done by split_header_words.
Takes a list of lists of (key, value) pairs and produces a single header
value. Attribute values are quoted if needed.
>>> join_header_words([[("text/plain", None), ("charset", "iso-8859/1")]])
'text/plain; charset="iso-8859/1"'
>>> join_header_words([[("text/plain", None)], [("charset", "iso-8859/1")]])
'text/plain, charset="iso-8859/1"'
"""
headers = []
for pairs in lists:
attr = []
for k, v in pairs:
if v is not None:
if not re.search(r"^\w+$", v):
v = HEADER_JOIN_ESCAPE_RE.sub(r"\\\1", v) # escape " and \
v = '"%s"' % v
k = "%s=%s" % (k, v)
attr.append(k)
if attr: headers.append("; ".join(attr))
return ", ".join(headers)
def strip_quotes(text):
if text.startswith('"'):
text = text[1:]
if text.endswith('"'):
text = text[:-1]
return text
def parse_ns_headers(ns_headers):
"""Ad-hoc parser for Netscape protocol cookie-attributes.
The old Netscape cookie format for Set-Cookie can for instance contain
an unquoted "," in the expires field, so we have to use this ad-hoc
parser instead of split_header_words.
XXX This may not make the best possible effort to parse all the crap
that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient
parser is probably better, so could do worse than following that if
this ever gives any trouble.
Currently, this is also used for parsing RFC 2109 cookies.
"""
known_attrs = ("expires", "domain", "path", "secure",
# RFC 2109 attrs (may turn up in Netscape cookies, too)
"version", "port", "max-age")
result = []
for ns_header in ns_headers:
pairs = []
version_set = False
for ii, param in enumerate(re.split(r";\s*", ns_header)):
param = param.rstrip()
if param == "": continue
if "=" not in param:
k, v = param, None
else:
k, v = re.split(r"\s*=\s*", param, 1)
k = k.lstrip()
if ii != 0:
lc = k.lower()
if lc in known_attrs:
k = lc
if k == "version":
# This is an RFC 2109 cookie.
v = strip_quotes(v)
version_set = True
if k == "expires":
# convert expires date to seconds since epoch
v = http2time(strip_quotes(v)) # None if invalid
pairs.append((k, v))
if pairs:
if not version_set:
pairs.append(("version", "0"))
result.append(pairs)
return result
IPV4_RE = re.compile(r"\.\d+$", re.ASCII)
def is_HDN(text):
"""Return True if text is a host domain name."""
# XXX
# This may well be wrong. Which RFC is HDN defined in, if any (for
# the purposes of RFC 2965)?
# For the current implementation, what about IPv6? Remember to look
# at other uses of IPV4_RE also, if change this.
if IPV4_RE.search(text):
return False
if text == "":
return False
if text[0] == "." or text[-1] == ".":
return False
return True
def domain_match(A, B):
"""Return True if domain A domain-matches domain B, according to RFC 2965.
A and B may be host domain names or IP addresses.
RFC 2965, section 1:
Host names can be specified either as an IP address or a HDN string.
Sometimes we compare one host name with another. (Such comparisons SHALL
be case-insensitive.) Host A's name domain-matches host B's if
* their host name strings string-compare equal; or
* A is a HDN string and has the form NB, where N is a non-empty
name string, B has the form .B', and B' is a HDN string. (So,
x.y.com domain-matches .Y.com but not Y.com.)
Note that domain-match is not a commutative operation: a.b.c.com
domain-matches .c.com, but not the reverse.
"""
# Note that, if A or B are IP addresses, the only relevant part of the
# definition of the domain-match algorithm is the direct string-compare.
A = A.lower()
B = B.lower()
if A == B:
return True
if not is_HDN(A):
return False
i = A.rfind(B)
if i == -1 or i == 0:
# A does not have form NB, or N is the empty string
return False
if not B.startswith("."):
return False
if not is_HDN(B[1:]):
return False
return True
def liberal_is_HDN(text):
"""Return True if text is a sort-of-like a host domain name.
For accepting/blocking domains.
"""
if IPV4_RE.search(text):
return False
return True
def user_domain_match(A, B):
"""For blocking/accepting domains.
A and B may be host domain names or IP addresses.
"""
A = A.lower()
B = B.lower()
if not (liberal_is_HDN(A) and liberal_is_HDN(B)):
if A == B:
# equal IP addresses
return True
return False
initial_dot = B.startswith(".")
if initial_dot and A.endswith(B):
return True
if not initial_dot and A == B:
return True
return False
cut_port_re = re.compile(r":\d+$", re.ASCII)
def request_host(request):
"""Return request-host, as defined by RFC 2965.
Variation from RFC: returned value is lowercased, for convenient
comparison.
"""
url = request.get_full_url()
host = urllib.parse.urlparse(url)[1]
if host == "":
host = request.get_header("Host", "")
# remove port, if present
host = cut_port_re.sub("", host, 1)
return host.lower()
def eff_request_host(request):
"""Return a tuple (request-host, effective request-host name).
As defined by RFC 2965, except both are lowercased.
"""
erhn = req_host = request_host(request)
if req_host.find(".") == -1 and not IPV4_RE.search(req_host):
erhn = req_host + ".local"
return req_host, erhn
def request_path(request):
"""Path component of request-URI, as defined by RFC 2965."""
url = request.get_full_url()
parts = urllib.parse.urlsplit(url)
path = escape_path(parts.path)
if not path.startswith("/"):
# fix bad RFC 2396 absoluteURI
path = "/" + path
return path
def request_port(request):
host = request.host
i = host.find(':')
if i >= 0:
port = host[i+1:]
try:
int(port)
except ValueError:
_debug("nonnumeric port: '%s'", port)
return None
else:
port = DEFAULT_HTTP_PORT
return port
# Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't
# need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738).
HTTP_PATH_SAFE = "%/;:@&=+$,!~*'()"
ESCAPED_CHAR_RE = re.compile(r"%([0-9a-fA-F][0-9a-fA-F])")
def uppercase_escaped_char(match):
return "%%%s" % match.group(1).upper()
def escape_path(path):
"""Escape any invalid characters in HTTP URL, and uppercase all escapes."""
# There's no knowing what character encoding was used to create URLs
# containing %-escapes, but since we have to pick one to escape invalid
# path characters, we pick UTF-8, as recommended in the HTML 4.0
# specification:
# http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1
# And here, kind of: draft-fielding-uri-rfc2396bis-03
# (And in draft IRI specification: draft-duerst-iri-05)
# (And here, for new URI schemes: RFC 2718)
path = urllib.parse.quote(path, HTTP_PATH_SAFE)
path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path)
return path
def reach(h):
"""Return reach of host h, as defined by RFC 2965, section 1.
The reach R of a host name H is defined as follows:
* If
- H is the host domain name of a host; and,
- H has the form A.B; and
- A has no embedded (that is, interior) dots; and
- B has at least one embedded dot, or B is the string "local".
then the reach of H is .B.
* Otherwise, the reach of H is H.
>>> reach("www.acme.com")
'.acme.com'
>>> reach("acme.com")
'acme.com'
>>> reach("acme.local")
'.local'
"""
i = h.find(".")
if i >= 0:
#a = h[:i] # this line is only here to show what a is
b = h[i+1:]
i = b.find(".")
if is_HDN(h) and (i >= 0 or b == "local"):
return "."+b
return h
def is_third_party(request):
"""
RFC 2965, section 3.3.6:
An unverifiable transaction is to a third-party host if its request-
host U does not domain-match the reach R of the request-host O in the
origin transaction.
"""
req_host = request_host(request)
if not domain_match(req_host, reach(request.origin_req_host)):
return True
else:
return False
class Cookie:
"""HTTP Cookie.
This class represents both Netscape and RFC 2965 cookies.
This is deliberately a very simple class. It just holds attributes. It's
possible to construct Cookie instances that don't comply with the cookie
standards. CookieJar.make_cookies is the factory function for Cookie
objects -- it deals with cookie parsing, supplying defaults, and
normalising to the representation used in this class. CookiePolicy is
responsible for checking them to see whether they should be accepted from
and returned to the server.
Note that the port may be present in the headers, but unspecified ("Port"
rather than"Port=80", for example); if this is the case, port is None.
"""
def __init__(self, version, name, value,
port, port_specified,
domain, domain_specified, domain_initial_dot,
path, path_specified,
secure,
expires,
discard,
comment,
comment_url,
rest,
rfc2109=False,
):
if version is not None: version = int(version)
if expires is not None: expires = int(expires)
if port is None and port_specified is True:
raise ValueError("if port is None, port_specified must be false")
self.version = version
self.name = name
self.value = value
self.port = port
self.port_specified = port_specified
# normalise case, as per RFC 2965 section 3.3.3
self.domain = domain.lower()
self.domain_specified = domain_specified
# Sigh. We need to know whether the domain given in the
# cookie-attribute had an initial dot, in order to follow RFC 2965
# (as clarified in draft errata). Needed for the returned $Domain
# value.
self.domain_initial_dot = domain_initial_dot
self.path = path
self.path_specified = path_specified
self.secure = secure
self.expires = expires
self.discard = discard
self.comment = comment
self.comment_url = comment_url
self.rfc2109 = rfc2109
self._rest = copy.copy(rest)
def has_nonstandard_attr(self, name):
return name in self._rest
def get_nonstandard_attr(self, name, default=None):
return self._rest.get(name, default)
def set_nonstandard_attr(self, name, value):
self._rest[name] = value
def is_expired(self, now=None):
if now is None: now = time.time()
if (self.expires is not None) and (self.expires <= now):
return True
return False
def __str__(self):
if self.port is None: p = ""
else: p = ":"+self.port
limit = self.domain + p + self.path
if self.value is not None:
namevalue = "%s=%s" % (self.name, self.value)
else:
namevalue = self.name
return "<Cookie %s for %s>" % (namevalue, limit)
def __repr__(self):
args = []
for name in ("version", "name", "value",
"port", "port_specified",
"domain", "domain_specified", "domain_initial_dot",
"path", "path_specified",
"secure", "expires", "discard", "comment", "comment_url",
):
attr = getattr(self, name)
args.append("%s=%s" % (name, repr(attr)))
args.append("rest=%s" % repr(self._rest))
args.append("rfc2109=%s" % repr(self.rfc2109))
return "Cookie(%s)" % ", ".join(args)
class CookiePolicy:
"""Defines which cookies get accepted from and returned to server.
May also modify cookies, though this is probably a bad idea.
The subclass DefaultCookiePolicy defines the standard rules for Netscape
and RFC 2965 cookies -- override that if you want a customised policy.
"""
def set_ok(self, cookie, request):
"""Return true if (and only if) cookie should be accepted from server.
Currently, pre-expired cookies never get this far -- the CookieJar
class deletes such cookies itself.
"""
raise NotImplementedError()
def return_ok(self, cookie, request):
"""Return true if (and only if) cookie should be returned to server."""
raise NotImplementedError()
def domain_return_ok(self, domain, request):
"""Return false if cookies should not be returned, given cookie domain.
"""
return True
def path_return_ok(self, path, request):
"""Return false if cookies should not be returned, given cookie path.
"""
return True
class DefaultCookiePolicy(CookiePolicy):
"""Implements the standard rules for accepting and returning cookies."""
DomainStrictNoDots = 1
DomainStrictNonDomain = 2
DomainRFC2965Match = 4
DomainLiberal = 0
DomainStrict = DomainStrictNoDots|DomainStrictNonDomain
def __init__(self,
blocked_domains=None, allowed_domains=None,
netscape=True, rfc2965=False,
rfc2109_as_netscape=None,
hide_cookie2=False,
strict_domain=False,
strict_rfc2965_unverifiable=True,
strict_ns_unverifiable=False,
strict_ns_domain=DomainLiberal,
strict_ns_set_initial_dollar=False,
strict_ns_set_path=False,
):
"""Constructor arguments should be passed as keyword arguments only."""
self.netscape = netscape
self.rfc2965 = rfc2965
self.rfc2109_as_netscape = rfc2109_as_netscape
self.hide_cookie2 = hide_cookie2
self.strict_domain = strict_domain
self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable
self.strict_ns_unverifiable = strict_ns_unverifiable
self.strict_ns_domain = strict_ns_domain
self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar
self.strict_ns_set_path = strict_ns_set_path
if blocked_domains is not None:
self._blocked_domains = tuple(blocked_domains)
else:
self._blocked_domains = ()
if allowed_domains is not None:
allowed_domains = tuple(allowed_domains)
self._allowed_domains = allowed_domains
def blocked_domains(self):
"""Return the sequence of blocked domains (as a tuple)."""
return self._blocked_domains
def set_blocked_domains(self, blocked_domains):
"""Set the sequence of blocked domains."""
self._blocked_domains = tuple(blocked_domains)
def is_blocked(self, domain):
for blocked_domain in self._blocked_domains:
if user_domain_match(domain, blocked_domain):
return True
return False
def allowed_domains(self):
"""Return None, or the sequence of allowed domains (as a tuple)."""
return self._allowed_domains
def set_allowed_domains(self, allowed_domains):
"""Set the sequence of allowed domains, or None."""
if allowed_domains is not None:
allowed_domains = tuple(allowed_domains)
self._allowed_domains = allowed_domains
def is_not_allowed(self, domain):
if self._allowed_domains is None:
return False
for allowed_domain in self._allowed_domains:
if user_domain_match(domain, allowed_domain):
return False
return True
def set_ok(self, cookie, request):
"""
If you override .set_ok(), be sure to call this method. If it returns
false, so should your subclass (assuming your subclass wants to be more
strict about which cookies to accept).
"""
_debug(" - checking cookie %s=%s", cookie.name, cookie.value)
assert cookie.name is not None
for n in "version", "verifiability", "name", "path", "domain", "port":
fn_name = "set_ok_"+n
fn = getattr(self, fn_name)
if not fn(cookie, request):
return False
return True
def set_ok_version(self, cookie, request):
if cookie.version is None:
# Version is always set to 0 by parse_ns_headers if it's a Netscape
# cookie, so this must be an invalid RFC 2965 cookie.
_debug(" Set-Cookie2 without version attribute (%s=%s)",
cookie.name, cookie.value)
return False
if cookie.version > 0 and not self.rfc2965:
_debug(" RFC 2965 cookies are switched off")
return False
elif cookie.version == 0 and not self.netscape:
_debug(" Netscape cookies are switched off")
return False
return True
def set_ok_verifiability(self, cookie, request):
if request.unverifiable and is_third_party(request):
if cookie.version > 0 and self.strict_rfc2965_unverifiable:
_debug(" third-party RFC 2965 cookie during "
"unverifiable transaction")
return False
elif cookie.version == 0 and self.strict_ns_unverifiable:
_debug(" third-party Netscape cookie during "
"unverifiable transaction")
return False
return True
def set_ok_name(self, cookie, request):
# Try and stop servers setting V0 cookies designed to hack other
# servers that know both V0 and V1 protocols.
if (cookie.version == 0 and self.strict_ns_set_initial_dollar and
cookie.name.startswith("$")):
_debug(" illegal name (starts with '$'): '%s'", cookie.name)
return False
return True
def set_ok_path(self, cookie, request):
if cookie.path_specified:
req_path = request_path(request)
if ((cookie.version > 0 or
(cookie.version == 0 and self.strict_ns_set_path)) and
not req_path.startswith(cookie.path)):
_debug(" path attribute %s is not a prefix of request "
"path %s", cookie.path, req_path)
return False
return True
def set_ok_domain(self, cookie, request):
if self.is_blocked(cookie.domain):
_debug(" domain %s is in user block-list", cookie.domain)
return False
if self.is_not_allowed(cookie.domain):
_debug(" domain %s is not in user allow-list", cookie.domain)
return False
if cookie.domain_specified:
req_host, erhn = eff_request_host(request)
domain = cookie.domain
if self.strict_domain and (domain.count(".") >= 2):
# XXX This should probably be compared with the Konqueror
# (kcookiejar.cpp) and Mozilla implementations, but it's a
# losing battle.
i = domain.rfind(".")
j = domain.rfind(".", 0, i)
if j == 0: # domain like .foo.bar
tld = domain[i+1:]
sld = domain[j+1:i]
if sld.lower() in ("co", "ac", "com", "edu", "org", "net",
"gov", "mil", "int", "aero", "biz", "cat", "coop",
"info", "jobs", "mobi", "museum", "name", "pro",
"travel", "eu") and len(tld) == 2:
# domain like .co.uk
_debug(" country-code second level domain %s", domain)
return False
if domain.startswith("."):
undotted_domain = domain[1:]
else:
undotted_domain = domain
embedded_dots = (undotted_domain.find(".") >= 0)
if not embedded_dots and domain != ".local":
_debug(" non-local domain %s contains no embedded dot",
domain)
return False
if cookie.version == 0:
if (not erhn.endswith(domain) and
(not erhn.startswith(".") and
not ("."+erhn).endswith(domain))):
_debug(" effective request-host %s (even with added "
"initial dot) does not end with %s",
erhn, domain)
return False
if (cookie.version > 0 or
(self.strict_ns_domain & self.DomainRFC2965Match)):
if not domain_match(erhn, domain):
_debug(" effective request-host %s does not domain-match "
"%s", erhn, domain)
return False
if (cookie.version > 0 or
(self.strict_ns_domain & self.DomainStrictNoDots)):
host_prefix = req_host[:-len(domain)]
if (host_prefix.find(".") >= 0 and
not IPV4_RE.search(req_host)):
_debug(" host prefix %s for domain %s contains a dot",
host_prefix, domain)
return False
return True
def set_ok_port(self, cookie, request):
if cookie.port_specified:
req_port = request_port(request)
if req_port is None:
req_port = "80"
else:
req_port = str(req_port)
for p in cookie.port.split(","):
try:
int(p)
except ValueError:
_debug(" bad port %s (not numeric)", p)
return False
if p == req_port:
break
else:
_debug(" request port (%s) not found in %s",
req_port, cookie.port)
return False
return True
def return_ok(self, cookie, request):
"""
If you override .return_ok(), be sure to call this method. If it
returns false, so should your subclass (assuming your subclass wants to
be more strict about which cookies to return).
"""
# Path has already been checked by .path_return_ok(), and domain
# blocking done by .domain_return_ok().
_debug(" - checking cookie %s=%s", cookie.name, cookie.value)
for n in "version", "verifiability", "secure", "expires", "port", "domain":
fn_name = "return_ok_"+n
fn = getattr(self, fn_name)
if not fn(cookie, request):
return False
return True
def return_ok_version(self, cookie, request):
if cookie.version > 0 and not self.rfc2965:
_debug(" RFC 2965 cookies are switched off")
return False
elif cookie.version == 0 and not self.netscape:
_debug(" Netscape cookies are switched off")
return False
return True
def return_ok_verifiability(self, cookie, request):
if request.unverifiable and is_third_party(request):
if cookie.version > 0 and self.strict_rfc2965_unverifiable:
_debug(" third-party RFC 2965 cookie during unverifiable "
"transaction")
return False
elif cookie.version == 0 and self.strict_ns_unverifiable:
_debug(" third-party Netscape cookie during unverifiable "
"transaction")
return False
return True
def return_ok_secure(self, cookie, request):
if cookie.secure and request.type != "https":
_debug(" secure cookie with non-secure request")
return False
return True
def return_ok_expires(self, cookie, request):
if cookie.is_expired(self._now):
_debug(" cookie expired")
return False
return True
def return_ok_port(self, cookie, request):
if cookie.port:
req_port = request_port(request)
if req_port is None:
req_port = "80"
for p in cookie.port.split(","):
if p == req_port:
break
else:
_debug(" request port %s does not match cookie port %s",
req_port, cookie.port)
return False
return True
def return_ok_domain(self, cookie, request):
req_host, erhn = eff_request_host(request)
domain = cookie.domain
# strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't
if (cookie.version == 0 and
(self.strict_ns_domain & self.DomainStrictNonDomain) and
not cookie.domain_specified and domain != erhn):
_debug(" cookie with unspecified domain does not string-compare "
"equal to request domain")
return False
if cookie.version > 0 and not domain_match(erhn, domain):
_debug(" effective request-host name %s does not domain-match "
"RFC 2965 cookie domain %s", erhn, domain)
return False
if cookie.version == 0 and not ("."+erhn).endswith(domain):
_debug(" request-host %s does not match Netscape cookie domain "
"%s", req_host, domain)
return False
return True
def domain_return_ok(self, domain, request):
# Liberal check of. This is here as an optimization to avoid
# having to load lots of MSIE cookie files unless necessary.
req_host, erhn = eff_request_host(request)
if not req_host.startswith("."):
req_host = "."+req_host
if not erhn.startswith("."):
erhn = "."+erhn
if not (req_host.endswith(domain) or erhn.endswith(domain)):
#_debug(" request domain %s does not match cookie domain %s",
# req_host, domain)
return False
if self.is_blocked(domain):
_debug(" domain %s is in user block-list", domain)
return False
if self.is_not_allowed(domain):
_debug(" domain %s is not in user allow-list", domain)
return False
return True
def path_return_ok(self, path, request):
_debug("- checking cookie path=%s", path)
req_path = request_path(request)
if not req_path.startswith(path):
_debug(" %s does not path-match %s", req_path, path)
return False
return True
def vals_sorted_by_key(adict):
keys = sorted(adict.keys())
return map(adict.get, keys)
def deepvalues(mapping):
"""Iterates over nested mapping, depth-first, in sorted order by key."""
values = vals_sorted_by_key(mapping)
for obj in values:
mapping = False
try:
obj.items
except AttributeError:
pass
else:
mapping = True
for subobj in deepvalues(obj):
yield subobj
if not mapping:
yield obj
# Used as second parameter to dict.get() method, to distinguish absent
# dict key from one with a None value.
class Absent: pass
class CookieJar:
"""Collection of HTTP cookies.
You may not need to know about this class: try
urllib.request.build_opener(HTTPCookieProcessor).open(url).
"""
non_word_re = re.compile(r"\W")
quote_re = re.compile(r"([\"\\])")
strict_domain_re = re.compile(r"\.?[^.]*")
domain_re = re.compile(r"[^.]*")
dots_re = re.compile(r"^\.+")
magic_re = re.compile(r"^\#LWP-Cookies-(\d+\.\d+)", re.ASCII)
def __init__(self, policy=None):
if policy is None:
policy = DefaultCookiePolicy()
self._policy = policy
self._cookies_lock = _threading.RLock()
self._cookies = {}
def set_policy(self, policy):
self._policy = policy
def _cookies_for_domain(self, domain, request):
cookies = []
if not self._policy.domain_return_ok(domain, request):
return []
_debug("Checking %s for cookies to return", domain)
cookies_by_path = self._cookies[domain]
for path in cookies_by_path.keys():
if not self._policy.path_return_ok(path, request):
continue
cookies_by_name = cookies_by_path[path]
for cookie in cookies_by_name.values():
if not self._policy.return_ok(cookie, request):
_debug(" not returning cookie")
continue
_debug(" it's a match")
cookies.append(cookie)
return cookies
def _cookies_for_request(self, request):
"""Return a list of cookies to be returned to server."""
cookies = []
for domain in self._cookies.keys():
cookies.extend(self._cookies_for_domain(domain, request))
return cookies
def _cookie_attrs(self, cookies):
"""Return a list of cookie-attributes to be returned to server.
like ['foo="bar"; $Path="/"', ...]
The $Version attribute is also added when appropriate (currently only
once per request).
"""
# add cookies in order of most specific (ie. longest) path first
cookies.sort(key=lambda a: len(a.path), reverse=True)
version_set = False
attrs = []
for cookie in cookies:
# set version of Cookie header
# XXX
# What should it be if multiple matching Set-Cookie headers have
# different versions themselves?
# Answer: there is no answer; was supposed to be settled by
# RFC 2965 errata, but that may never appear...
version = cookie.version
if not version_set:
version_set = True
if version > 0:
attrs.append("$Version=%s" % version)
# quote cookie value if necessary
# (not for Netscape protocol, which already has any quotes
# intact, due to the poorly-specified Netscape Cookie: syntax)
if ((cookie.value is not None) and
self.non_word_re.search(cookie.value) and version > 0):
value = self.quote_re.sub(r"\\\1", cookie.value)
else:
value = cookie.value
# add cookie-attributes to be returned in Cookie header
if cookie.value is None:
attrs.append(cookie.name)
else:
attrs.append("%s=%s" % (cookie.name, value))
if version > 0:
if cookie.path_specified:
attrs.append('$Path="%s"' % cookie.path)
if cookie.domain.startswith("."):
domain = cookie.domain
if (not cookie.domain_initial_dot and
domain.startswith(".")):
domain = domain[1:]
attrs.append('$Domain="%s"' % domain)
if cookie.port is not None:
p = "$Port"
if cookie.port_specified:
p = p + ('="%s"' % cookie.port)
attrs.append(p)
return attrs
def add_cookie_header(self, request):
"""Add correct Cookie: header to request (urllib.request.Request object).
The Cookie2 header is also added unless policy.hide_cookie2 is true.
"""
_debug("add_cookie_header")
self._cookies_lock.acquire()
try:
self._policy._now = self._now = int(time.time())
cookies = self._cookies_for_request(request)
attrs = self._cookie_attrs(cookies)
if attrs:
if not request.has_header("Cookie"):
request.add_unredirected_header(
"Cookie", "; ".join(attrs))
# if necessary, advertise that we know RFC 2965
if (self._policy.rfc2965 and not self._policy.hide_cookie2 and
not request.has_header("Cookie2")):
for cookie in cookies:
if cookie.version != 1:
request.add_unredirected_header("Cookie2", '$Version="1"')
break
finally:
self._cookies_lock.release()
self.clear_expired_cookies()
def _normalized_cookie_tuples(self, attrs_set):
"""Return list of tuples containing normalised cookie information.
attrs_set is the list of lists of key,value pairs extracted from
the Set-Cookie or Set-Cookie2 headers.
Tuples are name, value, standard, rest, where name and value are the
cookie name and value, standard is a dictionary containing the standard
cookie-attributes (discard, secure, version, expires or max-age,
domain, path and port) and rest is a dictionary containing the rest of
the cookie-attributes.
"""
cookie_tuples = []
boolean_attrs = "discard", "secure"
value_attrs = ("version",
"expires", "max-age",
"domain", "path", "port",
"comment", "commenturl")
for cookie_attrs in attrs_set:
name, value = cookie_attrs[0]
# Build dictionary of standard cookie-attributes (standard) and
# dictionary of other cookie-attributes (rest).
# Note: expiry time is normalised to seconds since epoch. V0
# cookies should have the Expires cookie-attribute, and V1 cookies
# should have Max-Age, but since V1 includes RFC 2109 cookies (and
# since V0 cookies may be a mish-mash of Netscape and RFC 2109), we
# accept either (but prefer Max-Age).
max_age_set = False
bad_cookie = False
standard = {}
rest = {}
for k, v in cookie_attrs[1:]:
lc = k.lower()
# don't lose case distinction for unknown fields
if lc in value_attrs or lc in boolean_attrs:
k = lc
if k in boolean_attrs and v is None:
# boolean cookie-attribute is present, but has no value
# (like "discard", rather than "port=80")
v = True
if k in standard:
# only first value is significant
continue
if k == "domain":
if v is None:
_debug(" missing value for domain attribute")
bad_cookie = True
break
# RFC 2965 section 3.3.3
v = v.lower()
if k == "expires":
if max_age_set:
# Prefer max-age to expires (like Mozilla)
continue
if v is None:
_debug(" missing or invalid value for expires "
"attribute: treating as session cookie")
continue
if k == "max-age":
max_age_set = True
try:
v = int(v)
except ValueError:
_debug(" missing or invalid (non-numeric) value for "
"max-age attribute")
bad_cookie = True
break
# convert RFC 2965 Max-Age to seconds since epoch
# XXX Strictly you're supposed to follow RFC 2616
# age-calculation rules. Remember that zero Max-Age is a
# is a request to discard (old and new) cookie, though.
k = "expires"
v = self._now + v
if (k in value_attrs) or (k in boolean_attrs):
if (v is None and
k not in ("port", "comment", "commenturl")):
_debug(" missing value for %s attribute" % k)
bad_cookie = True
break
standard[k] = v
else:
rest[k] = v
if bad_cookie:
continue
cookie_tuples.append((name, value, standard, rest))
return cookie_tuples
def _cookie_from_cookie_tuple(self, tup, request):
# standard is dict of standard cookie-attributes, rest is dict of the
# rest of them
name, value, standard, rest = tup
domain = standard.get("domain", Absent)
path = standard.get("path", Absent)
port = standard.get("port", Absent)
expires = standard.get("expires", Absent)
# set the easy defaults
version = standard.get("version", None)
if version is not None:
try:
version = int(version)
except ValueError:
return None # invalid version, ignore cookie
secure = standard.get("secure", False)
# (discard is also set if expires is Absent)
discard = standard.get("discard", False)
comment = standard.get("comment", None)
comment_url = standard.get("commenturl", None)
# set default path
if path is not Absent and path != "":
path_specified = True
path = escape_path(path)
else:
path_specified = False
path = request_path(request)
i = path.rfind("/")
if i != -1:
if version == 0:
# Netscape spec parts company from reality here
path = path[:i]
else:
path = path[:i+1]
if len(path) == 0: path = "/"
# set default domain
domain_specified = domain is not Absent
# but first we have to remember whether it starts with a dot
domain_initial_dot = False
if domain_specified:
domain_initial_dot = bool(domain.startswith("."))
if domain is Absent:
req_host, erhn = eff_request_host(request)
domain = erhn
elif not domain.startswith("."):
domain = "."+domain
# set default port
port_specified = False
if port is not Absent:
if port is None:
# Port attr present, but has no value: default to request port.
# Cookie should then only be sent back on that port.
port = request_port(request)
else:
port_specified = True
port = re.sub(r"\s+", "", port)
else:
# No port attr present. Cookie can be sent back on any port.
port = None
# set default expires and discard
if expires is Absent:
expires = None
discard = True
elif expires <= self._now:
# Expiry date in past is request to delete cookie. This can't be
# in DefaultCookiePolicy, because can't delete cookies there.
try:
self.clear(domain, path, name)
except KeyError:
pass
_debug("Expiring cookie, domain='%s', path='%s', name='%s'",
domain, path, name)
return None
return Cookie(version,
name, value,
port, port_specified,
domain, domain_specified, domain_initial_dot,
path, path_specified,
secure,
expires,
discard,
comment,
comment_url,
rest)
def _cookies_from_attrs_set(self, attrs_set, request):
cookie_tuples = self._normalized_cookie_tuples(attrs_set)
cookies = []
for tup in cookie_tuples:
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie: cookies.append(cookie)
return cookies
def _process_rfc2109_cookies(self, cookies):
rfc2109_as_ns = getattr(self._policy, 'rfc2109_as_netscape', None)
if rfc2109_as_ns is None:
rfc2109_as_ns = not self._policy.rfc2965
for cookie in cookies:
if cookie.version == 1:
cookie.rfc2109 = True
if rfc2109_as_ns:
# treat 2109 cookies as Netscape cookies rather than
# as RFC2965 cookies
cookie.version = 0
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.get_all("Set-Cookie2", [])
ns_hdrs = headers.get_all("Set-Cookie", [])
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if ((not rfc2965_hdrs and not ns_hdrs) or
(not ns_hdrs and not rfc2965) or
(not rfc2965_hdrs and not netscape) or
(not netscape and not rfc2965)):
return [] # no relevant cookie headers: quick exit
try:
cookies = self._cookies_from_attrs_set(
split_header_words(rfc2965_hdrs), request)
except Exception:
_warn_unhandled_exception()
cookies = []
if ns_hdrs and netscape:
try:
# RFC 2109 and Netscape cookies
ns_cookies = self._cookies_from_attrs_set(
parse_ns_headers(ns_hdrs), request)
except Exception:
_warn_unhandled_exception()
ns_cookies = []
self._process_rfc2109_cookies(ns_cookies)
# Look for Netscape cookies (from Set-Cookie headers) that match
# corresponding RFC 2965 cookies (from Set-Cookie2 headers).
# For each match, keep the RFC 2965 cookie and ignore the Netscape
# cookie (RFC 2965 section 9.1). Actually, RFC 2109 cookies are
# bundled in with the Netscape cookies for this purpose, which is
# reasonable behaviour.
if rfc2965:
lookup = {}
for cookie in cookies:
lookup[(cookie.domain, cookie.path, cookie.name)] = None
def no_matching_rfc2965(ns_cookie, lookup=lookup):
key = ns_cookie.domain, ns_cookie.path, ns_cookie.name
return key not in lookup
ns_cookies = filter(no_matching_rfc2965, ns_cookies)
if ns_cookies:
cookies.extend(ns_cookies)
return cookies
def set_cookie_if_ok(self, cookie, request):
"""Set a cookie if policy says it's OK to do so."""
self._cookies_lock.acquire()
try:
self._policy._now = self._now = int(time.time())
if self._policy.set_ok(cookie, request):
self.set_cookie(cookie)
finally:
self._cookies_lock.release()
def set_cookie(self, cookie):
"""Set a cookie, without checking whether or not it should be set."""
c = self._cookies
self._cookies_lock.acquire()
try:
if cookie.domain not in c: c[cookie.domain] = {}
c2 = c[cookie.domain]
if cookie.path not in c2: c2[cookie.path] = {}
c3 = c2[cookie.path]
c3[cookie.name] = cookie
finally:
self._cookies_lock.release()
def extract_cookies(self, response, request):
"""Extract cookies from response, where allowable given the request."""
_debug("extract_cookies: %s", response.info())
self._cookies_lock.acquire()
try:
self._policy._now = self._now = int(time.time())
for cookie in self.make_cookies(response, request):
if self._policy.set_ok(cookie, request):
_debug(" setting cookie: %s", cookie)
self.set_cookie(cookie)
finally:
self._cookies_lock.release()
def clear(self, domain=None, path=None, name=None):
"""Clear some cookies.
Invoking this method without arguments will clear all cookies. If
given a single argument, only cookies belonging to that domain will be
removed. If given two arguments, cookies belonging to the specified
path within that domain are removed. If given three arguments, then
the cookie with the specified name, path and domain is removed.
Raises KeyError if no matching cookie exists.
"""
if name is not None:
if (domain is None) or (path is None):
raise ValueError(
"domain and path must be given to remove a cookie by name")
del self._cookies[domain][path][name]
elif path is not None:
if domain is None:
raise ValueError(
"domain must be given to remove cookies by path")
del self._cookies[domain][path]
elif domain is not None:
del self._cookies[domain]
else:
self._cookies = {}
def clear_session_cookies(self):
"""Discard all session cookies.
Note that the .save() method won't save session cookies anyway, unless
you ask otherwise by passing a true ignore_discard argument.
"""
self._cookies_lock.acquire()
try:
for cookie in self:
if cookie.discard:
self.clear(cookie.domain, cookie.path, cookie.name)
finally:
self._cookies_lock.release()
def clear_expired_cookies(self):
"""Discard all expired cookies.
You probably don't need to call this method: expired cookies are never
sent back to the server (provided you're using DefaultCookiePolicy),
this method is called by CookieJar itself every so often, and the
.save() method won't save expired cookies anyway (unless you ask
otherwise by passing a true ignore_expires argument).
"""
self._cookies_lock.acquire()
try:
now = time.time()
for cookie in self:
if cookie.is_expired(now):
self.clear(cookie.domain, cookie.path, cookie.name)
finally:
self._cookies_lock.release()
def __iter__(self):
return deepvalues(self._cookies)
def __len__(self):
"""Return number of contained cookies."""
i = 0
for cookie in self: i = i + 1
return i
def __repr__(self):
r = []
for cookie in self: r.append(repr(cookie))
return "<%s[%s]>" % (self.__class__, ", ".join(r))
def __str__(self):
r = []
for cookie in self: r.append(str(cookie))
return "<%s[%s]>" % (self.__class__, ", ".join(r))
# derives from IOError for backwards-compatibility with Python 2.4.0
class LoadError(IOError): pass
class FileCookieJar(CookieJar):
"""CookieJar that can be loaded from and saved to a file."""
def __init__(self, filename=None, delayload=False, policy=None):
"""
Cookies are NOT loaded from the named file until either the .load() or
.revert() method is called.
"""
CookieJar.__init__(self, policy)
if filename is not None:
try:
filename+""
except:
raise ValueError("filename must be string-like")
self.filename = filename
self.delayload = bool(delayload)
def save(self, filename=None, ignore_discard=False, ignore_expires=False):
"""Save cookies to a file."""
raise NotImplementedError()
def load(self, filename=None, ignore_discard=False, ignore_expires=False):
"""Load cookies from a file."""
if filename is None:
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
f = open(filename)
try:
self._really_load(f, filename, ignore_discard, ignore_expires)
finally:
f.close()
def revert(self, filename=None,
ignore_discard=False, ignore_expires=False):
"""Clear all cookies and reload cookies from a saved file.
Raises LoadError (or IOError) if reversion is not successful; the
object's state will not be altered if this happens.
"""
if filename is None:
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
self._cookies_lock.acquire()
try:
old_state = copy.deepcopy(self._cookies)
self._cookies = {}
try:
self.load(filename, ignore_discard, ignore_expires)
except (LoadError, IOError):
self._cookies = old_state
raise
finally:
self._cookies_lock.release()
def lwp_cookie_str(cookie):
"""Return string representation of Cookie in an the LWP cookie file format.
Actually, the format is extended a bit -- see module docstring.
"""
h = [(cookie.name, cookie.value),
("path", cookie.path),
("domain", cookie.domain)]
if cookie.port is not None: h.append(("port", cookie.port))
if cookie.path_specified: h.append(("path_spec", None))
if cookie.port_specified: h.append(("port_spec", None))
if cookie.domain_initial_dot: h.append(("domain_dot", None))
if cookie.secure: h.append(("secure", None))
if cookie.expires: h.append(("expires",
time2isoz(float(cookie.expires))))
if cookie.discard: h.append(("discard", None))
if cookie.comment: h.append(("comment", cookie.comment))
if cookie.comment_url: h.append(("commenturl", cookie.comment_url))
keys = sorted(cookie._rest.keys())
for k in keys:
h.append((k, str(cookie._rest[k])))
h.append(("version", str(cookie.version)))
return join_header_words([h])
class LWPCookieJar(FileCookieJar):
"""
The LWPCookieJar saves a sequence of "Set-Cookie3" lines.
"Set-Cookie3" is the format used by the libwww-perl libary, not known
to be compatible with any browser, but which is easy to read and
doesn't lose information about RFC 2965 cookies.
Additional methods
as_lwp_str(ignore_discard=True, ignore_expired=True)
"""
def as_lwp_str(self, ignore_discard=True, ignore_expires=True):
"""Return cookies as a string of "\\n"-separated "Set-Cookie3" headers.
ignore_discard and ignore_expires: see docstring for FileCookieJar.save
"""
now = time.time()
r = []
for cookie in self:
if not ignore_discard and cookie.discard:
continue
if not ignore_expires and cookie.is_expired(now):
continue
r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie))
return "\n".join(r+[""])
def save(self, filename=None, ignore_discard=False, ignore_expires=False):
if filename is None:
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
f = open(filename, "w")
try:
# There really isn't an LWP Cookies 2.0 format, but this indicates
# that there is extra information in here (domain_dot and
# port_spec) while still being compatible with libwww-perl, I hope.
f.write("#LWP-Cookies-2.0\n")
f.write(self.as_lwp_str(ignore_discard, ignore_expires))
finally:
f.close()
def _really_load(self, f, filename, ignore_discard, ignore_expires):
magic = f.readline()
if not self.magic_re.search(magic):
msg = ("%r does not look like a Set-Cookie3 (LWP) format "
"file" % filename)
raise LoadError(msg)
now = time.time()
header = "Set-Cookie3:"
boolean_attrs = ("port_spec", "path_spec", "domain_dot",
"secure", "discard")
value_attrs = ("version",
"port", "path", "domain",
"expires",
"comment", "commenturl")
try:
while 1:
line = f.readline()
if line == "": break
if not line.startswith(header):
continue
line = line[len(header):].strip()
for data in split_header_words([line]):
name, value = data[0]
standard = {}
rest = {}
for k in boolean_attrs:
standard[k] = False
for k, v in data[1:]:
if k is not None:
lc = k.lower()
else:
lc = None
# don't lose case distinction for unknown fields
if (lc in value_attrs) or (lc in boolean_attrs):
k = lc
if k in boolean_attrs:
if v is None: v = True
standard[k] = v
elif k in value_attrs:
standard[k] = v
else:
rest[k] = v
h = standard.get
expires = h("expires")
discard = h("discard")
if expires is not None:
expires = iso2time(expires)
if expires is None:
discard = True
domain = h("domain")
domain_specified = domain.startswith(".")
c = Cookie(h("version"), name, value,
h("port"), h("port_spec"),
domain, domain_specified, h("domain_dot"),
h("path"), h("path_spec"),
h("secure"),
expires,
discard,
h("comment"),
h("commenturl"),
rest)
if not ignore_discard and c.discard:
continue
if not ignore_expires and c.is_expired(now):
continue
self.set_cookie(c)
except IOError:
raise
except Exception:
_warn_unhandled_exception()
raise LoadError("invalid Set-Cookie3 format file %r: %r" %
(filename, line))
class MozillaCookieJar(FileCookieJar):
"""
WARNING: you may want to backup your browser's cookies file if you use
this class to save cookies. I *think* it works, but there have been
bugs in the past!
This class differs from CookieJar only in the format it uses to save and
load cookies to and from a file. This class uses the Mozilla/Netscape
`cookies.txt' format. lynx uses this file format, too.
Don't expect cookies saved while the browser is running to be noticed by
the browser (in fact, Mozilla on unix will overwrite your saved cookies if
you change them on disk while it's running; on Windows, you probably can't
save at all while the browser is running).
Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to
Netscape cookies on saving.
In particular, the cookie version and port number information is lost,
together with information about whether or not Path, Port and Discard were
specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the
domain as set in the HTTP header started with a dot (yes, I'm aware some
domains in Netscape files start with a dot and some don't -- trust me, you
really don't want to know any more about this).
Note that though Mozilla and Netscape use the same format, they use
slightly different headers. The class saves cookies using the Netscape
header by default (Mozilla can cope with that).
"""
magic_re = re.compile("#( Netscape)? HTTP Cookie File")
header = """\
# Netscape HTTP Cookie File
# http://curl.haxx.se/rfc/cookie_spec.html
# This is a generated file! Do not edit.
"""
def _really_load(self, f, filename, ignore_discard, ignore_expires):
now = time.time()
magic = f.readline()
if not self.magic_re.search(magic):
f.close()
raise LoadError(
"%r does not look like a Netscape format cookies file" %
filename)
try:
while 1:
line = f.readline()
if line == "": break
# last field may be absent, so keep any trailing tab
if line.endswith("\n"): line = line[:-1]
# skip comments and blank lines XXX what is $ for?
if (line.strip().startswith(("#", "$")) or
line.strip() == ""):
continue
domain, domain_specified, path, secure, expires, name, value = \
line.split("\t")
secure = (secure == "TRUE")
domain_specified = (domain_specified == "TRUE")
if name == "":
# cookies.txt regards 'Set-Cookie: foo' as a cookie
# with no name, whereas http.cookiejar regards it as a
# cookie with no value.
name = value
value = None
initial_dot = domain.startswith(".")
assert domain_specified == initial_dot
discard = False
if expires == "":
expires = None
discard = True
# assume path_specified is false
c = Cookie(0, name, value,
None, False,
domain, domain_specified, initial_dot,
path, False,
secure,
expires,
discard,
None,
None,
{})
if not ignore_discard and c.discard:
continue
if not ignore_expires and c.is_expired(now):
continue
self.set_cookie(c)
except IOError:
raise
except Exception:
_warn_unhandled_exception()
raise LoadError("invalid Netscape format cookies file %r: %r" %
(filename, line))
def save(self, filename=None, ignore_discard=False, ignore_expires=False):
if filename is None:
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
f = open(filename, "w")
try:
f.write(self.header)
now = time.time()
for cookie in self:
if not ignore_discard and cookie.discard:
continue
if not ignore_expires and cookie.is_expired(now):
continue
if cookie.secure: secure = "TRUE"
else: secure = "FALSE"
if cookie.domain.startswith("."): initial_dot = "TRUE"
else: initial_dot = "FALSE"
if cookie.expires is not None:
expires = str(cookie.expires)
else:
expires = ""
if cookie.value is None:
# cookies.txt regards 'Set-Cookie: foo' as a cookie
# with no name, whereas http.cookiejar regards it as a
# cookie with no value.
name = ""
value = cookie.name
else:
name = cookie.name
value = cookie.value
f.write(
"\t".join([cookie.domain, initial_dot, cookie.path,
secure, expires, name, value])+
"\n")
finally:
f.close()
| {
"content_hash": "559998a8add628988658911617f859c8",
"timestamp": "",
"source": "github",
"line_count": 2091,
"max_line_length": 83,
"avg_line_length": 36.324725011956005,
"alnum_prop": 0.5385425580936081,
"repo_name": "timm/timmnix",
"id": "9fcd4c6f462e47f869f7cf67d496da38d7faf527",
"size": "75955",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pypy3-v5.5.0-linux64/lib-python/3/http/cookiejar.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1641"
},
{
"name": "Batchfile",
"bytes": "1234"
},
{
"name": "C",
"bytes": "436685"
},
{
"name": "CSS",
"bytes": "96"
},
{
"name": "Common Lisp",
"bytes": "4"
},
{
"name": "Emacs Lisp",
"bytes": "290698"
},
{
"name": "HTML",
"bytes": "111577"
},
{
"name": "Makefile",
"bytes": "1681"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PowerShell",
"bytes": "1540"
},
{
"name": "Prolog",
"bytes": "14301"
},
{
"name": "Python",
"bytes": "21267592"
},
{
"name": "Roff",
"bytes": "21080"
},
{
"name": "Shell",
"bytes": "27687"
},
{
"name": "TeX",
"bytes": "3052861"
},
{
"name": "VBScript",
"bytes": "481"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0004_auto_20151228_1634'),
]
operations = [
migrations.CreateModel(
name='EventComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('text', models.TextField()),
('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='events.Event')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL)),
],
options={
'get_latest_by': 'modified',
'ordering': ('-modified', '-created'),
'abstract': False,
},
),
]
| {
"content_hash": "519abe63588a1415df2376931854d02d",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 143,
"avg_line_length": 41.333333333333336,
"alnum_prop": 0.6151026392961877,
"repo_name": "letsmeet-click/letsmeet.click",
"id": "346e06db9506f71dfb017939e7977f96ad6d3566",
"size": "1434",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "letsmeet/events/migrations/0005_eventcomment.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "425"
},
{
"name": "Dockerfile",
"bytes": "766"
},
{
"name": "HTML",
"bytes": "67033"
},
{
"name": "JavaScript",
"bytes": "3716"
},
{
"name": "Makefile",
"bytes": "74"
},
{
"name": "Python",
"bytes": "98881"
},
{
"name": "Shell",
"bytes": "1027"
}
],
"symlink_target": ""
} |
import time
from xserverpy.utils.porgress import Progress
from xserverpy.utils.config import *
from xserverpy.utils import config
class IntegrationWatcher():
def __init__(self, integrations_service, integration, interval):
self.integration = integration
self.integrations_service = integrations_service
self.progress = Progress()
self.interval = interval
def watch(self):
self.handle_pending()
previous_step = None
while not self.integration.is_complete():
self.integration = self.integrations_service.get_item(self.integration.id)
if self.integration.is_complete():
break
if previous_step != self.integration.step:
previous_step = self.integration.step
self.progress.increment("- Performing step: %s" % self.integration.step)
self.progress.step()
time.sleep(self.interval)
self.progress.done()
print ("")
return self.print_integration_result(self.integration)
def handle_pending(self):
if self.integration.is_pending():
self.progress.increment("- Pending for integration")
self.previos_bot = None
while self.integration.is_pending():
self.print_running()
self.integration = self.integrations_service.get_item(self.integration.id)
time.sleep(self.interval * 3)
def print_running(self):
integrations = self.integrations_service.get_running_integration()
is_the_only_pending = len(integrations) == 0 or integrations[0].id == self.integration.id
if is_the_only_pending:
self.progress.step()
return
bot_name = integrations[0].bot.name
if self.previos_bot == bot_name:
pass
else:
self.previos_bot = bot_name
if config.tty:
sys.stdout.write("\r")
sys.stdout.write("\033[K")
sys.stdout.write("Waiting for bot '%s' to finish integrating" % bot_name)
sys.stdout.flush()
print("")
else:
self.progress.increment("Waiting for bot '%s' to finish integrating" %
bot_name, prefix="")
self.progress.step()
def print_integration_result(self, integration):
if integration.succeeded():
success("Integration number '%s' for bot '%s' completed successfully" %
(integration.number, integration.bot.name))
result = True
elif integration.completed_with_warnings():
warn("Integration number '%s' for bot '%s' completed with warnings" %
(integration.number, integration.bot.name))
result = True
else:
error("Integration number '%s' for bot '%s' failed with result '%s'" %
(integration.number, integration.bot.name, integration.result))
result = False
info("Integration ID '%s" % integration.id)
return result
| {
"content_hash": "00cb5edf14699bf74a5a62d05b612275",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 97,
"avg_line_length": 35.40909090909091,
"alnum_prop": 0.5882541720154044,
"repo_name": "oarrabi/xserverpy",
"id": "9bc14eb2651a45ee5df7b2b188fc2af55bfec71d",
"size": "3116",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xserverpy/lib/integration_watcher.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "181"
},
{
"name": "Makefile",
"bytes": "1977"
},
{
"name": "Python",
"bytes": "40651"
}
],
"symlink_target": ""
} |
import os
from perfkitbenchmarker import errors
from perfkitbenchmarker import flags
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import object_storage_service
from perfkitbenchmarker import providers
from perfkitbenchmarker import vm_util
flags.DEFINE_boolean('openstack_swift_insecure', False,
'Allow swiftclient to access Swift service without \n'
'having to verify the SSL certificate')
FLAGS = flags.FLAGS
SWIFTCLIENT_LIB_VERSION = 'python-swiftclient_lib_version'
class SwiftStorageService(object_storage_service.ObjectStorageService):
"""Interface to OpenStack Swift."""
STORAGE_NAME = providers.OPENSTACK
def __init__(self):
self.swift_command_prefix = ''
def PrepareService(self, location):
openstack_creds_set = ('OS_AUTH_URL' in os.environ,
'OS_TENANT_NAME' in os.environ,
'OS_USERNAME' in os.environ,
'OS_PASSWORD' in os.environ,)
if not all(openstack_creds_set):
raise errors.Benchmarks.MissingObjectCredentialException(
'OpenStack credentials not found in environment variables')
self.swift_command_parts = [
'--os-auth-url', os.environ['OS_AUTH_URL'],
'--os-tenant-name', os.environ['OS_TENANT_NAME'],
'--os-username', os.environ['OS_USERNAME'],
'--os-password', os.environ['OS_PASSWORD']]
if FLAGS.openstack_swift_insecure:
self.swift_command_parts.append('--insecure')
self.swift_command_prefix = ' '.join(self.swift_command_parts)
def MakeBucket(self, bucket):
vm_util.IssueCommand(
['swift'] + self.swift_command_parts + ['post', bucket])
def DeleteBucket(self, bucket):
self.EmptyBucket(bucket)
vm_util.IssueCommand(
['swift'] + self.swift_command_parts + ['delete', bucket])
def EmptyBucket(self, bucket):
vm_util.IssueCommand(
['swift'] + self.swift_command_parts + ['delete', bucket])
def PrepareVM(self, vm):
vm.Install('swift_client')
def CleanupVM(self, vm):
vm.Uninstall('swift_client')
vm.RemoteCommand('/usr/bin/yes | sudo pip uninstall python-gflags')
def CLIUploadDirectory(self, vm, directory, file_names, bucket):
return vm.RemoteCommand(
'time swift %s upload %s %s'
% (self.swift_command_prefix, bucket, directory))
def CLIDownloadBucket(self, vm, bucket, objects, dest):
return vm.RemoteCommand(
'time swift %s download %s -D %s'
% (self.swift_command_prefix, bucket, dest))
def Metadata(self, vm):
return {SWIFTCLIENT_LIB_VERSION:
linux_packages.GetPipPackageVersion(vm, 'python-swiftclient')}
| {
"content_hash": "80b38fe9d242507fa26c645c3a6e718f",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 75,
"avg_line_length": 34.29113924050633,
"alnum_prop": 0.6670358065706903,
"repo_name": "meteorfox/PerfKitBenchmarker",
"id": "60e346c2e793bf69f23c497780e35c3affa8878b",
"size": "3320",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "perfkitbenchmarker/providers/openstack/swift.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "1547"
},
{
"name": "Python",
"bytes": "1843285"
},
{
"name": "Shell",
"bytes": "23474"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Bot',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('mu', models.FloatField(default=25.0)),
('sigma', models.FloatField(default=8.33333)),
('enabled', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Match',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField()),
('replay', models.FileField(upload_to='hlt/')),
('seed', models.CharField(max_length=255)),
('width', models.IntegerField()),
('height', models.IntegerField()),
],
options={
'verbose_name_plural': 'matches',
},
),
migrations.CreateModel(
name='MatchResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rank', models.IntegerField()),
('mu', models.FloatField()),
('sigma', models.FloatField()),
('last_frame_alive', models.IntegerField()),
('bot', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='matches', to='tournament.Bot')),
('match', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='results', to='tournament.Match')),
],
),
]
| {
"content_hash": "efb1a81597af61dfac8688a685f3339b",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 137,
"avg_line_length": 38.98039215686274,
"alnum_prop": 0.5372233400402414,
"repo_name": "nmalaguti/mini-halite",
"id": "6e7437a067eee6118d60453e23813af3bd11ebf5",
"size": "2061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tournament/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "848"
},
{
"name": "HTML",
"bytes": "11899"
},
{
"name": "JavaScript",
"bytes": "43179"
},
{
"name": "Python",
"bytes": "21587"
}
],
"symlink_target": ""
} |
from utils import *
from model import *
class Handler(BaseHandler):
repo_required = False
def get(self):
self.env.robots_ok = True
self.render('start.html', cache_seconds=0, get_vars=self.get_vars)
def get_vars(self):
# Round off the count so people don't expect it to change every time
# they add a record.
person_count = Counter.get_count(self.repo, 'person.all')
if person_count < 100:
num_people = 0 # No approximate count will be displayed.
else:
# 100, 200, 300, etc.
num_people = int(round(person_count, -2))
return {'num_people': num_people,
'seek_url': self.get_url('/query', role='seek'),
'provide_url': self.get_url('/query', role='provide')}
| {
"content_hash": "e783168a75b36145e4df7e843ef54f41",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 76,
"avg_line_length": 33.5,
"alnum_prop": 0.5808457711442786,
"repo_name": "kspviswa/personfinder",
"id": "cf19fc7b59b63f88ef2cf48449c1abcfe2137df7",
"size": "1402",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "app/start.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "877"
},
{
"name": "JavaScript",
"bytes": "15708"
},
{
"name": "Python",
"bytes": "1515547"
}
],
"symlink_target": ""
} |
import copy
import json
import urlparse
import requests
from keystoneclient import exceptions
from keystoneclient.v2_0 import tenants
from tests import utils
class TenantTests(utils.TestCase):
def setUp(self):
super(TenantTests, self).setUp()
self.TEST_REQUEST_HEADERS = {
'X-Auth-Token': 'aToken',
'User-Agent': 'python-keystoneclient',
}
self.TEST_POST_HEADERS = {
'Content-Type': 'application/json',
'X-Auth-Token': 'aToken',
'User-Agent': 'python-keystoneclient',
}
self.TEST_TENANTS = {
"tenants": {
"values": [
{
"enabled": True,
"description": "A description change!",
"name": "invisible_to_admin",
"id": 3,
},
{
"enabled": True,
"description": "None",
"name": "demo",
"id": 2,
},
{
"enabled": True,
"description": "None",
"name": "admin",
"id": 1,
},
{
"extravalue01": "metadata01",
"enabled": True,
"description": "For testing extras",
"name": "test_extras",
"id": 4,
}
],
"links": [],
},
}
def test_create(self):
req_body = {
"tenant": {
"name": "tenantX",
"description": "Like tenant 9, but better.",
"enabled": True,
"extravalue01": "metadata01",
},
}
resp_body = {
"tenant": {
"name": "tenantX",
"enabled": True,
"id": 4,
"description": "Like tenant 9, but better.",
"extravalue01": "metadata01",
}
}
resp = utils.TestResponse({
"status_code": 200,
"text": json.dumps(resp_body),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_POST_HEADERS
kwargs['data'] = json.dumps(req_body)
requests.request('POST',
urlparse.urljoin(self.TEST_URL, 'v2.0/tenants'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
tenant = self.client.tenants.create(
req_body['tenant']['name'],
req_body['tenant']['description'],
req_body['tenant']['enabled'],
extravalue01=req_body['tenant']['extravalue01'],
name="dont overwrite priors")
self.assertTrue(isinstance(tenant, tenants.Tenant))
self.assertEqual(tenant.id, 4)
self.assertEqual(tenant.name, "tenantX")
self.assertEqual(tenant.description, "Like tenant 9, but better.")
self.assertEqual(tenant.extravalue01, "metadata01")
def test_duplicate_create(self):
req_body = {
"tenant": {
"name": "tenantX",
"description": "The duplicate tenant.",
"enabled": True
},
}
resp_body = {
"error": {
"message": "Conflict occurred attempting to store project.",
"code": 409,
"title": "Conflict",
}
}
resp = utils.TestResponse({
"status_code": 409,
"text": json.dumps(resp_body),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_POST_HEADERS
kwargs['data'] = json.dumps(req_body)
requests.request('POST',
urlparse.urljoin(self.TEST_URL, 'v2.0/tenants'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
def create_duplicate_tenant():
self.client.tenants.create(req_body['tenant']['name'],
req_body['tenant']['description'],
req_body['tenant']['enabled'])
self.assertRaises(exceptions.Conflict, create_duplicate_tenant)
def test_delete(self):
resp = utils.TestResponse({
"status_code": 204,
"text": "",
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('DELETE',
urlparse.urljoin(self.TEST_URL, 'v2.0/tenants/1'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
self.client.tenants.delete(1)
def test_get(self):
resp = utils.TestResponse({
"status_code": 200,
"text": json.dumps({
'tenant': self.TEST_TENANTS['tenants']['values'][2],
}),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('GET',
urlparse.urljoin(self.TEST_URL, 'v2.0/tenants/1'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
t = self.client.tenants.get(1)
self.assertTrue(isinstance(t, tenants.Tenant))
self.assertEqual(t.id, 1)
self.assertEqual(t.name, 'admin')
def test_list(self):
resp = utils.TestResponse({
"status_code": 200,
"text": json.dumps(self.TEST_TENANTS),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('GET',
urlparse.urljoin(self.TEST_URL, 'v2.0/tenants'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
tenant_list = self.client.tenants.list()
[self.assertTrue(isinstance(t, tenants.Tenant)) for t in tenant_list]
def test_list_limit(self):
resp = utils.TestResponse({
"status_code": 200,
"text": json.dumps(self.TEST_TENANTS),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('GET',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants?limit=1'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
tenant_list = self.client.tenants.list(limit=1)
[self.assertTrue(isinstance(t, tenants.Tenant)) for t in tenant_list]
def test_list_marker(self):
resp = utils.TestResponse({
"status_code": 200,
"text": json.dumps(self.TEST_TENANTS),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('GET',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants?marker=1'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
tenant_list = self.client.tenants.list(marker=1)
[self.assertTrue(isinstance(t, tenants.Tenant)) for t in tenant_list]
def test_list_limit_marker(self):
resp = utils.TestResponse({
"status_code": 200,
"text": json.dumps(self.TEST_TENANTS),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('GET',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants?marker=1&limit=1'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
tenant_list = self.client.tenants.list(limit=1, marker=1)
[self.assertTrue(isinstance(t, tenants.Tenant)) for t in tenant_list]
def test_update(self):
req_body = {
"tenant": {
"id": 4,
"name": "tenantX",
"description": "I changed you!",
"enabled": False,
"extravalue01": "metadataChanged",
#"extraname": "dontoverwrite!",
},
}
resp_body = {
"tenant": {
"name": "tenantX",
"enabled": False,
"id": 4,
"description": "I changed you!",
"extravalue01": "metadataChanged",
},
}
resp = utils.TestResponse({
"status_code": 200,
"text": json.dumps(resp_body),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_POST_HEADERS
kwargs['data'] = json.dumps(req_body)
requests.request('POST',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants/4'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
tenant = self.client.tenants.update(
req_body['tenant']['id'],
req_body['tenant']['name'],
req_body['tenant']['description'],
req_body['tenant']['enabled'],
extravalue01=req_body['tenant']['extravalue01'],
name="dont overwrite priors")
self.assertTrue(isinstance(tenant, tenants.Tenant))
self.assertEqual(tenant.id, 4)
self.assertEqual(tenant.name, "tenantX")
self.assertEqual(tenant.description, "I changed you!")
self.assertFalse(tenant.enabled)
self.assertEqual(tenant.extravalue01, "metadataChanged")
def test_update_empty_description(self):
req_body = {
"tenant": {
"id": 4,
"name": "tenantX",
"description": "",
"enabled": False,
},
}
resp_body = {
"tenant": {
"name": "tenantX",
"enabled": False,
"id": 4,
"description": "",
},
}
resp = utils.TestResponse({
"status_code": 200,
"text": json.dumps(resp_body),
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_POST_HEADERS
kwargs['data'] = json.dumps(req_body)
requests.request('POST',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants/4'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
tenant = self.client.tenants.update(req_body['tenant']['id'],
req_body['tenant']['name'],
req_body['tenant']['description'],
req_body['tenant']['enabled'])
self.assertTrue(isinstance(tenant, tenants.Tenant))
self.assertEqual(tenant.id, 4)
self.assertEqual(tenant.name, "tenantX")
self.assertEqual(tenant.description, "")
self.assertFalse(tenant.enabled)
def test_add_user(self):
resp = utils.TestResponse({
"status_code": 204,
"text": '',
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('PUT',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants/4/users/foo/roles/OS-KSADM/barrr'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
self.client.tenants.add_user('4', 'foo', 'barrr')
def test_remove_user(self):
resp = utils.TestResponse({
"status_code": 204,
"text": '',
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('DELETE',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants/4/users/foo/roles/OS-KSADM/barrr'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
self.client.tenants.remove_user('4', 'foo', 'barrr')
def test_tenant_add_user(self):
req_body = {
"tenant": {
"id": 4,
"name": "tenantX",
"description": "I changed you!",
"enabled": False,
},
}
resp = utils.TestResponse({
"status_code": 204,
"text": '',
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('PUT',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants/4/users/foo/roles/OS-KSADM/barrr'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
# make tenant object with manager
tenant = self.client.tenants.resource_class(self.client.tenants,
req_body['tenant'])
tenant.add_user('foo', 'barrr')
self.assertTrue(isinstance(tenant, tenants.Tenant))
def test_tenant_remove_user(self):
req_body = {
"tenant": {
"id": 4,
"name": "tenantX",
"description": "I changed you!",
"enabled": False,
},
}
resp = utils.TestResponse({
"status_code": 204,
"text": '',
})
kwargs = copy.copy(self.TEST_REQUEST_BASE)
kwargs['headers'] = self.TEST_REQUEST_HEADERS
requests.request('DELETE',
urlparse.urljoin(self.TEST_URL,
'v2.0/tenants/4/users/foo/roles/OS-KSADM/barrr'),
**kwargs).AndReturn((resp))
self.mox.ReplayAll()
# make tenant object with manager
tenant = self.client.tenants.resource_class(self.client.tenants,
req_body['tenant'])
tenant.remove_user('foo', 'barrr')
self.assertTrue(isinstance(tenant, tenants.Tenant))
| {
"content_hash": "1e8a8ff5c1b618c3336c560419ac8228",
"timestamp": "",
"source": "github",
"line_count": 411,
"max_line_length": 78,
"avg_line_length": 34.98053527980535,
"alnum_prop": 0.4771510050775544,
"repo_name": "tylertian/Openstack",
"id": "bf87e3509f2f762324265ad2966b077fc703787f",
"size": "14377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack F/python-keystoneclient/tests/v2_0/test_tenants.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "239919"
},
{
"name": "JavaScript",
"bytes": "156942"
},
{
"name": "Python",
"bytes": "16949418"
},
{
"name": "Shell",
"bytes": "96743"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
from sys import version_info
from pysendpulse import (
__author__,
__author_email__,
__version__
)
install_requires = ['python3-memcached', ]
if version_info.major == 2:
install_requires = ['python-memcached', 'requests', 'simplejson']
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='pysendpulse',
version=__version__,
packages=find_packages(),
description='A simple SendPulse REST client library and example for Python',
long_description=long_description,
long_description_content_type="text/markdown",
author=__author__,
author_email=__author_email__,
url='https://github.com/sendpulse/sendpulse-rest-api-python',
install_requires=install_requires
)
| {
"content_hash": "39fb7764a7f48a1356a55e43cae0d02f",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 80,
"avg_line_length": 28.321428571428573,
"alnum_prop": 0.6847414880201765,
"repo_name": "sendpulse/sendpulse-rest-api-python",
"id": "55f4505417dbbf272afafbda48cb756685f643c6",
"size": "793",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "62671"
}
],
"symlink_target": ""
} |
import os
import mock
import tempfile
from smtLayer import smt
from zvmsdk import config
from zvmsdk import database
from zvmsdk import exception
from zvmsdk import smtclient
from zvmsdk import utils as zvmutils
from zvmsdk.tests.unit import base
CONF = config.CONF
class SDKSMTClientTestCases(base.SDKTestCase):
"""Test cases for smt zvm client."""
def setUp(self):
self._smtclient = smtclient.SMTClient()
def _generate_results(self, overallrc=0, rc=0, rs=0, errno=0, strerror='',
logentries=[], response=[]):
return {'rc': rc,
'errno': errno,
'strError': strerror,
'overallRC': overallrc,
'logEntries': logentries,
'rs': rs,
'response': response}
@mock.patch.object(smt.SMT, 'request')
def test_private_request_success(self, request):
requestData = "fake request"
request.return_value = {'overallRC': 0}
self._smtclient._request(requestData)
request.assert_called_once_with(requestData)
@mock.patch.object(smt.SMT, 'request')
def test_private_request_failed(self, request):
requestData = "fake request"
request.return_value = {'overallRC': 1, 'logEntries': []}
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient._request, requestData)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_start(self, request):
fake_userid = 'FakeID'
requestData = "PowerVM FakeID on"
request.return_value = {'overallRC': 0}
self._smtclient.guest_start(fake_userid)
request.assert_called_once_with(requestData)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_stop(self, request):
fake_userid = 'FakeID'
requestData = "PowerVM FakeID off"
request.return_value = {'overallRC': 0}
self._smtclient.guest_stop(fake_userid)
request.assert_called_once_with(requestData)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_stop_with_timeout(self, request):
fake_userid = 'FakeID'
requestData = "PowerVM FakeID off --maxwait 300"
request.return_value = {'overallRC': 0}
self._smtclient.guest_stop(fake_userid, timeout=300)
request.assert_called_once_with(requestData)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_stop_with_poll_interval(self, request):
fake_userid = 'FakeID'
rd = "PowerVM FakeID off --maxwait 300 --poll 10"
request.return_value = {'overallRC': 0}
self._smtclient.guest_stop(fake_userid, timeout=300,
poll_interval=10)
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_softstop(self, request):
fake_userid = 'FakeID'
requestData = "PowerVM FakeID softoff --wait --maxwait 300 --poll 10"
request.return_value = {'overallRC': 0}
self._smtclient.guest_softstop(fake_userid, timeout=300,
poll_interval=10)
request.assert_called_once_with(requestData)
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_pause(self, request, power_state):
power_state.return_value = 'on'
fake_userid = 'FakeID'
requestData = "PowerVM FakeID pause"
request.return_value = {'overallRC': 0}
self._smtclient.guest_pause(fake_userid)
request.assert_called_once_with(requestData)
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_unpause(self, request, power_state):
power_state.return_value = 'on'
fake_userid = 'FakeID'
requestData = "PowerVM FakeID unpause"
request.return_value = {'overallRC': 0}
self._smtclient.guest_unpause(fake_userid)
request.assert_called_once_with(requestData)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_get_power_state(self, request):
fake_userid = 'FakeID'
requestData = "PowerVM FakeID status"
request.return_value = {'overallRC': 0,
'response': [fake_userid + ': on']}
status = self._smtclient.get_power_state(fake_userid)
request.assert_called_once_with(requestData)
self.assertEqual('on', status)
@mock.patch.object(smtclient.SMTClient, 'add_mdisks')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(database.GuestDbOperator, 'add_guest')
def test_create_vm(self, add_guest, request, add_mdisks):
user_id = 'fakeuser'
cpu = 2
memory = 1024
disk_list = [{'size': '1g',
'is_boot_disk': True,
'disk_pool': 'ECKD:eckdpool1',
'format': 'ext3'}]
profile = 'osdflt'
max_cpu = 10
max_mem = '4G'
base.set_conf('zvm', 'default_admin_userid', 'lbyuser1 lbyuser2')
base.set_conf('zvm', 'user_root_vdev', '0100')
rd = ('makevm fakeuser directory LBYONLY 1024m G --cpus 2 '
'--profile osdflt --maxCPU 10 --maxMemSize 4G --setReservedMem '
'--logonby "lbyuser1 lbyuser2" --ipl 0100')
self._smtclient.create_vm(user_id, cpu, memory, disk_list, profile,
max_cpu, max_mem, '', '', '')
request.assert_called_with(rd)
add_mdisks.assert_called_with(user_id, disk_list)
add_guest.assert_called_with(user_id)
@mock.patch.object(smtclient.SMTClient, 'add_mdisks')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(database.GuestDbOperator, 'add_guest')
def test_create_vm_cms(self, add_guest, request, add_mdisks):
user_id = 'fakeuser'
cpu = 2
memory = 1024
disk_list = [{'size': '1g',
'is_boot_disk': True,
'disk_pool': 'ECKD:eckdpool1',
'format': 'ext3'}]
profile = 'osdflt'
max_cpu = 10
max_mem = '4G'
base.set_conf('zvm', 'default_admin_userid', 'lbyuser1 lbyuser2')
base.set_conf('zvm', 'user_root_vdev', '0100')
rd = ('makevm fakeuser directory LBYONLY 1024m G --cpus 2 '
'--profile osdflt --maxCPU 10 --maxMemSize 4G --setReservedMem '
'--logonby "lbyuser1 lbyuser2" --ipl cms')
self._smtclient.create_vm(user_id, cpu, memory, disk_list, profile,
max_cpu, max_mem, 'cms', '', '')
request.assert_called_with(rd)
add_mdisks.assert_called_with(user_id, disk_list)
add_guest.assert_called_with(user_id)
@mock.patch.object(smtclient.SMTClient, 'add_mdisks')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(database.GuestDbOperator, 'add_guest')
def test_create_vm_cms_with_param(self, add_guest, request, add_mdisks):
user_id = 'fakeuser'
cpu = 2
memory = 1024
disk_list = [{'size': '1g',
'is_boot_disk': True,
'disk_pool': 'ECKD:eckdpool1',
'format': 'ext3'}]
profile = 'osdflt'
max_cpu = 10
max_mem = '4G'
ipl_param = 'dummy'
ipl_loadparam = 'load=1'
base.set_conf('zvm', 'default_admin_userid', 'lbyuser1 lbyuser2')
base.set_conf('zvm', 'user_root_vdev', '0100')
rd = ('makevm fakeuser directory LBYONLY 1024m G --cpus 2 '
'--profile osdflt --maxCPU 10 --maxMemSize 4G --setReservedMem '
'--logonby "lbyuser1 lbyuser2" --ipl cms --iplParam dummy '
'--iplLoadparam load=1')
self._smtclient.create_vm(user_id, cpu, memory, disk_list, profile,
max_cpu, max_mem, 'cms', ipl_param,
ipl_loadparam)
request.assert_called_with(rd)
add_mdisks.assert_called_with(user_id, disk_list)
add_guest.assert_called_with(user_id)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_add_mdisk(self, request):
userid = 'fakeuser'
disk = {'size': '1g',
'disk_pool': 'ECKD:eckdpool1',
'format': 'ext3'}
vdev = '0101'
rd = ('changevm fakeuser add3390 eckdpool1 0101 1g --mode MR '
'--filesystem ext3')
self._smtclient._add_mdisk(userid, disk, vdev),
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_add_mdisk_format_none(self, request):
userid = 'fakeuser'
disk = {'size': '1g',
'disk_pool': 'ECKD:eckdpool1',
'format': 'none'}
vdev = '0101'
rd = ('changevm fakeuser add3390 eckdpool1 0101 1g --mode MR')
self._smtclient._add_mdisk(userid, disk, vdev),
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_remove_mdisk(self, request):
userid = 'fakeuser'
vdev = '0102'
rd = 'changevm fakeuser removedisk 0102'
self._smtclient._remove_mdisk(userid, vdev),
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_authorize_iucv_client(self, request):
fake_userid = 'FakeID'
client_userid = 'ClientID'
requestData = "ChangeVM FakeID punchfile /tmp/FakeID/iucvauth.sh" + \
" --class x"
request.return_value = {'overallRC': 0}
self._smtclient.guest_authorize_iucv_client(fake_userid,
client_userid)
request.assert_called_once_with(requestData)
self.assertIs(os.path.exists('/tmp/FakeID'), False)
@mock.patch.object(database.GuestDbOperator,
'update_guest_by_userid')
@mock.patch.object(database.ImageDbOperator,
'image_query_record')
@mock.patch.object(smtclient.SMTClient, 'guest_authorize_iucv_client')
@mock.patch.object(zvmutils.PathUtils, 'clean_temp_folder')
@mock.patch.object(tempfile, 'mkdtemp')
@mock.patch.object(zvmutils, 'execute')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_image_path_by_name')
def test_guest_deploy(self, get_image_path, request, execute, mkdtemp,
cleantemp, guestauth, image_query, guest_update):
base.set_conf("zvm", "user_root_vdev", "0100")
execute.side_effect = [(0, ""), (0, "")]
mkdtemp.return_value = '/tmp/tmpdir'
image_query.return_value = [{'imageosdistro': 'fakeos'}]
userid = 'fakeuser'
image_name = 'fakeimg'
get_image_path.return_value = \
'/var/lib/zvmsdk/images/netboot/rhel7/fakeimg'
transportfiles = '/faketran'
self._smtclient.guest_deploy(userid, image_name, transportfiles)
get_image_path.assert_called_once_with(image_name)
unpack_cmd = ['sudo', '/opt/zthin/bin/unpackdiskimage', 'fakeuser',
'0100',
'/var/lib/zvmsdk/images/netboot/rhel7/fakeimg/0100']
cp_cmd = ["/usr/bin/cp", '/faketran', '/tmp/tmpdir/faketran']
execute.assert_has_calls([mock.call(unpack_cmd), mock.call(cp_cmd)])
purge_rd = "changevm fakeuser purgerdr"
punch_rd = ("changevm fakeuser punchfile "
"/tmp/tmpdir/faketran --class X")
request.assert_has_calls([mock.call(purge_rd), mock.call(punch_rd)])
mkdtemp.assert_called_with()
cleantemp.assert_called_with('/tmp/tmpdir')
guestauth.assert_called_once_with(userid)
guest_update.assert_called_once_with(userid, meta='os_version=fakeos')
@mock.patch.object(zvmutils, 'execute')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_image_path_by_name')
def test_guest_deploy_unpackdiskimage_failed(self, get_image_path,
request, execute):
base.set_conf("zvm", "user_root_vdev", "0100")
userid = 'fakeuser'
image_name = 'fakeimg'
transportfiles = '/faketran'
get_image_path.return_value = \
'/var/lib/zvmsdk/images/netboot/rhel7/fakeimg'
unpack_error = ('unpackdiskimage fakeuser start time: '
'2017-08-16-01:29:59.453\nSOURCE USER ID: "fakeuser"\n'
'DISK CHANNEL: "0100"\n'
'IMAGE FILE: "/var/lib/zvmsdk/images/fakeimg"\n\n'
'Image file compression level: 6\n'
'Deploying image to fakeuser\'s disk at channel 100.\n'
'ERROR: Unable to link fakeuser 0100 disk. '
'HCPLNM053E FAKEUSER not in CP directory\n'
'HCPDTV040E Device 260C does not exist\n'
'ERROR: Failed to connect disk: fakeuser:0100\n\n'
'IMAGE DEPLOYMENT FAILED.\n'
'A detailed trace can be found at: /var/log/zthin/'
'unpackdiskimage_trace_2017-08-16-01:29:59.453.txt\n'
'unpackdiskimage end time: 2017-08-16-01:29:59.605\n')
execute.return_value = (3, unpack_error)
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.guest_deploy, userid, image_name,
transportfiles)
get_image_path.assert_called_once_with(image_name)
unpack_cmd = ['sudo', '/opt/zthin/bin/unpackdiskimage', 'fakeuser',
'0100',
'/var/lib/zvmsdk/images/netboot/rhel7/fakeimg/0100']
execute.assert_called_once_with(unpack_cmd)
@mock.patch.object(zvmutils.PathUtils, 'clean_temp_folder')
@mock.patch.object(tempfile, 'mkdtemp')
@mock.patch.object(zvmutils, 'execute')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_image_path_by_name')
def test_guest_deploy_cp_transport_failed(self, get_image_path, request,
execute, mkdtemp, cleantemp):
base.set_conf("zvm", "user_root_vdev", "0100")
cp_error = ("/usr/bin/cp: cannot stat '/faketran': "
"No such file or directory\n")
execute.side_effect = [(0, ""), (1, cp_error)]
mkdtemp.return_value = '/tmp/tmpdir'
userid = 'fakeuser'
image_name = 'fakeimg'
transportfiles = '/faketran'
get_image_path.return_value = \
'/var/lib/zvmsdk/images/netboot/rhel7/fakeimg'
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.guest_deploy, userid, image_name,
transportfiles)
get_image_path.assert_called_once_with(image_name)
unpack_cmd = ['sudo', '/opt/zthin/bin/unpackdiskimage', 'fakeuser',
'0100',
'/var/lib/zvmsdk/images/netboot/rhel7/fakeimg/0100']
cp_cmd = ["/usr/bin/cp", '/faketran', '/tmp/tmpdir/faketran']
execute.assert_has_calls([mock.call(unpack_cmd), mock.call(cp_cmd)])
purge_rd = "changevm fakeuser purgerdr"
request.assert_called_once_with(purge_rd)
mkdtemp.assert_called_with()
cleantemp.assert_called_with('/tmp/tmpdir')
@mock.patch.object(zvmutils.PathUtils, 'clean_temp_folder')
@mock.patch.object(tempfile, 'mkdtemp')
@mock.patch.object(zvmutils, 'execute')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_image_path_by_name')
def test_guest_deploy_smt_request_failed(self, get_image_path, request,
execute, mkdtemp, cleantemp):
base.set_conf("zvm", "user_root_vdev", "0100")
get_image_path.return_value = \
'/var/lib/zvmsdk/images/netboot/rhel7/fakeimg'
fake_smt_results = {'rs': 8, 'errno': 0, 'strError': 'Failed',
'overallRC': 3, 'rc': 400, 'logEntries': '',
'response': ['(Error) output and error info']}
execute.side_effect = [(0, ""), (0, "")]
request.side_effect = [None,
exception.SDKSMTRequestFailed(
fake_smt_results, 'fake error')]
mkdtemp.return_value = '/tmp/tmpdir'
userid = 'fakeuser'
image_name = 'fakeimg'
transportfiles = '/faketran'
remote_host = "[email protected]"
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient.guest_deploy, userid, image_name,
transportfiles, remote_host)
get_image_path.assert_called_once_with(image_name)
unpack_cmd = ['sudo', '/opt/zthin/bin/unpackdiskimage', 'fakeuser',
'0100',
'/var/lib/zvmsdk/images/netboot/rhel7/fakeimg/0100']
scp_cmd = ["/usr/bin/scp", "-B", '-P', '22',
'-o StrictHostKeyChecking=no', '[email protected]:/faketran',
'/tmp/tmpdir/faketran']
execute.assert_has_calls([mock.call(unpack_cmd), mock.call(scp_cmd)])
purge_rd = "changevm fakeuser purgerdr"
punch_rd = ("changevm fakeuser punchfile "
"/tmp/tmpdir/faketran --class X")
request.assert_has_calls([mock.call(purge_rd), mock.call(punch_rd)])
mkdtemp.assert_called_with()
cleantemp.assert_called_with('/tmp/tmpdir')
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_grant_user_to_vswitch(self, request, userid):
userid.return_value = 'FakeHostID'
vswitch_name = 'FakeVs'
userid = 'FakeID'
requestData = ' '.join((
'SMAPI FakeHostID API Virtual_Network_Vswitch_Set_Extended',
"--operands",
"-k switch_name=FakeVs",
"-k grant_userid=FakeID",
"-k persist=YES"))
self._smtclient.grant_user_to_vswitch(vswitch_name, userid)
request.assert_called_once_with(requestData)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_revoke_user_from_vswitch(self, request, userid):
userid.return_value = 'FakeHostID'
vswitch_name = 'FakeVs'
userid = 'FakeID'
requestData = ' '.join((
'SMAPI FakeHostID API Virtual_Network_Vswitch_Set_Extended',
"--operands",
"-k switch_name=FakeVs",
"-k revoke_userid=FakeID",
"-k persist=YES"))
self._smtclient.revoke_user_from_vswitch(vswitch_name, userid)
request.assert_called_once_with(requestData)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_image_performance_query_single(self, smt_req, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
smt_req.return_value = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 0, 'logEntries': [], 'rc': 0,
'response': [
'Virtual server ID: FAKEVM',
'Record version: "1"',
'Guest flags: "0"',
'Used CPU time: "646609178 uS"',
'Elapsed time: "596837441984 uS"',
'Minimum memory: "0 KB"',
'Max memory: "2097152 KB"',
'Shared memory: "302180 KB"',
'Used memory: "302180 KB"',
'Active CPUs in CEC: "44"',
'Logical CPUs in VM: "6"',
'Guest CPUs: "2"',
'Minimum CPU count: "2"',
'Max CPU limit: "10000"',
'Processor share: "100"',
'Samples CPU in use: "371"',
',Samples CPU delay: "116"',
'Samples page wait: "0"',
'Samples idle: "596331"',
'Samples other: "12"',
'Samples total: "596830"',
'Guest name: "FAKEVM "',
'']
}
pi_info = self._smtclient.image_performance_query('fakevm')
self.assertEqual(pi_info['FAKEVM']['used_memory'], "302180 KB")
self.assertEqual(pi_info['FAKEVM']['used_cpu_time'], "646609178 uS")
self.assertEqual(pi_info['FAKEVM']['elapsed_cpu_time'],
"596837441984 uS")
self.assertEqual(pi_info['FAKEVM']['min_cpu_count'], "2")
self.assertEqual(pi_info['FAKEVM']['max_cpu_limit'], "10000")
self.assertEqual(pi_info['FAKEVM']['samples_cpu_in_use'], "371")
self.assertEqual(pi_info['FAKEVM']['samples_cpu_delay'], "116")
self.assertEqual(pi_info['FAKEVM']['guest_cpus'], "2")
self.assertEqual(pi_info['FAKEVM']['userid'], "FAKEVM")
self.assertEqual(pi_info['FAKEVM']['max_memory'], "2097152 KB")
self.assertEqual(pi_info['FAKEVM']['min_memory'], "0 KB")
self.assertEqual(pi_info['FAKEVM']['shared_memory'], "302180 KB")
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_image_performance_query_single_off(self, smt_req,
get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
smt_req.return_value = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 0, 'logEntries': [], 'rc': 0,
'response': []
}
pi_info = self._smtclient.image_performance_query('fakevm')
self.assertDictEqual(pi_info, {})
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_image_performance_query_multiple(self, smt_req, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
response_list = ['Virtual server ID: fakevm',
'Record version: "1"',
'Guest flags: "0"',
'Used CPU time: "652337849 uS"',
'Elapsed time: "602181110336 uS"',
'Minimum memory: "0 KB"',
'Max memory: "2097152 KB"',
'Shared memory: "302336 KB"',
'Used memory: "302336 KB"',
'Active CPUs in CEC: "44"',
'Logical CPUs in VM: "6"',
'Guest CPUs: "2"',
'Minimum CPU count: "2"',
'Max CPU limit: "10000"',
'Processor share: "100"',
'Samples CPU in use: "375"',
',Samples CPU delay: "116"',
'Samples page wait: "0"',
'Samples idle: "601671"',
'Samples other: "12"',
'Samples total: "602174"',
'Guest name: "FAKEVM "',
'',
'Virtual server ID: fakevm2',
'Record version: "1"',
'Guest flags: "0"',
'Used CPU time: "3995650268844 uS"',
'Elapsed time: "3377790094595 uS"',
'Minimum memory: "0 KB"',
'Max memory: "8388608 KB"',
'Shared memory: "8383048 KB"',
'Used memory: "8383048 KB"',
'Active CPUs in CEC: "44"',
'Logical CPUs in VM: "6"',
'Guest CPUs: "4"',
'Minimum CPU count: "4"',
'Max CPU limit: "10000"',
'Processor share: "100"',
'Samples CPU in use: "1966323"',
',Samples CPU delay: "111704"',
'Samples page wait: "0"',
'Samples idle: "4001258"',
'Samples other: "8855"',
'Samples total: "6088140"',
'Guest name: "FAKEVM2 "',
'']
smt_req.return_value = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 0, 'logEntries': [], 'rc': 0,
'response': response_list
}
pi_info = self._smtclient.image_performance_query(['fakevm',
'fakevm2'])
self.assertEqual(pi_info['FAKEVM']['used_memory'], "302336 KB")
self.assertEqual(pi_info['FAKEVM']['used_cpu_time'], "652337849 uS")
self.assertEqual(pi_info['FAKEVM']['elapsed_cpu_time'],
"602181110336 uS")
self.assertEqual(pi_info['FAKEVM']['min_cpu_count'], "2")
self.assertEqual(pi_info['FAKEVM']['max_cpu_limit'], "10000")
self.assertEqual(pi_info['FAKEVM']['samples_cpu_in_use'], "375")
self.assertEqual(pi_info['FAKEVM']['samples_cpu_delay'], "116")
self.assertEqual(pi_info['FAKEVM']['guest_cpus'], "2")
self.assertEqual(pi_info['FAKEVM']['userid'], "FAKEVM")
self.assertEqual(pi_info['FAKEVM']['max_memory'], "2097152 KB")
self.assertEqual(pi_info['FAKEVM']['min_memory'], "0 KB")
self.assertEqual(pi_info['FAKEVM']['shared_memory'], "302336 KB")
self.assertEqual(pi_info['FAKEVM2']['used_memory'], "8383048 KB")
self.assertEqual(pi_info['FAKEVM2']['used_cpu_time'],
"3995650268844 uS")
self.assertEqual(pi_info['FAKEVM2']['elapsed_cpu_time'],
"3377790094595 uS")
self.assertEqual(pi_info['FAKEVM2']['min_cpu_count'], "4")
self.assertEqual(pi_info['FAKEVM2']['max_cpu_limit'], "10000")
self.assertEqual(pi_info['FAKEVM2']['samples_cpu_in_use'], "1966323")
self.assertEqual(pi_info['FAKEVM2']['samples_cpu_delay'], "111704")
self.assertEqual(pi_info['FAKEVM2']['guest_cpus'], "4")
self.assertEqual(pi_info['FAKEVM2']['userid'], "FAKEVM2")
self.assertEqual(pi_info['FAKEVM2']['max_memory'], "8388608 KB")
self.assertEqual(pi_info['FAKEVM2']['min_memory'], "0 KB")
self.assertEqual(pi_info['FAKEVM2']['shared_memory'], "8383048 KB")
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_system_image_performance_query(self, smt_req, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
response_list = ['Virtual server ID: fakevm',
'Record version: "1"',
'Guest flags: "0"',
'Used CPU time: "652337849 uS"',
'Elapsed time: "602181110336 uS"',
'Minimum memory: "0 KB"',
'Max memory: "2097152 KB"',
'Shared memory: "302336 KB"',
'Used memory: "302336 KB"',
'Active CPUs in CEC: "44"',
'Logical CPUs in VM: "6"',
'Guest CPUs: "2"',
'Minimum CPU count: "2"',
'Max CPU limit: "10000"',
'Processor share: "100"',
'Samples CPU in use: "375"',
',Samples CPU delay: "116"',
'Samples page wait: "0"',
'Samples idle: "601671"',
'Samples other: "12"',
'Samples total: "602174"',
'Guest name: "FAKEVM "',
'',
'Virtual server ID: fakevm2',
'Record version: "1"',
'Guest flags: "0"',
'Used CPU time: "3995650268844 uS"',
'Elapsed time: "3377790094595 uS"',
'Minimum memory: "0 KB"',
'Max memory: "8388608 KB"',
'Shared memory: "8383048 KB"',
'Used memory: "8383048 KB"',
'Active CPUs in CEC: "44"',
'Logical CPUs in VM: "6"',
'Guest CPUs: "4"',
'Minimum CPU count: "4"',
'Max CPU limit: "10000"',
'Processor share: "100"',
'Samples CPU in use: "1966323"',
',Samples CPU delay: "111704"',
'Samples page wait: "0"',
'Samples idle: "4001258"',
'Samples other: "8855"',
'Samples total: "6088140"',
'Guest name: "FAKEVM2 "',
'']
smt_req.return_value = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 0, 'logEntries': [], 'rc': 0,
'response': response_list
}
pi_info = self._smtclient.system_image_performance_query(['fakevm',
'fakevm2'])
self.assertEqual(pi_info['FAKEVM']['used_memory'], "302336 KB")
self.assertEqual(pi_info['FAKEVM']['used_cpu_time'], "652337849 uS")
self.assertEqual(pi_info['FAKEVM']['elapsed_cpu_time'],
"602181110336 uS")
self.assertEqual(pi_info['FAKEVM']['min_cpu_count'], "2")
self.assertEqual(pi_info['FAKEVM']['max_cpu_limit'], "10000")
self.assertEqual(pi_info['FAKEVM']['samples_cpu_in_use'], "375")
self.assertEqual(pi_info['FAKEVM']['samples_cpu_delay'], "116")
self.assertEqual(pi_info['FAKEVM']['guest_cpus'], "2")
self.assertEqual(pi_info['FAKEVM']['userid'], "FAKEVM")
self.assertEqual(pi_info['FAKEVM']['max_memory'], "2097152 KB")
self.assertEqual(pi_info['FAKEVM']['min_memory'], "0 KB")
self.assertEqual(pi_info['FAKEVM']['shared_memory'], "302336 KB")
self.assertEqual(pi_info['FAKEVM2']['used_memory'], "8383048 KB")
self.assertEqual(pi_info['FAKEVM2']['used_cpu_time'],
"3995650268844 uS")
self.assertEqual(pi_info['FAKEVM2']['elapsed_cpu_time'],
"3377790094595 uS")
self.assertEqual(pi_info['FAKEVM2']['min_cpu_count'], "4")
self.assertEqual(pi_info['FAKEVM2']['max_cpu_limit'], "10000")
self.assertEqual(pi_info['FAKEVM2']['samples_cpu_in_use'], "1966323")
self.assertEqual(pi_info['FAKEVM2']['samples_cpu_delay'], "111704")
self.assertEqual(pi_info['FAKEVM2']['guest_cpus'], "4")
self.assertEqual(pi_info['FAKEVM2']['userid'], "FAKEVM2")
self.assertEqual(pi_info['FAKEVM2']['max_memory'], "8388608 KB")
self.assertEqual(pi_info['FAKEVM2']['min_memory'], "0 KB")
self.assertEqual(pi_info['FAKEVM2']['shared_memory'], "8383048 KB")
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_virtual_network_vswitch_query_byte_stats(self, smt_req,
get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
vsw_data = ['vswitch count: 2',
'',
'vswitch number: 1',
'vswitch name: XCATVSW1',
'uplink count: 1',
'uplink_conn: 6240',
'uplink_fr_rx: 3658251',
'uplink_fr_rx_dsc: 0',
'uplink_fr_rx_err: 0',
'uplink_fr_tx: 4209828',
'uplink_fr_tx_dsc: 0',
'uplink_fr_tx_err: 0',
'uplink_rx: 498914052',
'uplink_tx: 2615220898',
'bridge_fr_rx: 0',
'bridge_fr_rx_dsc: 0',
'bridge_fr_rx_err: 0',
'bridge_fr_tx: 0',
'bridge_fr_tx_dsc: 0',
'bridge_fr_tx_err: 0',
'bridge_rx: 0',
'bridge_tx: 0',
'nic count: 2',
'nic_id: INST1 0600',
'nic_fr_rx: 573952',
'nic_fr_rx_dsc: 0',
'nic_fr_rx_err: 0',
'nic_fr_tx: 548780',
'nic_fr_tx_dsc: 0',
'nic_fr_tx_err: 4',
'nic_rx: 103024058',
'nic_tx: 102030890',
'nic_id: INST2 0600',
'nic_fr_rx: 17493',
'nic_fr_rx_dsc: 0',
'nic_fr_rx_err: 0',
'nic_fr_tx: 16886',
'nic_fr_tx_dsc: 0',
'nic_fr_tx_err: 4',
'nic_rx: 3111714',
'nic_tx: 3172646',
'vlan count: 0',
'',
'vswitch number: 2',
'vswitch name: XCATVSW2',
'uplink count: 1',
'uplink_conn: 6200',
'uplink_fr_rx: 1608681',
'uplink_fr_rx_dsc: 0',
'uplink_fr_rx_err: 0',
'uplink_fr_tx: 2120075',
'uplink_fr_tx_dsc: 0',
'uplink_fr_tx_err: 0',
'uplink_rx: 314326223',
'uplink_tx: 1503721533',
'bridge_fr_rx: 0',
'bridge_fr_rx_dsc: 0',
'bridge_fr_rx_err: 0',
'bridge_fr_tx: 0',
'bridge_fr_tx_dsc: 0',
'bridge_fr_tx_err: 0',
'bridge_rx: 0',
'bridge_tx: 0',
'nic count: 2',
'nic_id: INST1 1000',
'nic_fr_rx: 34958',
'nic_fr_rx_dsc: 0',
'nic_fr_rx_err: 0',
'nic_fr_tx: 16211',
'nic_fr_tx_dsc: 0',
'nic_fr_tx_err: 0',
'nic_rx: 4684435',
'nic_tx: 3316601',
'nic_id: INST2 1000',
'nic_fr_rx: 27211',
'nic_fr_rx_dsc: 0',
'nic_fr_rx_err: 0',
'nic_fr_tx: 12344',
'nic_fr_tx_dsc: 0',
'nic_fr_tx_err: 0',
'nic_rx: 3577163',
'nic_tx: 2515045',
'vlan count: 0'
]
smt_req.return_value = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 0, 'logEntries': [],
'rc': 0, 'response': vsw_data
}
vsw_dict = self._smtclient.virtual_network_vswitch_query_byte_stats()
self.assertEqual(2, len(vsw_dict['vswitches']))
self.assertEqual(2, len(vsw_dict['vswitches'][1]['nics']))
self.assertEqual('INST1',
vsw_dict['vswitches'][0]['nics'][0]['userid'])
self.assertEqual('3577163',
vsw_dict['vswitches'][1]['nics'][1]['nic_rx'])
@mock.patch.object(smtclient.SMTClient, '_request')
def test_get_host_info(self, smt_req):
resp = ['ZCC USERID: OPNCLOUD',
'z/VM Host: OPNSTK2',
'Architecture: s390x',
'CEC Vendor: IBM',
'CEC Model: 2817',
'Hypervisor OS: z/VM 6.4.0',
'Hypervisor Name: OPNSTK2',
'LPAR CPU Total: 6',
'LPAR CPU Used: 6',
'LPAR Memory Total: 50G',
'LPAR Memory Offline: 0',
'LPAR Memory Used: 36.5G',
'IPL Time: IPL at 07/12/17 22:37:47 EDT']
smt_req.return_value = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 0, 'logEntries': [], 'rc': 0,
'response': resp}
expect = {'architecture': 's390x',
'cec_model': '2817',
'cec_vendor': 'IBM',
'hypervisor_name': 'OPNSTK2',
'hypervisor_os': 'z/VM 6.4.0',
'ipl_time': 'IPL at 07/12/17 22:37:47 EDT',
'lpar_cpu_total': '6',
'lpar_cpu_used': '6',
'lpar_memory_offline': '0',
'lpar_memory_total': '50G',
'lpar_memory_used': '36.5G',
'zcc_userid': 'OPNCLOUD',
'zvm_host': 'OPNSTK2'}
host_info = self._smtclient.get_host_info()
smt_req.assert_called_once_with('getHost general')
self.assertDictEqual(host_info, expect)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_get_diskpool_info(self, smt_req):
resp = ['XCATECKD Total: 3623.0G',
'XCATECKD Used: 397.4G',
'XCATECKD Free: 3225.6G']
smt_req.return_value = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 0, 'logEntries': [], 'rc': 0,
'response': resp}
expect = {'disk_available': '3225.6G',
'disk_total': '3623.0G',
'disk_used': '397.4G'}
dp_info = self._smtclient.get_diskpool_info('pool')
smt_req.assert_called_once_with('getHost diskpoolspace pool')
self.assertDictEqual(dp_info, expect)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_get_vswitch_list(self, request, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
request.return_value = {'overallRC': 0,
'response': ['VSWITCH: Name: VSTEST1', 'VSWITCH: Name: VSTEST2',
'VSWITCH: Name: VSTEST3', 'VSWITCH: Name: VSTEST4']}
expect = ['VSTEST1', 'VSTEST2', 'VSTEST3', 'VSTEST4']
rd = ' '.join((
"SMAPI SMTUSER API Virtual_Network_Vswitch_Query",
"--operands",
"-s \'*\'"))
list = self._smtclient.get_vswitch_list()
request.assert_called_once_with(rd)
self.assertEqual(list, expect)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_set_vswitch_port_vlan_id(self, request, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
request.return_value = {'overallRC': 0}
userid = 'FakeID'
vswitch_name = 'FakeVS'
vlan_id = 'FakeVLAN'
rd = ' '.join((
"SMAPI SMTUSER API Virtual_Network_Vswitch_Set_Extended",
"--operands",
"-k grant_userid=FakeID",
"-k switch_name=FakeVS",
"-k user_vlan_id=FakeVLAN",
"-k persist=YES"))
self._smtclient.set_vswitch_port_vlan_id(vswitch_name,
userid, vlan_id)
request.assert_called_once_with(rd)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_add_vswitch(self, request, get_smt_userid):
get_smt_userid.return_value = 'SMTUSER'
request.return_value = {'overallRC': 0}
rd = ' '.join((
"SMAPI SMTUSER API Virtual_Network_Vswitch_Create_Extended",
"--operands",
"-k switch_name=fakename",
"-k real_device_address='111 222'",
"-k connection_value=CONNECT",
"-k queue_memory_limit=5",
"-k transport_type=ETHERNET",
"-k vlan_id=10",
"-k persist=NO",
"-k port_type=ACCESS",
"-k gvrp_value=GVRP",
"-k native_vlanid=None",
"-k routing_value=NONROUTER"))
self._smtclient.add_vswitch("fakename", rdev="111 222",
controller='*', connection='CONNECT',
network_type='ETHERNET',
router="NONROUTER", vid='10',
port_type='ACCESS', gvrp='GVRP',
queue_mem=5, native_vid=None,
persist=False)
request.assert_called_with(rd)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_set_vswitch(self, request, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
request.return_value = {'overallRC': 0}
rd = ' '.join((
"SMAPI SMTUSER API Virtual_Network_Vswitch_Set_Extended",
"--operands",
"-k switch_name=fake_vs",
"-k real_device_address='1000 1003'"))
self._smtclient.set_vswitch("fake_vs",
real_device_address='1000 1003')
request.assert_called_with(rd)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_set_vswitch_with_errorcode(self, request, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
results = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 1, 'logEntries': [], 'rc': 0,
'response': ['fake response']}
request.side_effect = exception.SDKSMTRequestFailed(
results, 'fake error')
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient.set_vswitch,
"vswitch_name", grant_userid='fake_id')
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_delete_vswitch(self, request, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
request.return_value = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 0, 'logEntries': [], 'rc': 0,
'response': ['fake response']}
switch_name = 'FakeVS'
rd = ' '.join((
"SMAPI SMTUSER API Virtual_Network_Vswitch_Delete_Extended",
"--operands",
"-k switch_name=FakeVS",
"-k persist=YES"))
self._smtclient.delete_vswitch(switch_name, True)
request.assert_called_once_with(rd)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_delete_vswitch_with_errorcode(self, request, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
results = {'rs': 0, 'errno': 0, 'strError': '',
'overallRC': 1, 'logEntries': [], 'rc': 0,
'response': ['fake response']}
request.side_effect = exception.SDKSMTRequestFailed(
results, 'fake error')
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient.delete_vswitch,
"vswitch_name", True)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_delete_vswitch_not_exist(self, request, get_smt_userid):
get_smt_userid.return_value = "SMTUSER"
results = {'rs': 40, 'errno': 0, 'strError': '',
'overallRC': 1, 'logEntries': [], 'rc': 212,
'response': ['fake response']}
request.side_effect = exception.SDKSMTRequestFailed(
results, 'fake error')
switch_name = 'FakeVS'
rd = ' '.join((
"SMAPI SMTUSER API Virtual_Network_Vswitch_Delete_Extended",
"--operands",
"-k switch_name=FakeVS",
"-k persist=YES"))
self._smtclient.delete_vswitch(switch_name, True)
request.assert_called_once_with(rd)
@mock.patch.object(database.NetworkDbOperator, 'switch_select_table')
def test_get_available_vdev(self, switch_select_table):
switch_select_table.return_value = [
{'userid': 'fake_id', 'interface': '1003',
'switch': None, 'port': None, 'comments': None},
{'userid': 'fake_id', 'interface': '1006',
'switch': None, 'port': None, 'comments': None}]
result = self._smtclient._get_available_vdev('fake_id', vdev='1009')
switch_select_table.assert_called_with()
self.assertEqual(result, '1009')
@mock.patch.object(database.NetworkDbOperator, 'switch_select_table')
def test_get_available_vdev_without_vdev(self, switch_select_table):
switch_select_table.return_value = [
{'userid': 'FAKE_ID', 'interface': '1003',
'switch': None, 'port': None, 'comments': None},
{'userid': 'FAKE_ID', 'interface': '2003',
'switch': None, 'port': None, 'comments': None}]
result = self._smtclient._get_available_vdev('fake_id', vdev=None)
switch_select_table.assert_called_with()
self.assertEqual(result, '2006')
@mock.patch.object(database.NetworkDbOperator, 'switch_select_table')
def test_get_available_vdev_with_used_vdev(self, switch_select_table):
switch_select_table.return_value = [
{'userid': 'FAKE_ID', 'interface': '1003',
'switch': None, 'port': None, 'comments': None},
{'userid': 'FAKE_ID', 'interface': '1006',
'switch': None, 'port': None, 'comments': None}]
self.assertRaises(exception.SDKConflictError,
self._smtclient._get_available_vdev,
'fake_id', vdev='1004')
@mock.patch.object(smtclient.SMTClient, '_get_available_vdev')
@mock.patch.object(smtclient.SMTClient, '_create_nic')
def test_create_nic(self, create_nic, get_vdev):
userid = 'fake_id'
get_vdev.return_value = '1009'
self._smtclient.create_nic(userid, vdev='1009', nic_id='nic_id')
create_nic.assert_called_with(userid, '1009', nic_id="nic_id",
mac_addr=None, active=False)
get_vdev.assert_called_with(userid, vdev='1009')
@mock.patch.object(smtclient.SMTClient, '_get_available_vdev')
@mock.patch.object(smtclient.SMTClient, '_create_nic')
def test_create_nic_without_vdev(self, create_nic, get_vdev):
userid = 'fake_id'
get_vdev.return_value = '2006'
self._smtclient.create_nic(userid, nic_id='nic_id')
create_nic.assert_called_with(userid, '2006', nic_id='nic_id',
mac_addr=None, active=False)
get_vdev.assert_called_with(userid, vdev=None)
@mock.patch.object(smtclient.SMTClient, '_get_available_vdev')
def test_create_nic_with_used_vdev(self, get_vdev):
get_vdev.side_effect = exception.SDKConflictError('network', rs=6,
vdev='1004',
userid='fake_id',
msg="error")
self.assertRaises(exception.SDKConflictError,
self._smtclient.create_nic,
'fake_id', nic_id="nic_id", vdev='1004')
@mock.patch.object(database.NetworkDbOperator, 'switch_add_record')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
def test_private_create_nic_active(self, power_state, request, add_record):
request.return_value = {'overallRC': 0}
power_state.return_value = 'on'
self._smtclient._create_nic("fakenode", "fake_vdev",
nic_id="fake_nic",
mac_addr='11:22:33:44:55:66',
active=True)
add_record.assert_called_once_with("fakenode", "fake_vdev",
port="fake_nic")
rd1 = ' '.join((
'SMAPI fakenode API Virtual_Network_Adapter_Create_Extended_DM',
"--operands",
"-k image_device_number=fake_vdev",
"-k adapter_type=QDIO",
"-k mac_id=445566"))
rd2 = ' '.join((
'SMAPI fakenode API Virtual_Network_Adapter_Create_Extended',
"--operands",
"-k image_device_number=fake_vdev",
"-k adapter_type=QDIO"))
request.assert_any_call(rd1)
request.assert_any_call(rd2)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_get_user_direct(self, req):
req.return_value = {'response': 'OK'}
resp = self._smtclient.get_user_direct('user1')
req.assert_called_once_with('getvm user1 directory')
self.assertEqual(resp, 'OK')
@mock.patch.object(database.NetworkDbOperator,
'switch_delete_record_for_nic')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
@mock.patch.object(database.NetworkDbOperator,
'switch_select_record_for_userid')
def test_delete_nic(self, select_rec, power_state, request, delete_nic):
select_rec.return_value = [{"interface": "1000",
"comments": None}]
power_state.return_value = 'on'
userid = 'FakeID'
vdev = '1000'
rd1 = ' '.join((
"SMAPI FakeID API Virtual_Network_Adapter_Delete_DM",
"--operands",
'-v 1000'))
rd2 = ' '.join((
"SMAPI FakeID API Virtual_Network_Adapter_Delete",
"--operands",
'-v 1000'))
self._smtclient.delete_nic(userid, vdev, True)
request.assert_any_call(rd1)
request.assert_any_call(rd2)
delete_nic.assert_called_with(userid, vdev)
@mock.patch.object(smtclient.SMTClient, '_undedicate_nic')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
@mock.patch.object(database.NetworkDbOperator,
'switch_select_record_for_userid')
def test_delete_nic_OSA(self, select_rec, power_state, undedicate_nic):
select_rec.return_value = [{"interface": "1000",
"comments": "OSA=F000"}]
power_state.return_value = 'on'
userid = 'FakeID'
vdev = '1000'
self._smtclient.delete_nic(userid, vdev, True)
undedicate_nic.assert_called_with(userid, vdev, active=True)
@mock.patch.object(smtclient.SMTClient, '_couple_nic')
def test_couple_nic_to_vswitch(self, couple_nic):
self._smtclient.couple_nic_to_vswitch("fake_userid",
"fakevdev",
"fake_VS_name",
True)
couple_nic.assert_called_with("fake_userid",
"fakevdev",
"fake_VS_name",
active=True)
@mock.patch.object(smtclient.SMTClient, '_uncouple_nic')
def test_uncouple_nic_from_vswitch(self, uncouple_nic):
self._smtclient.uncouple_nic_from_vswitch("fake_userid",
"fakevdev",
False)
uncouple_nic.assert_called_with("fake_userid",
"fakevdev", active=False)
@mock.patch.object(database.NetworkDbOperator,
'switch_update_record_with_switch')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
def test_couple_nic(self, power_state, request, update_switch):
request.return_value = {'overallRC': 0}
power_state.return_value = 'on'
userid = 'FakeID'
vdev = 'FakeVdev'
vswitch_name = 'FakeVS'
requestData1 = ' '.join((
'SMAPI FakeID',
"API Virtual_Network_Adapter_Connect_Vswitch_DM",
"--operands",
"-v FakeVdev",
"-n FakeVS"))
requestData2 = ' '.join((
'SMAPI FakeID',
"API Virtual_Network_Adapter_Connect_Vswitch",
"--operands",
"-v FakeVdev",
"-n FakeVS"))
self._smtclient._couple_nic(userid, vdev, vswitch_name,
active=True)
update_switch.assert_called_with(userid, vdev, vswitch_name)
request.assert_any_call(requestData1)
request.assert_any_call(requestData2)
@mock.patch.object(database.NetworkDbOperator,
'switch_update_record_with_switch')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
def test_uncouple_nic(self, power_state, request, update_switch):
request.return_value = {'overallRC': 0}
power_state.return_value = 'on'
userid = 'FakeID'
vdev = 'FakeVdev'
requestData1 = ' '.join((
'SMAPI FakeID',
"API Virtual_Network_Adapter_Disconnect_DM",
"--operands",
"-v FakeVdev"))
requestData2 = ' '.join((
'SMAPI FakeID',
"API Virtual_Network_Adapter_Disconnect",
"--operands",
"-v FakeVdev"))
self._smtclient._uncouple_nic(userid, vdev, active=True)
update_switch.assert_called_with(userid, vdev, None)
request.assert_any_call(requestData1)
request.assert_any_call(requestData2)
@mock.patch.object(database.GuestDbOperator,
'get_migrated_guest_info_list')
@mock.patch.object(database.GuestDbOperator, 'get_guest_list')
def test_get_vm_list(self, db_list, migrated_list):
db_list.return_value = [(u'9a5c9689-d099-46bb-865f-0c01c384f58c',
u'TEST0', u'', u''),
(u'3abe0ac8-90b5-4b00-b624-969c184b8158',
u'TEST1', u'comm1', u''),
(u'aa252ca5-03aa-4407-9c2e-d9737ddb8d24',
u'TEST2', u'comm2', u'meta2')]
migrated_list.return_value = []
userid_list = self._smtclient.get_vm_list()
db_list.assert_called_once()
migrated_list.assert_called_once()
self.assertListEqual(sorted(userid_list),
sorted(['TEST0', 'TEST1', 'TEST2']))
@mock.patch.object(database.GuestDbOperator,
'get_migrated_guest_info_list')
@mock.patch.object(database.GuestDbOperator, 'get_guest_list')
def test_get_vm_list_exclude_migrated(self, db_list, migrated_list):
db_list.return_value = [(u'9a5c9689-d099-46bb-865f-0c01c384f58c',
u'TEST0', u'', u''),
(u'3abe0ac8-90b5-4b00-b624-969c184b8158',
u'TEST1', u'comm1', u''),
(u'aa252ca5-03aa-4407-9c2e-d9737ddb8d24',
u'TEST2', u'{"migrated": 1}', u'meta2')]
migrated_list.return_value = [(u'aa252ca5-03aa-4407-9c2e-d9737ddb8d24',
u'TEST2', u'{"migrated": 1}', u'meta2')]
userid_list = self._smtclient.get_vm_list()
db_list.assert_called_once()
migrated_list.assert_called_once()
self.assertListEqual(sorted(userid_list), sorted(['TEST0', 'TEST1']))
@mock.patch.object(smtclient.SMTClient, '_request')
def test_delete_userid(self, request):
rd = 'deletevm fuser1 directory'
self._smtclient.delete_userid('fuser1')
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_execute_cmd(self, request):
rd = 'cmdVM fuser1 CMD \'ls\''
self._smtclient.execute_cmd('fuser1', 'ls')
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_delete_userid_not_exist(self, request):
rd = 'deletevm fuser1 directory'
results = {'rc': 400, 'rs': 4, 'logEntries': ''}
request.side_effect = exception.SDKSMTRequestFailed(results,
"fake error")
self._smtclient.delete_userid('fuser1')
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_delete_userid_failed(self, request):
rd = 'deletevm fuser1 directory'
results = {'rc': 400, 'rs': 104, 'logEntries': ''}
request.side_effect = exception.SDKSMTRequestFailed(results,
"fake error")
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient.delete_userid, 'fuser1')
request.assert_called_once_with(rd)
@mock.patch.object(os, 'rename')
@mock.patch.object(database.ImageDbOperator, 'image_add_record')
@mock.patch.object(smtclient.SMTClient, '_get_image_size')
@mock.patch.object(smtclient.SMTClient, '_get_disk_size_units')
@mock.patch.object(smtclient.SMTClient, '_get_md5sum')
@mock.patch.object(smtclient.FilesystemBackend, 'image_import')
@mock.patch.object(zvmutils.PathUtils,
'create_import_image_repository')
@mock.patch.object(database.ImageDbOperator, 'image_query_record')
def test_image_import(self, image_query, create_path, image_import,
get_md5sum, disk_size_units, image_size,
image_add_record, rename):
image_name = 'testimage'
url = 'file:///tmp/testdummyimg'
image_meta = {'os_version': 'rhel6.5',
'md5sum': 'c73ce117eef8077c3420bfc8f473ac2f'}
import_image_fpath = '/home/netboot/rhel6.5/testimage/testdummyimg'
final_image_fpath = '/home/netboot/rhel6.5/testimage/0100'
image_query.return_value = []
create_path.return_value = '/home/netboot/rhel6.5/testimage'
get_md5sum.return_value = 'c73ce117eef8077c3420bfc8f473ac2f'
disk_size_units.return_value = '3338:CYL'
image_size.return_value = '512000'
self._smtclient.image_import(image_name, url, image_meta)
image_query.assert_called_once_with(image_name)
image_import.assert_called_once_with(image_name, url,
import_image_fpath,
remote_host=None)
get_md5sum.assert_called_once_with(import_image_fpath)
disk_size_units.assert_called_once_with(final_image_fpath)
image_size.assert_called_once_with(final_image_fpath)
image_add_record.assert_called_once_with(image_name,
'rhel6.5',
'c73ce117eef8077c3420bfc8f473ac2f',
'3338:CYL',
'512000',
'rootonly')
@mock.patch.object(smtclient.SMTClient, '_get_image_path_by_name')
@mock.patch.object(database.ImageDbOperator, 'image_query_record')
def test_image_import_image_already_exist(self, image_query,
get_image_path):
image_name = 'testimage'
url = 'file:///tmp/testdummyimg'
image_meta = {'os_version': 'rhel6.5',
'md5sum': 'c73ce117eef8077c3420bfc8f473ac2f'}
image_query.return_value = [(u'testimage', u'rhel6.5',
u'c73ce117eef8077c3420bfc8f473ac2f',
u'3338:CYL', u'5120000', u'netboot', None)]
self.assertRaises(exception.SDKImageOperationError,
self._smtclient.image_import,
image_name, url, image_meta)
image_query.assert_called_once_with(image_name)
get_image_path.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_md5sum')
@mock.patch.object(smtclient.FilesystemBackend, 'image_import')
@mock.patch.object(database.ImageDbOperator, 'image_query_record')
def test_image_import_invalid_md5sum(self, image_query, image_import,
get_md5sum):
image_name = 'testimage'
url = 'file:///tmp/testdummyimg'
image_meta = {'os_version': 'rhel6.5',
'md5sum': 'c73ce117eef8077c3420bfc8f473ac2f'}
image_query.return_value = []
get_md5sum.return_value = 'c73ce117eef8077c3420bfc000000'
self.assertRaises(exception.SDKImageOperationError,
self._smtclient.image_import,
image_name, url, image_meta)
@mock.patch.object(database.ImageDbOperator, 'image_query_record')
def test_image_query(self, image_query):
image_name = "testimage"
self._smtclient.image_query(image_name)
image_query.assert_called_once_with(image_name)
@mock.patch.object(database.ImageDbOperator, 'image_delete_record')
@mock.patch.object(smtclient.SMTClient, '_delete_image_file')
def test_image_delete(self, delete_file, delete_db_record):
image_name = 'testimage'
self._smtclient.image_delete(image_name)
delete_file.assert_called_once_with(image_name)
delete_db_record.assert_called_once_with(image_name)
@mock.patch.object(smtclient.SMTClient, 'image_get_root_disk_size')
def test_image_get_root_disk_size(self, query_disk_size_units):
image_name = 'testimage'
self._smtclient.image_get_root_disk_size(image_name)
query_disk_size_units.assert_called_once_with(image_name)
@mock.patch.object(database.ImageDbOperator, 'image_query_record')
@mock.patch.object(smtclient.FilesystemBackend, 'image_export')
def test_image_export(self, image_export, image_query):
image_name = u'testimage'
dest_url = 'file:///path/to/exported/image'
remote_host = '[email protected]'
image_query.return_value = [
{'imagename': u'testimage',
'imageosdistro': u'rhel6.5',
'md5sum': u'c73ce117eef8077c3420bfc8f473ac2f',
'disk_size_units': u'3338:CYL',
'image_size_in_bytes': u'5120000',
'type': u'rootonly',
'comments': None}]
expect_return = {
'image_name': u'testimage',
'image_path': u'file:///path/to/exported/image',
'os_version': u'rhel6.5',
'md5sum': u'c73ce117eef8077c3420bfc8f473ac2f'
}
real_return = self._smtclient.image_export(image_name, dest_url,
remote_host=remote_host)
image_query.assert_called_once_with(image_name)
self.assertDictEqual(real_return, expect_return)
def test_generate_vdev(self):
base = '0100'
idx = 1
vdev = self._smtclient._generate_vdev(base, idx)
self.assertEqual(vdev, '0101')
@mock.patch.object(smtclient.SMTClient, '_add_mdisk')
def test_add_mdisks(self, add_mdisk):
userid = 'fakeuser'
disk_list = [{'size': '1g',
'is_boot_disk': True,
'disk_pool': 'ECKD:eckdpool1'},
{'size': '200000',
'disk_pool': 'FBA:fbapool1',
'format': 'ext3'}]
self._smtclient.add_mdisks(userid, disk_list)
add_mdisk.assert_any_call(userid, disk_list[0], '0100')
add_mdisk.assert_any_call(userid, disk_list[1], '0101')
@mock.patch.object(smtclient.SMTClient, '_add_mdisk')
def test_add_mdisks_with_1dev(self, add_mdisk):
userid = 'fakeuser'
disk_list = [{'size': '1g',
'is_boot_disk': True,
'disk_pool': 'ECKD:eckdpool1'},
{'size': '200000',
'disk_pool': 'FBA:fbapool1',
'format': 'ext3',
'vdev': '0200'}]
self._smtclient.add_mdisks(userid, disk_list)
add_mdisk.assert_any_call(userid, disk_list[0], '0100')
add_mdisk.assert_any_call(userid, disk_list[1], '0200')
@mock.patch.object(smtclient.SMTClient, '_add_mdisk')
def test_add_mdisks_with_2dev(self, add_mdisk):
userid = 'fakeuser'
disk_list = [{'size': '1g',
'is_boot_disk': True,
'disk_pool': 'ECKD:eckdpool1',
'vdev': '0200'},
{'size': '200000',
'disk_pool': 'FBA:fbapool1',
'format': 'ext3',
'vdev': '0300'}]
self._smtclient.add_mdisks(userid, disk_list)
add_mdisk.assert_any_call(userid, disk_list[0], '0200')
add_mdisk.assert_any_call(userid, disk_list[1], '0300')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_dedicate_device(self, request):
fake_userid = 'FakeID'
vaddr = 'vaddr'
raddr = 'raddr'
mode = 1
requestData = "changevm FakeID dedicate vaddr raddr 1"
request.return_value = {'overallRC': 0}
self._smtclient.dedicate_device(fake_userid, vaddr,
raddr, mode)
request.assert_called_once_with(requestData)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_undedicate_device(self, request):
fake_userid = 'FakeID'
vaddr = 'vaddr'
requestData = "changevm FakeID undedicate vaddr"
request.return_value = {'overallRC': 0}
self._smtclient.undedicate_device(fake_userid, vaddr)
request.assert_called_once_with(requestData)
@mock.patch.object(smtclient.SMTClient, '_remove_mdisk')
def test_remove_mdisks(self, remove_mdisk):
userid = 'fakeuser'
vdev_list = ['102', '103']
self._smtclient.remove_mdisks(userid, vdev_list)
remove_mdisk.assert_any_call(userid, vdev_list[0])
remove_mdisk.assert_any_call(userid, vdev_list[1])
@mock.patch.object(smtclient.SMTClient, 'image_performance_query')
def test_get_image_performance_info(self, ipq):
ipq.return_value = {
u'FAKEVM': {
'used_memory': u'5222192 KB',
'used_cpu_time': u'25640530229 uS',
'guest_cpus': u'2',
'userid': u'FKAEVM',
'max_memory': u'8388608 KB'}}
info = self._smtclient.get_image_performance_info('FAKEVM')
self.assertEqual(info['used_memory'], '5222192 KB')
@mock.patch.object(smtclient.SMTClient, 'image_performance_query')
def test_get_image_performance_info_not_exist(self, ipq):
ipq.return_value = {}
info = self._smtclient.get_image_performance_info('fakevm')
self.assertEqual(info, None)
def test_is_vdev_valid_true(self):
vdev = '1009'
vdev_info = ['1003', '1006']
result = self._smtclient._is_vdev_valid(vdev, vdev_info)
self.assertEqual(result, True)
def test_is_vdev_valid_False(self):
vdev = '2002'
vdev_info = ['2000', '2004']
result = self._smtclient._is_vdev_valid(vdev, vdev_info)
self.assertEqual(result, False)
@mock.patch.object(zvmutils, 'execute')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_get_user_console_output(self, req, execu):
req.return_value = self._generate_results(response=['cons: 0001 0002'])
execu.side_effect = [(0, 'first line\n'), (0, 'second line\n')]
cons_log = self._smtclient.get_user_console_output('fakeuser')
req.assert_called_once_with('getvm fakeuser consoleoutput')
execu.assert_any_call('sudo /usr/sbin/vmur re -t -O 0001')
execu.assert_any_call('sudo /usr/sbin/vmur re -t -O 0002')
self.assertEqual(cons_log, 'first line\nsecond line\n')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_get_user_console_output_request_failed(self, req):
req.side_effect = exception.SDKSMTRequestFailed({}, 'err')
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient.get_user_console_output, 'fakeuser')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_reboot(self, req):
req.return_value = self._generate_results()
self._smtclient.guest_reboot('fakeuser')
req.assert_called_once_with('PowerVM fakeuser reboot')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_guest_reset(self, req):
req.return_value = self._generate_results()
self._smtclient.guest_reset('fakeuser')
req.assert_called_once_with('PowerVM fakeuser reset')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_get_guest_connection_status(self, req):
result = self._generate_results(rs=1, response=['testuid: reachable'])
req.return_value = result
is_reachable = self._smtclient.get_guest_connection_status('testuid')
self.assertTrue(is_reachable)
@mock.patch.object(database.NetworkDbOperator, 'switch_select_record')
def test_get_nic_info(self, select):
self._smtclient.get_nic_info(userid='testid', nic_id='fake_nic')
select.assert_called_with(userid='testid', nic_id='fake_nic',
vswitch=None)
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
def test_guest_capture_get_capture_devices_rh7(self, execcmd):
userid = 'fakeid'
execcmd.side_effect = [['/dev/disk/by-path/ccw-0.0.0100-part1'],
['/dev/dasda1'],
['0.0.0100(ECKD) at ( 94: 0) is dasda'
' : active at blocksize: 4096,'
' 600840 blocks, 2347 MB']]
result = self._smtclient._get_capture_devices(userid)
self.assertEqual(result, ['0100'])
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
def test_guest_capture_get_capture_devices_ubuntu(self, execcmd):
userid = 'fakeid'
execcmd.side_effect = [['UUID=8320ec9d-c2b5-439f-b0a0-cede08afe957'
' allow_lun_scan=0 crashkernel=128M'
' BOOT_IMAGE=0'],
['/dev/dasda1'],
['0.0.0100(ECKD) at ( 94: 0) is dasda'
' : active at blocksize: 4096,'
' 600840 blocks, 2347 MB']]
result = self._smtclient._get_capture_devices(userid)
self.assertEqual(result, ['0100'])
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
def test_guest_capture_get_os_version_rh7(self, execcmd):
userid = 'fakeid'
execcmd.side_effect = [['/etc/os-release', '/etc/redhat-release',
'/etc/system-release'],
['NAME="Red Hat Enterprise Linux Server"',
'VERSION="7.0 (Maipo)"',
'ID="rhel"',
'ID_LIKE="fedora"',
'VERSION_ID="7.0"',
'PRETTY_NAME="Red Hat Enterprise Linux'
' Server 7.0 (Maipo)"',
'ANSI_COLOR="0;31"',
'CPE_NAME="cpe:/o:redhat:enterprise_linux:'
'7.0:GA:server"',
'HOME_URL="https://www.redhat.com/"']]
result = self._smtclient._guest_get_os_version(userid)
self.assertEqual(result, 'rhel7.0')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
def test_guest_capture_get_os_version_rhel67_sles11(self, execcmd):
userid = 'fakeid'
execcmd.side_effect = [['/etc/redhat-release',
'/etc/system-release'],
['Red Hat Enterprise Linux Server release 6.7'
' (Santiago)']]
result = self._smtclient._guest_get_os_version(userid)
self.assertEqual(result, 'rhel6.7')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
def test_guest_capture_get_os_version_ubuntu(self, execcmd):
userid = 'fakeid'
execcmd.side_effect = [['/etc/lsb-release',
'/etc/os-release'],
['NAME="Ubuntu"',
'VERSION="16.04 (Xenial Xerus)"',
'ID=ubuntu',
'ID_LIKE=debian',
'PRETTY_NAME="Ubuntu 16.04"',
'VERSION_ID="16.04"',
'HOME_URL="http://www.ubuntu.com/"',
'SUPPORT_URL="http://help.ubuntu.com/"',
'BUG_REPORT_URL="http://bugs.launchpad.net'
'/ubuntu/"',
'UBUNTU_CODENAME=xenial']]
result = self._smtclient._guest_get_os_version(userid)
self.assertEqual(result, 'ubuntu16.04')
@mock.patch.object(database.ImageDbOperator, 'image_add_record')
@mock.patch.object(zvmutils.PathUtils, 'clean_temp_folder')
@mock.patch.object(smtclient.SMTClient, '_get_image_size')
@mock.patch.object(smtclient.SMTClient, '_get_disk_size_units')
@mock.patch.object(smtclient.SMTClient, '_get_md5sum')
@mock.patch.object(zvmutils, 'execute')
@mock.patch.object(zvmutils.PathUtils, 'mkdir_if_not_exist')
@mock.patch.object(smtclient.SMTClient, 'guest_softstop')
@mock.patch.object(smtclient.SMTClient, '_get_capture_devices')
@mock.patch.object(smtclient.SMTClient, '_guest_get_os_version')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
def test_guest_capture_good_path(self, get_power_state, execcmd,
get_os_version, get_capture_devices,
softstop, mkdir, execute, md5sum,
disk_size_units, imagesize, rm_folder,
image_add_record):
userid = 'fakeid'
image_name = 'fakeimage'
get_power_state.return_value = 'on'
execcmd.return_value = ['/']
get_os_version.return_value = 'rhel7.0'
get_capture_devices.return_value = ['0100']
image_temp_dir = '/'.join([CONF.image.sdk_image_repository,
'staging',
'rhel7.0',
image_name])
image_file_path = '/'.join((image_temp_dir, '0100'))
cmd1 = ['sudo', '/opt/zthin/bin/creatediskimage', userid, '0100',
image_file_path, '--compression', '6']
execute.side_effect = [(0, ''),
(0, '')]
image_final_dir = '/'.join((CONF.image.sdk_image_repository,
'netboot',
'rhel7.0',
image_name))
image_final_path = '/'.join((image_final_dir,
'0100'))
cmd2 = ['mv', image_file_path, image_final_path]
md5sum.return_value = '547396211b558490d31e0de8e15eef0c'
disk_size_units.return_value = '1000:CYL'
imagesize.return_value = '1024000'
self._smtclient.guest_capture(userid, image_name)
get_power_state.assert_called_with(userid)
execcmd.assert_called_once_with(userid, 'pwd')
get_os_version.assert_called_once_with(userid)
get_capture_devices.assert_called_once_with(userid, 'rootonly')
softstop.assert_called_once_with(userid)
execute.assert_has_calls([mock.call(cmd1), mock.call(cmd2)])
mkdir.assert_has_calls([mock.call(image_temp_dir)],
[mock.call(image_final_dir)])
rm_folder.assert_called_once_with(image_temp_dir)
md5sum.assert_called_once_with(image_final_path)
disk_size_units.assert_called_once_with(image_final_path)
imagesize.assert_called_once_with(image_final_path)
image_add_record.assert_called_once_with(image_name, 'rhel7.0',
'547396211b558490d31e0de8e15eef0c', '1000:CYL', '1024000',
'rootonly')
@mock.patch.object(smtclient.SMTClient, '_guest_get_os_version')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
def test_guest_capture_error_path(self, get_power_state, execcmd,
get_os_version):
userid = 'fakeid'
image_name = 'fakeimage'
get_power_state.return_value = 'on'
result = {'rs': 101, 'errno': 0, 'strError': '',
'overallRC': 2,
'rc': 4,
'response': ['(Error) ULTVMU0315E IUCV socket error'
' sending command to FP1T0006. cmd: pwd, '
'rc: 4, rs: 101, out: ERROR: ERROR connecting'
' socket:', 'Network is unreachable', 'Return'
' code 4, Reason code 101.']}
execcmd.side_effect = exception.SDKSMTRequestFailed(result, 'err')
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.guest_capture, userid,
image_name)
get_power_state.assert_called_once_with(userid)
execcmd.assert_called_once_with(userid, 'pwd')
get_os_version.assert_not_called()
@mock.patch.object(database.GuestDbOperator,
'get_guest_by_userid')
def test_is_first_network_config_true(self, db_list):
db_list.return_value = [u'9a5c9689-d099-46bb-865f-0c01c384f58c',
u'TEST', u'', 0]
result = self._smtclient.is_first_network_config('TEST')
db_list.assert_called_once_with('TEST')
self.assertTrue(result)
@mock.patch.object(database.GuestDbOperator,
'get_guest_by_userid')
def test_is_first_network_config_false(self, db_list):
db_list.return_value = [u'9a5c9689-d099-46bb-865f-0c01c384f58c',
u'TEST', u'', 1]
result = self._smtclient.is_first_network_config('TEST')
db_list.assert_called_once_with('TEST')
self.assertFalse(result)
@mock.patch.object(database.GuestDbOperator,
'update_guest_by_userid')
def test_update_guestdb_with_net_set(self, update):
self._smtclient.update_guestdb_with_net_set('TEST')
update.assert_called_once_with('TEST', net_set='1')
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_query_vswitch_NotExist(self, req, get_id):
get_id.return_value = "SMTUSER"
req.side_effect = exception.SDKSMTRequestFailed(
{'rc': 212, 'rs': 40}, 'err')
self.assertRaises(exception.SDKObjectNotExistError,
self._smtclient.query_vswitch, 'testvs')
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_query_vswitch_RequestFailed(self, req, get_id):
get_id.return_value = "SMTUSER"
req.side_effect = exception.SDKSMTRequestFailed(
{'rc': 1, 'rs': 1}, 'err')
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient.query_vswitch, 'testvs')
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_query_OSA_RequestFailed(self, req, get_id):
get_id.return_value = "SMTUSER"
req.side_effect = exception.SDKSMTRequestFailed(
{'rc': 1, 'rs': 1}, 'err')
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient._query_OSA)
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_query_OSA_NoOSA(self, req, get_id):
get_id.return_value = "SMTUSER"
req.side_effect = exception.SDKSMTRequestFailed(
{'rc': 4, 'rs': 4}, 'err')
result = self._smtclient._query_OSA()
get_id.assert_called_once_with()
self.assertEqual(result, {})
@mock.patch.object(zvmutils, 'get_smt_userid')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_query_OSA(self, req, get_id):
get_id.return_value = "SMTUSER"
osa_info = [
"OSA Address: 0440",
"OSA Status: FREE",
"OSA Type: OSA",
"CHPID Address: 10",
"Agent Status: NO",
"OSA Address: 0441",
"OSA Status: FREE",
"OSA Type: OSA",
"CHPID Address: 10",
"Agent Status: NO",
"OSA Address: 4000",
"OSA Status: ATTACHED TCPIP",
"OSA Type: OSA",
"CHPID Address: 3B",
"Agent Status: NO",
"OSA Address: FB1D",
"OSA Status: FREE",
"OSA Type: HIPER",
"CHPID Address: FB",
"Agent Status: NO",
]
req.return_value = {'response': osa_info}
expected = {'OSA': {'FREE': ['0440', '0441'],
'BOXED': [],
'OFFLINE': [],
'ATTACHED': [('TCPIP', '4000')]},
'HIPER': {'FREE': ['FB1D'],
'BOXED': [],
'OFFLINE': [],
'ATTACHED': []}}
result = self._smtclient._query_OSA()
get_id.assert_called_once_with()
self.assertEqual(result.keys(), expected.keys())
self.assertEqual(result['OSA'], expected['OSA'])
self.assertEqual(result['HIPER'], expected['HIPER'])
@mock.patch.object(smtclient.SMTClient, '_query_OSA')
def test_is_OSA_free_noOSA(self, query_osa):
query_osa.return_value = {'HIPER': {}}
result = self._smtclient._is_OSA_free('0100')
query_osa.assert_called_once_with()
self.assertFalse(result)
@mock.patch.object(smtclient.SMTClient, '_query_OSA')
def test_is_OSA_free_noFree(self, query_osa):
query_osa.return_value = {'OSA': {'FREE': []}}
result = self._smtclient._is_OSA_free('0100')
query_osa.assert_called_once_with()
self.assertFalse(result)
@mock.patch.object(smtclient.SMTClient, '_query_OSA')
def test_is_OSA_free_notallFree(self, query_osa):
query_osa.return_value = {'OSA': {'FREE': ['0100', '0101']}}
result = self._smtclient._is_OSA_free('0100')
query_osa.assert_called_once_with()
self.assertFalse(result)
@mock.patch.object(smtclient.SMTClient, '_query_OSA')
def test_is_OSA_free_OK_num(self, query_osa):
query_osa.return_value = {'OSA': {'FREE': ['0100', '0101', '0102']}}
result = self._smtclient._is_OSA_free('0100')
query_osa.assert_called_once_with()
self.assertTrue(result)
@mock.patch.object(smtclient.SMTClient, '_query_OSA')
def test_is_OSA_free_OK_character(self, query_osa):
query_osa.return_value = {'OSA': {'FREE': ['0AA0', '0AA1', '0AA2']}}
result = self._smtclient._is_OSA_free('AA0')
query_osa.assert_called_once_with()
self.assertTrue(result)
@mock.patch.object(smtclient.SMTClient, '_get_available_vdev')
@mock.patch.object(smtclient.SMTClient, '_is_OSA_free')
@mock.patch.object(smtclient.SMTClient, '_dedicate_OSA')
def test_dedicate_OSA(self, attach_osa, OSA_free, get_vdev):
OSA_free.return_value = True
get_vdev.return_value = '1000'
result = self._smtclient.dedicate_OSA('userid', 'OSA_device',
vdev='nic_vdev', active=True)
get_vdev.assert_called_once_with('userid', vdev='nic_vdev')
OSA_free.assert_called_once_with('OSA_device')
attach_osa.assert_called_once_with('userid', 'OSA_device',
'1000', active=True)
self.assertEqual(result, '1000')
@mock.patch.object(smtclient.SMTClient, '_get_available_vdev')
@mock.patch.object(smtclient.SMTClient, '_is_OSA_free')
def test_dedicate_OSA_notFree(self, OSA_free, get_vdev):
OSA_free.return_value = False
get_vdev.return_value = '1000'
self.assertRaises(exception.SDKConflictError,
self._smtclient.dedicate_OSA,
'userid', 'OSA_device', 'nic_vdev', active=True)
@mock.patch.object(database.NetworkDbOperator, 'switch_add_record')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_private_dedicate_OSA_notActive(self, request, add_rec):
request_response = ['', '', '', '', '', '']
request.side_effect = request_response
self._smtclient._dedicate_OSA('userid', 'f000',
'1000', active=False)
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1002 -r f002")
add_rec.assert_called_once_with('userid', '1000', comments='OSA=f000')
@mock.patch.object(smtclient.SMTClient, '_request')
def test_private_dedicate_OSA_notActive_Fail_Input(self, request):
request_response = ['', '']
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 404, 'rs': 4}, 'err'))
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 1, 'rs': 1}, 'err'))
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 404, 'rs': 8}, 'err'))
request.side_effect = request_response
self.assertRaises(exception.SDKConflictError,
self._smtclient._dedicate_OSA,
'userid', 'f000', '1000', active=False)
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1002 -r f002")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1001")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1000")
@mock.patch.object(smtclient.SMTClient, '_request')
def test_private_dedicate_OSA_notActive_Fail_Lock(self, request):
request_response = ['', '']
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 404, 'rs': 12}, 'err'))
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 1, 'rs': 1}, 'err'))
request_response.append('')
request.side_effect = request_response
self.assertRaises(exception.SDKConflictError,
self._smtclient._dedicate_OSA,
'userid', 'f000', '1000', active=False)
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1002 -r f002")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1001")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1000")
@mock.patch.object(database.NetworkDbOperator, 'switch_add_record')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
def test_private_dedicate_OSA_Active(self, power_state, request, add_rec):
power_state.return_value = 'on'
request_response = ['', '', '', '', '', '']
request.side_effect = request_response
self._smtclient._dedicate_OSA('userid', 'f000',
'1000', active=True)
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1002 -r f002")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1002 -r f002")
add_rec.assert_called_once_with('userid', '1000', comments='OSA=f000')
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
def test_private_dedicate_OSA_Active_Fail(self, power_state, request):
power_state.return_value = 'on'
request_response = ['', '', '', '', '']
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 300, 'rs': 0}, 'err'))
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 404, 'rs': 8}, 'err'))
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 400, 'rs': 8}, 'err'))
request_response.append('')
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 204, 'rs': 8}, 'err'))
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 200, 'rs': 8}, 'err'))
request.side_effect = request_response
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient._dedicate_OSA,
'userid', 'f000', '1000', active=True)
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1002 -r f002")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1000")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1001")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1002")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1002 -r f002")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate '
"--operands -v 1000")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate '
"--operands -v 1001")
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'get_power_state')
def test_private_dedicate_OSA_Active_Fail_Input(self, power, request):
power.return_value = 'on'
request_response = ['', '', '', '', '']
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 204, 'rs': 8}, 'err'))
request_response.append('')
request_response.append('')
request_response.append('')
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 204, 'rs': 8}, 'err'))
request_response.append(exception.SDKSMTRequestFailed(
{'rc': 200, 'rs': 8}, 'err'))
request.side_effect = request_response
self.assertRaises(exception.SDKConflictError,
self._smtclient._dedicate_OSA,
'userid', 'f000', '1000', active=True)
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate_DM '
"--operands -v 1002 -r f002")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1000")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1001")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate_DM '
"--operands -v 1002")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1000 -r f000")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1001 -r f001")
request.assert_any_call('SMAPI userid API Image_Device_Dedicate '
"--operands -v 1002 -r f002")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate '
"--operands -v 1000")
request.assert_any_call('SMAPI userid API Image_Device_Undedicate '
"--operands -v 1001")
@mock.patch.object(smtclient.SMTClient, '_request_with_error_ignored')
def test_namelist_add(self, req):
self._smtclient.namelist_add('tnlist', 'testid')
rd = "SMAPI tnlist API Name_List_Add --operands -n testid"
req.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request_with_error_ignored')
def test_namelist_remove(self, req):
self._smtclient.namelist_remove('tnlist', 'testid')
rd = "SMAPI tnlist API Name_List_Remove --operands -n testid"
req.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request_with_error_ignored')
def test_namelist_query(self, req):
req.return_value = {'response': ['t1', 't2']}
resp = self._smtclient.namelist_query('tnlist')
rd = "SMAPI tnlist API Name_List_Query"
req.assert_called_once_with(rd)
self.assertEqual(['t1', 't2'], resp)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_namelist_query_err(self, req):
req.side_effect = exception.SDKSMTRequestFailed({}, 'err')
resp = self._smtclient.namelist_query('tnlist')
rd = "SMAPI tnlist API Name_List_Query"
req.assert_called_once_with(rd)
self.assertEqual([], resp)
@mock.patch.object(smtclient.SMTClient, '_request_with_error_ignored')
def test_namelist_destroy(self, req):
self._smtclient.namelist_destroy('tnlist')
rd = "SMAPI tnlist API Name_List_Destroy"
req.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, 'get_user_direct')
def test_private_get_defined_cpu_addrs(self, get_user_direct):
get_user_direct.return_value = ['USER TESTUID LBYONLY 1024m 64G G',
'INCLUDE OSDFLT',
'CPU 00 BASE',
'CPU 0A',
'IPL 0100',
'MACHINE ESA 32',
'NICDEF 1000 TYPE QDIO LAN '
'SYSTEM XCATVSW2 DEVICES 3',
'MDISK 0100 3390 52509 1100 OMB1AB MR',
'']
(max_cpus, defined_addrs) = self._smtclient._get_defined_cpu_addrs(
'TESTUID')
get_user_direct.assert_called_once_with('TESTUID')
self.assertEqual(max_cpus, 32)
self.assertEqual(defined_addrs, ['00', '0A'])
@mock.patch.object(smtclient.SMTClient, 'get_user_direct')
def test_private_get_defined_cpu_addrs_no_max_cpu(self, get_user_direct):
get_user_direct.return_value = ['USER TESTUID LBYONLY 1024m 64G G',
'INCLUDE OSDFLT',
'CPU 00 BASE',
'CPU 0A',
'IPL 0100',
'NICDEF 1000 TYPE QDIO LAN '
'SYSTEM XCATVSW2 DEVICES 3',
'MDISK 0100 3390 52509 1100 OMB1AB MR',
'']
(max_cpus, defined_addrs) = self._smtclient._get_defined_cpu_addrs(
'TESTUID')
get_user_direct.assert_called_once_with('TESTUID')
self.assertEqual(max_cpus, 0)
self.assertEqual(defined_addrs, ['00', '0A'])
def test_private_get_available_cpu_addrs(self):
used = ['00', '01', '1A', '1F']
max = 32
avail_expected = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1B', '1C', '1D', '1E']
avail_addrs = self._smtclient._get_available_cpu_addrs(used, max)
avail_addrs.sort()
self.assertListEqual(avail_addrs, avail_expected)
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
def test_private_get_active_cpu_addrs(self, exec_cmd):
active_cpus = [('# The following is the parsable format, which can '
'be fed to other'),
('# programs. Each different item in every column has '
'an unique ID'),
'# starting from zero.',
'# Address',
'0', '3', '10', '19']
exec_cmd.return_value = active_cpus
addrs = self._smtclient._get_active_cpu_addrs('TESTUID')
exec_cmd.assert_called_once_with('TESTUID', "lscpu --parse=ADDRESS")
addrs.sort()
self.assertListEqual(addrs, ['00', '03', '0A', '13'])
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, 'resize_cpus')
@mock.patch.object(smtclient.SMTClient, '_get_active_cpu_addrs')
def test_live_resize_cpus(self, get_active, resize, get_avail,
exec_cmd, request):
userid = 'testuid'
count = 4
get_active.return_value = ['00', '01']
resize.return_value = (1, ['02', '03'], 32)
avail_lst = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1A', '1B', '1C', '1D', '1E', '1F']
get_avail.return_value = avail_lst
self._smtclient.live_resize_cpus(userid, count)
get_active.assert_called_once_with(userid)
resize.assert_called_once_with(userid, count)
get_avail.assert_called_once_with(['00', '01'], 32)
cmd_def_cpu = "vmcp def cpu 02 03"
cmd_rescan_cpu = "chcpu -r"
exec_cmd.assert_has_calls([mock.call(userid, cmd_def_cpu),
mock.call(userid, cmd_rescan_cpu)])
request.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, 'resize_cpus')
@mock.patch.object(smtclient.SMTClient, '_get_active_cpu_addrs')
def test_live_resize_cpus_equal_active(self, get_active, resize, get_avail,
exec_cmd, request):
userid = 'testuid'
count = 4
get_active.return_value = ['00', '01', '02', '03']
resize.return_value = (1, ['02', '03'], 32)
self._smtclient.live_resize_cpus(userid, count)
get_active.assert_called_once_with(userid)
resize.assert_called_once_with(userid, count)
get_avail.assert_not_called()
exec_cmd.assert_not_called()
request.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, 'resize_cpus')
@mock.patch.object(smtclient.SMTClient, '_get_active_cpu_addrs')
def test_live_resize_cpus_less_active(self, get_active, resize, get_avail,
exec_cmd, request):
userid = 'testuid'
count = 4
get_active.return_value = ['00', '01', '02', '03', '04']
self.assertRaises(exception.SDKConflictError,
self._smtclient.live_resize_cpus, userid, count)
get_active.assert_called_once_with(userid)
resize.assert_not_called()
get_avail.assert_not_called()
exec_cmd.assert_not_called()
request.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, 'resize_cpus')
@mock.patch.object(smtclient.SMTClient, '_get_active_cpu_addrs')
def test_live_resize_cpus_revert_definition_equal(self, get_active,
resize, get_avail,
exec_cmd, request):
# Test case: active update failed, definition not updated
userid = 'testuid'
count = 4
get_active.return_value = ['00', '01']
resize.return_value = (0, [], 32)
avail_lst = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1A', '1B', '1C', '1D', '1E', '1F']
get_avail.return_value = avail_lst
exec_cmd.side_effect = [exception.SDKSMTRequestFailed({}, 'err'), ""]
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.live_resize_cpus, userid, count)
get_active.assert_called_once_with(userid)
resize.assert_called_once_with(userid, count)
get_avail.assert_called_once_with(['00', '01'], 32)
exec_cmd.assert_called_once_with(userid, "vmcp def cpu 02 03")
request.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, 'resize_cpus')
@mock.patch.object(smtclient.SMTClient, '_get_active_cpu_addrs')
def test_live_resize_cpus_revert_added_cpus(self, get_active,
resize, get_avail,
exec_cmd, request):
userid = 'testuid'
count = 4
get_active.return_value = ['00', '01']
resize.return_value = (1, ['01', '02', '03'], 32)
avail_lst = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1A', '1B', '1C', '1D', '1E', '1F']
get_avail.return_value = avail_lst
exec_cmd.side_effect = [exception.SDKSMTRequestFailed({}, 'err'), ""]
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.live_resize_cpus, userid, count)
get_active.assert_called_once_with(userid)
resize.assert_called_once_with(userid, count)
get_avail.assert_called_once_with(['00', '01'], 32)
exec_cmd.assert_called_once_with(userid, "vmcp def cpu 02 03")
rd = ("SMAPI testuid API Image_Definition_Delete_DM --operands "
"-k CPU=CPUADDR=01 -k CPU=CPUADDR=02 -k CPU=CPUADDR=03")
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, 'resize_cpus')
@mock.patch.object(smtclient.SMTClient, '_get_active_cpu_addrs')
def test_live_resize_cpus_revert_deleted_cpus(self, get_active,
resize, get_avail,
exec_cmd, request):
userid = 'testuid'
count = 4
get_active.return_value = ['00', '01']
resize.return_value = (2, ['04', '0A'], 32)
avail_lst = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1A', '1B', '1C', '1D', '1E', '1F']
get_avail.return_value = avail_lst
exec_cmd.side_effect = [exception.SDKSMTRequestFailed({}, 'err'), ""]
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.live_resize_cpus, userid, count)
get_active.assert_called_once_with(userid)
resize.assert_called_once_with(userid, count)
get_avail.assert_called_once_with(['00', '01'], 32)
exec_cmd.assert_called_once_with(userid, "vmcp def cpu 02 03")
rd = ("SMAPI testuid API Image_Definition_Create_DM --operands "
"-k CPU=CPUADDR=04 -k CPU=CPUADDR=0A")
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, 'resize_cpus')
@mock.patch.object(smtclient.SMTClient, '_get_active_cpu_addrs')
def test_live_resize_cpus_revert_failed(self, get_active,
resize, get_avail,
exec_cmd, request):
userid = 'testuid'
count = 4
get_active.return_value = ['00', '01']
resize.return_value = (2, ['04', '0A'], 32)
avail_lst = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1A', '1B', '1C', '1D', '1E', '1F']
get_avail.return_value = avail_lst
exec_cmd.side_effect = [exception.SDKSMTRequestFailed({}, 'err'), ""]
request.side_effect = [exception.SDKSMTRequestFailed({}, 'err'), ""]
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.live_resize_cpus, userid, count)
get_active.assert_called_once_with(userid)
resize.assert_called_once_with(userid, count)
get_avail.assert_called_once_with(['00', '01'], 32)
exec_cmd.assert_called_once_with(userid, "vmcp def cpu 02 03")
rd = ("SMAPI testuid API Image_Definition_Create_DM --operands "
"-k CPU=CPUADDR=04 -k CPU=CPUADDR=0A")
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, 'resize_cpus')
@mock.patch.object(smtclient.SMTClient, '_get_active_cpu_addrs')
def test_live_resize_cpus_rescan_failed(self, get_active,
resize, get_avail,
exec_cmd, request):
userid = 'testuid'
count = 4
get_active.return_value = ['00', '01']
resize.return_value = (2, ['04', '0A'], 32)
avail_lst = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1A', '1B', '1C', '1D', '1E', '1F']
get_avail.return_value = avail_lst
exec_cmd.side_effect = ["", exception.SDKSMTRequestFailed({}, 'err')]
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.live_resize_cpus, userid, count)
get_active.assert_called_once_with(userid)
resize.assert_called_once_with(userid, count)
get_avail.assert_called_once_with(['00', '01'], 32)
cmd_def_cpu = "vmcp def cpu 02 03"
cmd_rescan_cpu = "chcpu -r"
exec_cmd.assert_has_calls([mock.call(userid, cmd_def_cpu),
mock.call(userid, cmd_rescan_cpu)])
request.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, '_get_defined_cpu_addrs')
def test_resize_cpus_equal_count(self, get_defined,
get_avail, request):
userid = 'testuid'
count = 2
get_defined.return_value = (32, ['00', '01'])
return_data = self._smtclient.resize_cpus(userid, count)
self.assertTupleEqual(return_data, (0, [], 32))
get_defined.assert_called_once_with(userid)
get_avail.assert_not_called()
request.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, '_get_defined_cpu_addrs')
def test_resize_cpus_add(self, get_defined,
get_avail, request):
userid = 'testuid'
count = 4
get_defined.return_value = (32, ['00', '01'])
avail_lst = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1A', '1B', '1C', '1D', '1E', '1F']
get_avail.return_value = avail_lst
return_data = self._smtclient.resize_cpus(userid, count)
self.assertTupleEqual(return_data, (1, ['02', '03'], 32))
get_defined.assert_called_once_with(userid)
get_avail.assert_called_once_with(['00', '01'], 32)
rd = ("SMAPI testuid API Image_Definition_Update_DM --operands "
"-k CPU=CPUADDR=02 -k CPU=CPUADDR=03")
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, '_get_defined_cpu_addrs')
def test_resize_cpus_delete(self, get_defined,
get_avail, request):
userid = 'testuid'
count = 4
get_defined.return_value = (32, ['00', '1A', '02', '01', '11', '10'])
return_data = self._smtclient.resize_cpus(userid, count)
self.assertTupleEqual(return_data, (2, ['11', '1A'], 32))
get_defined.assert_called_once_with(userid)
get_avail.assert_not_called()
rd = ("SMAPI testuid API Image_Definition_Delete_DM --operands "
"-k CPU=CPUADDR=11 -k CPU=CPUADDR=1A")
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, '_get_defined_cpu_addrs')
def test_resize_cpus_max_not_defined(self, get_defined,
get_avail, request):
userid = 'testuid'
count = 4
get_defined.return_value = (0, ['00', '01'])
self.assertRaises(exception.SDKConflictError,
self._smtclient.resize_cpus, userid, count)
get_defined.assert_called_once_with(userid)
get_avail.assert_not_called()
request.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, '_get_defined_cpu_addrs')
def test_resize_cpus_req_exceeds_max(self, get_defined,
get_avail, request):
userid = 'testuid'
count = 40
get_defined.return_value = (32, ['00', '01'])
self.assertRaises(exception.SDKConflictError,
self._smtclient.resize_cpus, userid, count)
get_defined.assert_called_once_with(userid)
get_avail.assert_not_called()
request.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, '_get_defined_cpu_addrs')
def test_resize_cpus_add_failed(self, get_defined,
get_avail, request):
userid = 'testuid'
count = 4
get_defined.return_value = (32, ['00', '01'])
avail_lst = ['02', '03', '04', '05', '06', '07', '08', '09',
'0A', '0B', '0C', '0D', '0E', '0F', '10', '11',
'12', '13', '14', '15', '16', '17', '18', '19',
'1A', '1B', '1C', '1D', '1E', '1F']
get_avail.return_value = avail_lst
request.side_effect = exception.SDKSMTRequestFailed({}, 'err')
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.resize_cpus, userid, count)
get_defined.assert_called_once_with(userid)
get_avail.assert_called_once_with(['00', '01'], 32)
rd = ("SMAPI testuid API Image_Definition_Update_DM --operands "
"-k CPU=CPUADDR=02 -k CPU=CPUADDR=03")
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_request')
@mock.patch.object(smtclient.SMTClient, '_get_available_cpu_addrs')
@mock.patch.object(smtclient.SMTClient, '_get_defined_cpu_addrs')
def test_resize_cpus_delete_failed(self, get_defined,
get_avail, request):
userid = 'testuid'
count = 4
get_defined.return_value = (32, ['00', '01', '02', '03', '04', '05'])
request.side_effect = exception.SDKSMTRequestFailed({}, 'err')
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.resize_cpus, userid, count)
get_defined.assert_called_once_with(userid)
get_avail.assert_not_called()
rd = ("SMAPI testuid API Image_Definition_Delete_DM --operands "
"-k CPU=CPUADDR=04 -k CPU=CPUADDR=05")
request.assert_called_once_with(rd)
@mock.patch.object(smtclient.SMTClient, '_get_defined_memory')
@mock.patch.object(smtclient.SMTClient, '_replace_user_direct')
def test_resize_memory_reserved_not_defined(self, replace_def,
get_defined):
userid = 'testuid'
size = '2g'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_defined.return_value = (4096, 65536, -1, sample_definition)
self.assertRaises(exception.SDKConflictError,
self._smtclient.resize_memory, userid, size)
replace_def.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_defined_memory')
@mock.patch.object(smtclient.SMTClient, '_replace_user_direct')
def test_resize_memory_exceed_max_size(self, replace_def, get_defined):
userid = 'testuid'
size = '65g'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_defined.return_value = (4096, 65536, 61440, sample_definition)
self.assertRaises(exception.SDKConflictError,
self._smtclient.resize_memory, userid, size)
replace_def.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_defined_memory')
@mock.patch.object(smtclient.SMTClient, '_replace_user_direct')
def test_resize_memory_equal_size(self, replace_def, get_defined):
userid = 'testuid'
size = '4g'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_defined.return_value = (4096, 65536, 61440, sample_definition)
(action, defined_mem, max_mem, user_direct) = \
self._smtclient.resize_memory(userid, size)
self.assertEqual(action, 0)
replace_def.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_defined_memory')
@mock.patch.object(smtclient.SMTClient, '_lock_user_direct')
@mock.patch.object(smtclient.SMTClient, '_replace_user_direct')
def test_resize_memory_increase(self, replace_def, lock_def, get_def):
userid = 'testuid'
size = '10240M'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_def.return_value = (4096, 65536, 61440, sample_definition)
(action, defined_mem, max_mem, user_direct) = \
self._smtclient.resize_memory(userid, size)
self.assertEqual(action, 1)
get_def.assert_called_once_with(userid)
lock_def.assert_called_once_with(userid)
new_entry = ("USER TESTUID LBYONLY 10240M 64G G\n"
"INCLUDE OSDFLT\n"
"COMMAND DEF STOR RESERVED 55296M\n"
"CPU 00 BASE\n"
"IPL 0100\n"
"MDISK 0100 3390 5501 5500 OMB1BA MR\n")
replace_def.assert_called_once_with(userid, new_entry)
@mock.patch.object(smtclient.SMTClient, '_get_defined_memory')
@mock.patch.object(smtclient.SMTClient, '_lock_user_direct')
@mock.patch.object(smtclient.SMTClient, '_replace_user_direct')
def test_resize_memory_decrease(self, replace_def, lock_def, get_def):
userid = 'testuid'
size = '2g'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_def.return_value = (4096, 65536, 61440, sample_definition)
(action, defined_mem, max_mem, user_direct) = \
self._smtclient.resize_memory(userid, size)
self.assertEqual(action, 1)
get_def.assert_called_once_with(userid)
lock_def.assert_called_once_with(userid)
new_entry = ("USER TESTUID LBYONLY 2048M 64G G\n"
"INCLUDE OSDFLT\n"
"COMMAND DEF STOR RESERVED 63488M\n"
"CPU 00 BASE\n"
"IPL 0100\n"
"MDISK 0100 3390 5501 5500 OMB1BA MR\n")
replace_def.assert_called_once_with(userid, new_entry)
@mock.patch.object(smtclient.SMTClient, '_get_defined_memory')
@mock.patch.object(smtclient.SMTClient, '_lock_user_direct')
@mock.patch.object(smtclient.SMTClient, '_replace_user_direct')
def test_resize_memory_lock_failed(self, replace_def, lock_def, get_def):
userid = 'testuid'
size = '2g'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_def.return_value = (4096, 65536, 61440, sample_definition)
lock_def.side_effect = exception.SDKSMTRequestFailed({}, 'err')
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.resize_memory, userid, size)
get_def.assert_called_once_with(userid)
lock_def.assert_called_once_with(userid)
replace_def.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_defined_memory')
@mock.patch.object(smtclient.SMTClient, '_lock_user_direct')
@mock.patch.object(smtclient.SMTClient, '_replace_user_direct')
def test_resize_memory_replace_failed(self, replace_def, lock_def,
get_def):
userid = 'testuid'
size = '2g'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_def.return_value = (4096, 65536, 61440, sample_definition)
replace_def.side_effect = exception.SDKSMTRequestFailed({}, 'err')
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.resize_memory, userid, size)
get_def.assert_called_once_with(userid)
lock_def.assert_called_once_with(userid)
new_entry = ("USER TESTUID LBYONLY 2048M 64G G\n"
"INCLUDE OSDFLT\n"
"COMMAND DEF STOR RESERVED 63488M\n"
"CPU 00 BASE\n"
"IPL 0100\n"
"MDISK 0100 3390 5501 5500 OMB1BA MR\n")
replace_def.assert_called_once_with(userid, new_entry)
@mock.patch.object(smtclient.SMTClient, 'get_user_direct')
def test_get_defined_memory(self, get_user_direct):
userid = 'testuid'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_user_direct.return_value = sample_definition
(defined_mem, max_mem, reserved_mem, user_direct) = \
self._smtclient._get_defined_memory(userid)
self.assertEqual(defined_mem, 4096)
self.assertEqual(max_mem, 65536)
self.assertEqual(reserved_mem, 61440)
self.assertListEqual(user_direct, sample_definition)
@mock.patch.object(smtclient.SMTClient, 'get_user_direct')
def test_get_defined_memory_reserved_not_defined(self, get_user_direct):
userid = 'testuid'
sample_definition = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
get_user_direct.return_value = sample_definition
(defined_mem, max_mem, reserved_mem, user_direct) = \
self._smtclient._get_defined_memory(userid)
self.assertEqual(defined_mem, 4096)
self.assertEqual(max_mem, 65536)
self.assertEqual(reserved_mem, -1)
self.assertListEqual(user_direct, sample_definition)
@mock.patch.object(smtclient.SMTClient, '_request')
def test_replace_user_direct_err(self, req):
userid = 'testuid'
user_entry = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
req.side_effect = [exception.SDKSMTRequestFailed({}, 'err'), ""]
self.assertRaises(exception.SDKSMTRequestFailed,
self._smtclient._replace_user_direct, userid,
user_entry)
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
def test_get_active_memory(self, execute_cmd):
userid = 'testuid'
sample_lsmem = [u'Address Range Size (MB) \
State Removable Device',
u'==================================================\
=============================',
u'0x0000000000000000-0x000000000fffffff 256 \
online no 0-1',
u'0x0000000010000000-0x000000003fffffff 768 \
online yes 2-7',
u'0x0000000040000000-0x000000007fffffff 1024 \
online no 8-15',
u'0x0000000080000000-0x00000000ffffffff 2048 \
online yes 16-31',
u'0x0000000100000000-0x0000000fffffffff 61440 \
offline - 32-511',
u'',
u'Memory device size : 128 MB',
u'Memory block size : 256 MB',
u'Total online memory : 4096 MB',
u'Total offline memory: 61440 MB'
]
execute_cmd.return_value = sample_lsmem
active_mem = self._smtclient._get_active_memory(userid)
self.assertEqual(active_mem, 4096)
@mock.patch.object(smtclient.SMTClient, '_get_active_memory')
@mock.patch.object(smtclient.SMTClient, 'resize_memory')
def test_live_resize_memory_less(self, resize_mem, get_active_mem):
userid = 'testuid'
req_mem = "1g"
get_active_mem.return_value = 2048
self.assertRaises(exception.SDKConflictError,
self._smtclient.live_resize_memory, userid,
req_mem)
resize_mem.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_active_memory')
@mock.patch.object(smtclient.SMTClient, 'resize_memory')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
def test_live_resize_memory_equal(self, exec_cmd, resize_mem,
get_active_mem):
userid = 'testuid'
req_mem = "2g"
get_active_mem.return_value = 2048
resize_mem.return_value = (1, 2048, 65536, [])
self._smtclient.live_resize_memory(userid, req_mem)
resize_mem.assert_called_once_with(userid, req_mem)
exec_cmd.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_active_memory')
@mock.patch.object(smtclient.SMTClient, 'resize_memory')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_revert_user_direct')
def test_live_resize_memory_more(self, revert, exec_cmd, resize_mem,
get_active_mem):
userid = 'testuid'
req_mem = "4096m"
get_active_mem.return_value = 2048
resize_mem.return_value = (1, 2048, 65536, [])
exec_cmd.side_effect = ['', '']
self._smtclient.live_resize_memory(userid, req_mem)
resize_mem.assert_called_once_with(userid, req_mem)
def_standby_cmd = "vmcp def storage standby 2048M"
online_mem_cmd = "chmem -e 2048M"
exec_cmd.assert_has_calls([mock.call(userid, def_standby_cmd),
mock.call(userid, online_mem_cmd)])
revert.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_active_memory')
@mock.patch.object(smtclient.SMTClient, 'resize_memory')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_revert_user_direct')
def test_live_resize_memory_standby_failed(self, revert, exec_cmd,
resize_mem, get_active_mem):
userid = 'testuid'
req_mem = "4096m"
get_active_mem.return_value = 2048
sample_direct = [u'USER TESTUID LBYONLY 2048M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
resize_mem.return_value = (1, 2048, 65536, sample_direct)
exec_cmd.side_effect = exception.SDKSMTRequestFailed({}, 'fake err')
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.live_resize_memory, userid,
req_mem)
resize_mem.assert_called_once_with(userid, req_mem)
def_standby_cmd = "vmcp def storage standby 2048M"
exec_cmd.assert_called_with(userid, def_standby_cmd)
revert.assert_called_once_with(userid, sample_direct)
@mock.patch.object(smtclient.SMTClient, '_get_active_memory')
@mock.patch.object(smtclient.SMTClient, 'resize_memory')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_revert_user_direct')
def test_live_resize_memory_standby_failed_no_revert(self, revert,
exec_cmd,
resize_mem,
get_active_mem):
userid = 'testuid'
req_mem = "4096m"
get_active_mem.return_value = 2048
sample_direct = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
resize_mem.return_value = (0, 4096, 65536, sample_direct)
exec_cmd.side_effect = [exception.SDKSMTRequestFailed({}, 'fake err'),
'']
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.live_resize_memory, userid,
req_mem)
resize_mem.assert_called_once_with(userid, req_mem)
def_standby_cmd = "vmcp def storage standby 2048M"
exec_cmd.assert_called_with(userid, def_standby_cmd)
revert.assert_not_called()
@mock.patch.object(smtclient.SMTClient, '_get_active_memory')
@mock.patch.object(smtclient.SMTClient, 'resize_memory')
@mock.patch.object(smtclient.SMTClient, 'execute_cmd')
@mock.patch.object(smtclient.SMTClient, '_revert_user_direct')
def test_live_resize_memory_online_failed(self, revert,
exec_cmd,
resize_mem,
get_active_mem):
userid = 'testuid'
req_mem = "4096m"
get_active_mem.return_value = 2048
sample_direct = [u'USER TESTUID LBYONLY 4096M 64G G',
u'INCLUDE OSDFLT',
u'COMMAND DEF STOR RESERVED 61440M',
u'CPU 00 BASE',
u'IPL 0100',
u'MDISK 0100 3390 5501 5500 OMB1BA MR',
u'']
resize_mem.return_value = (1, 4096, 65536, sample_direct)
exec_cmd.side_effect = ['',
exception.SDKSMTRequestFailed({}, 'fake err'),
'']
self.assertRaises(exception.SDKGuestOperationError,
self._smtclient.live_resize_memory, userid,
req_mem)
resize_mem.assert_called_once_with(userid, req_mem)
def_standby_cmd = "vmcp def storage standby 2048M"
online_mem_cmd = "chmem -e 2048M"
revert_standby_cmd = "vmcp def storage standby 0M"
exec_cmd.assert_has_calls([mock.call(userid, def_standby_cmd),
mock.call(userid, online_mem_cmd),
mock.call(userid, revert_standby_cmd)])
revert.assert_called_once_with(userid, sample_direct)
| {
"content_hash": "dda6c3126df98ca97f8b00402f3e4163",
"timestamp": "",
"source": "github",
"line_count": 2756,
"max_line_length": 79,
"avg_line_length": 50.16727140783745,
"alnum_prop": 0.5318925799755535,
"repo_name": "mfcloud/python-zvm-sdk",
"id": "011c0f994abb16d4d7f9577b4ed2183f8d5782d4",
"size": "138869",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zvmsdk/tests/unit/test_smtclient.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2538773"
},
{
"name": "C++",
"bytes": "952"
},
{
"name": "Makefile",
"bytes": "50059"
},
{
"name": "Python",
"bytes": "1590423"
},
{
"name": "Shell",
"bytes": "145392"
},
{
"name": "Smarty",
"bytes": "10342"
}
],
"symlink_target": ""
} |
import datetime
import rmc.models as m
import rmc.test.lib as testlib
class SimpleCourseAlert(m.BaseCourseAlert):
def send_alert(self, sections):
return True
class BaseCourseAlertTest(testlib.FixturesTestCase):
def tearDown(self):
# Clear DB for other tests
SimpleCourseAlert.objects.delete()
super(BaseCourseAlertTest, self).tearDown()
def test_send_eligible_alerts(self):
# This class is full. Should not alert anything.
alert = SimpleCourseAlert(
course_id='spcom223',
created_date=datetime.datetime.now(),
expiry_date=datetime.datetime.max,
term_id='2014_01',
section_type='LEC',
section_num='003',
)
alert.save()
alerts_sent = SimpleCourseAlert.send_eligible_alerts()
self.assertEqual(alerts_sent, 0)
self.assertEqual(SimpleCourseAlert.objects.count(), 1)
# Here's a non-full class to alert on.
alert = SimpleCourseAlert(
course_id='spcom223',
created_date=datetime.datetime.now(),
expiry_date=datetime.datetime.max,
term_id='2014_01',
section_type='LEC',
section_num='002',
)
alert.save()
self.assertEqual(SimpleCourseAlert.objects.count(), 2)
alerts_sent = SimpleCourseAlert.send_eligible_alerts()
self.assertEqual(alerts_sent, 1)
self.assertEqual(SimpleCourseAlert.objects.count(), 1)
# Here's a less restrictive query with multiple available sections
alert = SimpleCourseAlert(
course_id='spcom223',
created_date=datetime.datetime.now(),
expiry_date=datetime.datetime.max,
)
alert.save()
self.assertEqual(SimpleCourseAlert.objects.count(), 2)
alerts_sent = SimpleCourseAlert.send_eligible_alerts()
self.assertEqual(alerts_sent, 1)
self.assertEqual(SimpleCourseAlert.objects.count(), 1)
def test_delete_expired(self):
self.assertEqual(SimpleCourseAlert.objects.count(), 0)
SimpleCourseAlert(
course_id='spcom223',
created_date=datetime.datetime.now(),
expiry_date=datetime.datetime.min,
).save()
SimpleCourseAlert(
course_id='cs241',
created_date=datetime.datetime.now(),
expiry_date=datetime.datetime.max,
).save()
self.assertEqual(SimpleCourseAlert.objects.count(), 2)
SimpleCourseAlert.delete_expired()
self.assertEqual(SimpleCourseAlert.objects.count(), 1)
self.assertEqual(SimpleCourseAlert.objects[0].course_id, 'cs241')
| {
"content_hash": "61320ede57ffa19249a4009b8b53d4f2",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 74,
"avg_line_length": 32.626506024096386,
"alnum_prop": 0.6240768094534712,
"repo_name": "sachdevs/rmc",
"id": "ff9fe7092656f18110ef0af5dff3fb2e637fe671",
"size": "2708",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "models/course_alert_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "241561"
},
{
"name": "HTML",
"bytes": "101360"
},
{
"name": "JavaScript",
"bytes": "1894886"
},
{
"name": "Makefile",
"bytes": "3637"
},
{
"name": "Python",
"bytes": "382754"
},
{
"name": "Ruby",
"bytes": "1008"
},
{
"name": "Shell",
"bytes": "30562"
}
],
"symlink_target": ""
} |
import mechanize
import requests
import os
import argparse
import csv
# Sample for getting Spring Semester of 2015:
# python scraper.py -c true -ft 1 -fy 2015 -lt 1 -ly 2015
# 1 is Spring, 4 is Summer, 7 is Fall
parser = argparse.ArgumentParser()
#if we want to do conversion
parser.add_argument('-c', '--convert', nargs='+', type=bool)
#ft is the first term, in this case spring
parser.add_argument('-ft', '--firstterm', nargs='+', type=int)
#lt is the last term, begin with spring(1)
parser.add_argument('-lt', '--lastterm', nargs='+', type=int)
#first years, are years that we will start terms with. For example:
#if fyears are 2013, 2014, and last years are 2014, 2015 we will download 2013-2014 and 2014-2015
parser.add_argument('-fy', '--firstyears', nargs='+', type=int)
parser.add_argument('-ly', '--lastyears', nargs='+', type=int)
arguments = parser.parse_args()
#this is the boiler plate for the browser
br = mechanize.Browser()
br.set_handle_robots(False) # ignore robots
br.set_handle_refresh(False) # can sometimes hang without this
br.addheaders = [('User-agent', 'Firefox')]
#this sets the department we want, in this case we use entire campus
fcqdpt = 'BD : Entire Campus ## BD'
# fcqdpt = 'AS : MATHEMATICS -- MATH'
# 1 is Spring, 4 is Summer, 7 is Fall
#fterm is the first term, in this case spring
#lterm is the last term, spring as well.
#ftrm = '1'
ftrm = str(arguments.firstterm[0])
#ltrm = '1'
ltrm = str(arguments.lastterm[0])
fileFrmt = 'XLS'
#years that we are interested in, from first years to last years
fyrs = map(str,arguments.firstyears)
lyrs = map(str,arguments.lastyears)
#fyrs = ['2013', '2012', '2011', '2010', '2009', '2008', '2007']
#lyrs = ['2014', '2013', '2012', '2011', '2010', '2009', '2008']
# The instructor group grp1=[*ALL, TTT, OTH, T_O, TA]
grp1 = 'ALL'
def convert_csv(input_file,output_file):
with open(input_file, 'rb') as f:
with open(output_file,'w') as f1:
# f.next() # skip header line
first = True
for line in f:
if first:
# This is the custom header line that our import task wants.
f1.write('yearterm,subject,crse,sec,onlinefcq,bdcontinedcrse,instructor_last,instructor_first,formsrequested,formsreturned,percentage_passed,courseoverall,courseoverall_sd,instructoroverall,instructoroverall_sd,hoursperwkinclclass,priorinterest,instreffective,availability,challenge,howmuchlearned,instrrespect,course_title,courseoverall_old,courseoverall_sd_old,instroverall_old,instroverall_sd_old,r_fair,r_access,workload,r_divstu,r_diviss,r_presnt,r_explan,r_assign,r_motiv,r_learn,r_complx,campus,college,asdiv,level,fcqdept,instr_group,i_num\n')
# f1.write('yearterm,subject,crse,sec,onlineFCQ,bd_continuing_education,instructor_last,instructor_first,formsrequested,formsreturned,percentage_passed,course_overall,course_overall_SD,instructoroverall,instructoroverall_SD,total_hours,prior_interest,effectiveness,availability,challenge,amount_learned,respect,course_title,courseOverall_old,courseOverall_SD_old,instrOverall_old,instrOverall_SD_old,r_Fair,r_Access,workload,r_Divstu,r_Diviss,r_Presnt,r_Explan,r_Assign,r_Motiv,r_Learn,r_Complx,campus,college,aSdiv,level,fcqdept,instr_group,i_Num\n')
first = False
else:
# Replace double quotes with null, relace spaced commas with normal commas, replace the big comma chunk with a bigger one for our header.
line = line.replace('"','').replace(',,,,,,',',,,,,,,,,,,,,,,',1).replace(', ',',',1).replace(', ',' ')
f1.write(line)
print("DONE")
for i in range (0,len(fyrs)):
#the url for the fcq site
url = 'https://fcq.colorado.edu/UCBdata.htm'
#we open the url
response = br.open(url)
control = br.select_form("frmFCQ")
#go through all of the form options so we can change them
for control in br.form.controls:
#this will show us all of our default value for the fields. See page options for the output of the FCQ page as of 1/3/15
#print "DEFAULT: type=%s, name=%s value=%s" % (control.type, control.name, br[control.name])
if (control.name == 'fcqdpt'):
br[control.name] = [fcqdpt]
print "CHANGE fcqdpt type=%s, name=%s value=%s" % (control.type, control.name, br[control.name])
elif (control.name == 'ftrm'):
br[control.name] = [ftrm]
print "CHANGE first term type=%s, name=%s value=%s" % (control.type, control.name, br[control.name])
elif (control.name == 'ltrm'):
br[control.name] = [ltrm]
print "CHANGE last term type=%s, name=%s value=%s" % (control.type, control.name, br[control.name])
elif (control.name == 'fileFrmt'):
br[control.name] = [fileFrmt]
print "CHANGE fileFrmt type=%s, name=%s value=%s" % (control.type, control.name, br[control.name])
elif (control.name == 'fyr'):
br[control.name] = [fyrs[i]]
print "CHANGE first year type=%s, name=%s value=%s" % (control.type, control.name, br[control.name])
elif (control.name == 'lyr'):
br[control.name] = [lyrs[i]]
print "CHANGE last year type=%s, name=%s value=%s" % (control.type, control.name, br[control.name])
response = br.submit()
if 'Currently, Excel files with FCQ results are limited to 50,000 rows.' in response.read():
print('CAUTION REQUEST HAS MORE THAN 50,000 LINES!')
for link in br.links():
if (link.text == 'Click here'):
print(link.url)
original = link.url
#need to split because output link is: javascript:popup('/fcqtemp/BD010395426.xls','BD010395426','750','550','yes','yes')
split = original.replace("\'","").split(',')
#get this BD010395426
proper_link = "https://fcq.colorado.edu/fcqtemp/{xcel}.xls".format(xcel=split[1])
print proper_link
r = requests.get(proper_link)
file_name = "{ftrm}-{fyr}_{ltrm}-{lyr}".format(ftrm=ftrm,fyr=fyrs[i],ltrm=ltrm,lyr=lyrs[i])
xcel_path = "../raw/{file_name}.xls".format(file_name=file_name)
output = open(xcel_path,'wb')
output.write(r.content)
output.close()
csv_path = "../fcq/{file_name}.csv".format(file_name=file_name)
convert_command = "ssconvert -S {path} temp.csv".format(path=xcel_path)
rm_command = "rm temp.csv.*"
if arguments.convert:
os.system(convert_command)
convert_csv('temp.csv.1',csv_path)
os.system(rm_command)
print "Converted!"
else:
print "Did not convert!"
convert_csv
print("SUCCESS!")
| {
"content_hash": "ba947933d7dbccef6d1497475f9fcccc",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 571,
"avg_line_length": 44.84,
"alnum_prop": 0.6504608980077312,
"repo_name": "antsankov/cufcq",
"id": "675818b22f4db483153c5d551b63a7a3844ef439",
"size": "6862",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "data/scraper/scraper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17426"
},
{
"name": "CoffeeScript",
"bytes": "1477"
},
{
"name": "HTML",
"bytes": "104750"
},
{
"name": "JavaScript",
"bytes": "17055"
},
{
"name": "Python",
"bytes": "6862"
},
{
"name": "Ruby",
"bytes": "117711"
},
{
"name": "Shell",
"bytes": "6130"
}
],
"symlink_target": ""
} |
import hashlib, inspect, os, random, sys
# Gets the secret token provided by OpenShift
# or generates one (this is slightly less secure, but good enough for now)
def get_openshift_secret_token():
token = os.getenv('OPENSHIFT_SECRET_TOKEN')
name = os.getenv('OPENSHIFT_APP_NAME')
uuid = os.getenv('OPENSHIFT_APP_UUID')
if token is not None:
return token
elif (name is not None and uuid is not None):
return hashlib.sha256(name + '-' + uuid).hexdigest()
return None
# Loop through all provided variables and generate secure versions
# If not running on OpenShift, returns defaults and logs an error message
#
# This function calls secure_function and passes an array of:
# {
# 'hash': generated sha hash,
# 'variable': name of variable,
# 'original': original value
# }
def openshift_secure(default_keys, secure_function = 'make_secure_key'):
# Attempts to get secret token
my_token = get_openshift_secret_token()
# Only generate random values if on OpenShift
my_list = default_keys
if my_token is not None:
# Loop over each default_key and set the new value
for key, value in default_keys.iteritems():
# Create hash out of token and this key's name
sha = hashlib.sha256(my_token + '-' + key).hexdigest()
# Pass a dictionary so we can add stuff without breaking existing calls
vals = { 'hash': sha, 'variable': key, 'original': value }
# Call user specified function or just return hash
my_list[key] = sha
if secure_function is not None:
# Pick through the global and local scopes to find the function.
possibles = globals().copy()
possibles.update(locals())
supplied_function = possibles.get(secure_function)
if not supplied_function:
raise Exception("Cannot find supplied security function")
else:
my_list[key] = supplied_function(vals)
else:
calling_file = inspect.stack()[1][1]
if os.getenv('OPENSHIFT_REPO_DIR'):
base = os.getenv('OPENSHIFT_REPO_DIR')
calling_file.replace(base,'')
sys.stderr.write("OPENSHIFT WARNING: Using default values for secure variables, please manually modify in " + calling_file + "\n")
return my_list
# This function transforms default keys into per-deployment random keys;
def make_secure_key(key_info):
hashcode = key_info['hash']
key = key_info['variable']
original = key_info['original']
# These are the legal password characters
# as per the Django source code
# (django/contrib/auth/models.py)
chars = 'abcdefghjkmnpqrstuvwxyz'
chars += 'ABCDEFGHJKLMNPQRSTUVWXYZ'
chars += '23456789'
# Use the hash to seed the RNG
random.seed(int("0x" + hashcode[:8], 0))
# Create a random string the same length as the default
rand_key = ''
for _ in range(len(original)):
rand_pos = random.randint(0,len(chars))
rand_key += chars[rand_pos:(rand_pos+1)]
# Reset the RNG
random.seed()
# Set the value
return rand_key
| {
"content_hash": "5c3c5dc42e1521fe4481269ea1e0472b",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 138,
"avg_line_length": 37.01176470588236,
"alnum_prop": 0.647488874761602,
"repo_name": "cansoftinc/bmprototype",
"id": "a75be1830a73c2d455cf43b014026eda6904a15f",
"size": "3168",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "wsgi/openshift/openshiftlibs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14692"
}
],
"symlink_target": ""
} |
import aggravator
import yaml
from click.testing import CliRunner
def test_list():
runner = CliRunner()
result = runner.invoke(aggravator.cli, [
'--vault-password-file=/dev/null',
'--uri=example/config.yml',
'--env=prod',
'--list'
])
assert result.exit_code == 0
data = yaml.load(result.output)
assert type(data) is dict
assert type(data['all']) is dict
assert type(data['all']['vars']) is dict
assert data['all']['vars']['platform_name'] == 'prod'
| {
"content_hash": "b34482943e9248178649ee0cfdc63a80",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 57,
"avg_line_length": 28.77777777777778,
"alnum_prop": 0.61003861003861,
"repo_name": "petercb/aggravator",
"id": "5ea7feb2626e99c2829bfceafcaa91e573603528",
"size": "518",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16521"
}
],
"symlink_target": ""
} |
import os.path
import sys
APP_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_PATH = os.path.dirname(APP_PATH)
sys.path.append(APP_PATH)
for p in os.listdir(os.path.join(PROJECT_PATH, "env/lib/")):
python_lib = os.path.join(PROJECT_PATH, "env/lib/%s/site-packages" % p)
sys.path.append(python_lib)
| {
"content_hash": "fec3b0b5707763c0695fe8a05eca62e8",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 75,
"avg_line_length": 31.8,
"alnum_prop": 0.7012578616352201,
"repo_name": "nyodas/enjoliver",
"id": "a1d4e2d48f419145fca19e2acb482591d34b7c2e",
"size": "318",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "app/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "32"
},
{
"name": "Go",
"bytes": "32073"
},
{
"name": "HTML",
"bytes": "2485"
},
{
"name": "JavaScript",
"bytes": "2460"
},
{
"name": "Makefile",
"bytes": "41832"
},
{
"name": "Python",
"bytes": "462357"
},
{
"name": "Roff",
"bytes": "1036"
},
{
"name": "Shell",
"bytes": "52881"
}
],
"symlink_target": ""
} |
"Test replace, coverage 78%."
from idlelib.replace import ReplaceDialog
import unittest
from test.support import requires
requires('gui')
from tkinter import Tk, Text
from unittest.mock import Mock
from idlelib.idle_test.mock_tk import Mbox
import idlelib.searchengine as se
orig_mbox = se.tkMessageBox
showerror = Mbox.showerror
class ReplaceDialogTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.root = Tk()
cls.root.withdraw()
se.tkMessageBox = Mbox
cls.engine = se.SearchEngine(cls.root)
cls.dialog = ReplaceDialog(cls.root, cls.engine)
cls.dialog.bell = lambda: None
cls.dialog.ok = Mock()
cls.text = Text(cls.root)
cls.text.undo_block_start = Mock()
cls.text.undo_block_stop = Mock()
cls.dialog.text = cls.text
@classmethod
def tearDownClass(cls):
se.tkMessageBox = orig_mbox
del cls.text, cls.dialog, cls.engine
cls.root.destroy()
del cls.root
def setUp(self):
self.text.insert('insert', 'This is a sample sTring')
def tearDown(self):
self.engine.patvar.set('')
self.dialog.replvar.set('')
self.engine.wordvar.set(False)
self.engine.casevar.set(False)
self.engine.revar.set(False)
self.engine.wrapvar.set(True)
self.engine.backvar.set(False)
showerror.title = ''
showerror.message = ''
self.text.delete('1.0', 'end')
def test_replace_simple(self):
# Test replace function with all options at default setting.
# Wrap around - True
# Regular Expression - False
# Match case - False
# Match word - False
# Direction - Forwards
text = self.text
equal = self.assertEqual
pv = self.engine.patvar
rv = self.dialog.replvar
replace = self.dialog.replace_it
# test accessor method
self.engine.setpat('asdf')
equal(self.engine.getpat(), pv.get())
# text found and replaced
pv.set('a')
rv.set('asdf')
replace()
equal(text.get('1.8', '1.12'), 'asdf')
# don't "match word" case
text.mark_set('insert', '1.0')
pv.set('is')
rv.set('hello')
replace()
equal(text.get('1.2', '1.7'), 'hello')
# don't "match case" case
pv.set('string')
rv.set('world')
replace()
equal(text.get('1.23', '1.28'), 'world')
# without "regular expression" case
text.mark_set('insert', 'end')
text.insert('insert', '\nline42:')
before_text = text.get('1.0', 'end')
pv.set(r'[a-z][\d]+')
replace()
after_text = text.get('1.0', 'end')
equal(before_text, after_text)
# test with wrap around selected and complete a cycle
text.mark_set('insert', '1.9')
pv.set('i')
rv.set('j')
replace()
equal(text.get('1.8'), 'i')
equal(text.get('2.1'), 'j')
replace()
equal(text.get('2.1'), 'j')
equal(text.get('1.8'), 'j')
before_text = text.get('1.0', 'end')
replace()
after_text = text.get('1.0', 'end')
equal(before_text, after_text)
# text not found
before_text = text.get('1.0', 'end')
pv.set('foobar')
replace()
after_text = text.get('1.0', 'end')
equal(before_text, after_text)
# test access method
self.dialog.find_it(0)
def test_replace_wrap_around(self):
text = self.text
equal = self.assertEqual
pv = self.engine.patvar
rv = self.dialog.replvar
replace = self.dialog.replace_it
self.engine.wrapvar.set(False)
# replace candidate found both after and before 'insert'
text.mark_set('insert', '1.4')
pv.set('i')
rv.set('j')
replace()
equal(text.get('1.2'), 'i')
equal(text.get('1.5'), 'j')
replace()
equal(text.get('1.2'), 'i')
equal(text.get('1.20'), 'j')
replace()
equal(text.get('1.2'), 'i')
# replace candidate found only before 'insert'
text.mark_set('insert', '1.8')
pv.set('is')
before_text = text.get('1.0', 'end')
replace()
after_text = text.get('1.0', 'end')
equal(before_text, after_text)
def test_replace_whole_word(self):
text = self.text
equal = self.assertEqual
pv = self.engine.patvar
rv = self.dialog.replvar
replace = self.dialog.replace_it
self.engine.wordvar.set(True)
pv.set('is')
rv.set('hello')
replace()
equal(text.get('1.0', '1.4'), 'This')
equal(text.get('1.5', '1.10'), 'hello')
def test_replace_match_case(self):
equal = self.assertEqual
text = self.text
pv = self.engine.patvar
rv = self.dialog.replvar
replace = self.dialog.replace_it
self.engine.casevar.set(True)
before_text = self.text.get('1.0', 'end')
pv.set('this')
rv.set('that')
replace()
after_text = self.text.get('1.0', 'end')
equal(before_text, after_text)
pv.set('This')
replace()
equal(text.get('1.0', '1.4'), 'that')
def test_replace_regex(self):
equal = self.assertEqual
text = self.text
pv = self.engine.patvar
rv = self.dialog.replvar
replace = self.dialog.replace_it
self.engine.revar.set(True)
before_text = text.get('1.0', 'end')
pv.set(r'[a-z][\d]+')
rv.set('hello')
replace()
after_text = text.get('1.0', 'end')
equal(before_text, after_text)
text.insert('insert', '\nline42')
replace()
equal(text.get('2.0', '2.8'), 'linhello')
pv.set('')
replace()
self.assertIn('error', showerror.title)
self.assertIn('Empty', showerror.message)
pv.set(r'[\d')
replace()
self.assertIn('error', showerror.title)
self.assertIn('Pattern', showerror.message)
showerror.title = ''
showerror.message = ''
pv.set('[a]')
rv.set('test\\')
replace()
self.assertIn('error', showerror.title)
self.assertIn('Invalid Replace Expression', showerror.message)
# test access method
self.engine.setcookedpat("?")
equal(pv.get(), "\\?")
def test_replace_backwards(self):
equal = self.assertEqual
text = self.text
pv = self.engine.patvar
rv = self.dialog.replvar
replace = self.dialog.replace_it
self.engine.backvar.set(True)
text.insert('insert', '\nis as ')
pv.set('is')
rv.set('was')
replace()
equal(text.get('1.2', '1.4'), 'is')
equal(text.get('2.0', '2.3'), 'was')
replace()
equal(text.get('1.5', '1.8'), 'was')
replace()
equal(text.get('1.2', '1.5'), 'was')
def test_replace_all(self):
text = self.text
pv = self.engine.patvar
rv = self.dialog.replvar
replace_all = self.dialog.replace_all
text.insert('insert', '\n')
text.insert('insert', text.get('1.0', 'end')*100)
pv.set('is')
rv.set('was')
replace_all()
self.assertNotIn('is', text.get('1.0', 'end'))
self.engine.revar.set(True)
pv.set('')
replace_all()
self.assertIn('error', showerror.title)
self.assertIn('Empty', showerror.message)
pv.set('[s][T]')
rv.set('\\')
replace_all()
self.engine.revar.set(False)
pv.set('text which is not present')
rv.set('foobar')
replace_all()
def test_default_command(self):
text = self.text
pv = self.engine.patvar
rv = self.dialog.replvar
replace_find = self.dialog.default_command
equal = self.assertEqual
pv.set('This')
rv.set('was')
replace_find()
equal(text.get('sel.first', 'sel.last'), 'was')
self.engine.revar.set(True)
pv.set('')
replace_find()
if __name__ == '__main__':
unittest.main(verbosity=2)
| {
"content_hash": "7288ef06a40d06586a45c7b131bdeb97",
"timestamp": "",
"source": "github",
"line_count": 294,
"max_line_length": 70,
"avg_line_length": 28.24829931972789,
"alnum_prop": 0.5403973509933775,
"repo_name": "batermj/algorithm-challenger",
"id": "c3c5d2eeb94998bdfb643eaabdf8e8eec213ce98",
"size": "8305",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Lib/idlelib/idle_test/test_replace.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "655185"
},
{
"name": "Batchfile",
"bytes": "127416"
},
{
"name": "C",
"bytes": "33127630"
},
{
"name": "C++",
"bytes": "1364796"
},
{
"name": "CSS",
"bytes": "3163"
},
{
"name": "Common Lisp",
"bytes": "48962"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "DTrace",
"bytes": "2196"
},
{
"name": "Go",
"bytes": "26248"
},
{
"name": "HTML",
"bytes": "385719"
},
{
"name": "Haskell",
"bytes": "33612"
},
{
"name": "Java",
"bytes": "1084"
},
{
"name": "JavaScript",
"bytes": "20754"
},
{
"name": "M4",
"bytes": "403992"
},
{
"name": "Makefile",
"bytes": "238185"
},
{
"name": "Objective-C",
"bytes": "4934684"
},
{
"name": "PHP",
"bytes": "3513"
},
{
"name": "PLSQL",
"bytes": "45772"
},
{
"name": "Perl",
"bytes": "649"
},
{
"name": "PostScript",
"bytes": "27606"
},
{
"name": "PowerShell",
"bytes": "21737"
},
{
"name": "Python",
"bytes": "55270625"
},
{
"name": "R",
"bytes": "29951"
},
{
"name": "Rich Text Format",
"bytes": "14551"
},
{
"name": "Roff",
"bytes": "292490"
},
{
"name": "Ruby",
"bytes": "519"
},
{
"name": "Scala",
"bytes": "846446"
},
{
"name": "Shell",
"bytes": "491113"
},
{
"name": "Swift",
"bytes": "881"
},
{
"name": "TeX",
"bytes": "337654"
},
{
"name": "VBScript",
"bytes": "140"
},
{
"name": "XSLT",
"bytes": "153"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# no unicode literals
import WatchmanTestCase
import tempfile
import os
import os.path
class TestSince(WatchmanTestCase.WatchmanTestCase):
def test_sinceIssue1(self):
root = self.mkdtemp()
self.touchRelative(root, '111')
self.touchRelative(root, '222')
self.watchmanCommand('watch', root)
self.assertFileList(root, ['111', '222'])
# Create a cursor for this state
self.watchmanCommand('since', root, 'n:foo')
bar_dir = os.path.join(root, 'bar')
os.mkdir(bar_dir)
self.touchRelative(bar_dir, '333')
self.waitForSync(root)
# We should not observe 111 or 222
self.assertFileList(root, cursor='n:foo', files=['bar', 'bar/333'])
def test_sinceIssue2(self):
root = self.mkdtemp()
watch = self.watchmanCommand('watch', root)
self.assertFileList(root, files=[])
foo_dir = os.path.join(root, 'foo')
os.mkdir(foo_dir)
self.touchRelative(foo_dir, '111')
self.waitForSync(root)
self.assertFileList(root, cursor='n:foo', files=['foo', 'foo/111'])
bar_dir = os.path.join(foo_dir, 'bar')
os.mkdir(bar_dir)
self.touchRelative(bar_dir, '222')
# wait until we observe all the files
self.assertFileList(root, files=[
'foo',
'foo/111',
'foo/bar',
'foo/bar/222'])
# now check the delta for the since
expected = ['foo/bar', 'foo/bar/222']
if watch['watcher'] in ('win32', 'portfs', 'kqueue'):
# These systems also show the containing dir as modified
expected.append('foo')
self.assertFileList(root, cursor='n:foo', files=expected)
def test_sinceRelativeRoot(self):
root = self.mkdtemp()
self.watchmanCommand('watch', root)
clock = self.watchmanCommand('clock', root)['clock']
self.touchRelative(root, 'a')
os.mkdir(os.path.join(root, 'subdir'))
self.touchRelative(os.path.join(root, 'subdir'), 'foo')
self.assertFileList(root, files=[
'a',
'subdir',
'subdir/foo'])
res = self.watchmanCommand('query', root, {
'since': clock,
'relative_root': 'subdir',
'fields': ['name']})
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['foo']))
# touch a file outside the relative root
self.touchRelative(root, 'b')
self.assertFileList(root, files=[
'a',
'b',
'subdir',
'subdir/foo'])
res = self.watchmanCommand('query', root, {
'since': res['clock'],
'relative_root': 'subdir',
'fields': ['name']})
expect = []
# Filter out 'foo' as some operating systems may report
# it and others may not. We're not interested in it here.
self.assertEqual(self.normFileList(
filter(lambda x: x != 'foo', res['files'])), expect)
# touching just the subdir shouldn't cause anything to show up
self.touchRelative(root, 'subdir')
self.waitForSync(root)
res = self.watchmanCommand('query', root, {
'since': res['clock'],
'relative_root': 'subdir',
'fields': ['name']})
self.assertEqual(self.normWatchmanFileList(res['files']), [])
# touching a new file inside the subdir should cause it to show up
dir2 = os.path.join(root, 'subdir', 'dir2')
os.mkdir(dir2)
self.touchRelative(dir2, 'bar')
self.waitForSync(root)
res = self.watchmanCommand('query', root, {
'since': res['clock'],
'relative_root': 'subdir',
'fields': ['name']})
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['dir2', 'dir2/bar']))
def assertFreshInstanceForSince(self, root, cursor, empty=False):
res = self.watchmanCommand('query', root, {
'since': cursor,
'fields': ['name'],
'empty_on_fresh_instance': empty})
self.assertTrue(res['is_fresh_instance'])
if empty:
self.assertEqual(self.normWatchmanFileList(res['files']), [])
else:
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['111']))
def test_sinceFreshInstance(self):
root = self.mkdtemp()
self.watchmanCommand('watch', root)
self.assertFileList(root, [])
self.touchRelative(root, '111')
res = self.watchmanCommand('query', root, {
'fields': ['name']})
self.assertTrue(res['is_fresh_instance'])
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['111']))
# relative clock value, fresh instance
self.assertFreshInstanceForSince(root, 'c:0:1:0:1', False)
# old-style clock value (implies fresh instance, event if the
# pid is the same)
pid = self.watchmanCommand('get-pid')['pid']
self.assertFreshInstanceForSince(root, 'c:%s:1' % pid, False)
# -- decompose clock and replace elements one by one
clock = self.decodeBSERUTF8(
self.watchmanCommand('clock', root)['clock'])
p = clock.split(':')
# ['c', startTime, pid, rootNum, ticks]
self.assertEqual(len(p), 5)
# replace start time
self.assertFreshInstanceForSince(root,
':'.join(
['c', '0', p[2], p[3], p[4]]),
False)
# replace pid
self.assertFreshInstanceForSince(root,
':'.join(
['c', p[1], '1', p[3], p[4]]),
False)
# replace root number (also try empty_on_fresh_instance)
self.assertFreshInstanceForSince(root,
':'.join(
['c', p[1], p[2], '0', p[4]]),
True)
# empty_on_fresh_instance, not a fresh instance
self.touchRelative(root, '222')
res = self.watchmanCommand('query', root, {
'since': clock,
'fields': ['name'],
'empty_on_fresh_instance': True})
self.assertFalse(res['is_fresh_instance'])
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['222']))
# fresh instance results should omit deleted files
os.unlink(os.path.join(root, '111'))
res = self.watchmanCommand('query', root, {
'since': 'c:0:1:0:1',
'fields': ['name']})
self.assertTrue(res['is_fresh_instance'])
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['222']))
def test_reAddWatchFreshInstance(self):
root = self.mkdtemp()
self.watchmanCommand('watch', root)
self.assertFileList(root, [])
self.touchRelative(root, '111')
res = self.watchmanCommand('query', root, {
'fields': ['name']})
self.assertTrue(res['is_fresh_instance'])
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['111']))
clock = res['clock']
os.unlink(os.path.join(root, '111'))
self.watchmanCommand('watch-del', root)
self.watchmanCommand('watch', root)
self.touchRelative(root, '222')
# wait for touch to be observed
self.assertFileList(root, ['222'])
# ensure that our since query is a fresh instance
res = self.watchmanCommand('query', root, {
'since': clock,
'fields': ['name']})
self.assertTrue(res['is_fresh_instance'])
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['222']))
def test_recrawlFreshInstance(self):
root = self.mkdtemp()
self.watchmanCommand('watch', root)
self.touchRelative(root, '111')
self.assertFileList(root, ['111'])
res = self.watchmanCommand('query', root, {
'fields': ['name']})
self.assertTrue(res['is_fresh_instance'])
clock = res['clock']
os.unlink(os.path.join(root, '111'))
self.watchmanCommand('debug-recrawl', root)
self.touchRelative(root, '222')
res = self.watchmanCommand('query', root, {
'since': clock,
'fields': ['name']})
self.assertTrue(res['is_fresh_instance'])
self.assertEqual(self.normWatchmanFileList(res['files']),
self.normFileList(['222']))
warning = self.decodeBSERUTF8(res['warning'])
self.assertRegexpMatches(warning, 'Recrawled this watch')
| {
"content_hash": "ea15b8c81993997105c8fc2162d36057",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 75,
"avg_line_length": 36.944,
"alnum_prop": 0.545257687310524,
"repo_name": "dhruvsinghal/watchman",
"id": "a5d8a75e201692d42f49b2c059af019bd3a91d49",
"size": "9346",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration/test_since.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "725"
},
{
"name": "C",
"bytes": "733960"
},
{
"name": "CSS",
"bytes": "40936"
},
{
"name": "HTML",
"bytes": "37547"
},
{
"name": "Java",
"bytes": "152559"
},
{
"name": "JavaScript",
"bytes": "38082"
},
{
"name": "M4",
"bytes": "9662"
},
{
"name": "Makefile",
"bytes": "12645"
},
{
"name": "PHP",
"bytes": "152384"
},
{
"name": "Python",
"bytes": "172353"
},
{
"name": "Ruby",
"bytes": "16447"
},
{
"name": "Shell",
"bytes": "3566"
}
],
"symlink_target": ""
} |
from time import strftime, gmtime, localtime
import sys
from os.path import dirname, abspath
from inspect import getfile, currentframe
currentdir = dirname(abspath(getfile(currentframe())))
parentdir = dirname(currentdir)
sys.path.insert(0, parentdir)
from sippy.Math.recfilter import recfilter
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_REALTIME, CLOCK_MONOTONIC
sys.path.pop(0)
from threading import local
class MonoGlobals(local):
realt_flt = None
monot_max = None
def __init__(self):
realt = clock_getdtime(CLOCK_REALTIME)
self.monot_max = clock_getdtime(CLOCK_MONOTONIC)
self.realt_flt = recfilter(0.99, realt - self.monot_max)
class MonoTime(object):
monot = None
realt = None
globals = MonoGlobals()
def __init__(self, s = None, monot = None, realt = None, trust_realt = False):
if s != None:
parts = s.split('-', 1)
self.realt = float(parts[0])
if len(parts) == 1:
self.__initFromRealt()
else:
self.monot = float(parts[1])
return
if monot == None and realt == None:
if trust_realt:
raise TypeError('MonoTime.__init__: realt could not be None when trust_realt is set')
realt = clock_getdtime(CLOCK_REALTIME)
self.monot = clock_getdtime(CLOCK_MONOTONIC)
diff_flt = self.globals.realt_flt.apply(realt - self.monot)
if self.globals.monot_max < self.monot:
self.globals.monot_max = self.monot
self.realt = self.monot + diff_flt
return
if monot != None:
self.monot = monot
if realt != None:
self.realt = realt
else:
self.realt = monot + self.globals.realt_flt.lastval
return
self.realt = realt
self.__initFromRealt(trust_realt)
def __initFromRealt(self, trust_realt = False):
self.monot = self.realt - self.globals.realt_flt.lastval
if not trust_realt and self.monot > self.globals.monot_max:
monot_now = clock_getdtime(CLOCK_MONOTONIC)
if monot_now > self.globals.monot_max:
self.globals.monot_max = monot_now
self.monot = self.globals.monot_max
def getdiff(self):
return (self.realt - self.monot)
def __str__(self):
rstr = '%.6f-%.6f' % (self.realt, self.monot)
return (rstr)
def ftime(self, base = None):
if base != None:
realt = base.realt - (base.monot - self.monot)
else:
realt = self.realt
return strftime('%Y-%m-%d %H:%M:%S+00', gmtime(round(realt)))
def fptime(self, base = None):
if base != None:
realt = base.realt - (base.monot - self.monot)
else:
realt = self.realt
return '%s.%.3d' % (strftime('%d %b %H:%M:%S', localtime(realt)), \
(realt % 1) * 1000)
def frtime(self, base = None):
if base != None:
realt = base.realt - (base.monot - self.monot)
else:
realt = self.realt
gt = gmtime(realt)
day = strftime('%d', gt)
if day[0] == '0':
day = day[1]
return strftime('%%H:%%M:%%S.000 GMT %%a %%b %s %%Y' % day, gt)
def __add__(self, x):
if isinstance(x, MonoTime):
return (self.monot + x.monot)
return (self.monot + x)
def __sub__(self, x):
if isinstance(x, MonoTime):
return (self.monot - x.monot)
return (self.monot - x)
def __radd__(self, x):
if isinstance(x, MonoTime):
return (self.monot + x.monot)
return (self.monot + x)
def __rsub__(self, x):
if isinstance(x, MonoTime):
return (x.monot - self.monot)
return (x - self.monot)
def __cmp__(self, other):
if other == None:
return (1)
if isinstance(other, int):
otime = float(other)
elif isinstance(other, float):
otime = other
else:
otime = other.monot
return cmp(self.monot, otime)
def __lt__(self, other):
return (self.monot < other.monot)
def __le__(self, other):
return (self.monot <= other.monot)
def __eq__(self, other):
if other == None:
return (False)
return (self.monot == other.monot)
def __ne__(self, other):
if other == None:
return (True)
return (self.monot != other.monot)
def __gt__(self, other):
return (self.monot > other.monot)
def __ge__(self, other):
return (self.monot >= other.monot)
def offsetFromNow(self):
now = clock_getdtime(CLOCK_MONOTONIC)
return (now - self.monot)
def getOffsetCopy(self, offst):
return self.__class__(monot = self.monot + offst, realt = self.realt + offst)
def offset(self, offst):
self.monot += offst
self.realt += offst
def getCopy(self):
return self.__class__(monot = self.monot, realt = self.realt)
class selftest(object):
mg1 = None
mg2 = None
def run_t1(self):
m = MonoTime()
self.mg1 = m.globals.realt_flt
def run_t2(self):
m = MonoTime()
self.mg2 = m.globals.realt_flt
def run(self):
for x in range (0, 100000):
m1 = MonoTime()
m2 = MonoTime()
if x == 0:
print(m1, m2)
print(m1.ftime(), m2.ftime())
#print (m1.getdiff() - m2.getdiff())
print(m1, m2)
print(m1 < m2, m1 > m2, m1 == m2, m1 <= m2, m1 >= m2, m1 != m2)
print(m1.ftime(), m2.ftime())
ms1 = str(m1)
ms2 = str(m2)
m3 = MonoTime(s = ms1)
m4 = MonoTime(s = ms2)
print(m3, m4)
print(m3.ftime(), m4.ftime())
m5 = MonoTime(realt = m3.realt)
m6 = MonoTime(monot = m4.monot)
print(m5.ftime(), m6.ftime())
print(m5.globals.realt_flt == m1.globals.realt_flt)
from threading import Thread
t1 = Thread(target = self.run_t1)
t2 = Thread(target = self.run_t2)
t1.start()
t2.start()
t1.join()
t2.join()
print(self.mg1 != self.mg2)
if __name__ == '__main__':
selftest().run()
| {
"content_hash": "f4e6452b4956560d3675e39dad60d869",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 101,
"avg_line_length": 30.677884615384617,
"alnum_prop": 0.5361228647547407,
"repo_name": "dsanders11/rtpproxy",
"id": "cfc5325cddf867840be394e87477555116b524a1",
"size": "7750",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "python/sippy_lite/sippy/Time/MonoTime.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "1404701"
},
{
"name": "M4",
"bytes": "23052"
},
{
"name": "Makefile",
"bytes": "1448887"
},
{
"name": "Python",
"bytes": "130120"
},
{
"name": "Roff",
"bytes": "16599"
},
{
"name": "Shell",
"bytes": "439512"
}
],
"symlink_target": ""
} |
"""PRAW Integration test suite."""
import pytest
from betamax import Betamax
from praw import Reddit
class IntegrationTest(object):
"""Base class for PRAW integration tests."""
def setup(self):
"""Setup runs before all test cases."""
self.setup_reddit()
self.setup_betamax()
def setup_betamax(self):
"""Configure betamax instance based off of the reddit instance."""
http = self.reddit._core._requestor._http
self.recorder = Betamax(http)
# Disable response compression in order to see the response bodies in
# the betamax cassettes.
http.headers['Accept-Encoding'] = 'identity'
# Require tests to explicitly disable read_only mode.
self.reddit.read_only = True
def setup_reddit(self):
self.reddit = Reddit(client_id=pytest.placeholders.client_id,
client_secret=pytest.placeholders.client_secret,
password=pytest.placeholders.password,
user_agent=pytest.placeholders.user_agent,
username=pytest.placeholders.username)
| {
"content_hash": "8b3593be652c5c8ccb365c81899eca83",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 77,
"avg_line_length": 36.03125,
"alnum_prop": 0.6244579358196011,
"repo_name": "RGood/praw",
"id": "8ec5b389e62d3f903709b23d75c4691a0527752c",
"size": "1153",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tests/integration/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "323946"
},
{
"name": "Shell",
"bytes": "189"
}
],
"symlink_target": ""
} |
from dotmailer import Base
from dotmailer.constants import constants
from dotmailer.connection import connection
from dotmailer.address_books import AddressBook
class ContactScore(Base):
"""
Scoring information about a specific contact.
"""
contact_id = None
email = None
date_modified = None
score_label = None
score = None
engagement = None
suitability = None
def _update_values(self, data):
# Attempt to convert strings to the appropriate data type for contact
# scores.
for key in ['contact_id', 'score', 'engagement', 'suitability']:
if key in data and data[key] is not None:
data[key] = int(data[key])
data['date_modified'] = self.strptime(data['date_modified'])
super(ContactScore, self)._update_values(data)
class Contact(Base):
"""
This class represents a DotMailer contact. To be able to create a
contact, you must specify the email of the contact.
``Required keyword arguments``
**email** - `A string containing the email address of the contact
that you wish to add.`
``Optional keywoard arguments``
**opt_in_type** - `A string which represents the type of optin
that the contact performed. You can either specify these values
by hand or use the pre-defined constant values.`
* :class:`Constants`.CONTACT_OPTINTYPE_UNKNOWN
* :class:`Constants`.CONTACT_OPTINTYPE_SINGLE
* :class:`Constants`.CONTACT_OPTINTYPE_DOUBLE
* :class:`Constants`.CONTACT_OPTINTYPE_VERIFIEDDOUBLE
**email_type** - `A string representing the type of email that the
contact would prefer to receive. This can be either plain text or
HTML. Alternatively use the constant values.`
* :class:`Constants`.CONTACT_EMAILTYPE_HTML
* :class:`Constants`.CONTACT_EMAILTYPE_PLAIN
**data_fields** - `A dictionary of values which any data fields
and value that should be associated with the contact e.g`
.. code-block:: python
{
'FavouriteColour': 'Red',
'age': 23
}
"""
end_point = '/v2/contacts'
email = None
opt_in_type = constants.CONTACT_OPTINTYPE_UNKNOWN
email_type = constants.CONTACT_EMAILTYPE_HTML
data_fields = None
def __init__(self, **kwargs):
self.required_fields = ['email']
# Reassign `delete` to reference the instance method rather
# than the class method version.
self.delete = self._delete
self.unsubscribe = self._unsubscribe
# Setup the other optional fields to the default value if they have not
# been specified.
if 'opt_in_type' not in kwargs:
kwargs['opt_in_type'] = constants.CONTACT_OPTINTYPE_UNKNOWN
if 'email_type' not in kwargs:
kwargs['email_type'] = constants.CONTACT_EMAILTYPE_HTML
if 'data_fields' not in kwargs:
kwargs['data_fields'] = None
super(Contact, self).__init__(**kwargs)
def __repr__(self):
return "ID:{}, Email:{}, DataFields:{}".format(
self.id,
self.email,
self.data_fields
)
def _update_values(self, data):
if 'data_fields' in data:
# If the data fields is a list then this is likely to be
# coming back from the server as a list of dictionaries
# so we need to unpack them
if isinstance(data['data_fields'], list):
data['data_fields'] = {
entry['key']: entry['value']
for entry in data['data_fields']
}
super(Contact, self)._update_values(data)
def param_dict(self):
contact_data_fields = []
if self.data_fields is not None:
contact_data_fields = [
{'key': key, 'value': value}
for key, value in self.data_fields.items()
]
return {
'Email': self.email,
'OptInType': self.opt_in_type,
'EmailType': self.email_type,
'DataFields': contact_data_fields
}
def create(self):
"""
Creates a contact
:return:
"""
response = connection.post(
self.end_point,
self.param_dict()
)
self._update_values(response)
def update(self):
"""
Updates an existing contact's data. Unlike the DotMailer's API
you currently can NOT create a contact using the update value and
assigning an ID value of zero. If you need to create a contact
then please use the create method.
:return:
"""
self.validate_id('Sorry unable to update this contact as no ID value'
' has been defined.')
response = connection.put(
'{}/{}'.format(self.end_point, self.id),
self.param_dict()
)
self._update_values(response)
def _delete(self):
"""
Deletes an existing contact. When calling on an instance use
`instance.delete()`.
:return:
"""
self.validate_id('Sorry, unable to delete contact as no ID value is'
'defined for this contact.')
# Attempt to issue the delete request to DotMailer to remove the
# address book
type(self).delete(self.id)
# Clear the current ID value so we can't accidently call this
# delete call multiple times
self.id = None
@classmethod
def delete(cls, id):
connection.delete(
'{}/{}'.format(cls.end_point, id)
)
return True
def add_to_address_book(self, address_book):
"""
Adds a contact to a specific address book
:param address_book: This should be an instance of :class:`AddressBook`
:return:
"""
address_book.add_contact(self)
def delete_from_address_book(self, address_book):
"""
Deletes a contact from a given address book
:param address_book:
:return:
"""
address_book.delete_contact(self)
@staticmethod
def delete_multiple_from_address_book(id_list, address_book):
"""
Deletes multiple contacts from an address book
:param id_list:
:param address_book:
:return:
"""
address_book.delete_multiple_contacts(id_list)
@staticmethod
def delete_all_from_address_book(address_book):
"""
Deletes all contacts from a given address book
:param address_book:
:return:
"""
address_book.delete_all_contacts()
@classmethod
def get_by_email(cls, email):
"""
Gets a contact by email address
:param email:
:return:
"""
response = connection.get(
cls.end_point + '/' + email
)
return cls(**response)
@classmethod
def get_by_id(cls, id):
"""
Gets a contact by ID
:param id:
:return:
"""
# TODO: Add some type checking in to make sure that the value supplied is actually an int
response = connection.get(
'{}/{}'.format(cls.end_point, id)
)
return cls(**response)
def get_address_books(self, select=1000, skip=0):
"""
Gets any address books that a contact is in
:param select:
:param skip:
:return:
"""
self.validate_id('Sorry, unable to get the address books that this'
'contact is in, due to no ID value being associated'
'with the contact.')
response = connection.get(
'{}/{}/address-books'.format(self.end_point, self.id),
query_params={'Select': select, 'Skip': skip}
)
return [AddressBook(**entry) for entry in response]
def get_all_address_books(self):
"""
Automatically performs all requests needed to return every possible
address book that this contact is associated with.
:return:
"""
all_address_books = []
select = 1000
skip = 0
address_books = self.get_address_books(select, skip)
num_of_entries = len(address_books)
while num_of_entries > 0:
all_address_books.extend(address_books)
if num_of_entries < select:
break
skip += select
address_books = self.get_address_books(select, skip)
num_of_entries = len(address_books)
return all_address_books
@classmethod
def get_multiple(cls, select=1000, skip=0):
"""
Gets a list of all contacts in the account
:param select:
:param skip:
:return:
"""
# TODO: Add some validation in for the parameter data types
response = connection.get(
cls.end_point,
query_params={'Select': select, 'Skip': skip}
)
return [cls(**entry) for entry in response]
@classmethod
def get_all(cls):
all_contacts = []
select = 1000
skip = 0
contacts = cls.get_multiple(select, skip)
num_of_entries = len(contacts)
while num_of_entries > 0:
all_contacts.extend(contacts)
if num_of_entries < select:
break
skip += select
contacts = cls.get_multiple(select, skip)
num_of_entries = len(contacts)
return all_contacts
@classmethod
def get_contacts_since(cls, date, with_full_data=True, select=1000, skip=0):
"""
Gets a list of created contacts after a specified date
:param date:
:param with_full_data:
:param select:
:param skip:
:return:
"""
response = connection.get(
'{}/created-since/{}'.format(
cls.end_point, date.strftime('%Y-%m-%d')
),
query_param={
'WithFullData': with_full_data, 'Select': select, 'Skip': skip
}
)
return [cls(**entry) for entry in response]
@classmethod
def get_all_contacts_since(cls, date, with_full_data=True):
"""
Get all the contacts that have been created since a specific
date.
This function will automatically handle making all the calls
required to get a complete list i.e. if there are more than
1000 contacts since the specified date.
:param date:
:param with_full_data:
:return:
"""
select = 1000
skip = 0
all_contacts = []
contacts = cls.get_contacts_since(date, with_full_data, select, skip)
num_of_entries = len(contacts)
while num_of_entries > 0:
all_contacts.extend(contacts)
if num_of_entries < select:
break
skip += select
contacts = cls.get_contacts_since(
date, with_full_data, select, skip)
num_of_entries = len(contacts)
return all_contacts
@classmethod
def bulk_create(cls, filedata):
"""
Bulk creates, or bulk updates, contacts.
This function allows you to upload a bulk number of contacts to
the server. The contact data must be in either a CSV or Excel
format, and it must include one column that is called 'Email' or
equivalent if your account is using a language other than
English. All other columns will be mapped to your custom contact
data fields.
Currently DotMailer place a file upload limit of 10MB. If your
data is larger than this then you will need to split it into
small chunks.
The API will return an ID for the import, and the current status.
You can re-query the import status later, by using the unique
ID value.
:param filedata: Either a file or filepath which can be read from
:return:
"""
url = '{}/import'.format(cls.end_point)
return connection.post(url, {}, files={'file': filedata})
# TODO: Since this uses a different end point, should we move this to the address-book class and just call into it from here?
@classmethod
def bulk_create_in_address_book(cls, address_book, filedata):
"""
Bulk creates, or bulk updates, contacts in an address book.
Similar to the bulk create verions, this function can be used to
create a bulk number of contacts in one go. However, this
version will also automatically associate the contact with the
address book that has been specified. The contact data must be
in either a CSV or Excel format, and it must include one column
that is called 'Email' or equivalent if your account is using a
language other than English. All other columns will be mapped
to your custom contact data fields.
Currently DotMailer place a file upload limit of 10MB. If your
data is larger than this then you will need to split it into
small chunks.
The API will return an ID for the import, and the current status.
You can re-query the import status later, by using the unique
ID value.
:param address_book:
:param filedata:
:return:
"""
url = '/v2/address-books/{}/contacts/import'.format(address_book.id)
return connection.post(url, {}, files={'file': filedata})
@classmethod
def get_contact_import_status(cls, id):
"""
Gets the import status of a previously started contact import.
:param id: The bulk upload ID value returned when you submitted
a bulk upload request. The ID is a GUID and should look similar
to 842d81e8-c619-457f-bb77-ab6c4a17da39.
:return: A dictionary that contains an the keys 'id' and 'status'.
"""
return connection.get(
'{}/imports/{}'.format(cls.end_point, id)
)
@classmethod
def get_contact_import_report(cls, id):
"""
Gets a report with statistics about what was successfully
imported, and what was unable to be imported.
:param id:
:return:
"""
return connection.get(
'{}/import/{}/report'.format(cls.end_point, id)
)
@classmethod
def get_contact_import_report_faults(cls, id):
"""
Gets a report with statistics about what was successfully
imported, and what was unable to be imported.
:param id:
:return:
"""
return connection.get(
'{}/import/{}/report-faults'.format(cls.end_point, id)
)
# TODO: Should this be a call into the address book object
@classmethod
def get_contacts_from_address_book(cls, address_book, with_full_data=True,
select=1000, skip=0):
response = connection.get(
'/v2/address-books/{}/contacts'.format(address_book.id),
query_params={
'withFullData': with_full_data, 'select': select, 'skip': skip
}
)
return [Contact(**entry) for entry in response]
# TODO: Should this be a call into the address book object
@classmethod
def get_all_contacts_from_address_book(cls, address_book,
with_full_data=True):
all_contacts = []
select = 1000
skip = 0
contacts = cls.get_contacts_from_address_book(
address_book, with_full_data)
num_of_entries = len(contacts)
while num_of_entries > 0:
all_contacts.extend(contacts)
if num_of_entries < select:
break
skip += select
contacts = cls.get_contacts_from_address_book(
address_book, with_full_data)
num_of_entries = len(contacts)
return all_contacts
# TODO: Should this be a call into the address book object
@classmethod
def get_modified_contacts_from_address_book_since(cls, address_book, date,
with_full_data=True,
select=1000, skip=0):
response = connection.get(
'/v2/address-books/{}/contacts/modified-since/{}'.format(
address_book.id, date.strftime('%Y-%m-%d')
),
query_params={
'withFullData': with_full_data, 'select': select, 'skip': skip
}
)
return [Contact(**entry) for entry in response]
# TODO: Should this be a call into the address book object
@classmethod
def get_all_modified_contacts_from_address_book_since(cls, address_book,
date,
with_full_data=True):
all_contacts = []
select = 1000
skip = 0
contacts = cls.get_modified_contacts_from_address_book_since(
address_book, date, with_full_data, select, skip
)
num_of_entries = len(contacts)
while num_of_entries > 0:
all_contacts.extend(contacts)
if num_of_entries < select:
break
skip += select
contacts = cls.get_modified_contacts_from_address_book_since(
address_book, date, with_full_data, select, skip
)
num_of_entries = len(contacts)
return all_contacts
@classmethod
def get_modified_contacts_since(cls, date, with_full_data=True, select=1000,
skip=0):
response = connection.get(
'{}/modified-since/{}'.format(
cls.end_point, date.strftime('%Y-%m-%d')
),
query_params={
'withFullData': with_full_data, 'select': select, 'skip': skip
}
)
return [Contact(**entry) for entry in response]
@classmethod
def get_all_modified_contacts_since(cls, date, with_full_data=True):
all_contacts = []
select = 1000
skip = 0
contacts = cls.get_modified_contacts_since(
date, with_full_data, select, skip
)
num_of_entries = len(contacts)
while num_of_entries > 0:
all_contacts.extend(contacts)
if num_of_entries < select:
break
skip += select
contacts = cls.get_modified_contacts_since(
date, with_full_data, select, skip
)
num_of_entries = len(contacts)
return all_contacts
# @classmethod
# def get_suppressed_contacts_since(cls, date, select=1000, skip=0):
# response = connection.get(
# '{}/suppressed-since/{}'.format(
# cls.end_point, date.strftime('%Y-%m-%d')
# ),
# query_params={
# 'select': select, 'skip': skip
# }
# )
# # TODO: Need to think how to handle these objects since they are nested with additional information
# return [Contact(**entry) for entry in response]
#
# @classmethod
# def get_all_suppressed_contacts_since(cls, date):
# all_contacts = []
# select = 1000
# skip = 0
# contacts = cls.get_suppressed_contacts_since(date, select, skip)
# num_of_entries = len(contacts)
# while num_of_entries > 0:
# all_contacts.extend(contacts)
# if num_of_entries < select:
# break
# skip += select
# contacts = cls.get_suppressed_contacts_since(date, select, skip)
# num_of_entries = len(contacts)
# return all_contacts
#
# @classmethod
# def get_unsubscribed_contacts_since(cls, date, select=1000, skip=0):
# response = connection.get(
# '{}/unsubscribed-since/{}'.format(
# cls.end_point, date.strftime('%Y-%m-%d')
# ),
# query_params={
# 'select': select, 'skip': skip
# }
# )
# # TODO: Need to think how to handle these objects since they are nested with additional information
# return [Contact(**entry) for entry in response]
#
# @classmethod
# def get_all_unsubscribed_contacts_since(cls, date, select=1000, skip=0):
# all_contacts = []
# select = 1000
# skip = 0
# contacts = cls.get_unsubscribed_contacts_since(date, select, skip)
# num_of_entries = len(contacts)
# while num_of_entries > 0:
# all_contacts.extend(contacts)
# if num_of_entries < 0:
# break
# skip += select
# contacts = cls.get_unsubscribed_contacts_since(date, select, skip)
# num_of_entries = len(contacts)
# return all_contacts
#
# @classmethod
# def get_unsubscribed_contacts_from_address_book_since(cls, address_book,
# date, select=1000,
# skip=0):
# response = connection.get(
# '/v2/address-books/{}/contacts/unsubscribed-since/{}'.format(
# address_book.id, date.strftime('%Y-%m-%d')
# ),
# query_params={
# 'select': select, 'skip': skip
# }
# )
# # TODO: Need to think how to handle these objects since they are nested with additional information
# return [Contact(**entry) for entry in response]
#
# @classmethod
# def get_all_unsubscribed_contacts_from_address_book_since(cls, address_book,
# date):
# all_contacts = []
# select = 1000
# skip = 0
# contacts = cls.get_unsubscribed_contacts_from_address_book_since(
# address_book, date, select, skip
# )
# num_of_entries = len(contacts)
# while num_of_entries > 0:
# all_contacts.extend(contacts)
# if num_of_entries < select:
# break
# skip += select
# contacts = cls.get_unsubscribed_contacts_from_address_book_since(
# address_book, date, select, skip
# )
# num_of_entries = len(contacts)
# return all_contacts
def _unsubscribe(self):
return type(self).unsubscribe(self.email)
@classmethod
def unsubscribe(cls, email):
"""
Unsubscribes contact from account
:param email:
:return:
"""
return connection.post(
'{}/unsubscribe'.format(cls.end_point),
{
'Email': email
}
)
def _resubscribe(self, preferred_local=None,
return_url_to_use_if_challenged=None):
contact, status = type(self).resubscribe(
self.email, preferred_local, return_url_to_use_if_challenged
)
# TODO: Look at a more dynamic way of this use __dict__ to pull out the variables that have been defined (excluding ID)
data = {
'email': contact.email,
'opt_in_type': contact.opt_in_type,
'email_type': contact.email_type,
'date_fields': contact.data_fields,
'status': contact.status
}
self._update_values(data)
return status
@classmethod
def resubscribe(cls, email, preferred_local=None,
return_url_to_use_if_challenged=None):
payload = {
'UnsubscribedContact': {
'Email': email
}
}
if preferred_local is not None:
payload['PreferredLocale'] = preferred_local
if return_url_to_use_if_challenged is not None:
payload['ReturnUrlToUseIfChallenged'] = return_url_to_use_if_challenged
response = connection.post(
'{}/resubscribe'.format(cls.end_point),
payload
)
return Contact(**response['contact']), response['status']
# https://developer.dotmailer.com/docs/resubscribe-contact-to-address-book
@classmethod
def get_scoring(cls, select, skip):
"""
:param select:
:param skip:
:return:
"""
response = connection.get(
'{}/score/'.format(cls.end_point),
query_params={
'Select': select, 'Skip': skip
}
)
return [ContactScore(**entry) for entry in response]
@classmethod
def get_all_scoring(cls):
"""
:return:
"""
all_scoring = []
select = 1000
skip = 0
scorings = cls.get_scoring(select, skip)
num_of_entries = len(scorings)
while num_of_entries > 0:
all_scoring.extend(scorings)
if num_of_entries < select:
break
skip += select
scorings = cls.get_scoring(select, skip)
num_of_entries = len(scorings)
return all_scoring
@classmethod
def get_scoring_in_address_book(cls, address_book, select, skip):
"""
Gets contact scoring for contacts within a specific address
book or segment
:param address_book:
:param select:
:param skip:
:return:
"""
response = connection.get(
'/v2/address-books/{}/contacts/score/'.format(address_book.id),
query_params={
'Select': select, 'Skip': skip
}
)
return [ContactScore(**entry) for entry in response]
@classmethod
def get_all_scoring_in_address_book(cls, address_book):
all_scoring = []
select = 1000
skip = 0
scorings = cls.get_scoring_in_address_book(address_book, select, skip)
num_of_entries = len(scorings)
while num_of_entries > 0:
all_scoring.extend(scorings)
if num_of_entries < select:
break
skip += select
scorings = cls.get_scoring_in_address_book(
address_book, select, skip
)
num_of_entries = len(scorings)
return all_scoring
@classmethod
def get_scores_modified_since(cls, date, select, skip):
response = connection.get(
'{}/score/modified-since/{}'.format(
cls.end_point, date.strftime('%Y-%m-%d')
),
query_params={
'Select': select, 'Skip': skip
}
)
return [ContactScore(**entry) for entry in response]
@classmethod
def get_all_scores_modified_since(cls, date):
all_scoring = []
select = 1000
skip = 0
scorings = cls.get_scores_modified_since(date, select, skip)
num_of_entries = len(scorings)
while num_of_entries > 0:
all_scoring.extend(scorings)
if num_of_entries < select:
break
skip += select
scorings = cls.get_scores_modified_since(date, select, skip)
num_of_entries = len(scorings)
return all_scoring
@classmethod
def get_score_by_email(cls, email):
"""
Gets contact scoring for a contact by email address
:param email:
:return:
"""
response = connection.get(
'{}/{}/score'.format(cls.end_point, email)
)
return ContactScore(**response)
@classmethod
def get_score_by_id(cls, id):
"""
Gets contact scoring for a contact by ID
:param id:
:return:
"""
response = connection.get(
'{}/{}/score'.format(cls.end_point, id)
)
return ContactScore(**response)
| {
"content_hash": "bb9e2401e1bc8db14f92d2b4a2e8876a",
"timestamp": "",
"source": "github",
"line_count": 849,
"max_line_length": 129,
"avg_line_length": 33.4640753828033,
"alnum_prop": 0.548695927633663,
"repo_name": "Mr-F/dotmailer",
"id": "9e8188b6958b68c8c05a0a1caeafe35479752465",
"size": "28411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dotmailer/contacts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "158110"
}
],
"symlink_target": ""
} |
"""Deprecation helpers for Home Assistant."""
import functools
import inspect
import logging
from typing import Any, Callable, Dict, Optional
from ..helpers.frame import MissingIntegrationFrame, get_integration_frame
def deprecated_substitute(substitute_name: str) -> Callable[..., Callable]:
"""Help migrate properties to new names.
When a property is added to replace an older property, this decorator can
be added to the new property, listing the old property as the substitute.
If the old property is defined, its value will be used instead, and a log
warning will be issued alerting the user of the impending change.
"""
def decorator(func: Callable) -> Callable:
"""Decorate function as deprecated."""
def func_wrapper(self: Callable) -> Any:
"""Wrap for the original function."""
if hasattr(self, substitute_name):
# If this platform is still using the old property, issue
# a logger warning once with instructions on how to fix it.
warnings = getattr(func, "_deprecated_substitute_warnings", {})
module_name = self.__module__
if not warnings.get(module_name):
logger = logging.getLogger(module_name)
logger.warning(
"'%s' is deprecated. Please rename '%s' to "
"'%s' in '%s' to ensure future support.",
substitute_name,
substitute_name,
func.__name__,
inspect.getfile(self.__class__),
)
warnings[module_name] = True
setattr(func, "_deprecated_substitute_warnings", warnings)
# Return the old property
return getattr(self, substitute_name)
return func(self)
return func_wrapper
return decorator
def get_deprecated(
config: Dict[str, Any], new_name: str, old_name: str, default: Optional[Any] = None
) -> Optional[Any]:
"""Allow an old config name to be deprecated with a replacement.
If the new config isn't found, but the old one is, the old value is used
and a warning is issued to the user.
"""
if old_name in config:
module = inspect.getmodule(inspect.stack()[1][0])
if module is not None:
module_name = module.__name__
else:
# If Python is unable to access the sources files, the call stack frame
# will be missing information, so let's guard.
# https://github.com/home-assistant/core/issues/24982
module_name = __name__
logger = logging.getLogger(module_name)
logger.warning(
"'%s' is deprecated. Please rename '%s' to '%s' in your "
"configuration file.",
old_name,
old_name,
new_name,
)
return config.get(old_name)
return config.get(new_name, default)
def deprecated_function(replacement: str) -> Callable[..., Callable]:
"""Mark function as deprecated and provide a replacement function to be used instead."""
def deprecated_decorator(func: Callable) -> Callable:
"""Decorate function as deprecated."""
@functools.wraps(func)
def deprecated_func(*args: tuple, **kwargs: Dict[str, Any]) -> Any:
"""Wrap for the original function."""
logger = logging.getLogger(func.__module__)
try:
_, integration, path = get_integration_frame()
if path == "custom_components/":
logger.warning(
"%s was called from %s, this is a deprecated function. Use %s instead, please report this to the maintainer of %s",
func.__name__,
integration,
replacement,
integration,
)
else:
logger.warning(
"%s was called from %s, this is a deprecated function. Use %s instead",
func.__name__,
integration,
replacement,
)
except MissingIntegrationFrame:
logger.warning(
"%s is a deprecated function. Use %s instead",
func.__name__,
replacement,
)
return func(*args, **kwargs)
return deprecated_func
return deprecated_decorator
| {
"content_hash": "3db92df6ad2ab804524e826929c9a41d",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 139,
"avg_line_length": 39.13559322033898,
"alnum_prop": 0.5493720225205717,
"repo_name": "turbokongen/home-assistant",
"id": "7478a7fede98ede1a4e3c532505de7ed9b2173b3",
"size": "4618",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/helpers/deprecation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "30405146"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else 0
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else 0
@property
def binary_peers(self):
binary_peers = ''
for peer in self.peers:
ip = peer.split(':')[0]
port = peer.split(':')[1]
ip = struct.unpack("!I", socket.inet_aton(ip))[0]
binary_peers += struct.pack('!ih', ip, int(port))
return binary_peers
| {
"content_hash": "7b5d293a122144847a08f24ef8c9a069",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 72,
"avg_line_length": 24.32,
"alnum_prop": 0.6332236842105263,
"repo_name": "vtemian/university_projects",
"id": "89380c6ec050f1f2d7f04948ca97c9c11bb3b2e2",
"size": "1216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data_structures/bitorrent/server/announce/torrent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "15965"
},
{
"name": "C++",
"bytes": "9417"
},
{
"name": "Python",
"bytes": "69909"
}
],
"symlink_target": ""
} |
"""
"""
# from activations import *
from activations import LinearActivation, ReluActivation, SoftmaxActivation, \
IdentityActivation, TanhActivation, SequenceSoftmaxActivation
from attrs import ExtraAttr
from default_decorators import wrap_name_default, wrap_act_default, \
wrap_param_default, wrap_bias_attr_default, wrap_param_attr_default
from layers import * # There are too many layers used in network, so import *
from poolings import MaxPooling, SumPooling
from paddle.trainer.config_parser import *
__all__ = [
'sequence_conv_pool', 'simple_lstm', "simple_img_conv_pool",
"img_conv_bn_pool", 'dropout_layer', 'lstmemory_group', 'lstmemory_unit',
'small_vgg', 'img_conv_group', 'vgg_16_network', 'gru_unit', 'gru_group',
'simple_gru', 'simple_attention', 'simple_gru2', 'bidirectional_gru',
'text_conv_pool', 'bidirectional_lstm', 'inputs', 'outputs'
]
######################################################
# Text CNN #
######################################################
@wrap_name_default("sequence_conv_pooling")
def sequence_conv_pool(input,
context_len,
hidden_size,
name=None,
context_start=None,
pool_type=None,
context_proj_layer_name=None,
context_proj_param_attr=False,
fc_layer_name=None,
fc_param_attr=None,
fc_bias_attr=None,
fc_act=None,
pool_bias_attr=None,
fc_attr=None,
context_attr=None,
pool_attr=None):
"""
Text convolution pooling layers helper.
Text input => Context Projection => FC Layer => Pooling => Output.
:param name: name of output layer(pooling layer name)
:type name: basestring
:param input: name of input layer
:type input: LayerOutput
:param context_len: context projection length. See
context_projection's document.
:type context_len: int
:param hidden_size: FC Layer size.
:type hidden_size: int
:param context_start: context projection length. See
context_projection's context_start.
:type context_start: int or None
:param pool_type: pooling layer type. See pooling_layer's document.
:type pool_type: BasePoolingType.
:param context_proj_layer_name: context projection layer name.
None if user don't care.
:type context_proj_layer_name: basestring
:param context_proj_param_attr: context projection parameter attribute.
None if user don't care.
:type context_proj_param_attr: ParameterAttribute or None.
:param fc_layer_name: fc layer name. None if user don't care.
:type fc_layer_name: basestring
:param fc_param_attr: fc layer parameter attribute. None if user don't care.
:type fc_param_attr: ParameterAttribute or None
:param fc_bias_attr: fc bias parameter attribute. False if no bias,
None if user don't care.
:type fc_bias_attr: ParameterAttribute or None
:param fc_act: fc layer activation type. None means tanh
:type fc_act: BaseActivation
:param pool_bias_attr: pooling layer bias attr. None if don't care.
False if no bias.
:type pool_bias_attr: ParameterAttribute or None.
:param fc_attr: fc layer extra attribute.
:type fc_attr: ExtraLayerAttribute
:param context_attr: context projection layer extra attribute.
:type context_attr: ExtraLayerAttribute
:param pool_attr: pooling layer extra attribute.
:type pool_attr: ExtraLayerAttribute
:return: output layer name.
:rtype: LayerOutput
"""
# Set Default Value to param
context_proj_layer_name = "%s_conv_proj" % name \
if context_proj_layer_name is None else context_proj_layer_name
with mixed_layer(
name=context_proj_layer_name,
size=input.size * context_len,
act=LinearActivation(),
layer_attr=context_attr) as m:
m += context_projection(
input,
context_len=context_len,
context_start=context_start,
padding_attr=context_proj_param_attr)
fc_layer_name = "%s_conv_fc" % name \
if fc_layer_name is None else fc_layer_name
fl = fc_layer(
name=fc_layer_name,
input=m,
size=hidden_size,
act=fc_act,
layer_attr=fc_attr,
param_attr=fc_param_attr,
bias_attr=fc_bias_attr)
return pooling_layer(
name=name,
input=fl,
pooling_type=pool_type,
bias_attr=pool_bias_attr,
layer_attr=pool_attr)
text_conv_pool = sequence_conv_pool
############################################################################
# Images #
############################################################################
@wrap_name_default("conv_pool")
def simple_img_conv_pool(input,
filter_size,
num_filters,
pool_size,
name=None,
pool_type=None,
act=None,
groups=1,
conv_stride=1,
conv_padding=0,
bias_attr=None,
num_channel=None,
param_attr=None,
shared_bias=True,
conv_layer_attr=None,
pool_stride=1,
pool_padding=0,
pool_layer_attr=None):
"""
Simple image convolution and pooling group.
Input => conv => pooling
:param name: group name
:type name: basestring
:param input: input layer name.
:type input: LayerOutput
:param filter_size: see img_conv_layer for details
:type filter_size: int
:param num_filters: see img_conv_layer for details
:type num_filters: int
:param pool_size: see img_pool_layer for details
:type pool_size: int
:param pool_type: see img_pool_layer for details
:type pool_type: BasePoolingType
:param act: see img_conv_layer for details
:type act: BaseActivation
:param groups: see img_conv_layer for details
:type groups: int
:param conv_stride: see img_conv_layer for details
:type conv_stride: int
:param conv_padding: see img_conv_layer for details
:type conv_padding: int
:param bias_attr: see img_conv_layer for details
:type bias_attr: ParameterAttribute
:param num_channel: see img_conv_layer for details
:type num_channel: int
:param param_attr: see img_conv_layer for details
:type param_attr: ParameterAttribute
:param shared_bias: see img_conv_layer for details
:type shared_bias: bool
:param conv_layer_attr: see img_conv_layer for details
:type conv_layer_attr: ExtraLayerAttribute
:param pool_stride: see img_pool_layer for details
:type pool_stride: int
:param pool_padding: see img_pool_layer for details
:type pool_padding: int
:param pool_layer_attr: see img_pool_layer for details
:type pool_layer_attr: ExtraLayerAttribute
:return: Layer's output
:rtype: LayerOutput
"""
_conv_ = img_conv_layer(
name="%s_conv" % name,
input=input,
filter_size=filter_size,
num_filters=num_filters,
num_channels=num_channel,
act=act,
groups=groups,
stride=conv_stride,
padding=conv_padding,
bias_attr=bias_attr,
param_attr=param_attr,
shared_biases=shared_bias,
layer_attr=conv_layer_attr)
return img_pool_layer(
name="%s_pool" % name,
input=_conv_,
pool_size=pool_size,
pool_type=pool_type,
stride=pool_stride,
padding=pool_padding,
layer_attr=pool_layer_attr)
@wrap_name_default("conv_bn_pool")
def img_conv_bn_pool(input,
filter_size,
num_filters,
pool_size,
name=None,
pool_type=None,
act=None,
groups=1,
conv_stride=1,
conv_padding=0,
conv_bias_attr=None,
num_channel=None,
conv_param_attr=None,
shared_bias=True,
conv_layer_attr=None,
bn_param_attr=None,
bn_bias_attr=None,
bn_layer_attr=None,
pool_stride=1,
pool_padding=0,
pool_layer_attr=None):
"""
Convolution, batch normalization, pooling group.
:param name: group name
:type name: basestring
:param input: layer's input
:type input: LayerOutput
:param filter_size: see img_conv_layer's document
:type filter_size: int
:param num_filters: see img_conv_layer's document
:type num_filters: int
:param pool_size: see img_pool_layer's document.
:type pool_size: int
:param pool_type: see img_pool_layer's document.
:type pool_type: BasePoolingType
:param act: see batch_norm_layer's document.
:type act: BaseActivation
:param groups: see img_conv_layer's document
:type groups: int
:param conv_stride: see img_conv_layer's document.
:type conv_stride: int
:param conv_padding: see img_conv_layer's document.
:type conv_padding: int
:param conv_bias_attr: see img_conv_layer's document.
:type conv_bias_attr: ParameterAttribute
:param num_channel: see img_conv_layer's document.
:type num_channel: int
:param conv_param_attr: see img_conv_layer's document.
:type conv_param_attr: ParameterAttribute
:param shared_bias: see img_conv_layer's document.
:type shared_bias: bool
:param conv_layer_attr: see img_conv_layer's document.
:type conv_layer_attr: ExtraLayerOutput
:param bn_param_attr: see batch_norm_layer's document.
:type bn_param_attr: ParameterAttribute.
:param bn_bias_attr: see batch_norm_layer's document.
:param bn_layer_attr: ParameterAttribute.
:param pool_stride: see img_pool_layer's document.
:type pool_stride: int
:param pool_padding: see img_pool_layer's document.
:type pool_padding: int
:param pool_layer_attr: see img_pool_layer's document.
:type pool_layer_attr: ExtraLayerAttribute
:return: Layer groups output
:rtype: LayerOutput
"""
__conv__ = img_conv_layer(
name="%s_conv" % name,
input=input,
filter_size=filter_size,
num_filters=num_filters,
num_channels=num_channel,
act=LinearActivation(),
groups=groups,
stride=conv_stride,
padding=conv_padding,
bias_attr=conv_bias_attr,
param_attr=conv_param_attr,
shared_biases=shared_bias,
layer_attr=conv_layer_attr)
__bn__ = batch_norm_layer(
name="%s_bn" % name,
input=__conv__,
act=act,
bias_attr=bn_bias_attr,
param_attr=bn_param_attr,
layer_attr=bn_layer_attr)
return img_pool_layer(
name="%s_pool" % name,
input=__bn__,
pool_type=pool_type,
pool_size=pool_size,
stride=pool_stride,
padding=pool_padding,
layer_attr=pool_layer_attr)
@wrap_act_default(param_names=['conv_act'], act=ReluActivation())
@wrap_param_default(
param_names=['pool_type'], default_factory=lambda _: MaxPooling())
def img_conv_group(input,
conv_num_filter,
pool_size,
num_channels=None,
conv_padding=1,
conv_filter_size=3,
conv_act=None,
conv_with_batchnorm=False,
conv_batchnorm_drop_rate=0,
pool_stride=1,
pool_type=None):
"""
Image Convolution Group, Used for vgg net.
TODO(yuyang18): Complete docs
:param conv_batchnorm_drop_rate:
:param input:
:param conv_num_filter:
:param pool_size:
:param num_channels:
:param conv_padding:
:param conv_filter_size:
:param conv_act:
:param conv_with_batchnorm:
:param pool_stride:
:param pool_type:
:return:
"""
tmp = input
# Type checks
assert isinstance(tmp, LayerOutput)
assert isinstance(conv_num_filter, list) or isinstance(conv_num_filter,
tuple)
for each_num_filter in conv_num_filter:
assert isinstance(each_num_filter, int)
assert isinstance(pool_size, int)
def __extend_list__(obj):
if not hasattr(obj, '__len__'):
return [obj] * len(conv_num_filter)
else:
return obj
conv_padding = __extend_list__(conv_padding)
conv_filter_size = __extend_list__(conv_filter_size)
conv_act = __extend_list__(conv_act)
conv_with_batchnorm = __extend_list__(conv_with_batchnorm)
conv_batchnorm_drop_rate = __extend_list__(conv_batchnorm_drop_rate)
for i in xrange(len(conv_num_filter)):
extra_kwargs = dict()
if num_channels is not None:
extra_kwargs['num_channels'] = num_channels
num_channels = None
if conv_with_batchnorm[i]:
extra_kwargs['act'] = LinearActivation()
else:
extra_kwargs['act'] = conv_act[i]
tmp = img_conv_layer(
input=tmp,
padding=conv_padding[i],
filter_size=conv_filter_size[i],
num_filters=conv_num_filter[i],
**extra_kwargs)
# logger.debug("tmp.num_filters = %d" % tmp.num_filters)
if conv_with_batchnorm[i]:
dropout = conv_batchnorm_drop_rate[i]
if dropout == 0 or abs(dropout) < 1e-5: # dropout not set
tmp = batch_norm_layer(input=tmp, act=conv_act[i])
else:
tmp = batch_norm_layer(
input=tmp,
act=conv_act[i],
layer_attr=ExtraAttr(drop_rate=dropout))
return img_pool_layer(
input=tmp, stride=pool_stride, pool_size=pool_size, pool_type=pool_type)
def small_vgg(input_image, num_channels, num_classes):
def __vgg__(ipt, num_filter, times, dropouts, num_channels_=None):
return img_conv_group(
input=ipt,
num_channels=num_channels_,
pool_size=2,
pool_stride=2,
conv_num_filter=[num_filter] * times,
conv_filter_size=3,
conv_act=ReluActivation(),
conv_with_batchnorm=True,
conv_batchnorm_drop_rate=dropouts,
pool_type=MaxPooling())
tmp = __vgg__(input_image, 64, 2, [0.3, 0], num_channels)
tmp = __vgg__(tmp, 128, 2, [0.4, 0])
tmp = __vgg__(tmp, 256, 3, [0.4, 0.4, 0])
tmp = __vgg__(tmp, 512, 3, [0.4, 0.4, 0])
tmp = img_pool_layer(
input=tmp, stride=2, pool_size=2, pool_type=MaxPooling())
tmp = dropout_layer(input=tmp, dropout_rate=0.5)
tmp = fc_layer(
input=tmp,
size=512,
layer_attr=ExtraAttr(drop_rate=0.5),
act=LinearActivation())
tmp = batch_norm_layer(input=tmp, act=ReluActivation())
return fc_layer(input=tmp, size=num_classes, act=SoftmaxActivation())
def vgg_16_network(input_image, num_channels, num_classes=1000):
"""
Same model from https://gist.github.com/ksimonyan/211839e770f7b538e2d8
:param num_classes:
:param input_image:
:type input_image: LayerOutput
:param num_channels:
:type num_channels: int
:return:
"""
tmp = img_conv_group(
input=input_image,
num_channels=num_channels,
conv_padding=1,
conv_num_filter=[64, 64],
conv_filter_size=3,
conv_act=ReluActivation(),
pool_size=2,
pool_stride=2,
pool_type=MaxPooling())
tmp = img_conv_group(
input=tmp,
conv_num_filter=[128, 128],
conv_padding=1,
conv_filter_size=3,
conv_act=ReluActivation(),
pool_stride=2,
pool_type=MaxPooling(),
pool_size=2)
tmp = img_conv_group(
input=tmp,
conv_num_filter=[256, 256, 256],
conv_padding=1,
conv_filter_size=3,
conv_act=ReluActivation(),
pool_stride=2,
pool_type=MaxPooling(),
pool_size=2)
tmp = img_conv_group(
input=tmp,
conv_num_filter=[512, 512, 512],
conv_padding=1,
conv_filter_size=3,
conv_act=ReluActivation(),
pool_stride=2,
pool_type=MaxPooling(),
pool_size=2)
tmp = img_conv_group(
input=tmp,
conv_num_filter=[512, 512, 512],
conv_padding=1,
conv_filter_size=3,
conv_act=ReluActivation(),
pool_stride=2,
pool_type=MaxPooling(),
pool_size=2)
tmp = fc_layer(
input=tmp,
size=4096,
act=ReluActivation(),
layer_attr=ExtraAttr(drop_rate=0.5))
tmp = fc_layer(
input=tmp,
size=4096,
act=ReluActivation(),
layer_attr=ExtraAttr(drop_rate=0.5))
return fc_layer(input=tmp, size=num_classes, act=SoftmaxActivation())
############################################################################
# Recurrent #
############################################################################
@wrap_name_default("lstm")
def simple_lstm(input,
size,
name=None,
reverse=False,
mat_param_attr=None,
bias_param_attr=None,
inner_param_attr=None,
act=None,
gate_act=None,
state_act=None,
mixed_layer_attr=None,
lstm_cell_attr=None):
"""
Simple LSTM Cell.
It just combine a mixed layer with fully_matrix_projection and a lstmemory
layer. The simple lstm cell was implemented as follow equations.
.. math::
i_t & = \\sigma(W_{xi}x_{t} + W_{hi}h_{t-1} + W_{ci}c_{t-1} + b_i)
f_t & = \\sigma(W_{xf}x_{t} + W_{hf}h_{t-1} + W_{cf}c_{t-1} + b_f)
c_t & = f_tc_{t-1} + i_t tanh (W_{xc}x_t+W_{hc}h_{t-1} + b_c)
o_t & = \\sigma(W_{xo}x_{t} + W_{ho}h_{t-1} + W_{co}c_t + b_o)
h_t & = o_t tanh(c_t)
Please refer **Generating Sequences With Recurrent Neural Networks** if you
want to know what lstm is. Link_ is here.
.. _Link: http://arxiv.org/abs/1308.0850
:param name: lstm layer name.
:type name: basestring
:param input: input layer name.
:type input: LayerOutput
:param size: lstm layer size.
:type size: int
:param reverse: whether to process the input data in a reverse order
:type reverse: bool
:param mat_param_attr: mixed layer's matrix projection parameter attribute.
:type mat_param_attr: ParameterAttribute
:param bias_param_attr: bias parameter attribute. False means no bias, None
means default bias.
:type bias_param_attr: ParameterAttribute|False
:param inner_param_attr: lstm cell parameter attribute.
:type inner_param_attr: ParameterAttribute
:param act: lstm final activiation type
:type act: BaseActivation
:param gate_act: lstm gate activiation type
:type gate_act: BaseActivation
:param state_act: lstm state activiation type.
:type state_act: BaseActivation
:param mixed_layer_attr: mixed layer's extra attribute.
:type mixed_layer_attr: ExtraLayerAttribute
:param lstm_cell_attr: lstm layer's extra attribute.
:type lstm_cell_attr: ExtraLayerAttribute
:return: lstm layer name.
:rtype: LayerOutput
"""
fc_name = 'lstm_transform_%s' % name
with mixed_layer(
name=fc_name,
size=size * 4,
act=IdentityActivation(),
layer_attr=mixed_layer_attr,
bias_attr=False) as m:
m += full_matrix_projection(input, param_attr=mat_param_attr)
return lstmemory(
name=name,
input=m,
reverse=reverse,
bias_attr=bias_param_attr,
param_attr=inner_param_attr,
act=act,
gate_act=gate_act,
state_act=state_act,
layer_attr=lstm_cell_attr)
@wrap_name_default('lstm_unit')
def lstmemory_unit(input,
name=None,
size=None,
param_attr=None,
act=None,
gate_act=None,
state_act=None,
mixed_bias_attr=None,
lstm_bias_attr=None,
mixed_layer_attr=None,
lstm_layer_attr=None,
get_output_layer_attr=None):
"""
Define calculations that a LSTM unit performs in a single time step.
This function itself is not a recurrent layer, so that it can not be
directly applied to sequence input. This function is always used in
recurrent_group (see layers.py for more details) to implement attention
mechanism.
Please refer to **Generating Sequences With Recurrent Neural Networks**
for more details about LSTM. The link goes as follows:
.. _Link: https://arxiv.org/abs/1308.0850
.. math::
i_t & = \\sigma(W_{xi}x_{t} + W_{hi}h_{t-1} + W_{ci}c_{t-1} + b_i)
f_t & = \\sigma(W_{xf}x_{t} + W_{hf}h_{t-1} + W_{cf}c_{t-1} + b_f)
c_t & = f_tc_{t-1} + i_t tanh (W_{xc}x_t+W_{hc}h_{t-1} + b_c)
o_t & = \\sigma(W_{xo}x_{t} + W_{ho}h_{t-1} + W_{co}c_t + b_o)
h_t & = o_t tanh(c_t)
The example usage is:
.. code-block:: python
lstm_step = lstmemory_unit(input=[layer1],
size=256,
act=TanhActivation(),
gate_act=SigmoidActivation(),
state_act=TanhActivation())
:param input: input layer name.
:type input: LayerOutput
:param name: lstmemory unit name.
:type name: basestring
:param size: lstmemory unit size.
:type size: int
:param param_attr: Parameter config, None if use default.
:type param_attr: ParameterAttribute
:param act: lstm final activiation type
:type act: BaseActivation
:param gate_act: lstm gate activiation type
:type gate_act: BaseActivation
:param state_act: lstm state activiation type.
:type state_act: BaseActivation
:param mixed_bias_attr: bias parameter attribute of mixed layer.
False means no bias, None means default bias.
:type mixed_bias_attr: ParameterAttribute|False
:param lstm_bias_attr: bias parameter attribute of lstm layer.
False means no bias, None means default bias.
:type lstm_bias_attr: ParameterAttribute|False
:param mixed_layer_attr: mixed layer's extra attribute.
:type mixed_layer_attr: ExtraLayerAttribute
:param lstm_layer_attr: lstm layer's extra attribute.
:type lstm_layer_attr: ExtraLayerAttribute
:param get_output_layer_attr: get output layer's extra attribute.
:type get_output_layer_attr: ExtraLayerAttribute
:return: lstmemory unit name.
:rtype: LayerOutput
"""
if size is None:
assert input.size % 4 == 0
size = input.size / 4
out_mem = memory(name=name, size=size)
state_mem = memory(name="%s_state" % name, size=size)
with mixed_layer(
name="%s_input_recurrent" % name,
size=size * 4,
bias_attr=mixed_bias_attr,
layer_attr=mixed_layer_attr,
act=IdentityActivation()) as m:
m += identity_projection(input=input)
m += full_matrix_projection(input=out_mem, param_attr=param_attr)
lstm_out = lstm_step_layer(
name=name,
input=m,
state=state_mem,
size=size,
bias_attr=lstm_bias_attr,
act=act,
gate_act=gate_act,
state_act=state_act,
layer_attr=lstm_layer_attr)
get_output_layer(
name='%s_state' % name,
input=lstm_out,
arg_name='state',
layer_attr=get_output_layer_attr)
return lstm_out
@wrap_name_default('lstm_group')
def lstmemory_group(input,
size=None,
name=None,
reverse=False,
param_attr=None,
act=None,
gate_act=None,
state_act=None,
mixed_bias_attr=None,
lstm_bias_attr=None,
mixed_layer_attr=None,
lstm_layer_attr=None,
get_output_layer_attr=None):
"""
lstm_group is a recurrent layer group version of Long Short Term Memory. It
does exactly the same calculation as the lstmemory layer (see lstmemory in
layers.py for the maths) does. A promising benefit is that LSTM memory
cell states, or hidden states in every time step are accessible to the
user. This is especially useful in attention model. If you do not need to
access the internal states of the lstm, but merely use its outputs,
it is recommended to use the lstmemory, which is relatively faster than
lstmemory_group.
NOTE: In PaddlePaddle's implementation, the following input-to-hidden
multiplications:
:math:`W_{xi}x_{t}` , :math:`W_{xf}x_{t}`,
:math:`W_{xc}x_t`, :math:`W_{xo}x_{t}` are not done in lstmemory_unit to
speed up the calculations. Consequently, an additional mixed_layer with
full_matrix_projection must be included before lstmemory_unit is called.
The example usage is:
.. code-block:: python
lstm_step = lstmemory_group(input=[layer1],
size=256,
act=TanhActivation(),
gate_act=SigmoidActivation(),
state_act=TanhActivation())
:param input: input layer name.
:type input: LayerOutput
:param name: lstmemory group name.
:type name: basestring
:param size: lstmemory group size.
:type size: int
:param reverse: is lstm reversed
:type reverse: bool
:param param_attr: Parameter config, None if use default.
:type param_attr: ParameterAttribute
:param act: lstm final activiation type
:type act: BaseActivation
:param gate_act: lstm gate activiation type
:type gate_act: BaseActivation
:param state_act: lstm state activiation type.
:type state_act: BaseActivation
:param mixed_bias_attr: bias parameter attribute of mixed layer.
False means no bias, None means default bias.
:type mixed_bias_attr: ParameterAttribute|False
:param lstm_bias_attr: bias parameter attribute of lstm layer.
False means no bias, None means default bias.
:type lstm_bias_attr: ParameterAttribute|False
:param mixed_layer_attr: mixed layer's extra attribute.
:type mixed_layer_attr: ExtraLayerAttribute
:param lstm_layer_attr: lstm layer's extra attribute.
:type lstm_layer_attr: ExtraLayerAttribute
:param get_output_layer_attr: get output layer's extra attribute.
:type get_output_layer_attr: ExtraLayerAttribute
:return: the lstmemory group.
:rtype: LayerOutput
"""
def __lstm_step__(ipt):
return lstmemory_unit(
input=ipt,
name=name,
size=size,
mixed_bias_attr=mixed_bias_attr,
mixed_layer_attr=mixed_layer_attr,
param_attr=param_attr,
lstm_bias_attr=lstm_bias_attr,
act=act,
gate_act=gate_act,
state_act=state_act,
lstm_layer_attr=lstm_layer_attr,
get_output_layer_attr=get_output_layer_attr)
return recurrent_group(
name='%s_recurrent_group' % name,
step=__lstm_step__,
reverse=reverse,
input=input)
@wrap_name_default('gru_unit')
def gru_unit(input,
size=None,
name=None,
gru_bias_attr=None,
gru_param_attr=None,
act=None,
gate_act=None,
gru_layer_attr=None,
naive=False):
"""
Define calculations that a gated recurrent unit performs in a single time
step. This function itself is not a recurrent layer, so that it can not be
directly applied to sequence input. This function is almost always used in
the recurrent_group (see layers.py for more details) to implement attention
mechanism.
Please see grumemory in layers.py for the details about the maths.
:param input: input layer name.
:type input: LayerOutput
:param name: name of the gru group.
:type name: basestring
:param size: hidden size of the gru.
:type size: int
:param act: type of the activation
:type act: BaseActivation
:param gate_act: type of the gate activation
:type gate_act: BaseActivation
:param gru_layer_attr: Extra parameter attribute of the gru layer.
:type gru_layer_attr: ParameterAttribute|False
:return: the gru output layer.
:rtype: LayerOutput
"""
assert input.size % 3 == 0
if size is None:
size = input.size / 3
out_mem = memory(name=name, size=size)
if naive:
__step__ = gru_step_naive_layer
else:
__step__ = gru_step_layer
gru_out = __step__(
name=name,
input=input,
output_mem=out_mem,
size=size,
bias_attr=gru_bias_attr,
param_attr=gru_param_attr,
act=act,
gate_act=gate_act,
layer_attr=gru_layer_attr)
return gru_out
@wrap_name_default('gru_group')
def gru_group(input,
size=None,
name=None,
reverse=False,
gru_bias_attr=None,
gru_param_attr=None,
act=None,
gate_act=None,
gru_layer_attr=None,
naive=False):
"""
gru_group is a recurrent layer group version of Gated Recurrent Unit. It
does exactly the same calculation as the grumemory layer does. A promising
benefit is that gru hidden states are accessible to the user. This is
especially useful in attention model. If you do not need to access
any internal state, but merely use the outputs of a GRU, it is recommended
to use the grumemory, which is relatively faster.
Please see grumemory in layers.py for more detail about the maths.
The example usage is:
.. code-block:: python
gru = gur_group(input=[layer1],
size=256,
act=TanhActivation(),
gate_act=SigmoidActivation())
:param input: input layer name.
:type input: LayerOutput
:param name: name of the gru group.
:type name: basestring
:param size: hidden size of the gru.
:type size: int
:param reverse: whether to process the input data in a reverse order
:type reverse: bool
:param act: type of the activiation
:type act: BaseActivation
:param gate_act: type of the gate activiation
:type gate_act: BaseActivation
:param gru_bias_attr: bias. False means no bias, None means default bias.
:type gru_bias_attr: ParameterAttribute|False
:param gru_layer_attr: Extra parameter attribute of the gru layer.
:type gru_layer_attr: ParameterAttribute|False
:return: the gru group.
:rtype: LayerOutput
"""
def __gru_step__(ipt):
return gru_unit(
input=ipt,
name=name,
size=size,
gru_bias_attr=gru_bias_attr,
gru_param_attr=gru_param_attr,
act=act,
gate_act=gate_act,
gru_layer_attr=gru_layer_attr,
naive=naive)
return recurrent_group(
name='%s_recurrent_group' % name,
step=__gru_step__,
reverse=reverse,
input=input)
@wrap_name_default('simple_gru')
def simple_gru(input,
size,
name=None,
reverse=False,
mixed_param_attr=None,
mixed_bias_param_attr=None,
mixed_layer_attr=None,
gru_bias_attr=None,
gru_param_attr=None,
act=None,
gate_act=None,
gru_layer_attr=None,
naive=False):
"""
You maybe see gru_step_layer, grumemory in layers.py, gru_unit, gru_group,
simple_gru in network.py. The reason why there are so many interfaces is
that we have two ways to implement recurrent neural network. One way is to
use one complete layer to implement rnn (including simple rnn, gru and lstm)
with multiple time steps, such as recurrent_layer, lstmemory, grumemory. But,
the multiplication operation :math:`W x_t` is not computed in these layers.
See details in their interfaces in layers.py.
The other implementation is to use an recurrent group which can ensemble a
series of layers to compute rnn step by step. This way is flexible for
attenion mechanism or other complex connections.
- gru_step_layer: only compute rnn by one step. It needs an memory as input
and can be used in recurrent group.
- gru_unit: a wrapper of gru_step_layer with memory.
- gru_group: a GRU cell implemented by a combination of multiple layers in
recurrent group.
But :math:`W x_t` is not done in group.
- gru_memory: a GRU cell implemented by one layer, which does same calculation
with gru_group and is faster than gru_group.
- simple_gru: a complete GRU implementation inlcuding :math:`W x_t` and
gru_group. :math:`W` contains :math:`W_r`, :math:`W_z` and :math:`W`, see
formula in grumemory.
The computational speed is that, grumemory is relatively better than
gru_group, and gru_group is relatively better than simple_gru.
The example usage is:
.. code-block:: python
gru = simple_gru(input=[layer1], size=256)
:param input: input layer name.
:type input: LayerOutput
:param name: name of the gru group.
:type name: basestring
:param size: hidden size of the gru.
:type size: int
:param reverse: whether to process the input data in a reverse order
:type reverse: bool
:param act: type of the activiation
:type act: BaseActivation
:param gate_act: type of the gate activiation
:type gate_act: BaseActivation
:param gru_bias_attr: bias. False means no bias, None means default bias.
:type gru_bias_attr: ParameterAttribute|False
:param gru_layer_attr: Extra parameter attribute of the gru layer.
:type gru_layer_attr: ParameterAttribute|False
:return: the gru group.
:rtype: LayerOutput
"""
with mixed_layer(
name='%s_transform' % name,
size=size * 3,
bias_attr=mixed_bias_param_attr,
layer_attr=mixed_layer_attr) as m:
m += full_matrix_projection(input=input, param_attr=mixed_param_attr)
return gru_group(
name=name,
size=size,
input=m,
reverse=reverse,
gru_bias_attr=gru_bias_attr,
gru_param_attr=gru_param_attr,
act=act,
gate_act=gate_act,
gru_layer_attr=gru_layer_attr,
naive=naive)
@wrap_name_default('simple_gru2')
def simple_gru2(input,
size,
name=None,
reverse=False,
mixed_param_attr=None,
mixed_bias_attr=None,
gru_param_attr=None,
gru_bias_attr=None,
act=None,
gate_act=None,
mixed_layer_attr=None,
gru_cell_attr=None):
"""
simple_gru2 is the same with simple_gru, but using grumemory instead
Please see grumemory in layers.py for more detail about the maths.
simple_gru2 is faster than simple_gru.
The example usage is:
.. code-block:: python
gru = simple_gru2(input=[layer1], size=256)
:param input: input layer name.
:type input: LayerOutput
:param name: name of the gru group.
:type name: basestring
:param size: hidden size of the gru.
:type size: int
:param reverse: whether to process the input data in a reverse order
:type reverse: bool
:param act: type of the activiation
:type act: BaseActivation
:param gate_act: type of the gate activiation
:type gate_act: BaseActivation
:param gru_bias_attr: bias. False means no bias, None means default bias.
:type gru_bias_attr: ParameterAttribute|False
:param gru_layer_attr: Extra parameter attribute of the gru layer.
:type gru_layer_attr: ParameterAttribute|False
:return: the gru group.
:rtype: LayerOutput
"""
with mixed_layer(
name='%s_transform' % name,
size=size * 3,
bias_attr=mixed_bias_attr,
layer_attr=mixed_layer_attr) as m:
m += full_matrix_projection(input=input, param_attr=mixed_param_attr)
return grumemory(
name=name,
size=size,
input=m,
reverse=reverse,
bias_attr=gru_bias_attr,
param_attr=gru_param_attr,
act=act,
gate_act=gate_act,
layer_attr=gru_cell_attr)
@wrap_name_default("bidirectional_gru")
def bidirectional_gru(input,
size,
name=None,
return_seq=False,
fwd_mixed_param_attr=None,
fwd_mixed_bias_attr=None,
fwd_gru_param_attr=None,
fwd_gru_bias_attr=None,
fwd_act=None,
fwd_gate_act=None,
fwd_mixed_layer_attr=None,
fwd_gru_cell_attr=None,
bwd_mixed_param_attr=None,
bwd_mixed_bias_attr=None,
bwd_gru_param_attr=None,
bwd_gru_bias_attr=None,
bwd_act=None,
bwd_gate_act=None,
bwd_mixed_layer_attr=None,
bwd_gru_cell_attr=None,
last_seq_attr=None,
first_seq_attr=None,
concat_attr=None,
concat_act=None):
"""
A bidirectional_gru is a recurrent unit that iterates over the input
sequence both in forward and bardward orders, and then concatenate two
outputs to form a final output. However, concatenation of two outputs
is not the only way to form the final output, you can also, for example,
just add them together.
The example usage is:
.. code-block:: python
bi_gru = bidirectional_gru(input=[input1], size=512)
:param name: bidirectional gru layer name.
:type name: basestring
:param input: input layer.
:type input: LayerOutput
:param size: gru layer size.
:type size: int
:param return_seq: If set False, outputs of the last time step are
concatenated and returned.
If set True, the entire output sequences that are
processed in forward and backward directions are
concatenated and returned.
:type return_seq: bool
:return: LayerOutput object.
:rtype: LayerOutput
"""
args = locals()
fw = simple_gru2(
name='%s_fw' % name,
input=input,
size=size,
**dict((k[len('fwd_'):], v) for k, v in args.iteritems()
if k.startswith('fwd_')))
bw = simple_gru2(
name="%s_bw" % name,
input=input,
size=size,
reverse=True,
**dict((k[len('bwd_'):], v) for k, v in args.iteritems()
if k.startswith('bwd_')))
if return_seq:
return concat_layer(
name=name, input=[fw, bw], layer_attr=concat_attr, act=concat_act)
else:
fw_seq = last_seq(
name="%s_fw_last" % name, input=fw, layer_attr=last_seq_attr)
bw_seq = first_seq(
name="%s_bw_last" % name, input=bw, layer_attr=first_seq_attr)
return concat_layer(
name=name,
input=[fw_seq, bw_seq],
layer_attr=concat_attr,
act=concat_act)
@wrap_name_default("bidirectional_lstm")
def bidirectional_lstm(input,
size,
name=None,
return_seq=False,
fwd_mat_param_attr=None,
fwd_bias_param_attr=None,
fwd_inner_param_attr=None,
fwd_act=None,
fwd_gate_act=None,
fwd_state_act=None,
fwd_mixed_layer_attr=None,
fwd_lstm_cell_attr=None,
bwd_mat_param_attr=None,
bwd_bias_param_attr=None,
bwd_inner_param_attr=None,
bwd_act=None,
bwd_gate_act=None,
bwd_state_act=None,
bwd_mixed_layer_attr=None,
bwd_lstm_cell_attr=None,
last_seq_attr=None,
first_seq_attr=None,
concat_attr=None,
concat_act=None):
"""
A bidirectional_lstm is a recurrent unit that iterates over the input
sequence both in forward and bardward orders, and then concatenate two
outputs form a final output. However, concatenation of two outputs
is not the only way to form the final output, you can also, for example,
just add them together.
Please refer to **Neural Machine Translation by Jointly Learning to Align
and Translate** for more details about the bidirectional lstm.
The link goes as follows:
.. _Link: https://arxiv.org/pdf/1409.0473v3.pdf
The example usage is:
.. code-block:: python
bi_lstm = bidirectional_lstm(input=[input1], size=512)
:param name: bidirectional lstm layer name.
:type name: basestring
:param input: input layer.
:type input: LayerOutput
:param size: lstm layer size.
:type size: int
:param return_seq: If set False, outputs of the last time step are
concatenated and returned.
If set True, the entire output sequences that are
processed in forward and backward directions are
concatenated and returned.
:type return_seq: bool
:return: LayerOutput object accroding to the return_seq.
:rtype: LayerOutput
"""
args = locals()
fw = simple_lstm(
name='%s_fw' % name,
input=input,
size=size,
**dict((k[len('fwd_'):], v) for k, v in args.iteritems()
if k.startswith('fwd_')))
bw = simple_lstm(
name="%s_bw" % name,
input=input,
size=size,
reverse=True,
**dict((k[len('bwd_'):], v) for k, v in args.iteritems()
if k.startswith('bwd_')))
if return_seq:
return concat_layer(
name=name, input=[fw, bw], layer_attr=concat_attr, act=concat_act)
else:
fw_seq = last_seq(
name="%s_fw_last" % name, input=fw, layer_attr=last_seq_attr)
bw_seq = first_seq(
name="%s_bw_last" % name, input=bw, layer_attr=first_seq_attr)
return concat_layer(
name=name,
input=[fw_seq, bw_seq],
layer_attr=concat_attr,
act=concat_act)
@wrap_name_default()
@wrap_act_default(param_names=['weight_act'], act=TanhActivation())
def simple_attention(encoded_sequence,
encoded_proj,
decoder_state,
transform_param_attr=None,
softmax_param_attr=None,
weight_act=None,
name=None):
"""
Calculate and then return a context vector by attention machanism.
Size of the context vector equals to size of the encoded_sequence.
.. math::
a(s_{i-1},h_{j}) & = v_{a}f(W_{a}s_{t-1} + U_{a}h_{j})
e_{i,j} & = a(s_{i-1}, h_{j})
a_{i,j} & = \\frac{exp(e_{i,j})}{\\sum_{k=1}^{T_x}{exp(e_{i,k})}}
c_{i} & = \\sum_{j=1}^{T_{x}}a_{i,j}h_{j}
where :math:`h_{j}` is the jth element of encoded_sequence,
:math:`U_{a}h_{j}` is the jth element of encoded_proj
:math:`s_{i-1}` is decoder_state
:math:`f` is weight_act, and is set to tanh by default.
Please refer to **Neural Machine Translation by Jointly Learning to
Align and Translate** for more details. The link is as follows:
https://arxiv.org/abs/1409.0473.
The example usage is:
.. code-block:: python
context = simple_attention(encoded_sequence=enc_seq,
encoded_proj=enc_proj,
decoder_state=decoder_prev,)
:param name: name of the attention model.
:type name: basestring
:param softmax_param_attr: parameter attribute of sequence softmax
that is used to produce attention weight
:type softmax_param_attr: ParameterAttribute
:param weight_act: activation of the attention model
:type weight_act: Activation
:param encoded_sequence: output of the encoder
:type encoded_sequence: LayerOutput
:param encoded_proj: attention weight is computed by a feed forward neural
network which has two inputs : decoder's hidden state
of previous time step and encoder's output.
encoded_proj is output of the feed-forward network for
encoder's output. Here we pre-compute it outside
simple_attention for speed consideration.
:type encoded_proj: LayerOutput
:param decoder_state: hidden state of decoder in previous time step
:type decoder_state: LayerOutput
:param transform_param_attr: parameter attribute of the feed-forward
network that takes decoder_state as inputs to
compute attention weight.
:type transform_param_attr: ParameterAttribute
:return: a context vector
"""
assert encoded_proj.size == decoder_state.size
proj_size = encoded_proj.size
with mixed_layer(size=proj_size, name="%s_transform" % name) as m:
m += full_matrix_projection(
decoder_state, param_attr=transform_param_attr)
expanded = expand_layer(
input=m, expand_as=encoded_sequence, name='%s_expand' % name)
with mixed_layer(
size=proj_size, act=weight_act, name="%s_combine" % name) as m:
m += identity_projection(expanded)
m += identity_projection(encoded_proj)
# sequence softmax is used to normalize similarities between decoder state
# and encoder outputs into a distribution
attention_weight = fc_layer(
input=m,
size=1,
act=SequenceSoftmaxActivation(),
param_attr=softmax_param_attr,
name="%s_softmax" % name,
bias_attr=False)
scaled = scaling_layer(
weight=attention_weight,
input=encoded_sequence,
name='%s_scaling' % name)
return pooling_layer(
input=scaled, pooling_type=SumPooling(), name="%s_pooling" % name)
############################################################################
# Miscs #
############################################################################
@wrap_name_default("dropout")
def dropout_layer(input, dropout_rate, name=None):
"""
@TODO(yuyang18): Add comments.
:param name:
:param input:
:param dropout_rate:
:return:
"""
return addto_layer(
name=name,
input=input,
act=LinearActivation(),
bias_attr=False,
layer_attr=ExtraAttr(drop_rate=dropout_rate))
def inputs(layers, *args):
"""
Declare the inputs of network. The order of input should be as same as
the data provider's return order.
:param layers: Input Layers.
:type layers: list|tuple|LayerOutput.
:return:
"""
if isinstance(layers, LayerOutput) or isinstance(layers, basestring):
layers = [layers]
if len(args) != 0:
layers.extend(args)
Inputs(*[l.name for l in layers])
def outputs(layers, *args):
"""
Declare the outputs of network. If user have not defined the inputs of
network, this method will calculate the input order by dfs travel.
:param layers: Output layers.
:type layers: list|tuple|LayerOutput
:return:
"""
def __dfs_travel__(layer,
predicate=lambda x: x.layer_type == LayerType.DATA):
"""
DFS LRV Travel for output layer.
The return order is define order for data_layer in this leaf node.
:param layer:
:type layer: LayerOutput
:return:
"""
assert isinstance(layer, LayerOutput), "layer is %s" % (layer)
retv = []
if layer.parents is not None:
for p in layer.parents:
retv.extend(__dfs_travel__(p, predicate))
if predicate(layer):
retv.append(layer)
return retv
if isinstance(layers, LayerOutput):
layers = [layers]
if len(args) != 0:
layers.extend(args)
assert len(layers) > 0
if HasInputsSet(): # input already set
Outputs(*[l.name for l in layers])
return # just return outputs.
if len(layers) != 1:
logger.warning("`outputs` routine try to calculate network's"
" inputs and outputs order. It might not work well."
"Please see follow log carefully.")
inputs = []
outputs_ = []
for each_layer in layers:
assert isinstance(each_layer, LayerOutput)
inputs.extend(__dfs_travel__(each_layer))
outputs_.extend(
__dfs_travel__(each_layer,
lambda x: x.layer_type == LayerType.COST))
# Currently, we got each leaf node's inputs order, output order.
# We merge them together.
final_inputs = []
final_outputs = []
for each_input in inputs:
assert isinstance(each_input, LayerOutput)
if each_input.name not in final_inputs:
final_inputs.append(each_input.name)
for each_output in outputs_:
assert isinstance(each_output, LayerOutput)
if each_output.name not in final_outputs:
final_outputs.append(each_output.name)
logger.info("".join(["The input order is [", ", ".join(final_inputs), "]"]))
if len(final_outputs) == 0:
final_outputs = map(lambda x: x.name, layers)
logger.info("".join(
["The output order is [", ", ".join(final_outputs), "]"]))
Inputs(*final_inputs)
Outputs(*final_outputs)
| {
"content_hash": "bdc915f5482c6cbce7c2e85c01a38f8a",
"timestamp": "",
"source": "github",
"line_count": 1478,
"max_line_length": 82,
"avg_line_length": 34.971583220568334,
"alnum_prop": 0.5747368828354744,
"repo_name": "cxysteven/Paddle",
"id": "fb533a47e0b0585be6f0e019086993f8b3aa7f38",
"size": "52297",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/paddle/trainer_config_helpers/networks.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "226899"
},
{
"name": "C++",
"bytes": "3159636"
},
{
"name": "CMake",
"bytes": "123472"
},
{
"name": "CSS",
"bytes": "21730"
},
{
"name": "Cuda",
"bytes": "511529"
},
{
"name": "HTML",
"bytes": "8941"
},
{
"name": "JavaScript",
"bytes": "1025"
},
{
"name": "Perl",
"bytes": "11452"
},
{
"name": "Protocol Buffer",
"bytes": "43771"
},
{
"name": "Python",
"bytes": "1022473"
},
{
"name": "Shell",
"bytes": "109781"
}
],
"symlink_target": ""
} |
import unittest
import test_collective_api_base as test_base
class TestCollectiveReduceScatterAPI(test_base.TestDistBase):
def _setup_config(self):
pass
def test_reduce_scatter_nccl_dygraph(self):
dtypes_to_test = [
"float16",
"float32",
"float64",
"int32",
"int64",
"int8",
"uint8",
"bool",
]
if self._nccl_version >= 2100:
dtypes_to_test.append("bfloat16")
for dtype in dtypes_to_test:
self.check_with_place(
"collective_reduce_scatter_api_dygraph.py",
"reduce_scatter",
"nccl",
static_mode="0",
dtype=dtype,
)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "4c5497619ee49f4aef566532f3e5af7a",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 61,
"avg_line_length": 25,
"alnum_prop": 0.4860606060606061,
"repo_name": "luotao1/Paddle",
"id": "26dfbd2fe13bbc73ac193d708ef4263c0ad201d0",
"size": "1436",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/collective/test_collective_reduce_scatter_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36771446"
},
{
"name": "CMake",
"bytes": "903079"
},
{
"name": "Cuda",
"bytes": "5200715"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36248258"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553175"
}
],
"symlink_target": ""
} |
"""Test client utility functions."""
import exceptions
import imp
import os
import sys
import tempfile
import time
import mox
from grr.client import client_utils_common
from grr.client import client_utils_linux
from grr.client import client_utils_osx
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import test_lib
def GetVolumePathName(_):
return "C:\\"
def GetVolumeNameForVolumeMountPoint(_):
return "\\\\?\\Volume{11111}\\"
class ClientUtilsTest(test_lib.GRRBaseTest):
"""Test the client utils."""
def testLinGetRawDevice(self):
"""Test the parser for linux mounts."""
proc_mounts = """rootfs / rootfs rw 0 0
none /sys sysfs rw,nosuid,nodev,noexec,relatime 0 0
none /proc proc rw,nosuid,nodev,noexec,relatime 0 0
none /dev devtmpfs rw,relatime,size=4056920k,nr_inodes=1014230,mode=755 0 0
none /dev/pts devpts rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0
/dev/mapper/root / ext4 rw,relatime,errors=remount-ro,barrier=1,data=ordered 0 0
none /sys/fs/fuse/connections fusectl rw,relatime 0 0
none /sys/kernel/debug debugfs rw,relatime 0 0
none /sys/kernel/security securityfs rw,relatime 0 0
none /dev/shm tmpfs rw,nosuid,nodev,relatime 0 0
none /var/run tmpfs rw,nosuid,relatime 0 0
none /var/lock tmpfs rw,nosuid,nodev,noexec,relatime 0 0
none /lib/init/rw tmpfs rw,nosuid,relatime,mode=755 0 0
/dev/sda1 /boot ext2 rw,relatime,errors=continue 0 0
/dev/mapper/usr /usr/local/ ext4 rw,relatime,barrier=1,data=writeback 0 0
binfmt_misc /proc/sys/fs/binfmt_misc binfmt_misc rw,nosuid,relatime 0 0
server.nfs:/vol/home /home/user nfs rw,nosuid,relatime 0 0
"""
mountpoints = client_utils_linux.GetMountpoints(proc_mounts)
def GetMountpointsMock():
return mountpoints
old_getmountpoints = client_utils_linux.GetMountpoints
client_utils_linux.GetMountpoints = GetMountpointsMock
for filename, expected_device, expected_path, device_type in [
("/etc/passwd", "/dev/mapper/root", "/etc/passwd",
rdfvalue.PathSpec.PathType.OS),
("/usr/local/bin/ls", "/dev/mapper/usr", "/bin/ls",
rdfvalue.PathSpec.PathType.OS),
("/proc/net/sys", "none", "/net/sys",
rdfvalue.PathSpec.PathType.UNSET),
("/home/user/test.txt", "server.nfs:/vol/home", "/test.txt",
rdfvalue.PathSpec.PathType.UNSET)]:
raw_pathspec, path = client_utils_linux.LinGetRawDevice(
filename)
self.assertEqual(expected_device, raw_pathspec.path)
self.assertEqual(device_type, raw_pathspec.pathtype)
self.assertEqual(expected_path, path)
client_utils_linux.GetMountpoints = old_getmountpoints
def testWinSplitPathspec(self):
"""Test windows split pathspec functionality."""
self.SetupWinEnvironment()
# We need to import after SetupWinEnvironment or this will fail
# pylint: disable=g-import-not-at-top
from grr.client import client_utils_windows
# pylint: enable=g-import-not-at-top
testdata = [(r"C:\Windows", "\\\\?\\Volume{11111}", "/Windows"),
(r"C:\\Windows\\", "\\\\?\\Volume{11111}", "/Windows"),
(r"C:\\", "\\\\?\\Volume{11111}", "/"),
]
for filename, expected_device, expected_path in testdata:
raw_pathspec, path = client_utils_windows.WinGetRawDevice(filename)
# Pathspec paths are always absolute and therefore must have a leading /.
self.assertEqual("/" + expected_device, raw_pathspec.path)
self.assertEqual(expected_path, path)
def SetupWinEnvironment(self):
"""Mock windows includes."""
winreg = imp.new_module("_winreg")
winreg.error = exceptions.Exception
sys.modules["_winreg"] = winreg
ntsecuritycon = imp.new_module("ntsecuritycon")
sys.modules["ntsecuritycon"] = ntsecuritycon
pywintypes = imp.new_module("pywintypes")
pywintypes.error = Exception
sys.modules["pywintypes"] = pywintypes
winfile = imp.new_module("win32file")
winfile.GetVolumeNameForVolumeMountPoint = GetVolumeNameForVolumeMountPoint
winfile.GetVolumePathName = GetVolumePathName
sys.modules["win32file"] = winfile
win32security = imp.new_module("win32security")
sys.modules["win32security"] = win32security
win32api = imp.new_module("win32api")
sys.modules["win32api"] = win32api
win32service = imp.new_module("win32service")
sys.modules["win32service"] = win32service
win32serviceutil = imp.new_module("win32serviceutil")
sys.modules["win32serviceutil"] = win32serviceutil
winerror = imp.new_module("winerror")
sys.modules["winerror"] = winerror
def testExecutionWhiteList(self):
"""Test if unknown commands are filtered correctly."""
# ls is not allowed
(stdout, stderr, status, _) = client_utils_common.Execute("ls",
["."])
self.assertEqual(status, -1)
self.assertEqual(stdout, "")
self.assertEqual(stderr, "Execution disallowed by whitelist.")
# "echo 1" is
(stdout, stderr, status, _) = client_utils_common.Execute("/bin/echo",
["1"])
self.assertEqual(status, 0)
self.assertEqual(stdout, "1\n")
self.assertEqual(stderr, "")
# but not "echo 11"
(stdout, stderr, status, _) = client_utils_common.Execute("/bin/echo",
["11"])
self.assertEqual(status, -1)
self.assertEqual(stdout, "")
self.assertEqual(stderr, "Execution disallowed by whitelist.")
def AppendTo(self, list_obj, element):
list_obj.append(element)
def testExecutionTimeLimit(self):
"""Test if the time limit works."""
(_, _, _, time_used) = client_utils_common.Execute("/bin/sleep", ["10"], 1)
# This should take just a bit longer than one second.
self.assertTrue(time_used < 2.0)
def testLinuxNanny(self):
"""Tests the linux nanny."""
self.exit_called = False
# Mock out the exit call.
old_exit = os._exit
try:
nanny_controller = client_utils_linux.NannyController()
nanny_controller.StartNanny(unresponsive_kill_period=0.5)
def MockExit(value):
self.exit_called = value
# Kill the nanny thread.
raise RuntimeError("Nannythread exiting.")
os._exit = MockExit
for _ in range(10):
# Unfortunately we really need to sleep because we cant mock out
# time.time.
time.sleep(0.1)
nanny_controller.Heartbeat()
self.assertEqual(self.exit_called, False)
# Main thread sleeps for long enough for the nanny to fire.
time.sleep(1)
self.assertEqual(self.exit_called, -1)
nanny_controller.StopNanny()
finally:
os._exit = old_exit
def testLinuxNannyLog(self):
"""Tests the linux nanny transaction log."""
with tempfile.NamedTemporaryFile() as fd:
nanny_controller = client_utils_linux.NannyController()
nanny_controller.StartNanny(nanny_logfile=fd.name)
grr_message = rdfvalue.GrrMessage(session_id="W:test")
nanny_controller.WriteTransactionLog(grr_message)
self.assertProtoEqual(grr_message, nanny_controller.GetTransactionLog())
nanny_controller.CleanTransactionLog()
self.assert_(nanny_controller.GetTransactionLog() is None)
nanny_controller.StopNanny()
class OSXVersionTests(test_lib.GRRBaseTest):
def setUp(self):
super(OSXVersionTests, self).setUp()
self.mox = mox.Mox()
self.mac_ver = ("10.8.1", ("", "", ""), "x86_64")
self.mox.StubOutWithMock(client_utils_osx.platform, "mac_ver")
client_utils_osx.platform.mac_ver().AndReturn(self.mac_ver)
def testVersionAsIntArray(self):
self.mox.ReplayAll()
osversion = client_utils_osx.OSXVersion()
self.assertEqual(osversion.VersionAsMajorMinor(), [10, 8])
self.mox.VerifyAll()
def testVersionString(self):
self.mox.ReplayAll()
osversion = client_utils_osx.OSXVersion()
self.assertEqual(osversion.VersionString(), "10.8.1")
self.mox.VerifyAll()
def testVersionAsFloat(self):
self.mox.ReplayAll()
osversion = client_utils_osx.OSXVersion()
self.assertEqual(osversion.VersionAsFloat(), 10.8)
self.mox.VerifyAll()
def tearDown(self):
self.mox.UnsetStubs()
super(OSXVersionTests, self).tearDown()
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
| {
"content_hash": "0168e60bb26de940bbd4076d696a2117",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 80,
"avg_line_length": 33.46031746031746,
"alnum_prop": 0.6731499051233396,
"repo_name": "simsong/grr-insider",
"id": "c63952b22a5f3d76b207590eb2e03dba68b84409",
"size": "8545",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "client/client_utils_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "C++",
"bytes": "55149"
},
{
"name": "CSS",
"bytes": "36308"
},
{
"name": "JavaScript",
"bytes": "679269"
},
{
"name": "Python",
"bytes": "3553249"
},
{
"name": "Shell",
"bytes": "30813"
}
],
"symlink_target": ""
} |
__author__ = 'Sidath'
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import Quiz
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
db = SQLAlchemy(app)
class Quiz():
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
def __init__(self, username, email):
self.username = username
self.email = email
def __repr__(self):
return '<User %r>' % self.username
| {
"content_hash": "42a5ccca07d2fb09621814a874132528",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 64,
"avg_line_length": 29.842105263157894,
"alnum_prop": 0.6278659611992945,
"repo_name": "SGCreations/kdumooc",
"id": "264932d37aab69dde45356856ae72e87a7a33cea",
"size": "567",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python app/database.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "779613"
},
{
"name": "HTML",
"bytes": "3090892"
},
{
"name": "JavaScript",
"bytes": "3070494"
},
{
"name": "PHP",
"bytes": "1017131"
},
{
"name": "PLpgSQL",
"bytes": "50617"
},
{
"name": "Python",
"bytes": "7127"
},
{
"name": "Shell",
"bytes": "4440"
}
],
"symlink_target": ""
} |
from flexmock import EXACTLY
from flexmock import AT_LEAST
from flexmock import AT_MOST
from flexmock import UPDATED_ATTRS
from flexmock import Mock
from flexmock import MockBuiltinError
from flexmock import FlexmockContainer
from flexmock import FlexmockError
from flexmock import MethodSignatureError
from flexmock import ExceptionClassError
from flexmock import ExceptionMessageError
from flexmock import StateError
from flexmock import MethodCallError
from flexmock import CallOrderError
from flexmock import ReturnValue
from flexmock import flexmock_teardown
from flexmock import _format_args
from flexmock import _isproperty
import flexmock
import re
import sys
import unittest
def module_level_function(some, args):
return "%s, %s" % (some, args)
module_level_attribute = 'test'
class OldStyleClass:
pass
class NewStyleClass(object):
pass
def assertRaises(exception, method, *kargs, **kwargs):
try:
method(*kargs, **kwargs)
except exception:
return
except:
instance = sys.exc_info()[1]
print('%s' % instance)
raise Exception('%s not raised' % exception.__name__)
def assertEqual(expected, received, msg=''):
if not msg:
msg = 'expected %s, received %s' % (expected, received)
if expected != received:
raise AssertionError('%s != %s : %s' % (expected, received, msg))
class RegularClass(object):
def _tear_down(self):
return flexmock_teardown()
def test_flexmock_should_create_mock_object(self):
mock = flexmock()
assert isinstance(mock, Mock)
def test_flexmock_should_create_mock_object_from_dict(self):
mock = flexmock(foo='foo', bar='bar')
assertEqual('foo', mock.foo)
assertEqual('bar', mock.bar)
def test_flexmock_should_add_expectations(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo')
assert ('method_foo' in
[x.name for x in FlexmockContainer.flexmock_objects[mock]])
def test_flexmock_should_return_value(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar')
mock.should_receive('method_bar').and_return('value_baz')
assertEqual('value_bar', mock.method_foo())
assertEqual('value_baz', mock.method_bar())
def test_type_flexmock_with_unicode_string_in_should_receive(self):
class Foo(object):
def bar(self): return 'bar'
flexmock(Foo).should_receive(u'bar').and_return('mocked_bar')
foo = Foo()
assertEqual('mocked_bar', foo.bar())
def test_flexmock_should_accept_shortcuts_for_creating_mock_object(self):
mock = flexmock(attr1='value 1', attr2=lambda: 'returning 2')
assertEqual('value 1', mock.attr1)
assertEqual('returning 2', mock.attr2())
def test_flexmock_should_accept_shortcuts_for_creating_expectations(self):
class Foo:
def method1(self): pass
def method2(self): pass
foo = Foo()
flexmock(foo, method1='returning 1', method2='returning 2')
assertEqual('returning 1', foo.method1())
assertEqual('returning 2', foo.method2())
assertEqual('returning 2', foo.method2())
def test_flexmock_expectations_returns_all(self):
mock = flexmock(name='temp')
assert mock not in FlexmockContainer.flexmock_objects
mock.should_receive('method_foo')
mock.should_receive('method_bar')
assertEqual(2, len(FlexmockContainer.flexmock_objects[mock]))
def test_flexmock_expectations_returns_named_expectation(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo')
assertEqual('method_foo',
FlexmockContainer.get_flexmock_expectation(
mock, 'method_foo').name)
def test_flexmock_expectations_returns_none_if_not_found(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo')
assert (FlexmockContainer.get_flexmock_expectation(
mock, 'method_bar') is None)
def test_flexmock_should_check_parameters(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('bar').and_return(1)
mock.should_receive('method_foo').with_args('baz').and_return(2)
assertEqual(1, mock.method_foo('bar'))
assertEqual(2, mock.method_foo('baz'))
def test_flexmock_should_keep_track_of_calls(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('foo').and_return(0)
mock.should_receive('method_foo').with_args('bar').and_return(1)
mock.should_receive('method_foo').with_args('baz').and_return(2)
mock.method_foo('bar')
mock.method_foo('bar')
mock.method_foo('baz')
expectation = FlexmockContainer.get_flexmock_expectation(
mock, 'method_foo', ('foo',))
assertEqual(0, expectation.times_called)
expectation = FlexmockContainer.get_flexmock_expectation(
mock, 'method_foo', ('bar',))
assertEqual(2, expectation.times_called)
expectation = FlexmockContainer.get_flexmock_expectation(
mock, 'method_foo', ('baz',))
assertEqual(1, expectation.times_called)
def test_flexmock_should_set_expectation_call_numbers(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').times(1)
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertRaises(MethodCallError, expectation.verify)
mock.method_foo()
expectation.verify()
def test_flexmock_should_check_raised_exceptions(self):
mock = flexmock(name='temp')
class FakeException(Exception):
pass
mock.should_receive('method_foo').and_raise(FakeException)
assertRaises(FakeException, mock.method_foo)
assertEqual(1,
FlexmockContainer.get_flexmock_expectation(
mock, 'method_foo').times_called)
def test_flexmock_should_check_raised_exceptions_instance_with_args(self):
mock = flexmock(name='temp')
class FakeException(Exception):
def __init__(self, arg, arg2):
pass
mock.should_receive('method_foo').and_raise(FakeException(1, arg2=2))
assertRaises(FakeException, mock.method_foo)
assertEqual(1,
FlexmockContainer.get_flexmock_expectation(
mock, 'method_foo').times_called)
def test_flexmock_should_check_raised_exceptions_class_with_args(self):
mock = flexmock(name='temp')
class FakeException(Exception):
def __init__(self, arg, arg2):
pass
mock.should_receive('method_foo').and_raise(FakeException, 1, arg2=2)
assertRaises(FakeException, mock.method_foo)
assertEqual(1,
FlexmockContainer.get_flexmock_expectation(
mock, 'method_foo').times_called)
def test_flexmock_should_match_any_args_by_default(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar')
mock.should_receive('method_foo').with_args('baz').and_return('baz')
assertEqual('bar', mock.method_foo())
assertEqual('bar', mock.method_foo(1))
assertEqual('bar', mock.method_foo('foo', 'bar'))
assertEqual('baz', mock.method_foo('baz'))
def test_flexmock_should_fail_to_match_exactly_no_args_when_calling_with_args(self):
mock = flexmock()
mock.should_receive('method_foo').with_args()
assertRaises(MethodSignatureError, mock.method_foo, 'baz')
def test_flexmock_should_match_exactly_no_args(self):
class Foo:
def bar(self): pass
foo = Foo()
flexmock(foo).should_receive('bar').with_args().and_return('baz')
assertEqual('baz', foo.bar())
def test_expectation_dot_mock_should_return_mock(self):
mock = flexmock(name='temp')
assertEqual(mock, mock.should_receive('method_foo').mock)
def test_flexmock_should_create_partial_new_style_object_mock(self):
class User(object):
def __init__(self, name=None):
self.name = name
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
user = User()
flexmock(user)
user.should_receive('get_name').and_return('john')
user.set_name('mike')
assertEqual('john', user.get_name())
def test_flexmock_should_create_partial_old_style_object_mock(self):
class User:
def __init__(self, name=None):
self.name = name
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
user = User()
flexmock(user)
user.should_receive('get_name').and_return('john')
user.set_name('mike')
assertEqual('john', user.get_name())
def test_flexmock_should_create_partial_new_style_class_mock(self):
class User(object):
def __init__(self): pass
def get_name(self): pass
flexmock(User)
User.should_receive('get_name').and_return('mike')
user = User()
assertEqual('mike', user.get_name())
def test_flexmock_should_create_partial_old_style_class_mock(self):
class User:
def __init__(self): pass
def get_name(self): pass
flexmock(User)
User.should_receive('get_name').and_return('mike')
user = User()
assertEqual('mike', user.get_name())
def test_flexmock_should_match_expectations_against_builtin_classes(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args(str).and_return('got a string')
mock.should_receive('method_foo').with_args(int).and_return('got an int')
assertEqual('got a string', mock.method_foo('string!'))
assertEqual('got an int', mock.method_foo(23))
assertRaises(MethodSignatureError, mock.method_foo, 2.0)
def test_flexmock_should_match_expectations_against_user_defined_classes(self):
mock = flexmock(name='temp')
class Foo:
pass
mock.should_receive('method_foo').with_args(Foo).and_return('got a Foo')
assertEqual('got a Foo', mock.method_foo(Foo()))
assertRaises(MethodSignatureError, mock.method_foo, 1)
def test_flexmock_configures_global_mocks_dict(self):
mock = flexmock(name='temp')
assert mock not in FlexmockContainer.flexmock_objects
mock.should_receive('method_foo')
assert mock in FlexmockContainer.flexmock_objects
assertEqual(len(FlexmockContainer.flexmock_objects[mock]), 1)
def test_flexmock_teardown_verifies_mocks(self):
mock = flexmock(name='temp')
mock.should_receive('verify_expectations').times(1)
assertRaises(MethodCallError, self._tear_down)
def test_flexmock_teardown_does_not_verify_stubs(self):
mock = flexmock(name='temp')
mock.should_receive('verify_expectations')
self._tear_down()
def test_flexmock_preserves_stubbed_object_methods_between_tests(self):
class User:
def get_name(self):
return 'mike'
user = User()
flexmock(user).should_receive('get_name').and_return('john')
assertEqual('john', user.get_name())
self._tear_down()
assertEqual('mike', user.get_name())
def test_flexmock_preserves_stubbed_class_methods_between_tests(self):
class User:
def get_name(self):
return 'mike'
user = User()
flexmock(User).should_receive('get_name').and_return('john')
assertEqual('john', user.get_name())
self._tear_down()
assertEqual('mike', user.get_name())
def test_flexmock_removes_new_stubs_from_objects_after_tests(self):
class User:
def get_name(self): pass
user = User()
saved = user.get_name
flexmock(user).should_receive('get_name').and_return('john')
assert saved != user.get_name
assertEqual('john', user.get_name())
self._tear_down()
assertEqual(saved, user.get_name)
def test_flexmock_removes_new_stubs_from_classes_after_tests(self):
class User:
def get_name(self): pass
user = User()
saved = user.get_name
flexmock(User).should_receive('get_name').and_return('john')
assert saved != user.get_name
assertEqual('john', user.get_name())
self._tear_down()
assertEqual(saved, user.get_name)
def test_flexmock_removes_stubs_from_multiple_objects_on_teardown(self):
class User:
def get_name(self): pass
class Group:
def get_name(self): pass
user = User()
group = User()
saved1 = user.get_name
saved2 = group.get_name
flexmock(user).should_receive('get_name').and_return('john').once()
flexmock(group).should_receive('get_name').and_return('john').once()
assert saved1 != user.get_name
assert saved2 != group.get_name
assertEqual('john', user.get_name())
assertEqual('john', group.get_name())
self._tear_down()
assertEqual(saved1, user.get_name)
assertEqual(saved2, group.get_name)
def test_flexmock_removes_stubs_from_multiple_classes_on_teardown(self):
class User:
def get_name(self): pass
class Group:
def get_name(self): pass
user = User()
group = User()
saved1 = user.get_name
saved2 = group.get_name
flexmock(User).should_receive('get_name').and_return('john')
flexmock(Group).should_receive('get_name').and_return('john')
assert saved1 != user.get_name
assert saved2 != group.get_name
assertEqual('john', user.get_name())
assertEqual('john', group.get_name())
self._tear_down()
assertEqual(saved1, user.get_name)
assertEqual(saved2, group.get_name)
def test_flexmock_respects_at_least_when_called_less_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar').at_least().twice()
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(AT_LEAST, expectation.modifier)
mock.method_foo()
assertRaises(MethodCallError, self._tear_down)
def test_flexmock_respects_at_least_when_called_requested_number(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_least().once()
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(AT_LEAST, expectation.modifier)
mock.method_foo()
self._tear_down()
def test_flexmock_respects_at_least_when_called_more_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_least().once()
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(AT_LEAST, expectation.modifier)
mock.method_foo()
mock.method_foo()
self._tear_down()
def test_flexmock_respects_at_most_when_called_less_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar').at_most().twice()
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(AT_MOST, expectation.modifier)
mock.method_foo()
self._tear_down()
def test_flexmock_respects_at_most_when_called_requested_number(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_most().once()
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(AT_MOST, expectation.modifier)
mock.method_foo()
self._tear_down()
def test_flexmock_respects_at_most_when_called_more_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_most().once()
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(AT_MOST, expectation.modifier)
mock.method_foo()
assertRaises(MethodCallError, mock.method_foo)
def test_flexmock_treats_once_as_times_one(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').once()
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(1, expectation.expected_calls[EXACTLY])
assertRaises(MethodCallError, self._tear_down)
def test_flexmock_treats_twice_as_times_two(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').twice().and_return('value_bar')
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(2, expectation.expected_calls[EXACTLY])
assertRaises(MethodCallError, self._tear_down)
def test_flexmock_works_with_never_when_true(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').never
expectation = FlexmockContainer.get_flexmock_expectation(mock, 'method_foo')
assertEqual(0, expectation.expected_calls[EXACTLY])
self._tear_down()
def test_flexmock_works_with_never_when_false(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').never
assertRaises(MethodCallError, mock.method_foo)
def test_flexmock_get_flexmock_expectation_should_work_with_args(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('value_bar')
assert FlexmockContainer.get_flexmock_expectation(
mock, 'method_foo', 'value_bar')
def test_flexmock_function_should_return_previously_mocked_object(self):
class User(object): pass
user = User()
foo = flexmock(user)
assert foo == user
assert foo == flexmock(user)
def test_flexmock_should_not_return_class_object_if_mocking_instance(self):
class User:
def method(self): pass
user = User()
user2 = User()
class_mock = flexmock(User).should_receive(
'method').and_return('class').mock
user_mock = flexmock(user).should_receive(
'method').and_return('instance').mock
assert class_mock is not user_mock
assertEqual('instance', user.method())
assertEqual('class', user2.method())
def test_should_call_on_class_mock(self):
class User:
def foo(self): return 'class'
user1 = User()
user2 = User()
flexmock(User).should_call('foo').once()
assertRaises(MethodCallError, self._tear_down)
flexmock(User).should_call('foo').twice()
assertEqual('class', user1.foo())
assertEqual('class', user2.foo())
def test_flexmock_should_not_blow_up_on_should_call_for_class_methods(self):
class User:
@classmethod
def foo(self):
return 'class'
flexmock(User).should_call('foo')
assertEqual('class', User.foo())
def test_flexmock_should_not_blow_up_on_should_call_for_static_methods(self):
class User:
@staticmethod
def foo():
return 'static'
flexmock(User).should_call('foo')
assertEqual('static', User.foo())
def test_flexmock_should_mock_new_instances_with_multiple_params(self):
class User(object): pass
class Group(object):
def __init__(self, arg, arg2):
pass
user = User()
flexmock(Group).new_instances(user)
assert user is Group(1, 2)
def test_flexmock_should_revert_new_instances_on_teardown(self):
class User(object): pass
class Group(object): pass
user = User()
group = Group()
flexmock(Group).new_instances(user)
assert user is Group()
self._tear_down()
assertEqual(group.__class__, Group().__class__)
def test_flexmock_should_cleanup_added_methods_and_attributes(self):
class Group(object): pass
group = Group()
flexmock(Group)
assert 'should_receive' in Group.__dict__
assert 'should_receive' not in group.__dict__
flexmock(group)
assert 'should_receive' in group.__dict__
self._tear_down()
for method in UPDATED_ATTRS:
assert method not in Group.__dict__
assert method not in group.__dict__
def test_flexmock_should_cleanup_after_exception(self):
class User:
def method2(self): pass
class Group:
def method1(self): pass
flexmock(Group)
flexmock(User)
Group.should_receive('method1').once()
User.should_receive('method2').once()
assertRaises(MethodCallError, self._tear_down)
for method in UPDATED_ATTRS:
assert method not in dir(Group)
for method in UPDATED_ATTRS:
assert method not in dir(User)
def test_flexmock_should_call_respects_matched_expectations(self):
class Group(object):
def method1(self, arg1, arg2='b'):
return '%s:%s' % (arg1, arg2)
def method2(self, arg):
return arg
group = Group()
flexmock(group).should_call('method1').twice()
assertEqual('a:c', group.method1('a', arg2='c'))
assertEqual('a:b', group.method1('a'))
group.should_call('method2').once().with_args('c')
assertEqual('c', group.method2('c'))
self._tear_down()
def test_flexmock_should_call_respects_unmatched_expectations(self):
class Group(object):
def method1(self, arg1, arg2='b'):
return '%s:%s' % (arg1, arg2)
def method2(self, a): pass
group = Group()
flexmock(group).should_call('method1').at_least().once()
assertRaises(MethodCallError, self._tear_down)
flexmock(group)
group.should_call('method2').with_args('a').once()
group.should_receive('method2').with_args('not a')
group.method2('not a')
assertRaises(MethodCallError, self._tear_down)
def test_flexmock_doesnt_error_on_properly_ordered_expectations(self):
class Foo(object):
def foo(self): pass
def method1(self, a): pass
def bar(self): pass
def baz(self): pass
foo = Foo()
flexmock(foo).should_receive('foo')
flexmock(foo).should_receive('method1').with_args('a').ordered()
flexmock(foo).should_receive('bar')
flexmock(foo).should_receive('method1').with_args('b').ordered()
flexmock(foo).should_receive('baz')
foo.bar()
foo.method1('a')
foo.method1('b')
foo.baz()
foo.foo()
def test_flexmock_errors_on_improperly_ordered_expectations(self):
class Foo(object):
def method1(self, a): pass
foo = Foo()
flexmock(foo)
foo.should_receive('method1').with_args('a').ordered()
foo.should_receive('method1').with_args('b').ordered()
assertRaises(CallOrderError, foo.method1, 'b')
def test_flexmock_should_accept_multiple_return_values(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1, 5).and_return(2)
assertEqual((1, 5), foo.method1())
assertEqual(2, foo.method1())
assertEqual((1, 5), foo.method1())
assertEqual(2, foo.method1())
def test_flexmock_should_accept_multiple_return_values_with_shortcut(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1, 2).one_by_one()
assertEqual(1, foo.method1())
assertEqual(2, foo.method1())
assertEqual(1, foo.method1())
assertEqual(2, foo.method1())
def test_flexmock_should_mix_multiple_return_values_with_exceptions(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1).and_raise(Exception)
assertEqual(1, foo.method1())
assertRaises(Exception, foo.method1)
assertEqual(1, foo.method1())
assertRaises(Exception, foo.method1)
def test_flexmock_should_match_types_on_multiple_arguments(self):
class Foo:
def method1(self, a, b): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(str, int).and_return('ok')
assertEqual('ok', foo.method1('some string', 12))
assertRaises(MethodSignatureError, foo.method1, 12, 32)
flexmock(foo).should_receive('method1').with_args(str, int).and_return('ok')
assertRaises(MethodSignatureError, foo.method1, 12, 'some string')
flexmock(foo).should_receive('method1').with_args(str, int).and_return('ok')
assertRaises(MethodSignatureError, foo.method1, 'string', 12, 14)
def test_flexmock_should_match_types_on_multiple_arguments_generic(self):
class Foo:
def method1(self, a, b, c): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(
object, object, object).and_return('ok')
assertEqual('ok', foo.method1('some string', None, 12))
assertEqual('ok', foo.method1((1,), None, 12))
assertEqual('ok', foo.method1(12, 14, []))
assertEqual('ok', foo.method1('some string', 'another one', False))
assertRaises(MethodSignatureError, foo.method1, 'string', 12)
flexmock(foo).should_receive('method1').with_args(
object, object, object).and_return('ok')
assertRaises(MethodSignatureError, foo.method1, 'string', 12, 13, 14)
def test_flexmock_should_match_types_on_multiple_arguments_classes(self):
class Foo:
def method1(self, a, b): pass
class Bar: pass
foo = Foo()
bar = Bar()
flexmock(foo).should_receive('method1').with_args(
object, Bar).and_return('ok')
assertEqual('ok', foo.method1('some string', bar))
assertRaises(MethodSignatureError, foo.method1, bar, 'some string')
flexmock(foo).should_receive('method1').with_args(
object, Bar).and_return('ok')
assertRaises(MethodSignatureError, foo.method1, 12, 'some string')
def test_flexmock_should_match_keyword_arguments(self):
class Foo:
def method1(self, a, **kwargs): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(1, arg3=3, arg2=2).twice()
foo.method1(1, arg2=2, arg3=3)
foo.method1(1, arg3=3, arg2=2)
self._tear_down()
flexmock(foo).should_receive('method1').with_args(1, arg3=3, arg2=2)
assertRaises(MethodSignatureError, foo.method1, arg2=2, arg3=3)
flexmock(foo).should_receive('method1').with_args(1, arg3=3, arg2=2)
assertRaises(MethodSignatureError, foo.method1, 1, arg2=2, arg3=4)
flexmock(foo).should_receive('method1').with_args(1, arg3=3, arg2=2)
assertRaises(MethodSignatureError, foo.method1, 1)
def test_flexmock_should_call_should_match_keyword_arguments(self):
class Foo:
def method1(self, arg1, arg2=None, arg3=None):
return '%s%s%s' % (arg1, arg2, arg3)
foo = Foo()
flexmock(foo).should_call('method1').with_args(1, arg3=3, arg2=2).once()
assertEqual('123', foo.method1(1, arg2=2, arg3=3))
def test_flexmock_should_mock_private_methods(self):
class Foo:
def __private_method(self):
return 'foo'
def public_method(self):
return self.__private_method()
foo = Foo()
flexmock(foo).should_receive('__private_method').and_return('bar')
assertEqual('bar', foo.public_method())
def test_flexmock_should_mock_special_methods(self):
class Foo:
def __special_method__(self):
return 'foo'
def public_method(self):
return self.__special_method__()
foo = Foo()
flexmock(foo).should_receive('__special_method__').and_return('bar')
assertEqual('bar', foo.public_method())
def test_flexmock_should_mock_double_underscore_method(self):
class Foo:
def __(self):
return 'foo'
def public_method(self):
return self.__()
foo = Foo()
flexmock(foo).should_receive('__').and_return('bar')
assertEqual('bar', foo.public_method())
def test_flexmock_should_mock_private_class_methods(self):
class Foo:
def __iter__(self): pass
flexmock(Foo).should_receive('__iter__').and_yield(1, 2, 3)
assertEqual([1, 2, 3], [x for x in Foo()])
def test_flexmock_should_mock_iter_on_new_style_instances(self):
class Foo(object):
def __iter__(self):
yield None
old = Foo.__iter__
foo = Foo()
foo2 = Foo()
foo3 = Foo()
flexmock(foo, __iter__=iter([1, 2, 3]))
flexmock(foo2, __iter__=iter([3, 4, 5]))
assertEqual([1, 2, 3], [x for x in foo])
assertEqual([3, 4, 5], [x for x in foo2])
assertEqual([None], [x for x in foo3])
assertEqual(False, foo.__iter__ == old)
assertEqual(False, foo2.__iter__ == old)
assertEqual(False, foo3.__iter__ == old)
self._tear_down()
assertEqual([None], [x for x in foo])
assertEqual([None], [x for x in foo2])
assertEqual([None], [x for x in foo3])
assertEqual(True, Foo.__iter__ == old, '%s != %s' % (Foo.__iter__, old))
def test_flexmock_should_mock_private_methods_with_leading_underscores(self):
class _Foo:
def __stuff(self): pass
def public_method(self):
return self.__stuff()
foo = _Foo()
flexmock(foo).should_receive('__stuff').and_return('bar')
assertEqual('bar', foo.public_method())
def test_flexmock_should_mock_generators(self):
class Gen:
def foo(self): pass
gen = Gen()
flexmock(gen).should_receive('foo').and_yield(*range(1, 10))
output = [val for val in gen.foo()]
assertEqual([val for val in range(1, 10)], output)
def test_flexmock_should_verify_correct_spy_return_values(self):
class User:
def get_stuff(self): return 'real', 'stuff'
user = User()
flexmock(user).should_call('get_stuff').and_return('real', 'stuff')
assertEqual(('real', 'stuff'), user.get_stuff())
def test_flexmock_should_verify_correct_spy_regexp_return_values(self):
class User:
def get_stuff(self): return 'real', 'stuff'
user = User()
flexmock(user).should_call('get_stuff').and_return(
re.compile('ea.*'), re.compile('^stuff$'))
assertEqual(('real', 'stuff'), user.get_stuff())
def test_flexmock_should_verify_spy_raises_correct_exception_class(self):
class FakeException(Exception):
def __init__(self, param, param2):
self.message = '%s, %s' % (param, param2)
Exception.__init__(self)
class User:
def get_stuff(self): raise FakeException(1, 2)
user = User()
flexmock(user).should_call('get_stuff').and_raise(FakeException, 1, 2)
user.get_stuff()
def test_flexmock_should_verify_spy_matches_exception_message(self):
class FakeException(Exception):
def __init__(self, param, param2):
self.p1 = param
self.p2 = param2
Exception.__init__(self, param)
def __str__(self):
return '%s, %s' % (self.p1, self.p2)
class User:
def get_stuff(self): raise FakeException('1', '2')
user = User()
flexmock(user).should_call('get_stuff').and_raise(FakeException, '2', '1')
assertRaises(ExceptionMessageError, user.get_stuff)
def test_flexmock_should_verify_spy_matches_exception_regexp(self):
class User:
def get_stuff(self): raise Exception('123asdf345')
user = User()
flexmock(user).should_call(
'get_stuff').and_raise(Exception, re.compile('asdf'))
user.get_stuff()
self._tear_down()
def test_flexmock_should_verify_spy_matches_exception_regexp_mismatch(self):
class User:
def get_stuff(self): raise Exception('123asdf345')
user = User()
flexmock(user).should_call(
'get_stuff').and_raise(Exception, re.compile('^asdf'))
assertRaises(ExceptionMessageError, user.get_stuff)
def test_flexmock_should_blow_up_on_wrong_spy_exception_type(self):
class User:
def get_stuff(self): raise CallOrderError('foo')
user = User()
flexmock(user).should_call('get_stuff').and_raise(MethodCallError)
assertRaises(ExceptionClassError, user.get_stuff)
def test_flexmock_should_match_spy_exception_parent_type(self):
class User:
def get_stuff(self): raise CallOrderError('foo')
user = User()
flexmock(user).should_call('get_stuff').and_raise(FlexmockError)
user.get_stuff()
def test_flexmock_should_blow_up_on_wrong_spy_return_values(self):
class User:
def get_stuff(self): return 'real', 'stuff'
def get_more_stuff(self): return 'other', 'stuff'
user = User()
flexmock(user).should_call('get_stuff').and_return('other', 'stuff')
assertRaises(MethodSignatureError, user.get_stuff)
flexmock(user).should_call('get_more_stuff').and_return()
assertRaises(MethodSignatureError, user.get_more_stuff)
def test_flexmock_should_mock_same_class_twice(self):
class Foo: pass
flexmock(Foo)
flexmock(Foo)
def test_flexmock_spy_should_not_clobber_original_method(self):
class User:
def get_stuff(self): return 'real', 'stuff'
user = User()
flexmock(user).should_call('get_stuff')
flexmock(user).should_call('get_stuff')
assertEqual(('real', 'stuff'), user.get_stuff())
def test_flexmock_should_properly_restore_static_methods(self):
class User:
@staticmethod
def get_stuff(): return 'ok!'
assertEqual('ok!', User.get_stuff())
flexmock(User).should_receive('get_stuff')
assert User.get_stuff() is None
self._tear_down()
assertEqual('ok!', User.get_stuff())
def test_flexmock_should_properly_restore_undecorated_static_methods(self):
class User:
def get_stuff(): return 'ok!'
get_stuff = staticmethod(get_stuff)
assertEqual('ok!', User.get_stuff())
flexmock(User).should_receive('get_stuff')
assert User.get_stuff() is None
self._tear_down()
assertEqual('ok!', User.get_stuff())
def test_flexmock_should_properly_restore_module_level_functions(self):
if 'flexmock_test' in sys.modules:
mod = sys.modules['flexmock_test']
else:
mod = sys.modules['__main__']
flexmock(mod).should_receive('module_level_function').with_args(1, 2)
assertEqual(None, module_level_function(1, 2))
self._tear_down()
assertEqual('1, 2', module_level_function(1, 2))
def test_module_level_function_with_kwargs(self):
if 'flexmock_test' in sys.modules:
mod = sys.modules['flexmock_test']
else:
mod = sys.modules['__main__']
flexmock(mod).should_receive('module_level_function').with_args(
1, args="expected")
assertRaises(FlexmockError, module_level_function, 1, args="not expected")
def test_flexmock_should_support_mocking_old_style_classes_as_functions(self):
if 'flexmock_test' in sys.modules:
mod = sys.modules['flexmock_test']
else:
mod = sys.modules['__main__']
flexmock(mod).should_receive('OldStyleClass').and_return('yay')
assertEqual('yay', OldStyleClass())
def test_flexmock_should_support_mocking_new_style_classes_as_functions(self):
if 'flexmock_test' in sys.modules:
mod = sys.modules['flexmock_test']
else:
mod = sys.modules['__main__']
flexmock(mod).should_receive('NewStyleClass').and_return('yay')
assertEqual('yay', NewStyleClass())
def test_flexmock_should_properly_restore_class_methods(self):
class User:
@classmethod
def get_stuff(cls):
return cls.__name__
assertEqual('User', User.get_stuff())
flexmock(User).should_receive('get_stuff').and_return('foo')
assertEqual('foo', User.get_stuff())
self._tear_down()
assertEqual('User', User.get_stuff())
def test_spy_should_match_return_value_class(self):
class User: pass
user = User()
foo = flexmock(foo=lambda: ('bar', 'baz'),
bar=lambda: user,
baz=lambda: None,
bax=lambda: None)
foo.should_call('foo').and_return(str, str)
foo.should_call('bar').and_return(User)
foo.should_call('baz').and_return(object)
foo.should_call('bax').and_return(None)
assertEqual(('bar', 'baz'), foo.foo())
assertEqual(user, foo.bar())
assertEqual(None, foo.baz())
assertEqual(None, foo.bax())
def test_spy_should_not_match_falsy_stuff(self):
class Foo:
def foo(self): return None
def bar(self): return False
def baz(self): return []
def quux(self): return ''
foo = Foo()
flexmock(foo).should_call('foo').and_return('bar').once
flexmock(foo).should_call('bar').and_return('bar').once
flexmock(foo).should_call('baz').and_return('bar').once
flexmock(foo).should_call('quux').and_return('bar').once
assertRaises(FlexmockError, foo.foo)
assertRaises(FlexmockError, foo.bar)
assertRaises(FlexmockError, foo.baz)
assertRaises(FlexmockError, foo.quux)
def test_new_instances_should_blow_up_on_should_receive(self):
class User(object): pass
mock = flexmock(User).new_instances(None).mock
assertRaises(FlexmockError, mock.should_receive, 'foo')
def test_should_call_alias_should_create_a_spy(self):
class Foo:
def get_stuff(self):
return 'yay'
foo = Foo()
flexmock(foo).should_call('get_stuff').and_return('yay').once()
assertRaises(MethodCallError, self._tear_down)
def test_flexmock_should_fail_mocking_nonexistent_methods(self):
class User: pass
user = User()
assertRaises(FlexmockError,
flexmock(user).should_receive, 'nonexistent')
def test_flexmock_should_not_explode_on_unicode_formatting(self):
if sys.version_info >= (3, 0):
formatted = _format_args(
'method', {'kargs' : (chr(0x86C7),), 'kwargs' : {}})
assertEqual('method("蛇")', formatted)
else:
formatted = _format_args(
'method', {'kargs' : (unichr(0x86C7),), 'kwargs' : {}})
assertEqual('method("%s")' % unichr(0x86C7), formatted)
def test_return_value_should_not_explode_on_unicode_values(self):
class Foo:
def method(self): pass
if sys.version_info >= (3, 0):
return_value = ReturnValue(chr(0x86C7))
assertEqual('"蛇"', '%s' % return_value)
return_value = ReturnValue((chr(0x86C7), chr(0x86C7)))
assertEqual('("蛇", "蛇")', '%s' % return_value)
else:
return_value = ReturnValue(unichr(0x86C7))
assertEqual('"%s"' % unichr(0x86C7), unicode(return_value))
def test_pass_thru_should_call_original_method_only_once(self):
class Nyan(object):
def __init__(self):
self.n = 0
def method(self):
self.n += 1
obj = Nyan()
flexmock(obj)
obj.should_call('method')
obj.method()
assertEqual(obj.n, 1)
def test_should_call_works_for_same_method_with_different_args(self):
class Foo:
def method(self, arg):
pass
foo = Foo()
flexmock(foo).should_call('method').with_args('foo').once()
flexmock(foo).should_call('method').with_args('bar').once()
foo.method('foo')
foo.method('bar')
self._tear_down()
def test_should_call_fails_properly_for_same_method_with_different_args(self):
class Foo:
def method(self, arg):
pass
foo = Foo()
flexmock(foo).should_call('method').with_args('foo').once()
flexmock(foo).should_call('method').with_args('bar').once()
foo.method('foo')
assertRaises(MethodCallError, self._tear_down)
def test_should_give_reasonable_error_for_builtins(self):
assertRaises(MockBuiltinError, flexmock, object)
def test_should_give_reasonable_error_for_instances_of_builtins(self):
assertRaises(MockBuiltinError, flexmock, object())
def test_mock_chained_method_calls_works_with_one_level(self):
class Foo:
def method2(self):
return 'foo'
class Bar:
def method1(self):
return Foo()
foo = Bar()
assertEqual('foo', foo.method1().method2())
flexmock(foo).should_receive('method1.method2').and_return('bar')
assertEqual('bar', foo.method1().method2())
def test_mock_chained_method_supports_args_and_mocks(self):
class Foo:
def method2(self, arg):
return arg
class Bar:
def method1(self):
return Foo()
foo = Bar()
assertEqual('foo', foo.method1().method2('foo'))
flexmock(foo).should_receive('method1.method2').with_args(
'foo').and_return('bar').once()
assertEqual('bar', foo.method1().method2('foo'))
self._tear_down()
flexmock(foo).should_receive('method1.method2').with_args(
'foo').and_return('bar').once()
assertRaises(MethodCallError, self._tear_down)
def test_mock_chained_method_calls_works_with_more_than_one_level(self):
class Baz:
def method3(self):
return 'foo'
class Foo:
def method2(self):
return Baz()
class Bar:
def method1(self):
return Foo()
foo = Bar()
assertEqual('foo', foo.method1().method2().method3())
flexmock(foo).should_receive('method1.method2.method3').and_return('bar')
assertEqual('bar', foo.method1().method2().method3())
def test_flexmock_should_replace_method(self):
class Foo:
def method(self, arg):
return arg
foo = Foo()
flexmock(foo).should_receive('method').replace_with(lambda x: x == 5)
assertEqual(foo.method(5), True)
assertEqual(foo.method(4), False)
def test_flexmock_should_replace_cannot_be_specified_twice(self):
class Foo:
def method(self, arg):
return arg
foo = Foo()
expectation = flexmock(foo).should_receive(
'method').replace_with(lambda x: x == 5)
assertRaises(FlexmockError,
expectation.replace_with, lambda x: x == 3)
def test_flexmock_should_mock_the_same_method_multiple_times(self):
class Foo:
def method(self): pass
foo = Foo()
flexmock(foo).should_receive('method').and_return(1)
assertEqual(foo.method(), 1)
flexmock(foo).should_receive('method').and_return(2)
assertEqual(foo.method(), 2)
flexmock(foo).should_receive('method').and_return(3)
assertEqual(foo.method(), 3)
flexmock(foo).should_receive('method').and_return(4)
assertEqual(foo.method(), 4)
def test_new_instances_should_be_a_method(self):
class Foo(object): pass
flexmock(Foo).new_instances('bar')
assertEqual('bar', Foo())
self._tear_down()
assert 'bar' != Foo()
def test_new_instances_raises_error_when_not_a_class(self):
class Foo(object): pass
foo = Foo()
flexmock(foo)
assertRaises(FlexmockError, foo.new_instances, 'bar')
def test_new_instances_works_with_multiple_return_values(self):
class Foo(object): pass
flexmock(Foo).new_instances('foo', 'bar')
assertEqual('foo', Foo())
assertEqual('bar', Foo())
def test_should_receive_should_not_replace_flexmock_methods(self):
class Foo:
def bar(self): pass
foo = Foo()
flexmock(foo)
assertRaises(FlexmockError, foo.should_receive, 'should_receive')
def test_flexmock_should_not_add_methods_if_they_already_exist(self):
class Foo:
def should_receive(self):
return 'real'
def bar(self): pass
foo = Foo()
mock = flexmock(foo)
assertEqual(foo.should_receive(), 'real')
assert 'should_call' not in dir(foo)
assert 'new_instances' not in dir(foo)
mock.should_receive('bar').and_return('baz')
assertEqual(foo.bar(), 'baz')
self._tear_down()
assertEqual(foo.should_receive(), 'real')
def test_flexmock_should_not_add_class_methods_if_they_already_exist(self):
class Foo:
def should_receive(self):
return 'real'
def bar(self): pass
foo = Foo()
mock = flexmock(Foo)
assertEqual(foo.should_receive(), 'real')
assert 'should_call' not in dir(Foo)
assert 'new_instances' not in dir(Foo)
mock.should_receive('bar').and_return('baz')
assertEqual(foo.bar(), 'baz')
self._tear_down()
assertEqual(foo.should_receive(), 'real')
def test_expectation_properties_work_with_parens(self):
foo = flexmock().should_receive(
'bar').at_least().once().and_return('baz').mock()
assertEqual('baz', foo.bar())
def test_mocking_down_the_inheritance_chain_class_to_class(self):
class Parent(object):
def foo(self): pass
class Child(Parent):
def bar(self): pass
flexmock(Parent).should_receive('foo').and_return('outer')
flexmock(Child).should_receive('bar').and_return('inner')
assert 'outer', Parent().foo()
assert 'inner', Child().bar()
def test_arg_matching_works_with_regexp(self):
class Foo:
def foo(self, arg1, arg2): pass
foo = Foo()
flexmock(foo).should_receive('foo').with_args(
re.compile('^arg1.*asdf$'), arg2=re.compile('f')).and_return('mocked')
assertEqual('mocked', foo.foo('arg1somejunkasdf', arg2='aadsfdas'))
def test_arg_matching_with_regexp_fails_when_regexp_doesnt_match_karg(self):
class Foo:
def foo(self, arg1, arg2): pass
foo = Foo()
flexmock(foo).should_receive('foo').with_args(
re.compile('^arg1.*asdf$'), arg2=re.compile('a')).and_return('mocked')
assertRaises(MethodSignatureError, foo.foo, 'arg1somejunkasdfa', arg2='a')
def test_arg_matching_with_regexp_fails_when_regexp_doesnt_match_kwarg(self):
class Foo:
def foo(self, arg1, arg2): pass
foo = Foo()
flexmock(foo).should_receive('foo').with_args(
re.compile('^arg1.*asdf$'), arg2=re.compile('a')).and_return('mocked')
assertRaises(MethodSignatureError, foo.foo, 'arg1somejunkasdf', arg2='b')
def test_flexmock_class_returns_same_object_on_repeated_calls(self):
class Foo: pass
a = flexmock(Foo)
b = flexmock(Foo)
assertEqual(a, b)
def test_flexmock_object_returns_same_object_on_repeated_calls(self):
class Foo: pass
foo = Foo()
a = flexmock(foo)
b = flexmock(foo)
assertEqual(a, b)
def test_flexmock_ordered_worked_after_default_stub(self):
foo = flexmock()
foo.should_receive('bar')
foo.should_receive('bar').with_args('a').ordered()
foo.should_receive('bar').with_args('b').ordered()
assertRaises(CallOrderError, foo.bar, 'b')
def test_flexmock_ordered_works_with_same_args(self):
foo = flexmock()
foo.should_receive('bar').ordered().and_return(1)
foo.should_receive('bar').ordered().and_return(2)
a = foo.bar()
assertEqual(a, 1)
b = foo.bar()
assertEqual(b, 2)
def test_flexmock_ordered_works_with_same_args_after_default_stub(self):
foo = flexmock()
foo.should_receive('bar').and_return(9)
foo.should_receive('bar').ordered().and_return(1)
foo.should_receive('bar').ordered().and_return(2)
a = foo.bar()
assertEqual(a, 1)
b = foo.bar()
assertEqual(b, 2)
c = foo.bar()
assertEqual(c, 9)
def test_state_machine(self):
class Radio:
def __init__(self): self.is_on = False
def switch_on(self): self.is_on = True
def switch_off(self): self.is_on = False
def select_channel(self): return None
def adjust_volume(self, num): self.volume = num
radio = Radio()
flexmock(radio)
radio.should_receive('select_channel').once().when(
lambda: radio.is_on)
radio.should_call('adjust_volume').once().with_args(5).when(
lambda: radio.is_on)
assertRaises(StateError, radio.select_channel)
assertRaises(StateError, radio.adjust_volume, 5)
radio.is_on = True
radio.select_channel()
radio.adjust_volume(5)
def test_support_at_least_and_at_most_together(self):
class Foo:
def bar(self): pass
foo = Foo()
flexmock(foo).should_call('bar').at_least().once().at_most().twice()
assertRaises(MethodCallError, self._tear_down)
flexmock(foo).should_call('bar').at_least().once().at_most().twice()
foo.bar()
foo.bar()
assertRaises(MethodCallError, foo.bar)
flexmock(foo).should_call('bar').at_least().once().at_most().twice()
foo.bar()
self._tear_down()
flexmock(foo).should_call('bar').at_least().once().at_most().twice()
foo.bar()
foo.bar()
self._tear_down()
def test_at_least_cannot_be_used_twice(self):
class Foo:
def bar(self): pass
expectation = flexmock(Foo).should_receive('bar')
try:
expectation.at_least().at_least()
raise Exception('should not be able to specify at_least twice')
except FlexmockError:
pass
except Exception:
raise
def test_at_most_cannot_be_used_twice(self):
class Foo:
def bar(self): pass
expectation = flexmock(Foo).should_receive('bar')
try:
expectation.at_most().at_most()
raise Exception('should not be able to specify at_most twice')
except FlexmockError:
pass
except Exception:
raise
def test_at_least_cannot_be_specified_until_at_most_is_set(self):
class Foo:
def bar(self): pass
expectation = flexmock(Foo).should_receive('bar')
try:
expectation.at_least().at_most()
raise Exception('should not be able to specify at_most if at_least unset')
except FlexmockError:
pass
except Exception:
raise
def test_at_most_cannot_be_specified_until_at_least_is_set(self):
class Foo:
def bar(self): pass
expectation = flexmock(Foo).should_receive('bar')
try:
expectation.at_most().at_least()
raise Exception('should not be able to specify at_least if at_most unset')
except FlexmockError:
pass
except Exception:
raise
def test_proper_reset_of_subclass_methods(self):
class A:
def x(self):
return 'a'
class B(A):
def x(self):
return 'b'
flexmock(B).should_receive('x').and_return('1')
self._tear_down()
assertEqual('b', B().x())
def test_format_args_supports_tuples(self):
formatted = _format_args('method', {'kargs' : ((1, 2),), 'kwargs' : {}})
assertEqual('method((1, 2))', formatted)
def test_mocking_subclass_of_str(self):
class String(str): pass
s = String()
flexmock(s, endswith='fake')
assertEqual('fake', s.endswith('stuff'))
self._tear_down()
assertEqual(False, s.endswith('stuff'))
def test_ordered_on_different_methods(self):
class String(str): pass
s = String('abc')
flexmock(s)
s.should_call('startswith').with_args('asdf', 0, 4).ordered()
s.should_call('endswith').ordered()
assertRaises(CallOrderError, s.endswith, 'c')
def test_fake_object_takes_properties(self):
foo = flexmock(bar=property(lambda self: 'baz'))
bar = flexmock(foo=property(lambda self: 'baz'))
assertEqual('baz', foo.bar)
assertEqual('baz', bar.foo)
def test_replace_non_callable_class_attributes(self):
class Foo:
bar = 1
foo = Foo()
bar = Foo()
flexmock(foo, bar=2)
assertEqual(2, foo.bar)
assertEqual(1, bar.bar)
self._tear_down()
assertEqual(1, foo.bar)
def test_should_chain_attributes(self):
class Baz:
x = 1
class Bar:
baz = Baz()
class Foo:
bar = Bar()
foo = Foo()
foo = flexmock(foo)
foo.should_receive('bar.baz.x').and_return(2)
assertEqual(2, foo.bar.baz.x)
self._tear_down()
assertEqual(1, foo.bar.baz.x)
def test_replace_non_callable_instance_attributes(self):
class Foo:
def __init__(self):
self.bar = 1
foo = Foo()
bar = Foo()
flexmock(foo, bar=2)
flexmock(bar, bar=1)
assertEqual(2, foo.bar)
self._tear_down()
assertEqual(1, foo.bar)
def test_replace_non_callable_module_attributes(self):
if 'flexmock_test' in sys.modules:
mod = sys.modules['flexmock_test']
else:
mod = sys.modules['__main__']
flexmock(mod, module_level_attribute='yay')
assertEqual('yay', module_level_attribute)
self._tear_down()
assertEqual('test', module_level_attribute)
def test_non_callable_attributes_fail_to_set_expectations(self):
class Foo:
bar = 1
foo = Foo()
e = flexmock(foo).should_receive('bar').and_return(2)
assertRaises(FlexmockError, e.times, 1)
assertRaises(FlexmockError, e.with_args, ())
assertRaises(FlexmockError, e.replace_with, lambda x: x)
assertRaises(FlexmockError, e.and_raise, Exception)
assertRaises(FlexmockError, e.when, lambda x: x)
assertRaises(FlexmockError, e.and_yield, 1)
assertRaises(FlexmockError, object.__getattribute__(e, 'ordered'))
assertRaises(FlexmockError, object.__getattribute__(e, 'at_least'))
assertRaises(FlexmockError, object.__getattribute__(e, 'at_most'))
assertRaises(FlexmockError, object.__getattribute__(e, 'one_by_one'))
def test_and_return_defaults_to_none_with_no_arguments(self):
foo = flexmock()
foo.should_receive('bar').and_return()
assertEqual(None, foo.bar())
def test_should_replace_attributes_that_are_instances_of_classes(self):
class Foo(object):
pass
class Bar(object):
foo = Foo()
bar = Bar()
flexmock(bar, foo='test')
assertEqual('test', bar.foo)
def test_isproperty(self):
class Foo:
@property
def bar(self): pass
def baz(self): pass
class Bar(Foo): pass
foo = Foo()
bar = Bar()
assertEqual(True, _isproperty(foo, 'bar'))
assertEqual(False, _isproperty(foo, 'baz'))
assertEqual(True, _isproperty(Foo, 'bar'))
assertEqual(False, _isproperty(Foo, 'baz'))
assertEqual(True, _isproperty(bar, 'bar'))
assertEqual(False, _isproperty(bar, 'baz'))
assertEqual(True, _isproperty(Bar, 'bar'))
assertEqual(False, _isproperty(Bar, 'baz'))
assertEqual(False, _isproperty(Mock(), 'baz'))
def test_fake_object_supporting_iteration(self):
foo = flexmock()
foo.should_receive('__iter__').and_yield(1, 2, 3)
assertEqual([1, 2, 3], [i for i in foo])
def test_with_args_for_single_named_arg_with_optional_args(self):
class Foo(object):
def bar(self, one, two='optional'): pass
e = flexmock(Foo).should_receive('bar')
e.with_args(one=1)
def test_with_args_doesnt_set_max_when_using_varargs(self):
class Foo(object):
def bar(self, *kargs): pass
flexmock(Foo).should_receive('bar').with_args(1, 2, 3)
def test_with_args_doesnt_set_max_when_using_kwargs(self):
class Foo(object):
def bar(self, **kwargs): pass
flexmock(Foo).should_receive('bar').with_args(1, 2, 3)
def test_with_args_blows_up_on_too_few_args(self):
class Foo(object):
def bar(self, a, b, c=1): pass
e = flexmock(Foo).should_receive('bar')
assertRaises(MethodSignatureError, e.with_args, 1)
def test_with_args_blows_up_on_too_few_args_with_kwargs(self):
class Foo(object):
def bar(self, a, b, c=1): pass
e = flexmock(Foo).should_receive('bar')
assertRaises(MethodSignatureError, e.with_args, 1, c=2)
def test_with_args_blows_up_on_too_many_args(self):
class Foo(object):
def bar(self, a, b, c=1): pass
e = flexmock(Foo).should_receive('bar')
assertRaises(MethodSignatureError, e.with_args, 1, 2, 3, 4)
def test_with_args_blows_up_on_kwarg_overlapping_positional(self):
class Foo(object):
def bar(self, a, b, c=1, **kwargs): pass
e = flexmock(Foo).should_receive('bar')
assertRaises(MethodSignatureError, e.with_args, 1, 2, 3, c=2)
def test_with_args_blows_up_on_invalid_kwarg(self):
class Foo(object):
def bar(self, a, b, c=1): pass
e = flexmock(Foo).should_receive('bar')
assertRaises(MethodSignatureError, e.with_args, 1, 2, d=2)
def test_with_args_ignores_invalid_args_on_flexmock_instances(self):
foo = flexmock(bar=lambda x: x)
foo.should_receive('bar').with_args('stuff')
foo.bar('stuff')
def test_with_args_does_not_compensate_for_self_on_static_instance_methods(self):
class Foo(object):
@staticmethod
def bar(x): pass
foo = Foo()
flexmock(foo).should_receive('bar').with_args('stuff')
foo.bar('stuff')
def test_with_args_does_not_compensate_for_self_on_static_class_methods(self):
class Foo(object):
@staticmethod
def bar(x): pass
flexmock(Foo).should_receive('bar').with_args('stuff')
Foo.bar('stuff')
def test_with_args_does_compensate_for_cls_on_class_methods(self):
class Foo(object):
@classmethod
def bar(cls, x): pass
foo = Foo()
flexmock(foo).should_receive('bar').with_args('stuff')
foo.bar('stuff')
def test_calling_with_keyword_args_matches_mock_with_positional_args(self):
class Foo(object):
def bar(self, a, b, c): pass
foo = Foo()
flexmock(foo).should_receive('bar').with_args(1,2,3).once()
foo.bar(a=1, b=2, c=3)
def test_calling_with_positional_args_matches_mock_with_kwargs(self):
class Foo(object):
def bar(self, a, b, c): pass
foo = Foo()
flexmock(foo).should_receive('bar').with_args(a=1,b=2,c=3).once()
foo.bar(1, 2, c=3)
def test_use_replace_with_for_callable_shortcut_kwargs(self):
class Foo(object):
def bar(self): return 'bar'
foo = Foo()
flexmock(foo, bar=lambda: 'baz')
assertEqual('baz', foo.bar())
def test_mock_property_with_attribute_on_instance(self):
class Foo(object):
@property
def bar(self): return 'bar'
foo = Foo()
foo2 = Foo()
foo3 = Foo()
flexmock(foo, bar='baz')
flexmock(foo2, bar='baz2')
assertEqual('baz', foo.bar)
assertEqual('baz2', foo2.bar)
assertEqual('bar', foo3.bar)
self._tear_down()
assertEqual(False, hasattr(Foo, '_flexmock__bar'),
'Property bar not cleaned up')
assertEqual('bar', foo.bar)
assertEqual('bar', foo2.bar)
assertEqual('bar', foo3.bar)
def test_mock_property_with_attribute_on_class(self):
class Foo(object):
@property
def bar(self): return 'bar'
foo = Foo()
foo2 = Foo()
flexmock(Foo, bar='baz')
assertEqual('baz', foo.bar)
assertEqual('baz', foo2.bar)
self._tear_down()
assertEqual(False, hasattr(Foo, '_flexmock__bar'),
'Property bar not cleaned up')
assertEqual('bar', foo.bar)
assertEqual('bar', foo2.bar)
class TestFlexmockUnittest(RegularClass, unittest.TestCase):
def tearDown(self):
pass
def _tear_down(self):
return flexmock_teardown()
if sys.version_info >= (2, 6):
import flexmock_modern_test
class TestUnittestModern(flexmock_modern_test.TestFlexmockUnittestModern):
pass
if sys.version_info >= (3, 0):
import py3_only_features
class TestPy3Features(unittest.TestCase):
def test_mock_kwargs_only_func_mock_all(self):
flexmock(py3_only_features).should_receive(
'kwargs_only_func').with_args(1, bar=2, baz=3).and_return(123)
self.assertEqual(py3_only_features.kwargs_only_func(1, bar=2, baz=3),
123)
def test_mock_kwargs_only_func_mock_required(self):
flexmock(py3_only_features).should_receive(
'kwargs_only_func').with_args(1, bar=2).and_return(123)
self.assertEqual(py3_only_features.kwargs_only_func(1, bar=2), 123)
def test_mock_kwargs_only_func_fails_if_required_not_provided(self):
self.assertRaises(
MethodSignatureError,
flexmock(py3_only_features).should_receive(
'kwargs_only_func').with_args,
1)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "b41bdd342e2fd2770eb82cfcdca1e493",
"timestamp": "",
"source": "github",
"line_count": 1664,
"max_line_length": 86,
"avg_line_length": 34.85396634615385,
"alnum_prop": 0.6578098867182786,
"repo_name": "has207/flexmock",
"id": "cc58ed30cdbf242ddacbf13772a703c5f30e0a9d",
"size": "58028",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/flexmock_test.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "109173"
},
{
"name": "Shell",
"bytes": "1821"
}
],
"symlink_target": ""
} |
import functools
import os
import routes
import webob.dec
import webob.exc
import nova.api.openstack
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import exception
from nova import flags
from nova import log as logging
import nova.policy
from nova import utils
from nova import wsgi as base_wsgi
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
class ExtensionDescriptor(object):
"""Base class that defines the contract for extensions.
Note that you don't have to derive from this class to have a valid
extension; it is purely a convenience.
"""
# The name of the extension, e.g., 'Fox In Socks'
name = None
# The alias for the extension, e.g., 'FOXNSOX'
alias = None
# Description comes from the docstring for the class
# The XML namespace for the extension, e.g.,
# 'http://www.fox.in.socks/api/ext/pie/v1.0'
namespace = None
# The timestamp when the extension was last updated, e.g.,
# '2011-01-22T13:25:27-06:00'
updated = None
def __init__(self, ext_mgr):
"""Register extension with the extension manager."""
ext_mgr.register(self)
def get_resources(self):
"""List of extensions.ResourceExtension extension objects.
Resources define new nouns, and are accessible through URLs.
"""
resources = []
return resources
def get_controller_extensions(self):
"""List of extensions.ControllerExtension extension objects.
Controller extensions are used to extend existing controllers.
"""
controller_exts = []
return controller_exts
@classmethod
def nsmap(cls):
"""Synthesize a namespace map from extension."""
# Start with a base nsmap
nsmap = ext_nsmap.copy()
# Add the namespace for the extension
nsmap[cls.alias] = cls.namespace
return nsmap
@classmethod
def xmlname(cls, name):
"""Synthesize element and attribute names."""
return '{%s}%s' % (cls.namespace, name)
def make_ext(elem):
elem.set('name')
elem.set('namespace')
elem.set('alias')
elem.set('updated')
desc = xmlutil.SubTemplateElement(elem, 'description')
desc.text = 'description'
xmlutil.make_links(elem, 'links')
ext_nsmap = {None: xmlutil.XMLNS_V11, 'atom': xmlutil.XMLNS_ATOM}
class ExtensionTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('extension', selector='extension')
make_ext(root)
return xmlutil.MasterTemplate(root, 1, nsmap=ext_nsmap)
class ExtensionsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('extensions')
elem = xmlutil.SubTemplateElement(root, 'extension',
selector='extensions')
make_ext(elem)
return xmlutil.MasterTemplate(root, 1, nsmap=ext_nsmap)
class ExtensionsResource(wsgi.Resource):
def __init__(self, extension_manager):
self.extension_manager = extension_manager
super(ExtensionsResource, self).__init__(None)
def _translate(self, ext):
ext_data = {}
ext_data['name'] = ext.name
ext_data['alias'] = ext.alias
ext_data['description'] = ext.__doc__
ext_data['namespace'] = ext.namespace
ext_data['updated'] = ext.updated
ext_data['links'] = [] # TODO(dprince): implement extension links
return ext_data
@wsgi.serializers(xml=ExtensionsTemplate)
def index(self, req):
extensions = []
for _alias, ext in self.extension_manager.extensions.iteritems():
extensions.append(self._translate(ext))
return dict(extensions=extensions)
@wsgi.serializers(xml=ExtensionTemplate)
def show(self, req, id):
try:
# NOTE(dprince): the extensions alias is used as the 'id' for show
ext = self.extension_manager.extensions[id]
except KeyError:
raise webob.exc.HTTPNotFound()
return dict(extension=self._translate(ext))
def delete(self, req, id):
raise webob.exc.HTTPNotFound()
def create(self, req):
raise webob.exc.HTTPNotFound()
class ExtensionManager(object):
"""Load extensions from the configured extension path.
See nova/tests/api/openstack/extensions/foxinsocks/extension.py for an
example extension implementation.
"""
def register(self, ext):
# Do nothing if the extension doesn't check out
if not self._check_extension(ext):
return
alias = ext.alias
LOG.audit(_('Loaded extension: %s'), alias)
if alias in self.extensions:
raise exception.Error("Found duplicate extension: %s" % alias)
self.extensions[alias] = ext
def get_resources(self):
"""Returns a list of ResourceExtension objects."""
resources = []
resources.append(ResourceExtension('extensions',
ExtensionsResource(self)))
for ext in self.extensions.values():
try:
resources.extend(ext.get_resources())
except AttributeError:
# NOTE(dprince): Extension aren't required to have resource
# extensions
pass
return resources
def get_controller_extensions(self):
"""Returns a list of ControllerExtension objects."""
controller_exts = []
for ext in self.extensions.values():
try:
controller_exts.extend(ext.get_controller_extensions())
except AttributeError:
# NOTE(Vek): Extensions aren't required to have
# controller extensions
pass
return controller_exts
def _check_extension(self, extension):
"""Checks for required methods in extension objects."""
try:
LOG.debug(_('Ext name: %s'), extension.name)
LOG.debug(_('Ext alias: %s'), extension.alias)
LOG.debug(_('Ext description: %s'),
' '.join(extension.__doc__.strip().split()))
LOG.debug(_('Ext namespace: %s'), extension.namespace)
LOG.debug(_('Ext updated: %s'), extension.updated)
except AttributeError as ex:
LOG.exception(_("Exception loading extension: %s"), unicode(ex))
return False
return True
def load_extension(self, ext_factory):
"""Execute an extension factory.
Loads an extension. The 'ext_factory' is the name of a
callable that will be imported and called with one
argument--the extension manager. The factory callable is
expected to call the register() method at least once.
"""
LOG.debug(_("Loading extension %s"), ext_factory)
# Load the factory
factory = utils.import_class(ext_factory)
# Call it
LOG.debug(_("Calling extension factory %s"), ext_factory)
factory(self)
def _load_extensions(self):
"""Load extensions specified on the command line."""
extensions = list(self.cls_list)
for ext_factory in extensions:
try:
self.load_extension(ext_factory)
except Exception as exc:
LOG.warn(_('Failed to load extension %(ext_factory)s: '
'%(exc)s') % locals())
class ControllerExtension(object):
"""Extend core controllers of nova OpenStack API.
Provide a way to extend existing nova OpenStack API core
controllers.
"""
def __init__(self, extension, collection, controller):
self.extension = extension
self.collection = collection
self.controller = controller
class ResourceExtension(object):
"""Add top level resources to the OpenStack API in nova."""
def __init__(self, collection, controller, parent=None,
collection_actions=None, member_actions=None,
custom_routes_fn=None):
if not collection_actions:
collection_actions = {}
if not member_actions:
member_actions = {}
self.collection = collection
self.controller = controller
self.parent = parent
self.collection_actions = collection_actions
self.member_actions = member_actions
self.custom_routes_fn = custom_routes_fn
def wrap_errors(fn):
"""Ensure errors are not passed along."""
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except Exception, e:
raise webob.exc.HTTPInternalServerError()
return wrapped
def load_standard_extensions(ext_mgr, logger, path, package, ext_list=None):
"""Registers all standard API extensions."""
# Walk through all the modules in our directory...
our_dir = path[0]
for dirpath, dirnames, filenames in os.walk(our_dir):
# Compute the relative package name from the dirpath
relpath = os.path.relpath(dirpath, our_dir)
if relpath == '.':
relpkg = ''
else:
relpkg = '.%s' % '.'.join(relpath.split(os.sep))
# Now, consider each file in turn, only considering .py files
for fname in filenames:
root, ext = os.path.splitext(fname)
# Skip __init__ and anything that's not .py
if ext != '.py' or root == '__init__':
continue
# Try loading it
classname = "%s%s" % (root[0].upper(), root[1:])
classpath = ("%s%s.%s.%s" %
(package, relpkg, root, classname))
if ext_list is not None and classname not in ext_list:
logger.debug("Skipping extension: %s" % classpath)
continue
try:
ext_mgr.load_extension(classpath)
except Exception as exc:
logger.warn(_('Failed to load extension %(classpath)s: '
'%(exc)s') % locals())
# Now, let's consider any subdirectories we may have...
subdirs = []
for dname in dirnames:
# Skip it if it does not have __init__.py
if not os.path.exists(os.path.join(dirpath, dname,
'__init__.py')):
continue
# If it has extension(), delegate...
ext_name = ("%s%s.%s.extension" %
(package, relpkg, dname))
try:
ext = utils.import_class(ext_name)
except exception.ClassNotFound:
# extension() doesn't exist on it, so we'll explore
# the directory for ourselves
subdirs.append(dname)
else:
try:
ext(ext_mgr)
except Exception as exc:
logger.warn(_('Failed to load extension %(ext_name)s: '
'%(exc)s') % locals())
# Update the list of directories we'll explore...
dirnames[:] = subdirs
def extension_authorizer(api_name, extension_name):
def authorize(context, target=None):
if target is None:
target = {'project_id': context.project_id,
'user_id': context.user_id}
action = '%s_extension:%s' % (api_name, extension_name)
nova.policy.enforce(context, action, target)
return authorize
def soft_extension_authorizer(api_name, extension_name):
hard_authorize = extension_authorizer(api_name, extension_name)
def authorize(context):
try:
hard_authorize(context)
return True
except exception.NotAuthorized:
return False
return authorize
| {
"content_hash": "b14fc62646e8a8f4ffa74f7328bf6fe2",
"timestamp": "",
"source": "github",
"line_count": 377,
"max_line_length": 78,
"avg_line_length": 31.50397877984085,
"alnum_prop": 0.5924896859476299,
"repo_name": "russellb/nova",
"id": "8d04b017f7da3de4ab4a16b4d8247ce5d34f18ec",
"size": "12590",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/api/openstack/extensions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4974"
},
{
"name": "JavaScript",
"bytes": "7412"
},
{
"name": "Python",
"bytes": "5611148"
},
{
"name": "Shell",
"bytes": "25380"
}
],
"symlink_target": ""
} |
from datetime import datetime
now = datetime.now()
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "Cookiecutter Django"
copyright = f"2013-{now.year}, Daniel Roy Greenfeld"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "{}.{}.{}".format(*now.isocalendar())
# The full version, including alpha/beta/rc tags.
release = "{}.{}.{}".format(*now.isocalendar())
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "cookiecutter-djangodoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
"index",
"cookiecutter-django.tex",
"cookiecutter-django Documentation",
"cookiecutter-django",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
"index",
"Cookiecutter Django",
"Cookiecutter Django documentation",
["Daniel Roy Greenfeld"],
1,
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"Cookiecutter Django",
"Cookiecutter Django documentation",
"Daniel Roy Greenfeld",
"Cookiecutter Django",
"A Cookiecutter template for creating production-ready "
"Django projects quickly.",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| {
"content_hash": "a03826181dfda4ba8222a848f032dd0c",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 80,
"avg_line_length": 31.80081300813008,
"alnum_prop": 0.6850313179087306,
"repo_name": "trungdong/cookiecutter-django",
"id": "b53e6a7e78c81bd69ae2e446bed80be46357bde2",
"size": "8166",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1037"
},
{
"name": "CSS",
"bytes": "228"
},
{
"name": "Dockerfile",
"bytes": "8132"
},
{
"name": "HTML",
"bytes": "21569"
},
{
"name": "JavaScript",
"bytes": "5218"
},
{
"name": "Makefile",
"bytes": "1124"
},
{
"name": "Procfile",
"bytes": "420"
},
{
"name": "Python",
"bytes": "126082"
},
{
"name": "SCSS",
"bytes": "662"
},
{
"name": "Shell",
"bytes": "16009"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import Book
admin.site.register(Book)
| {
"content_hash": "fa71a799e4b54cce2f545c994c2d08c0",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 32,
"avg_line_length": 17.2,
"alnum_prop": 0.8023255813953488,
"repo_name": "dlab-projects/equal-read-interface",
"id": "af94c1dabfd1eb2300f844f584dfebfba014d91d",
"size": "86",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "equal_read/books/admin.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "962"
},
{
"name": "HTML",
"bytes": "46601"
},
{
"name": "Python",
"bytes": "27716"
}
],
"symlink_target": ""
} |
import pbr.version
__version__ = pbr.version.VersionInfo(
'osccloud').version_string()
| {
"content_hash": "8d2fe51501cce4e802be2da6869fc214",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 38,
"avg_line_length": 18.6,
"alnum_prop": 0.6989247311827957,
"repo_name": "dtroyer/osc-cloud",
"id": "276279850ff29599bffb6f63a5c3865e69d6bdd0",
"size": "664",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "osccloud/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16999"
}
],
"symlink_target": ""
} |
from setuptools import find_packages
from distutils.core import setup
setup(
author=u'Matt Cowger & Magnus Nilson',
author_email='[email protected]',
name='PythonScale',
description='Python interface to ScaleIO 1.3 REST API',
version="0.1",
url='https://github.com/swevm/scaleio_sdk/',
license='Apache License',
packages=find_packages(),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
open("requirements.txt").readlines(),
],
) | {
"content_hash": "b9a1fe05c985aa14dbc1ba3316b7995b",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 32.083333333333336,
"alnum_prop": 0.638961038961039,
"repo_name": "swevm/scaleio_sdk",
"id": "1c9d642e5fd0a10a53edc241fe7fdbbec1657a26",
"size": "816",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17663"
}
],
"symlink_target": ""
} |
from pysys.constants import *
from xpybuild.xpybuild_basetest import XpybuildBaseTest
class PySysTest(XpybuildBaseTest):
def execute(self):
msg = self.xpybuild(shouldFail=False)
self.xpybuild(stdouterr='xpybuild-options', args=['--options'])
def validate(self):
self.assertDiff('build-output/defaults.txt', 'defaults.txt', abortOnError=False)
self.assertDiff('build-output/targetOverride.txt', 'targetOverride.txt', abortOnError=False)
self.assertDiff('build-output/legacyTargetOverride.txt', 'legacyTargetOverride.txt', abortOnError=False)
self.assertGrep(file='xpybuild.out', expr="targetOverrideBoth.txt mergeOptions testoption.targetOverride=expectedval")
self.assertGrep(file='xpybuild.out', expr="PathSet._resolveUnderlyingDependencies got options")
self.assertGrep(file='xpybuild.out', expr="Cannot read the value of basetarget.targetOptions during the initialization phase of the build", literal=True)
self.assertGrep(file='xpybuild.out', expr="ERROR .*", contains=False)
self.assertGrep(file='xpybuild-options.out', expr="testoption.default = expectedval")
self.assertGrep(file='xpybuild-options.out', expr="testoption.globalOverride = expectedval")
self.assertGrep(file='xpybuild-options.out', expr="testoption2.empty = $")
self.assertGrep(file='xpybuild-options.out', expr="Traceback", contains=False)
self.logFileContents('build-output/BUILD_WORK/targets/MyTarget/implicit-inputs/_OUTPUT_DIR_.defaults.txt.txt', tail=True)
# default options shouldn't contain any objects with no nice repr representation,
# least in the implicit inputs file whcih is used to decide rebuilding
self.assertGrep(file='build-output/BUILD_WORK/targets/MyTarget/implicit-inputs/_OUTPUT_DIR_.defaults.txt.txt', expr="at 0x", contains=False)
# check we did include a wide range of options as a result of the addHashableImplicitInputOption call
self.assertGrep(file='build-output/BUILD_WORK/targets/MyTarget/implicit-inputs/_OUTPUT_DIR_.defaults.txt.txt', expr="native.")
self.assertGrep(file='build-output/BUILD_WORK/targets/MyTarget/implicit-inputs/_OUTPUT_DIR_.defaults.txt.txt', expr="^None$", contains=False) # none items should be filtered out
self.assertGrep(file='build-output/BUILD_WORK/targets/MyTarget/implicit-inputs/_OUTPUT_DIR_.defaults.txt.txt', expr="addHashableImplicitInput str expectedval")
self.assertGrep(file='build-output/BUILD_WORK/targets/MyTarget/implicit-inputs/_OUTPUT_DIR_.defaults.txt.txt', expr="testoption.default")
| {
"content_hash": "03d035690bd44fb043689af9d1ec62ec",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 180,
"avg_line_length": 64.15384615384616,
"alnum_prop": 0.781374900079936,
"repo_name": "xpybuild/xpybuild",
"id": "85465e7adc91a3ac31da986fada4c33cde2ab28e",
"size": "2502",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/correctness/framework/OptionOverriding/run.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "96"
},
{
"name": "C",
"bytes": "610"
},
{
"name": "C++",
"bytes": "872"
},
{
"name": "Dockerfile",
"bytes": "55"
},
{
"name": "Java",
"bytes": "423"
},
{
"name": "Python",
"bytes": "688439"
}
],
"symlink_target": ""
} |
class OpenStackEnvException(Exception):
pass
class UnsupportedResourceTypeException(OpenStackEnvException):
def __init__(self, resource_type):
super(UnsupportedResourceTypeException, self).__init__()
self.resource_type = resource_type
class ResourceAlreadyExistsException(OpenStackEnvException):
def __init__(self, resource):
super(ResourceAlreadyExistsException, self).__init__()
self.resource = resource
self.message = "Resource \"%s\" already exists!" % resource
class UnsupportedResourceDefinitionTypeException(OpenStackEnvException):
def __init__(self, path):
super(UnsupportedResourceDefinitionTypeException, self).__init__()
self.message = "Unsupported resource definition source \"%s\"" % path
self.path = path
class TimeoutException(OpenStackEnvException):
pass
| {
"content_hash": "7bd4abb526448a63f878726cfba240f8",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 77,
"avg_line_length": 32,
"alnum_prop": 0.7164351851851852,
"repo_name": "smithsane/openstack-env",
"id": "e77ae8954212c1bc6408252bd4857c24f223fb00",
"size": "1694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack_env/exceptions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1719"
},
{
"name": "Python",
"bytes": "26116"
}
],
"symlink_target": ""
} |
from setuptools import find_packages, setup
def get_install_required():
with open("./requirements.txt", "r") as reqs:
requirements = reqs.readlines()
return [r.rstrip() for r in requirements]
setup(
name='PyFolding',
version='1.0',
description='PyFolding is a simple Python based framework for fitting \
kinetic and thermodynamic models to protein folding data. It includes \
several basic models and is extensible to enable fitting of more \
interesting models.',
author='Alan R. Lowe',
author_email='[email protected]',
url='https://github.com/quantumjot/PyFolding',
packages=find_packages(),
install_requires=get_install_required(),
python_requires=">=3.6",
)
| {
"content_hash": "f3306b61fa726422579a5bc027862710",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 75,
"avg_line_length": 34.666666666666664,
"alnum_prop": 0.6909340659340659,
"repo_name": "quantumjot/PyFolding",
"id": "5f4b236ea46b1d560144ec5c0ccdd561afb1f92b",
"size": "750",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "129818"
}
],
"symlink_target": ""
} |
"""
General-purpose Part-related objects
"""
from pptx.opc.packuri import PackURI
from pptx.util import Collection
class PartCollection(Collection):
"""
Sequence of parts. Sensitive to partname index when ordering parts added
via _loadpart(), e.g. ``/ppt/slide/slide2.xml`` appears before
``/ppt/slide/slide10.xml`` rather than after it as it does in a
lexicographical sort.
"""
def __init__(self):
super(PartCollection, self).__init__()
def add_part(self, part):
"""
Insert a new part into the collection such that list remains sorted
in logical partname order (e.g. slide10.xml comes after slide9.xml).
"""
new_partidx = part.partname.idx
for idx, seq_part in enumerate(self._values):
partidx = PackURI(seq_part.partname).idx
if partidx > new_partidx:
self._values.insert(idx, part)
return
self._values.append(part)
| {
"content_hash": "3d621b0066392c705d56fdd43963a3d4",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 76,
"avg_line_length": 32.46666666666667,
"alnum_prop": 0.6293634496919918,
"repo_name": "Hitachi-Data-Systems/org-chart-builder",
"id": "3174795124604ec9c9a42e954ba50e868ce918a2",
"size": "993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pptx/parts/part.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1358145"
}
],
"symlink_target": ""
} |
from datetime import date
from django.contrib.gis.db import models as gis
from django.db import models
from django.utils import timezone
from myvoice.core.validators import validate_year
class Region(gis.Model):
"""Geographical regions"""
TYPE_CHIOCES = (
('country', 'Country'),
('state', 'State'),
('lga', 'LGA'),
)
name = models.CharField(max_length=255)
alternate_name = models.CharField(max_length=255, blank=True)
type = models.CharField(max_length=16, choices=TYPE_CHIOCES, default='lga')
external_id = models.IntegerField("External ID")
boundary = gis.MultiPolygonField()
objects = gis.GeoManager()
class Meta(object):
unique_together = ('external_id', 'type')
def __unicode__(self):
return u"{} - {}".format(self.get_type_display(), self.name)
class State(models.Model):
"""A State."""
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class LGA(models.Model):
"""Local Government Area."""
name = models.CharField(max_length=255)
state = models.ForeignKey('State')
def __unicode__(self):
return self.name
class Clinic(models.Model):
"""A health clinic."""
TYPE_CHIOCES = (
('primary', 'Primary Healthcare Facility'),
('general', 'General Hospital'),
)
name = models.CharField(max_length=100, unique=True)
slug = models.SlugField(unique=True)
type = models.CharField(max_length=16, null=True, choices=TYPE_CHIOCES, default='primary')
# These might later become location-based models. LGA should be
# easily accessible (not multiple foreign keys away) since it is the
# designator that we are most interested in.
town = models.CharField(max_length=100)
ward = models.CharField(max_length=100)
lga = models.ForeignKey(LGA, null=True)
location = gis.PointField(null=True, blank=True)
lga_rank = models.IntegerField(
blank=True, null=True, verbose_name='LGA rank', editable=False)
pbf_rank = models.IntegerField(
blank=True, null=True, verbose_name='PBF rank', editable=False)
code = models.PositiveIntegerField(
verbose_name='SMS Code', unique=True,
help_text="Code of Clinic to be used in SMS registration.")
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
objects = gis.GeoManager()
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
def managers(self):
"""The staff members who are in charge of this clinic."""
return self.clinicstaff_set.filter(is_manager=True)
class ClinicStaff(models.Model):
"""Represents a person who works at a Clinic."""
clinic = models.ForeignKey('Clinic')
user = models.ForeignKey(
'auth.User', blank=True, null=True,
help_text="If possible, this person should have a User account.")
name = models.CharField(
max_length=100, blank=True,
help_text="If given, the User account's name will be preferred to the "
"name given here with the assumption that it is more likely to be "
"current.")
contact = models.ForeignKey(
'rapidsms.Contact', verbose_name='Preferred contact', blank=True, null=True,
help_text="If not given but a User is associated with this person, "
"the User's first associated Contact may be used.")
# It would be nice to make this a choice field if we could get a list
# of all possible staff position types.
staff_type = models.CharField(max_length=100)
year_started = models.CharField(
max_length=4, blank=True, validators=[validate_year],
help_text="Please enter a four-digit year.")
is_manager = models.BooleanField(
default=False,
help_text="Whether this person is considered in charge of the clinic.")
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.get_name_display()
def get_name_display(self):
"""Prefer the associated User's name to the name specified here."""
return self.user.get_full_name() if self.user else self.name
class Service(models.Model):
"""A medical service offered by a Clinic."""
name = models.CharField(max_length=128)
slug = models.SlugField(unique=True)
code = models.PositiveIntegerField(
verbose_name='SMS Code', unique=True,
help_text="Code of Service to be used in SMS registration.")
def __unicode__(self):
return self.name
class Patient(models.Model):
"""Represents a patient at the Clinic."""
name = models.CharField(max_length=50, blank=True)
clinic = models.ForeignKey('Clinic', blank=True, null=True)
mobile = models.CharField(max_length=11, blank=True)
serial = models.CharField(max_length=14, blank=True)
class Meta:
unique_together = [('clinic', 'serial')]
def __unicode__(self):
return u'{0} at {1}'.format(self.serial, self.clinic.name)
class Visit(models.Model):
"""Represents a visit of a Patient to the Clinic.
As registered via myvoice system."""
patient = models.ForeignKey('Patient')
service = models.ForeignKey('Service', blank=True, null=True)
staff = models.ForeignKey('ClinicStaff', blank=True, null=True)
visit_time = models.DateTimeField(default=timezone.now)
# welcome_sent is used to signify that a message is new (value is null).
# Welcome messages are no longer sent.
# See issue: https://github.com/myvoice-nigeria/myvoice/issues/207
welcome_sent = models.DateTimeField(blank=True, null=True)
survey_sent = models.DateTimeField(blank=True, null=True)
mobile = models.CharField(max_length=11, blank=True)
sender = models.CharField(max_length=11, blank=True)
# The following fields denormalize to help reporting
# so questions are more flexible.
satisfied = models.NullBooleanField()
survey_started = models.BooleanField(default=False)
survey_completed = models.BooleanField(default=False)
def __unicode__(self):
return unicode(self.patient)
class Meta:
permissions = (
('readonly', 'Can Only Read Visits'),
)
class VisitRegistrationError(models.Model):
"""Keeps current state of errors in Visit registration SMS.
Right now, only "wrong clinic" is useful."""
WRONG_CLINIC = 0
WRONG_MOBILE = 1
WRONG_SERIAL = 2
WRONG_SERVICE = 3
ERROR_TYPES = enumerate(('Wrong Clinic', 'Wrong Mobile', 'Wrong Serial', 'Wrong Service'))
sender = models.CharField(max_length=20)
error_type = models.PositiveIntegerField(choices=ERROR_TYPES, default=WRONG_CLINIC)
def __unicode__(self):
return self.sender
class VisitRegistrationErrorLog(models.Model):
"""Keeps log of errors in Visit registration SMS."""
sender = models.CharField(max_length=20)
error_type = models.CharField(max_length=50)
message_date = models.DateTimeField(auto_now=True)
message = models.CharField(max_length=160)
def __unicode__(self):
return self.sender
class ManualRegistration(models.Model):
"""Count of Registrations of visits made at the clinic outside the myvoice system."""
entry_date = models.DateField(default=date.today)
visit_count = models.PositiveIntegerField()
clinic = models.ForeignKey('Clinic')
class Meta:
unique_together = ('entry_date', 'clinic')
def __unicode__(self):
return unicode(self.clinic)
class GenericFeedback(models.Model):
"""Keeps Feedback information sent by patients."""
sender = models.CharField(max_length=20)
clinic = models.ForeignKey('Clinic', null=True, blank=True)
message = models.TextField(blank=True)
message_date = models.DateTimeField(default=timezone.now)
display_on_dashboard = models.BooleanField(
default=True,
help_text="Whether or not this response is displayed on the dashboard.")
display_on_summary = models.BooleanField(
default=False,
help_text="Whether or not this response is displayed on summary pages.")
report_count = models.PositiveIntegerField(
default=1,
help_text="How many times this kind of feedback was received.")
class Meta:
verbose_name = 'General Feedback'
verbose_name_plural = 'General Feedback'
def __unicode__(self):
return self.sender
class ClinicScore(models.Model):
"""Keeps quality and quantity scores for clinics.
Changes every quarter."""
clinic = models.ForeignKey('Clinic')
quality = models.DecimalField(max_digits=5, decimal_places=2)
quantity = models.PositiveIntegerField()
start_date = models.DateField()
end_date = models.DateField()
def __unicode__(self):
return unicode(self.clinic)
| {
"content_hash": "7c5092f4ed1c5b0efc4a830ab1e0a9ff",
"timestamp": "",
"source": "github",
"line_count": 267,
"max_line_length": 94,
"avg_line_length": 33.49438202247191,
"alnum_prop": 0.6724812702672481,
"repo_name": "myvoice-nigeria/myvoice",
"id": "957e6e9c2208718f08de851c4202f24a76617c3b",
"size": "8943",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "myvoice/clinics/models.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "82690"
},
{
"name": "Elixir",
"bytes": "391"
},
{
"name": "HTML",
"bytes": "79449"
},
{
"name": "JavaScript",
"bytes": "683534"
},
{
"name": "Python",
"bytes": "1279764"
},
{
"name": "Scheme",
"bytes": "3876"
},
{
"name": "Shell",
"bytes": "1263"
}
],
"symlink_target": ""
} |
"""Implementation of image ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_image_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
ops.NotDifferentiable('RandomCrop')
# TODO(b/31222613): This op may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable('RGBToHSV')
# TODO(b/31222613): This op may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable('HSVToRGB')
ops.NotDifferentiable('DrawBoundingBoxes')
ops.NotDifferentiable('SampleDistortedBoundingBox')
# TODO(bsteiner): Implement the gradient function for extract_glimpse
# TODO(b/31222613): This op may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable('ExtractGlimpse')
ops.NotDifferentiable('NonMaxSuppression')
def _assert(cond, ex_type, msg):
"""A polymorphic assert, works with tensors and boolean expressions.
If `cond` is not a tensor, behave like an ordinary assert statement, except
that a empty list is returned. If `cond` is a tensor, return a list
containing a single TensorFlow assert op.
Args:
cond: Something evaluates to a boolean value. May be a tensor.
ex_type: The exception class to use.
msg: The error message.
Returns:
A list, containing at most one assert op.
"""
if _is_tensor(cond):
return [control_flow_ops.Assert(cond, [msg])]
else:
if not cond:
raise ex_type(msg)
else:
return []
def _is_tensor(x):
"""Returns `True` if `x` is a symbolic tensor-like object.
Args:
x: A python object to check.
Returns:
`True` if `x` is a `tf.Tensor` or `tf.Variable`, otherwise `False`.
"""
return isinstance(x, (ops.Tensor, variables.Variable))
def _ImageDimensions(image):
"""Returns the dimensions of an image tensor.
Args:
image: A 3-D Tensor of shape `[height, width, channels]`.
Returns:
A list of `[height, width, channels]` corresponding to the dimensions of the
input image. Dimensions that are statically known are python integers,
otherwise they are integer scalar tensors.
"""
if image.get_shape().is_fully_defined():
return image.get_shape().as_list()
else:
static_shape = image.get_shape().with_rank(3).as_list()
dynamic_shape = array_ops.unstack(array_ops.shape(image), 3)
return [s if s is not None else d
for s, d in zip(static_shape, dynamic_shape)]
def _Check3DImage(image, require_static=True):
"""Assert that we are working with properly shaped image.
Args:
image: 3-D Tensor of shape [height, width, channels]
require_static: If `True`, requires that all dimensions of `image` are
known and non-zero.
Raises:
ValueError: if `image.shape` is not a 3-vector.
Returns:
An empty list, if `image` has fully defined dimensions. Otherwise, a list
containing an assert op is returned.
"""
try:
image_shape = image.get_shape().with_rank(3)
except ValueError:
raise ValueError("'image' must be three-dimensional.")
if require_static and not image_shape.is_fully_defined():
raise ValueError("'image' must be fully defined.")
if any(x == 0 for x in image_shape):
raise ValueError("all dims of 'image.shape' must be > 0: %s" %
image_shape)
if not image_shape.is_fully_defined():
return [check_ops.assert_positive(array_ops.shape(image),
["all dims of 'image.shape' "
"must be > 0."])]
else:
return []
def _CheckAtLeast3DImage(image):
"""Assert that we are working with properly shaped image.
Args:
image: >= 3-D Tensor of size [*, height, width, depth]
Raises:
ValueError: if image.shape is not a [>= 3] vector.
"""
if not image.get_shape().is_fully_defined():
raise ValueError('\'image\' must be fully defined.')
if image.get_shape().ndims < 3:
raise ValueError('\'image\' must be at least three-dimensional.')
if not all(x > 0 for x in image.get_shape()):
raise ValueError('all dims of \'image.shape\' must be > 0: %s' %
image.get_shape())
def fix_image_flip_shape(image, result):
"""Set the shape to 3 dimensional if we don't know anything else.
Args:
image: original image size
result: flipped or transformed image
Returns:
An image whose shape is at least None,None,None.
"""
image_shape = image.get_shape()
if image_shape == tensor_shape.unknown_shape():
result.set_shape([None, None, None])
else:
result.set_shape(image_shape)
return result
def random_flip_up_down(image, seed=None):
"""Randomly flips an image vertically (upside down).
With a 1 in 2 chance, outputs the contents of `image` flipped along the first
dimension, which is `height`. Otherwise output the image as-is.
Args:
image: A 3-D tensor of shape `[height, width, channels].`
seed: A Python integer. Used to create a random seed. See
@{tf.set_random_seed}
for behavior.
Returns:
A 3-D tensor of the same type and shape as `image`.
Raises:
ValueError: if the shape of `image` not supported.
"""
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
uniform_random = random_ops.random_uniform([], 0, 1.0, seed=seed)
mirror_cond = math_ops.less(uniform_random, .5)
stride = array_ops.where(mirror_cond, -1, 1)
result = image[::stride, :, :]
return fix_image_flip_shape(image, result)
def random_flip_left_right(image, seed=None):
"""Randomly flip an image horizontally (left to right).
With a 1 in 2 chance, outputs the contents of `image` flipped along the
second dimension, which is `width`. Otherwise output the image as-is.
Args:
image: A 3-D tensor of shape `[height, width, channels].`
seed: A Python integer. Used to create a random seed. See
@{tf.set_random_seed}
for behavior.
Returns:
A 3-D tensor of the same type and shape as `image`.
Raises:
ValueError: if the shape of `image` not supported.
"""
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
uniform_random = random_ops.random_uniform([], 0, 1.0, seed=seed)
mirror_cond = math_ops.less(uniform_random, .5)
stride = array_ops.where(mirror_cond, -1, 1)
result = image[:, ::stride, :]
return fix_image_flip_shape(image, result)
def flip_left_right(image):
"""Flip an image horizontally (left to right).
Outputs the contents of `image` flipped along the second dimension, which is
`width`.
See also `reverse()`.
Args:
image: A 3-D tensor of shape `[height, width, channels].`
Returns:
A 3-D tensor of the same type and shape as `image`.
Raises:
ValueError: if the shape of `image` not supported.
"""
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
return fix_image_flip_shape(image, image[:, ::-1, :])
def flip_up_down(image):
"""Flip an image horizontally (upside down).
Outputs the contents of `image` flipped along the first dimension, which is
`height`.
See also `reverse()`.
Args:
image: A 3-D tensor of shape `[height, width, channels].`
Returns:
A 3-D tensor of the same type and shape as `image`.
Raises:
ValueError: if the shape of `image` not supported.
"""
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
return fix_image_flip_shape(image, array_ops.reverse_v2(image, [0]))
def rot90(image, k=1, name=None):
"""Rotate an image counter-clockwise by 90 degrees.
Args:
image: A 3-D tensor of shape `[height, width, channels]`.
k: A scalar integer. The number of times the image is rotated by 90 degrees.
name: A name for this operation (optional).
Returns:
A rotated 3-D tensor of the same type and shape as `image`.
"""
with ops.name_scope(name, 'rot90', [image, k]) as scope:
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
k = ops.convert_to_tensor(k, dtype=dtypes.int32, name='k')
k.get_shape().assert_has_rank(0)
k = math_ops.mod(k, 4)
def _rot90():
return array_ops.transpose(array_ops.reverse_v2(image, [1]),
[1, 0, 2])
def _rot180():
return array_ops.reverse_v2(image, [0, 1])
def _rot270():
return array_ops.reverse_v2(array_ops.transpose(image, [1, 0, 2]),
[1])
cases = [(math_ops.equal(k, 1), _rot90),
(math_ops.equal(k, 2), _rot180),
(math_ops.equal(k, 3), _rot270)]
ret = control_flow_ops.case(cases, default=lambda: image, exclusive=True,
name=scope)
ret.set_shape([None, None, image.get_shape()[2]])
return ret
def transpose_image(image):
"""Transpose an image by swapping the first and second dimension.
See also `transpose()`.
Args:
image: 3-D tensor of shape `[height, width, channels]`
Returns:
A 3-D tensor of shape `[width, height, channels]`
Raises:
ValueError: if the shape of `image` not supported.
"""
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
return array_ops.transpose(image, [1, 0, 2], name='transpose_image')
def central_crop(image, central_fraction):
"""Crop the central region of the image.
Remove the outer parts of an image but retain the central region of the image
along each dimension. If we specify central_fraction = 0.5, this function
returns the region marked with "X" in the below diagram.
--------
| |
| XXXX |
| XXXX |
| | where "X" is the central 50% of the image.
--------
Args:
image: 3-D float Tensor of shape [height, width, depth]
central_fraction: float (0, 1], fraction of size to crop
Raises:
ValueError: if central_crop_fraction is not within (0, 1].
Returns:
3-D float Tensor
"""
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
if central_fraction <= 0.0 or central_fraction > 1.0:
raise ValueError('central_fraction must be within (0, 1]')
if central_fraction == 1.0:
return image
img_shape = array_ops.shape(image)
depth = image.get_shape()[2]
fraction_offset = int(1 / ((1 - central_fraction) / 2.0))
bbox_h_start = math_ops.div(img_shape[0], fraction_offset)
bbox_w_start = math_ops.div(img_shape[1], fraction_offset)
bbox_h_size = img_shape[0] - bbox_h_start * 2
bbox_w_size = img_shape[1] - bbox_w_start * 2
bbox_begin = array_ops.stack([bbox_h_start, bbox_w_start, 0])
bbox_size = array_ops.stack([bbox_h_size, bbox_w_size, -1])
image = array_ops.slice(image, bbox_begin, bbox_size)
# The first two dimensions are dynamic and unknown.
image.set_shape([None, None, depth])
return image
def pad_to_bounding_box(image, offset_height, offset_width, target_height,
target_width):
"""Pad `image` with zeros to the specified `height` and `width`.
Adds `offset_height` rows of zeros on top, `offset_width` columns of
zeros on the left, and then pads the image on the bottom and right
with zeros until it has dimensions `target_height`, `target_width`.
This op does nothing if `offset_*` is zero and the image already has size
`target_height` by `target_width`.
Args:
image: 3-D tensor with shape `[height, width, channels]`
offset_height: Number of rows of zeros to add on top.
offset_width: Number of columns of zeros to add on the left.
target_height: Height of output image.
target_width: Width of output image.
Returns:
3-D tensor of shape `[target_height, target_width, channels]`
Raises:
ValueError: If the shape of `image` is incompatible with the `offset_*` or
`target_*` arguments, or either `offset_height` or `offset_width` is
negative.
"""
image = ops.convert_to_tensor(image, name='image')
assert_ops = []
assert_ops += _Check3DImage(image, require_static=False)
height, width, depth = _ImageDimensions(image)
after_padding_width = target_width - offset_width - width
after_padding_height = target_height - offset_height - height
assert_ops += _assert(offset_height >= 0, ValueError,
'offset_height must be >= 0')
assert_ops += _assert(offset_width >= 0, ValueError,
'offset_width must be >= 0')
assert_ops += _assert(after_padding_width >= 0, ValueError,
'width must be <= target - offset')
assert_ops += _assert(after_padding_height >= 0, ValueError,
'height must be <= target - offset')
image = control_flow_ops.with_dependencies(assert_ops, image)
# Do not pad on the depth dimensions.
paddings = array_ops.reshape(
array_ops.stack([
offset_height, after_padding_height, offset_width,
after_padding_width, 0, 0
]), [3, 2])
padded = array_ops.pad(image, paddings)
padded_shape = [None if _is_tensor(i) else i
for i in [target_height, target_width, depth]]
padded.set_shape(padded_shape)
return padded
def crop_to_bounding_box(image, offset_height, offset_width, target_height,
target_width):
"""Crops an image to a specified bounding box.
This op cuts a rectangular part out of `image`. The top-left corner of the
returned image is at `offset_height, offset_width` in `image`, and its
lower-right corner is at
`offset_height + target_height, offset_width + target_width`.
Args:
image: 3-D tensor with shape `[height, width, channels]`
offset_height: Vertical coordinate of the top-left corner of the result in
the input.
offset_width: Horizontal coordinate of the top-left corner of the result in
the input.
target_height: Height of the result.
target_width: Width of the result.
Returns:
3-D tensor of image with shape `[target_height, target_width, channels]`
Raises:
ValueError: If the shape of `image` is incompatible with the `offset_*` or
`target_*` arguments, or either `offset_height` or `offset_width` is
negative, or either `target_height` or `target_width` is not positive.
"""
image = ops.convert_to_tensor(image, name='image')
assert_ops = []
assert_ops += _Check3DImage(image, require_static=False)
height, width, depth = _ImageDimensions(image)
assert_ops += _assert(offset_width >= 0, ValueError,
'offset_width must be >= 0.')
assert_ops += _assert(offset_height >= 0, ValueError,
'offset_height must be >= 0.')
assert_ops += _assert(target_width > 0, ValueError,
'target_width must be > 0.')
assert_ops += _assert(target_height > 0, ValueError,
'target_height must be > 0.')
assert_ops += _assert(width >= (target_width + offset_width), ValueError,
'width must be >= target + offset.')
assert_ops += _assert(height >= (target_height + offset_height), ValueError,
'height must be >= target + offset.')
image = control_flow_ops.with_dependencies(assert_ops, image)
cropped = array_ops.slice(image,
array_ops.stack([offset_height, offset_width, 0]),
array_ops.stack([target_height, target_width, -1]))
cropped_shape = [None if _is_tensor(i) else i
for i in [target_height, target_width, depth]]
cropped.set_shape(cropped_shape)
return cropped
def resize_image_with_crop_or_pad(image, target_height, target_width):
"""Crops and/or pads an image to a target width and height.
Resizes an image to a target width and height by either centrally
cropping the image or padding it evenly with zeros.
If `width` or `height` is greater than the specified `target_width` or
`target_height` respectively, this op centrally crops along that dimension.
If `width` or `height` is smaller than the specified `target_width` or
`target_height` respectively, this op centrally pads with 0 along that
dimension.
Args:
image: 3-D tensor of shape `[height, width, channels]`
target_height: Target height.
target_width: Target width.
Raises:
ValueError: if `target_height` or `target_width` are zero or negative.
Returns:
Cropped and/or padded image of shape
`[target_height, target_width, channels]`
"""
image = ops.convert_to_tensor(image, name='image')
assert_ops = []
assert_ops += _Check3DImage(image, require_static=False)
assert_ops += _assert(target_width > 0, ValueError,
'target_width must be > 0.')
assert_ops += _assert(target_height > 0, ValueError,
'target_height must be > 0.')
image = control_flow_ops.with_dependencies(assert_ops, image)
# `crop_to_bounding_box` and `pad_to_bounding_box` have their own checks.
# Make sure our checks come first, so that error messages are clearer.
if _is_tensor(target_height):
target_height = control_flow_ops.with_dependencies(
assert_ops, target_height)
if _is_tensor(target_width):
target_width = control_flow_ops.with_dependencies(assert_ops, target_width)
def max_(x, y):
if _is_tensor(x) or _is_tensor(y):
return math_ops.maximum(x, y)
else:
return max(x, y)
def min_(x, y):
if _is_tensor(x) or _is_tensor(y):
return math_ops.minimum(x, y)
else:
return min(x, y)
def equal_(x, y):
if _is_tensor(x) or _is_tensor(y):
return math_ops.equal(x, y)
else:
return x == y
height, width, _ = _ImageDimensions(image)
width_diff = target_width - width
offset_crop_width = max_(-width_diff // 2, 0)
offset_pad_width = max_(width_diff // 2, 0)
height_diff = target_height - height
offset_crop_height = max_(-height_diff // 2, 0)
offset_pad_height = max_(height_diff // 2, 0)
# Maybe crop if needed.
cropped = crop_to_bounding_box(image, offset_crop_height, offset_crop_width,
min_(target_height, height),
min_(target_width, width))
# Maybe pad if needed.
resized = pad_to_bounding_box(cropped, offset_pad_height, offset_pad_width,
target_height, target_width)
# In theory all the checks below are redundant.
if resized.get_shape().ndims is None:
raise ValueError('resized contains no shape.')
resized_height, resized_width, _ = _ImageDimensions(resized)
assert_ops = []
assert_ops += _assert(equal_(resized_height, target_height), ValueError,
'resized height is not correct.')
assert_ops += _assert(equal_(resized_width, target_width), ValueError,
'resized width is not correct.')
resized = control_flow_ops.with_dependencies(assert_ops, resized)
return resized
class ResizeMethod(object):
BILINEAR = 0
NEAREST_NEIGHBOR = 1
BICUBIC = 2
AREA = 3
def resize_images(images,
size,
method=ResizeMethod.BILINEAR,
align_corners=False):
"""Resize `images` to `size` using the specified `method`.
Resized images will be distorted if their original aspect ratio is not
the same as `size`. To avoid distortions see
@{tf.image.resize_image_with_crop_or_pad}.
`method` can be one of:
* <b>`ResizeMethod.BILINEAR`</b>: [Bilinear interpolation.](https://en.wikipedia.org/wiki/Bilinear_interpolation)
* <b>`ResizeMethod.NEAREST_NEIGHBOR`</b>: [Nearest neighbor interpolation.](https://en.wikipedia.org/wiki/Nearest-neighbor_interpolation)
* <b>`ResizeMethod.BICUBIC`</b>: [Bicubic interpolation.](https://en.wikipedia.org/wiki/Bicubic_interpolation)
* <b>`ResizeMethod.AREA`</b>: Area interpolation.
Args:
images: 4-D Tensor of shape `[batch, height, width, channels]` or
3-D Tensor of shape `[height, width, channels]`.
size: A 1-D int32 Tensor of 2 elements: `new_height, new_width`. The
new size for the images.
method: ResizeMethod. Defaults to `ResizeMethod.BILINEAR`.
align_corners: bool. If true, exactly align all 4 corners of the input and
output. Defaults to `false`.
Raises:
ValueError: if the shape of `images` is incompatible with the
shape arguments to this function
ValueError: if `size` has invalid shape or type.
ValueError: if an unsupported resize method is specified.
Returns:
If `images` was 4-D, a 4-D float Tensor of shape
`[batch, new_height, new_width, channels]`.
If `images` was 3-D, a 3-D float Tensor of shape
`[new_height, new_width, channels]`.
"""
images = ops.convert_to_tensor(images, name='images')
if images.get_shape().ndims is None:
raise ValueError('\'images\' contains no shape.')
# TODO(shlens): Migrate this functionality to the underlying Op's.
is_batch = True
if images.get_shape().ndims == 3:
is_batch = False
images = array_ops.expand_dims(images, 0)
elif images.get_shape().ndims != 4:
raise ValueError('\'images\' must have either 3 or 4 dimensions.')
_, height, width, _ = images.get_shape().as_list()
try:
size = ops.convert_to_tensor(size, dtypes.int32, name='size')
except (TypeError, ValueError):
raise ValueError('\'size\' must be a 1-D int32 Tensor')
if not size.get_shape().is_compatible_with([2]):
raise ValueError('\'size\' must be a 1-D Tensor of 2 elements: '
'new_height, new_width')
size_const_as_shape = tensor_util.constant_value_as_shape(size)
new_height_const = size_const_as_shape[0].value
new_width_const = size_const_as_shape[1].value
# If we can determine that the height and width will be unmodified by this
# transformation, we avoid performing the resize.
if all(x is not None
for x in [new_width_const, width, new_height_const, height]) and (
width == new_width_const and height == new_height_const):
if not is_batch:
images = array_ops.squeeze(images, squeeze_dims=[0])
return images
if method == ResizeMethod.BILINEAR:
images = gen_image_ops.resize_bilinear(images,
size,
align_corners=align_corners)
elif method == ResizeMethod.NEAREST_NEIGHBOR:
images = gen_image_ops.resize_nearest_neighbor(images,
size,
align_corners=align_corners)
elif method == ResizeMethod.BICUBIC:
images = gen_image_ops.resize_bicubic(images,
size,
align_corners=align_corners)
elif method == ResizeMethod.AREA:
images = gen_image_ops.resize_area(images,
size,
align_corners=align_corners)
else:
raise ValueError('Resize method is not implemented.')
# NOTE(mrry): The shape functions for the resize ops cannot unpack
# the packed values in `new_size`, so set the shape here.
images.set_shape([None, new_height_const, new_width_const, None])
if not is_batch:
images = array_ops.squeeze(images, squeeze_dims=[0])
return images
def per_image_standardization(image):
"""Linearly scales `image` to have zero mean and unit norm.
This op computes `(x - mean) / adjusted_stddev`, where `mean` is the average
of all values in image, and
`adjusted_stddev = max(stddev, 1.0/sqrt(image.NumElements()))`.
`stddev` is the standard deviation of all values in `image`. It is capped
away from zero to protect against division by 0 when handling uniform images.
Args:
image: 3-D tensor of shape `[height, width, channels]`.
Returns:
The standardized image with same shape as `image`.
Raises:
ValueError: if the shape of 'image' is incompatible with this function.
"""
image = ops.convert_to_tensor(image, name='image')
_Check3DImage(image, require_static=False)
num_pixels = math_ops.reduce_prod(array_ops.shape(image))
image = math_ops.cast(image, dtype=dtypes.float32)
image_mean = math_ops.reduce_mean(image)
variance = (math_ops.reduce_mean(math_ops.square(image)) -
math_ops.square(image_mean))
variance = gen_nn_ops.relu(variance)
stddev = math_ops.sqrt(variance)
# Apply a minimum normalization that protects us against uniform images.
min_stddev = math_ops.rsqrt(math_ops.cast(num_pixels, dtypes.float32))
pixel_value_scale = math_ops.maximum(stddev, min_stddev)
pixel_value_offset = image_mean
image = math_ops.subtract(image, pixel_value_offset)
image = math_ops.div(image, pixel_value_scale)
return image
def random_brightness(image, max_delta, seed=None):
"""Adjust the brightness of images by a random factor.
Equivalent to `adjust_brightness()` using a `delta` randomly picked in the
interval `[-max_delta, max_delta)`.
Args:
image: An image.
max_delta: float, must be non-negative.
seed: A Python integer. Used to create a random seed. See
@{tf.set_random_seed}
for behavior.
Returns:
The brightness-adjusted image.
Raises:
ValueError: if `max_delta` is negative.
"""
if max_delta < 0:
raise ValueError('max_delta must be non-negative.')
delta = random_ops.random_uniform([], -max_delta, max_delta, seed=seed)
return adjust_brightness(image, delta)
def random_contrast(image, lower, upper, seed=None):
"""Adjust the contrast of an image by a random factor.
Equivalent to `adjust_contrast()` but uses a `contrast_factor` randomly
picked in the interval `[lower, upper]`.
Args:
image: An image tensor with 3 or more dimensions.
lower: float. Lower bound for the random contrast factor.
upper: float. Upper bound for the random contrast factor.
seed: A Python integer. Used to create a random seed. See
@{tf.set_random_seed}
for behavior.
Returns:
The contrast-adjusted tensor.
Raises:
ValueError: if `upper <= lower` or if `lower < 0`.
"""
if upper <= lower:
raise ValueError('upper must be > lower.')
if lower < 0:
raise ValueError('lower must be non-negative.')
# Generate an a float in [lower, upper]
contrast_factor = random_ops.random_uniform([], lower, upper, seed=seed)
return adjust_contrast(image, contrast_factor)
def adjust_brightness(image, delta):
"""Adjust the brightness of RGB or Grayscale images.
This is a convenience method that converts an RGB image to float
representation, adjusts its brightness, and then converts it back to the
original data type. If several adjustments are chained it is advisable to
minimize the number of redundant conversions.
The value `delta` is added to all components of the tensor `image`. Both
`image` and `delta` are converted to `float` before adding (and `image` is
scaled appropriately if it is in fixed-point representation). For regular
images, `delta` should be in the range `[0,1)`, as it is added to the image in
floating point representation, where pixel values are in the `[0,1)` range.
Args:
image: A tensor.
delta: A scalar. Amount to add to the pixel values.
Returns:
A brightness-adjusted tensor of the same shape and type as `image`.
"""
with ops.name_scope(None, 'adjust_brightness', [image, delta]) as name:
image = ops.convert_to_tensor(image, name='image')
# Remember original dtype to so we can convert back if needed
orig_dtype = image.dtype
flt_image = convert_image_dtype(image, dtypes.float32)
adjusted = math_ops.add(flt_image,
math_ops.cast(delta, dtypes.float32),
name=name)
return convert_image_dtype(adjusted, orig_dtype, saturate=True)
def adjust_contrast(images, contrast_factor):
"""Adjust contrast of RGB or grayscale images.
This is a convenience method that converts an RGB image to float
representation, adjusts its contrast, and then converts it back to the
original data type. If several adjustments are chained it is advisable to
minimize the number of redundant conversions.
`images` is a tensor of at least 3 dimensions. The last 3 dimensions are
interpreted as `[height, width, channels]`. The other dimensions only
represent a collection of images, such as `[batch, height, width, channels].`
Contrast is adjusted independently for each channel of each image.
For each channel, this Op computes the mean of the image pixels in the
channel and then adjusts each component `x` of each pixel to
`(x - mean) * contrast_factor + mean`.
Args:
images: Images to adjust. At least 3-D.
contrast_factor: A float multiplier for adjusting contrast.
Returns:
The contrast-adjusted image or images.
"""
with ops.name_scope(None, 'adjust_contrast',
[images, contrast_factor]) as name:
images = ops.convert_to_tensor(images, name='images')
# Remember original dtype to so we can convert back if needed
orig_dtype = images.dtype
flt_images = convert_image_dtype(images, dtypes.float32)
# pylint: disable=protected-access
adjusted = gen_image_ops._adjust_contrastv2(flt_images,
contrast_factor=contrast_factor,
name=name)
# pylint: enable=protected-access
return convert_image_dtype(adjusted, orig_dtype, saturate=True)
def adjust_gamma(image, gamma=1, gain=1):
"""Performs Gamma Correction on the input image.
Also known as Power Law Transform. This function transforms the
input image pixelwise according to the equation Out = In**gamma
after scaling each pixel to the range 0 to 1.
Args:
image : A Tensor.
gamma : A scalar. Non negative real number.
gain : A scalar. The constant multiplier.
Returns:
A Tensor. Gamma corrected output image.
Notes:
For gamma greater than 1, the histogram will shift towards left and
the output image will be darker than the input image.
For gamma less than 1, the histogram will shift towards right and
the output image will be brighter than the input image.
References:
[1] http://en.wikipedia.org/wiki/Gamma_correction
"""
with ops.op_scope([image, gamma, gain], None, 'adjust_gamma') as name:
# Convert pixel value to DT_FLOAT for computing adjusted image
img = ops.convert_to_tensor(image, name='img', dtype=dtypes.float32)
# Keep image dtype for computing the scale of corresponding dtype
image = ops.convert_to_tensor(image, name='image')
if gamma < 0:
raise ValueError("Gamma should be a non-negative real number")
# scale = max(dtype) - min(dtype)
scale = constant_op.constant(image.dtype.limits[1] - image.dtype.limits[0], dtype=dtypes.float32)
# According to the definition of gamma correction
adjusted_img = (img / scale) ** gamma * scale * gain
return adjusted_img
def convert_image_dtype(image, dtype, saturate=False, name=None):
"""Convert `image` to `dtype`, scaling its values if needed.
Images that are represented using floating point values are expected to have
values in the range [0,1). Image data stored in integer data types are
expected to have values in the range `[0,MAX]`, where `MAX` is the largest
positive representable number for the data type.
This op converts between data types, scaling the values appropriately before
casting.
Note that converting from floating point inputs to integer types may lead to
over/underflow problems. Set saturate to `True` to avoid such problem in
problematic conversions. If enabled, saturation will clip the output into the
allowed range before performing a potentially dangerous cast (and only before
performing such a cast, i.e., when casting from a floating point to an integer
type, and when casting from a signed to an unsigned type; `saturate` has no
effect on casts between floats, or on casts that increase the type's range).
Args:
image: An image.
dtype: A `DType` to convert `image` to.
saturate: If `True`, clip the input before casting (if necessary).
name: A name for this operation (optional).
Returns:
`image`, converted to `dtype`.
"""
image = ops.convert_to_tensor(image, name='image')
if dtype == image.dtype:
return array_ops.identity(image, name=name)
with ops.name_scope(name, 'convert_image', [image]) as name:
# Both integer: use integer multiplication in the larger range
if image.dtype.is_integer and dtype.is_integer:
scale_in = image.dtype.max
scale_out = dtype.max
if scale_in > scale_out:
# Scaling down, scale first, then cast. The scaling factor will
# cause in.max to be mapped to above out.max but below out.max+1,
# so that the output is safely in the supported range.
scale = (scale_in + 1) // (scale_out + 1)
scaled = math_ops.div(image, scale)
if saturate:
return math_ops.saturate_cast(scaled, dtype, name=name)
else:
return math_ops.cast(scaled, dtype, name=name)
else:
# Scaling up, cast first, then scale. The scale will not map in.max to
# out.max, but converting back and forth should result in no change.
if saturate:
cast = math_ops.saturate_cast(scaled, dtype)
else:
cast = math_ops.cast(image, dtype)
scale = (scale_out + 1) // (scale_in + 1)
return math_ops.multiply(cast, scale, name=name)
elif image.dtype.is_floating and dtype.is_floating:
# Both float: Just cast, no possible overflows in the allowed ranges.
# Note: We're ignoreing float overflows. If your image dynamic range
# exceeds float range you're on your own.
return math_ops.cast(image, dtype, name=name)
else:
if image.dtype.is_integer:
# Converting to float: first cast, then scale. No saturation possible.
cast = math_ops.cast(image, dtype)
scale = 1. / image.dtype.max
return math_ops.multiply(cast, scale, name=name)
else:
# Converting from float: first scale, then cast
scale = dtype.max + 0.5 # avoid rounding problems in the cast
scaled = math_ops.multiply(image, scale)
if saturate:
return math_ops.saturate_cast(scaled, dtype, name=name)
else:
return math_ops.cast(scaled, dtype, name=name)
def rgb_to_grayscale(images, name=None):
"""Converts one or more images from RGB to Grayscale.
Outputs a tensor of the same `DType` and rank as `images`. The size of the
last dimension of the output is 1, containing the Grayscale value of the
pixels.
Args:
images: The RGB tensor to convert. Last dimension must have size 3 and
should contain RGB values.
name: A name for the operation (optional).
Returns:
The converted grayscale image(s).
"""
with ops.name_scope(name, 'rgb_to_grayscale', [images]) as name:
images = ops.convert_to_tensor(images, name='images')
# Remember original dtype to so we can convert back if needed
orig_dtype = images.dtype
flt_image = convert_image_dtype(images, dtypes.float32)
# Reference for converting between RGB and grayscale.
# https://en.wikipedia.org/wiki/Luma_%28video%29
rgb_weights = [0.2989, 0.5870, 0.1140]
rank_1 = array_ops.expand_dims(array_ops.rank(images) - 1, 0)
gray_float = math_ops.reduce_sum(flt_image * rgb_weights,
rank_1,
keep_dims=True)
gray_float.set_shape(images.get_shape()[:-1].concatenate([1]))
return convert_image_dtype(gray_float, orig_dtype, name=name)
def grayscale_to_rgb(images, name=None):
"""Converts one or more images from Grayscale to RGB.
Outputs a tensor of the same `DType` and rank as `images`. The size of the
last dimension of the output is 3, containing the RGB value of the pixels.
Args:
images: The Grayscale tensor to convert. Last dimension must be size 1.
name: A name for the operation (optional).
Returns:
The converted grayscale image(s).
"""
with ops.name_scope(name, 'grayscale_to_rgb', [images]) as name:
images = ops.convert_to_tensor(images, name='images')
rank_1 = array_ops.expand_dims(array_ops.rank(images) - 1, 0)
shape_list = (
[array_ops.ones(rank_1,
dtype=dtypes.int32)] + [array_ops.expand_dims(3, 0)])
multiples = array_ops.concat(shape_list, 0)
rgb = array_ops.tile(images, multiples, name=name)
rgb.set_shape(images.get_shape()[:-1].concatenate([3]))
return rgb
# pylint: disable=invalid-name
def random_hue(image, max_delta, seed=None):
"""Adjust the hue of an RGB image by a random factor.
Equivalent to `adjust_hue()` but uses a `delta` randomly
picked in the interval `[-max_delta, max_delta]`.
`max_delta` must be in the interval `[0, 0.5]`.
Args:
image: RGB image or images. Size of the last dimension must be 3.
max_delta: float. Maximum value for the random delta.
seed: An operation-specific seed. It will be used in conjunction
with the graph-level seed to determine the real seeds that will be
used in this operation. Please see the documentation of
set_random_seed for its interaction with the graph-level random seed.
Returns:
3-D float tensor of shape `[height, width, channels]`.
Raises:
ValueError: if `max_delta` is invalid.
"""
if max_delta > 0.5:
raise ValueError('max_delta must be <= 0.5.')
if max_delta < 0:
raise ValueError('max_delta must be non-negative.')
delta = random_ops.random_uniform([], -max_delta, max_delta, seed=seed)
return adjust_hue(image, delta)
def adjust_hue(image, delta, name=None):
"""Adjust hue of an RGB image.
This is a convenience method that converts an RGB image to float
representation, converts it to HSV, add an offset to the hue channel, converts
back to RGB and then back to the original data type. If several adjustments
are chained it is advisable to minimize the number of redundant conversions.
`image` is an RGB image. The image hue is adjusted by converting the
image to HSV and rotating the hue channel (H) by
`delta`. The image is then converted back to RGB.
`delta` must be in the interval `[-1, 1]`.
Args:
image: RGB image or images. Size of the last dimension must be 3.
delta: float. How much to add to the hue channel.
name: A name for this operation (optional).
Returns:
Adjusted image(s), same shape and DType as `image`.
"""
with ops.name_scope(name, 'adjust_hue', [image]) as name:
image = ops.convert_to_tensor(image, name='image')
# Remember original dtype to so we can convert back if needed
orig_dtype = image.dtype
flt_image = convert_image_dtype(image, dtypes.float32)
# TODO(zhengxq): we will switch to the fused version after we add a GPU
# kernel for that.
fused = os.environ.get('TF_ADJUST_HUE_FUSED', '')
fused = fused.lower() in ('true', 't', '1')
if not fused:
hsv = gen_image_ops.rgb_to_hsv(flt_image)
hue = array_ops.slice(hsv, [0, 0, 0], [-1, -1, 1])
saturation = array_ops.slice(hsv, [0, 0, 1], [-1, -1, 1])
value = array_ops.slice(hsv, [0, 0, 2], [-1, -1, 1])
# Note that we add 2*pi to guarantee that the resulting hue is a positive
# floating point number since delta is [-0.5, 0.5].
hue = math_ops.mod(hue + (delta + 1.), 1.)
hsv_altered = array_ops.concat([hue, saturation, value], 2)
rgb_altered = gen_image_ops.hsv_to_rgb(hsv_altered)
else:
rgb_altered = gen_image_ops.adjust_hue(flt_image, delta)
return convert_image_dtype(rgb_altered, orig_dtype)
def random_saturation(image, lower, upper, seed=None):
"""Adjust the saturation of an RGB image by a random factor.
Equivalent to `adjust_saturation()` but uses a `saturation_factor` randomly
picked in the interval `[lower, upper]`.
Args:
image: RGB image or images. Size of the last dimension must be 3.
lower: float. Lower bound for the random saturation factor.
upper: float. Upper bound for the random saturation factor.
seed: An operation-specific seed. It will be used in conjunction
with the graph-level seed to determine the real seeds that will be
used in this operation. Please see the documentation of
set_random_seed for its interaction with the graph-level random seed.
Returns:
Adjusted image(s), same shape and DType as `image`.
Raises:
ValueError: if `upper <= lower` or if `lower < 0`.
"""
if upper <= lower:
raise ValueError('upper must be > lower.')
if lower < 0:
raise ValueError('lower must be non-negative.')
# Pick a float in [lower, upper]
saturation_factor = random_ops.random_uniform([], lower, upper, seed=seed)
return adjust_saturation(image, saturation_factor)
def adjust_saturation(image, saturation_factor, name=None):
"""Adjust saturation of an RGB image.
This is a convenience method that converts an RGB image to float
representation, converts it to HSV, add an offset to the saturation channel,
converts back to RGB and then back to the original data type. If several
adjustments are chained it is advisable to minimize the number of redundant
conversions.
`image` is an RGB image. The image saturation is adjusted by converting the
image to HSV and multiplying the saturation (S) channel by
`saturation_factor` and clipping. The image is then converted back to RGB.
Args:
image: RGB image or images. Size of the last dimension must be 3.
saturation_factor: float. Factor to multiply the saturation by.
name: A name for this operation (optional).
Returns:
Adjusted image(s), same shape and DType as `image`.
"""
with ops.name_scope(name, 'adjust_saturation', [image]) as name:
image = ops.convert_to_tensor(image, name='image')
# Remember original dtype to so we can convert back if needed
orig_dtype = image.dtype
flt_image = convert_image_dtype(image, dtypes.float32)
# TODO(zhengxq): we will switch to the fused version after we add a GPU
# kernel for that.
fused = os.environ.get('TF_ADJUST_SATURATION_FUSED', '')
fused = fused.lower() in ('true', 't', '1')
if fused:
return convert_image_dtype(
gen_image_ops.adjust_saturation(flt_image, saturation_factor),
orig_dtype)
hsv = gen_image_ops.rgb_to_hsv(flt_image)
hue = array_ops.slice(hsv, [0, 0, 0], [-1, -1, 1])
saturation = array_ops.slice(hsv, [0, 0, 1], [-1, -1, 1])
value = array_ops.slice(hsv, [0, 0, 2], [-1, -1, 1])
saturation *= saturation_factor
saturation = clip_ops.clip_by_value(saturation, 0.0, 1.0)
hsv_altered = array_ops.concat([hue, saturation, value], 2)
rgb_altered = gen_image_ops.hsv_to_rgb(hsv_altered)
return convert_image_dtype(rgb_altered, orig_dtype)
def decode_image(contents, channels=None, name=None):
"""Convenience function for `decode_gif`, `decode_jpeg`, and `decode_png`.
Detects whether an image is a GIF, JPEG, or PNG, and performs the appropriate
operation to convert the input bytes `string` into a `Tensor` of type `uint8`.
Note: `decode_gif` returns a 4-D array `[num_frames, height, width, 3]`, as
opposed to `decode_jpeg` and `decode_png`, which return 3-D arrays
`[height, width, num_channels]`. Make sure to take this into account when
constructing your graph if you are intermixing GIF files with JPEG and/or PNG
files.
Args:
contents: 0-D `string`. The encoded image bytes.
channels: An optional `int`. Defaults to `0`. Number of color channels for
the decoded image.
name: A name for the operation (optional)
Returns:
`Tensor` with type `uint8` with shape `[height, width, num_channels]` for
JPEG and PNG images and shape `[num_frames, height, width, 3]` for GIF
images.
"""
with ops.name_scope(name, 'decode_image') as scope:
if channels not in (None, 0, 1, 3):
raise ValueError('channels must be in (None, 0, 1, 3)')
substr = string_ops.substr(contents, 0, 4)
def _gif():
# Create assert op to check that bytes are GIF decodable
is_gif = math_ops.equal(substr, b'\x47\x49\x46\x38', name='is_gif')
decode_msg = 'Unable to decode bytes as JPEG, PNG, or GIF'
assert_decode = control_flow_ops.Assert(is_gif, [decode_msg])
# Create assert to make sure that channels is not set to 1
# Already checked above that channels is in (None, 0, 1, 3)
gif_channels = 0 if channels is None else channels
good_channels = math_ops.not_equal(gif_channels, 1, name='check_channels')
channels_msg = 'Channels must be in (None, 0, 3) when decoding GIF images'
assert_channels = control_flow_ops.Assert(good_channels, [channels_msg])
with ops.control_dependencies([assert_decode, assert_channels]):
return gen_image_ops.decode_gif(contents)
def _png():
return gen_image_ops.decode_png(contents, channels)
def check_png():
is_png = math_ops.equal(substr, b'\211PNG', name='is_png')
return control_flow_ops.cond(is_png, _png, _gif, name='cond_png')
def _jpeg():
return gen_image_ops.decode_jpeg(contents, channels)
is_jpeg = math_ops.equal(substr, b'\xff\xd8\xff\xe0', name='is_jpeg')
return control_flow_ops.cond(is_jpeg, _jpeg, check_png, name='cond_jpeg')
def total_variation(images, name=None):
"""Calculate and return the total variation for one or more images.
The total variation is the sum of the absolute differences for neighboring
pixel-values in the input images. This measures how much noise is in the
images.
This can be used as a loss-function during optimization so as to suppress
noise in images. If you have a batch of images, then you should calculate
the scalar loss-value as the sum:
`loss = tf.reduce_sum(tf.image.total_variation(images))`
This implements the anisotropic 2-D version of the formula described here:
https://en.wikipedia.org/wiki/Total_variation_denoising
Args:
images: 4-D Tensor of shape `[batch, height, width, channels]` or
3-D Tensor of shape `[height, width, channels]`.
name: A name for the operation (optional).
Raises:
ValueError: if images.shape is not a 3-D or 4-D vector.
Returns:
The total variation of `images`.
If `images` was 4-D, return a 1-D float Tensor of shape `[batch]` with the
total variation for each image in the batch.
If `images` was 3-D, return a scalar float with the total variation for
that image.
"""
with ops.name_scope(name, 'total_variation'):
ndims = images.get_shape().ndims
if ndims == 3:
# The input is a single image with shape [height, width, channels].
# Calculate the difference of neighboring pixel-values.
# The images are shifted one pixel along the height and width by slicing.
pixel_dif1 = images[1:, :, :] - images[:-1, :, :]
pixel_dif2 = images[:, 1:, :] - images[:, :-1, :]
# Sum for all axis. (None is an alias for all axis.)
sum_axis = None
elif ndims == 4:
# The input is a batch of images with shape:
# [batch, height, width, channels].
# Calculate the difference of neighboring pixel-values.
# The images are shifted one pixel along the height and width by slicing.
pixel_dif1 = images[:, 1:, :, :] - images[:, :-1, :, :]
pixel_dif2 = images[:, :, 1:, :] - images[:, :, :-1, :]
# Only sum for the last 3 axis.
# This results in a 1-D tensor with the total variation for each image.
sum_axis = [1, 2, 3]
else:
raise ValueError('\'images\' must be either 3 or 4-dimensional.')
# Calculate the total variation by taking the absolute value of the
# pixel-differences and summing over the appropriate axis.
tot_var = math_ops.reduce_sum(math_ops.abs(pixel_dif1), axis=sum_axis) + \
math_ops.reduce_sum(math_ops.abs(pixel_dif2), axis=sum_axis)
return tot_var
| {
"content_hash": "cb916c6e794acd100b47d0a6b95f0bb3",
"timestamp": "",
"source": "github",
"line_count": 1326,
"max_line_length": 141,
"avg_line_length": 36.984162895927604,
"alnum_prop": 0.6662384535388757,
"repo_name": "jjas0nn/solvem",
"id": "7d185a81376e4996ecdcc0e6a08d27bbe5490e47",
"size": "49731",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/lib/python2.7/site-packages/tensorflow/python/ops/image_ops_impl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "67"
},
{
"name": "C",
"bytes": "309086"
},
{
"name": "C++",
"bytes": "10234032"
},
{
"name": "CMake",
"bytes": "307"
},
{
"name": "CSS",
"bytes": "1891"
},
{
"name": "Fortran",
"bytes": "6361"
},
{
"name": "HTML",
"bytes": "2989"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "18261384"
},
{
"name": "Shell",
"bytes": "3246"
}
],
"symlink_target": ""
} |
import time
import lib.stm32
import lib.stlinkex
# Stm32 L0 and L1 programming
class Flash():
PECR_OFFSET = 4
PEKEYR_OFFSET = 0x0c
PRGKEYR_OFFSET = 0x10
OPTKEYR_OFFSET = 0x14
SR_OFFSET = 0x18
STM32L0_NVM_PHY = 0x40022000
STM32L1_NVM_PHY = 0x40023c00
STM32_NVM_PEKEY1 = 0x89abcdef
STM32_NVM_PEKEY2 = 0x02030405
STM32_NVM_PRGKEY1 = 0x8c9daebf
STM32_NVM_PRGKEY2 = 0x13141516
PECR_PELOCK = 1 << 0
PECR_PRGLOCK = 1 << 1
PECR_PRG = 1 << 3
PECR_ERASE = 1 << 9
PECR_FPRG = 1 << 10
SR_BSY = 1 << 0
SR_EOP = 1 << 1
SR_WRPERR = 1 << 8
SR_PGAERR = 1 << 9
SR_SIZERR = 1 << 10
SR_ERROR_MASK = SR_WRPERR | SR_PGAERR | SR_SIZERR
def __init__(self, driver, stlink, dbg):
self._driver = driver
self._stlink = stlink
self._dbg = dbg
self._page_size = 2048
#use core id to find out if L0 or L1
if stlink._coreid == 0xbc11477:
self._nvm = Flash.STM32L0_NVM_PHY
self._page_size = 128
else:
self._nvm = Flash.STM32L1_NVM_PHY
self._page_size = 256
self.unlock()
def clear_sr(self):
# clear errors
sr = self._stlink.get_debugreg32(self._nvm + Flash.SR_OFFSET)
self._stlink.set_debugreg32(self._nvm + Flash.SR_OFFSET, sr)
def unlock(self):
self._dbg.debug('unlock')
self._driver.core_reset_halt()
self.wait_busy(0.01)
self.clear_sr()
# Lock first. Double unlock results in error!
self._stlink.set_debugreg32(self._nvm + Flash.PECR_OFFSET,
Flash.PECR_PELOCK)
pecr = self._stlink.get_debugreg32(self._nvm + Flash.PECR_OFFSET)
if pecr & Flash.PECR_PELOCK:
# unlock keys
self._stlink.set_debugreg32(self._nvm + Flash.PEKEYR_OFFSET,
Flash.STM32_NVM_PEKEY1)
self._stlink.set_debugreg32(self._nvm + Flash.PEKEYR_OFFSET,
Flash.STM32_NVM_PEKEY2)
pecr = self._stlink.get_debugreg32(self._nvm + Flash.PECR_OFFSET)
else :
raise lib.stlinkex.StlinkException(
'Unexpected unlock behaviour! FLASH_CR 0x%08x' % pecr)
# check if programing was unlocked
if pecr & Flash.PECR_PELOCK:
raise lib.stlinkex.StlinkException(
'Error unlocking FLASH_CR: 0x%08x. Reset!' % prcr)
def lock(self):
self._stlink.set_debugreg32(self._nvm + Flash.PECR_OFFSET,
Flash.PECR_PELOCK)
self._driver.core_reset_halt()
def prg_unlock(self):
pecr = self._stlink.get_debugreg32(self._nvm + Flash.PECR_OFFSET)
if not pecr & Flash.PECR_PRGLOCK:
return
if pecr & Flash.PECR_PELOCK:
raise lib.stlinkex.StlinkException('PELOCK still set: %08x' % pecr)
# unlock keys
self._stlink.set_debugreg32(self._nvm + Flash.PRGKEYR_OFFSET,
Flash.STM32_NVM_PRGKEY1)
self._stlink.set_debugreg32(self._nvm + Flash.PRGKEYR_OFFSET,
Flash.STM32_NVM_PRGKEY2)
pecr = self._stlink.get_debugreg32(self._nvm + Flash.PECR_OFFSET)
if pecr & Flash.PECR_PRGLOCK:
raise lib.stlinkex.StlinkException('PRGLOCK still set: %08x' % pecr)
def erase_pages(self, addr, size):
self._dbg.verbose('erase_pages from addr 0x%08x for %d byte' %
(addr, size))
erase_addr = addr & ~(self._page_size - 1)
last_addr = (addr + size + self._page_size - 1) &\
~(self._page_size - 1)
self._dbg.bargraph_start('Erasing FLASH', value_min=erase_addr,
value_max=last_addr)
self.prg_unlock()
pecr = Flash.PECR_PRG | Flash.PECR_ERASE
self._stlink.set_debugreg32(self._nvm + Flash.PECR_OFFSET, pecr)
while erase_addr < last_addr:
self._stlink.set_debugreg32(erase_addr, 0)
self.wait_busy(0.01)
erase_addr += self._page_size
self._dbg.bargraph_update(value=erase_addr)
self._dbg.bargraph_done()
self._stlink.set_debugreg32(self._nvm + Flash.PECR_OFFSET, 0)
def wait_busy(self, wait_time, bargraph_msg=None, check_eop=False):
end_time = time.time() + wait_time * 1.5
if bargraph_msg:
self._dbg.bargraph_start(bargraph_msg, value_min=time.time(),
value_max=time.time() + wait_time)
while time.time() < end_time:
if bargraph_msg:
self._dbg.bargraph_update(value=time.time())
status = self._stlink.get_debugreg32(self._nvm + Flash.SR_OFFSET)
if not status & (Flash.SR_BSY | (check_eop & Flash.SR_EOP)) :
self.end_of_operation(status)
if bargraph_msg:
self._dbg.bargraph_done()
if check_eop:
self._stlink.set_debugreg32(self._nvm + Flash.SR_OFFSET,
Flash.SR_EOP)
return
time.sleep(wait_time / 20)
raise lib.stlinkex.StlinkException('Operation timeout')
def end_of_operation(self, status):
if status & Flash.SR_ERROR_MASK:
raise lib.stlinkex.StlinkException(
'Error writing FLASH with status (FLASH_SR) %08x' % status)
class Stm32L0(lib.stm32.Stm32):
def flash_erase_all(self, flash_size):
# Mass erase is only possible by setting and removing flash
# write protection. This will also erase EEPROM!
# Use page erase instead
self._dbg.debug('Stm32L0.flash_erase_all')
flash = Flash(self, self._stlink, self._dbg)
flash.erase_pages(lib.stm32.Stm32.FLASH_START, flash_size);
flash.lock()
def flash_write(self, addr, data, erase=False, erase_sizes=None):
if addr is None:
addr = self.FLASH_START
self._dbg.debug(
'Stm32l4.flash_write '
'(%s, [data:%dBytes], erase=%s, erase_sizes=%s)'
% (addr, len(data), erase, erase_sizes))
if addr % 4:
raise lib.stlinkex.StlinkException
('Start address is not aligned to word')
flash = Flash(self, self._stlink, self._dbg)
if erase:
if erase_sizes:
flash.erase_pages(addr, len(data))
else:
flash.erase_all()
self._dbg.bargraph_start('Writing FLASH', value_min=addr,
value_max=addr + len(data))
flash.unlock()
flash.prg_unlock()
datablock = data
data_addr = addr
block = datablock
while len(datablock):
size = 0
if data_addr & ((flash._page_size >> 1) -1):
# not half page aligned
size = data_addr & ((flash._page_size >> 1) -1)
size = (flash._page_size >> 1) - size
if len(datablock) < (flash._page_size >> 1):
# remainder not full half page
size = len(datablock)
while size:
block = datablock[:4]
datablock = datablock[4:]
if max(block) != 0:
self._stlink.set_mem32(data_addr, block)
data_addr += 4
size -= 4
self._dbg.bargraph_update(value=data_addr)
flash.wait_busy(0.005, check_eop=True)
pecr = Flash.PECR_FPRG | Flash.PECR_PRG
self._stlink.set_debugreg32(flash._nvm + Flash.PECR_OFFSET, pecr)
while len(datablock) >= (flash._page_size >> 1):
block = datablock[:(flash._page_size >> 1)]
datablock = datablock[(flash._page_size >> 1):]
if max(block) != 0:
self._stlink.set_mem32(data_addr, block)
data_addr += len(block)
self._dbg.bargraph_update(value=data_addr)
flash.wait_busy(0.005, check_eop=True)
self._stlink.set_debugreg32(flash._nvm + Flash.PECR_OFFSET, 0)
flash.lock()
self._dbg.bargraph_done()
| {
"content_hash": "2b33314c6c0a3d7013594b2ac6e8d8ed",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 80,
"avg_line_length": 41.12682926829268,
"alnum_prop": 0.5327956351559721,
"repo_name": "pavelrevak/pystlink",
"id": "093c5a6d39781efd8fe3f22c9640fee4ead7ff56",
"size": "8431",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/stm32l0.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "186164"
},
{
"name": "Shell",
"bytes": "161"
}
],
"symlink_target": ""
} |
from sqlalchemy import Column, String, INTEGER, DATETIME, FLOAT, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import UniqueConstraint
from sqlalchemy.orm import class_mapper
Base = declarative_base()
class ImageMeta(Base):
__tablename__ = 'tbl_images'
id = Column(INTEGER, primary_key=True, autoincrement=True)
uuid = Column(String(32))
folder = Column(String(255))
filename = Column(String(256))
file_size = Column(INTEGER)
md5 = Column(String(32), index=True)
file_createtime = Column(DATETIME)
image_width = Column(INTEGER)
image_height = Column(INTEGER)
origin_datetime = Column(DATETIME)
digit_datetime = Column(DATETIME)
camera_brand = Column(String(32))
camera_type = Column(String(32))
focal_length = Column(FLOAT)
flash = Column(FLOAT)
fnumber = Column(FLOAT)
aperture = Column(FLOAT)
exposure_time = Column(FLOAT)
exposure_bias = Column(FLOAT)
exposure_mode = Column(FLOAT)
iso_speed_rating = Column(FLOAT)
latitude = Column(String(64))
longitude = Column(String(64))
altitude = Column(String(32))
country = Column(String(32))
province = Column(String(32))
city = Column(String(32))
address = Column(String(512))
orientation = Column(INTEGER)
info = Column(String(1024))
__table_args__ = (UniqueConstraint('folder', 'filename', name='folder_filename'),)
def get_filename_with_folder(self):
if self.folder:
filename = "{folder}{sep}{basename}".format(folder=self.folder, sep="/",
basename=self.filename)
else:
filename = self.filename
return filename
@classmethod
def _get_fields(cls):
return class_mapper(cls).c.keys()
def get_dict_info(self):
d = {}
for field in self._get_fields():
d[field] = getattr(self, field)
return d
class Tag(Base):
__tablename__ = "tbl_tags"
id = Column(INTEGER, primary_key=True, autoincrement=True)
image_id = Column(INTEGER)
tag = Column(String(32))
value = Column(String(32))
class Option(Base):
__tablename__ = "tbl_options"
id = Column(INTEGER, primary_key=True, autoincrement=True)
name = Column(String(32), unique=True)
value = Column(String(32))
class Person(Base):
__tablename__ = "tbl_person"
id = Column(INTEGER, primary_key=True, autoincrement=True)
name = Column(String(32), unique=True)
sex = Column(String(8))
img_list = Column(Text)
| {
"content_hash": "adc8349a643f1431c014df957938f70a",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 86,
"avg_line_length": 29.0561797752809,
"alnum_prop": 0.6384377416860015,
"repo_name": "wrenchzc/photomanager",
"id": "e8ba4df28410d40ec3cd683758abc90741f449fe",
"size": "2586",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "photomanager/db/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "87987"
}
],
"symlink_target": ""
} |
"""Pretty print a byte stream suspected to be UTF-8 text.
This should work with most versions of Python on Posix systems. In particular
it works with CPython 2.6, CPython 2.7, and CPython 3.3+, on Linux and Darwin,
with narrow (16 bits per character) and wide (21 or more bits per character)
Unicode strings, and with the native unicodedata module or the icu module
provided by PyICU.
"""
from __future__ import unicode_literals
import io
import sys
try:
import icu # pylint: disable=g-import-not-at-top
INFO = 'Unicode %s (ICU %s) via module %s\n' % (icu.UNICODE_VERSION,
icu.ICU_VERSION,
icu.__name__)
_NFD = icu.Normalizer2.getNFDInstance()
def IsPrintable(uchr):
return icu.Char.isprint(uchr)
def IsFormat(uchr):
return icu.Char.charType(uchr) == icu.UCharCategory.FORMAT_CHAR
def CharName(uchr):
return icu.Char.charName(uchr)
def ToNFD(uchr):
return _NFD.normalize(uchr)
except ImportError:
import unicodedata # pylint: disable=g-import-not-at-top
INFO = 'Unicode %s via module %s\n' % (unicodedata.unidata_version,
unicodedata.__name__)
def IsPrintable(uchr):
return not unicodedata.category(uchr).startswith('C')
def IsFormat(uchr):
return unicodedata.category(uchr) == 'Cf'
def CharName(uchr):
return unicodedata.name(uchr, '')
def ToNFD(uchr):
return unicodedata.normalize('NFD', uchr)
_STDOUT = io.open(1, mode='wt', encoding='utf-8', closefd=False)
_STDERR = io.open(2, mode='wt', encoding='utf-8', closefd=False)
CHAR_ESCAPE = {
0x00: r'\0',
0x07: r'\a',
0x08: r'\b',
0x09: r'\t',
0x0A: r'\n',
0x0B: r'\v',
0x0C: r'\f',
0x0D: r'\r',
}
CHAR_NAME = {
# C0 controls
0x00: 'NULL',
0x01: 'START OF HEADING',
0x02: 'START OF TEXT',
0x03: 'END OF TEXT',
0x04: 'END OF TRANSMISSION',
0x05: 'ENQUIRY',
0x06: 'ACKNOWLEDGE',
0x07: 'BELL',
0x08: 'BACKSPACE',
0x09: 'CHARACTER TABULATION',
0x0A: 'LINE FEED (LF)',
0x0B: 'LINE TABULATION',
0x0C: 'FORM FEED (FF)',
0x0D: 'CARRIAGE RETURN (CR)',
0x0E: 'SHIFT OUT',
0x0F: 'SHIFT IN',
0x10: 'DATA LINK ESCAPE',
0x11: 'DEVICE CONTROL ONE',
0x12: 'DEVICE CONTROL TWO',
0x13: 'DEVICE CONTROL THREE',
0x14: 'DEVICE CONTROL FOUR',
0x15: 'NEGATIVE ACKNOWLEDGE',
0x16: 'SYNCHRONOUS IDLE',
0x17: 'END OF TRANSMISSION BLOCK',
0x18: 'CANCEL',
0x19: 'END OF MEDIUM',
0x1A: 'SUBSTITUTE',
0x1B: 'ESCAPE',
0x1C: 'INFORMATION SEPARATOR FOUR (FILE)',
0x1D: 'INFORMATION SEPARATOR THREE (GROUP)',
0x1E: 'INFORMATION SEPARATOR TWO (RECORD)',
0x1F: 'INFORMATION SEPARATOR ONE (UNIT)',
0x7F: 'DELETE',
# C1 controls
0x80: 'XXX',
0x81: 'XXX',
0x82: 'BREAK PERMITTED HERE',
0x83: 'NO BREAK HERE',
0x84: 'INDEX',
0x85: 'NEXT LINE (NEL)',
0x86: 'START OF SELECTED AREA',
0x87: 'END OF SELECTED AREA',
0x88: 'CHARACTER TABULATION SET',
0x89: 'CHARACTER TABULATION WITH JUSTIFICATION',
0x8A: 'LINE TABULATION SET',
0x8B: 'PARTIAL LINE FORWARD',
0x8C: 'PARTIAL LINE BACKWARD',
0x8D: 'REVERSE LINE FEED',
0x8E: 'SINGLE SHIFT TWO',
0x8F: 'SINGLE SHIFT THREE',
0x90: 'DEVICE CONTROL STRING',
0x91: 'PRIVATE USE ONE',
0x92: 'PRIVATE USE TWO',
0x93: 'SET TRANSMIT STATE',
0x94: 'CANCEL CHARACTER',
0x95: 'MESSAGE WAITING',
0x96: 'START OF GUARDED AREA',
0x97: 'END OF GUARDED AREA',
0x98: 'START OF STRING',
0x99: 'XXX',
0x9A: 'SINGLE CHARACTER INTRODUCER',
0x9B: 'CONTROL SEQUENCE INTRODUCER',
0x9C: 'STRING TERMINATOR',
0x9D: 'OPERATING SYSTEM COMMAND',
0x9E: 'PRIVACY MESSAGE',
0x9F: 'APPLICATION PROGRAM COMMAND',
}
def IsHighSurrogateCodepoint(cp):
return 0xD800 <= cp <= 0xDBFF
def IsLowSurrogateCodepoint(cp):
return 0xDC00 <= cp <= 0xDFFF
def SurrogatePayload(cp):
payload = cp & ((1 << 10) - 1)
if IsHighSurrogateCodepoint(cp):
return 0x10000 + (payload << 10)
elif IsLowSurrogateCodepoint(cp):
return payload
raise ValueError('SurrogatePayload() arg is not a surrogate: %X' % cp)
def CharToCodepoint(char):
if len(char) == 1:
return ord(char)
elif len(char) == 2:
hi = ord(char[0])
lo = ord(char[1])
if IsHighSurrogateCodepoint(hi) and IsLowSurrogateCodepoint(lo):
return SurrogatePayload(hi) | SurrogatePayload(lo)
raise TypeError('CharToCodepoint expected a character or surrogate pair')
def CodepointToChar(codepoint):
return ('\\U%08X' % codepoint).encode('ascii').decode('unicode-escape')
def EscapeCodepoint(cp):
if cp in CHAR_ESCAPE:
return CHAR_ESCAPE[cp]
elif cp <= 0xFF:
return '\\x%02x' % cp
elif cp <= 0xFFFF:
return '\\u%04x' % cp
else:
return '\\U%08x' % cp
def CodepointName(cp):
"""Return a printable name for the given codepoint."""
name = CHAR_NAME.get(cp, '')
if name:
return name
if (0xE000 <= cp <= 0xF8FF or
0xF0000 <= cp <= 0xFFFFD or
0x100000 <= cp <= 0x10FFFD):
return '<Private Use>'
elif IsHighSurrogateCodepoint(cp):
return '<%sPrivate Use High Surrogate %X>' % (
'Non ' if cp <= 0xDB7F else '', SurrogatePayload(cp))
elif IsLowSurrogateCodepoint(cp):
return '<Low Surrogate %03X>' % SurrogatePayload(cp)
elif 0xFDD0 <= cp <= 0xFDEF or (cp & 0xFFFF) >= 0xFFFE:
return '<Noncharacter>'
return ''
def Decomposition(uchr):
nfd = ToNFD(uchr)
if nfd == uchr:
return ''
return ' '.join('%04X' % ord(c) for c in nfd)
def Write(writer, msg, *args):
writer.write(msg % tuple(args))
writer.flush()
return
def IsFollowByte(byte):
return 0b10000000 <= byte <= 0b10111111
def Utf8ChunkedStream(bstream):
"""Yield putative UTF-8 byte sequences of lead byte and follow bytes."""
utf8_bytes = []
while True:
bstring = bstream.read(1)
if not bstring:
# EOF reached.
if utf8_bytes:
yield utf8_bytes
return
assert len(bstring) == 1
byte = ord(bstring)
if IsFollowByte(byte):
utf8_bytes.append(byte)
else:
if utf8_bytes:
yield utf8_bytes
utf8_bytes = [byte]
return
def FormatByteSequence(byte_sequence):
return '%s (%s)' % (
' '.join('{:02X}'.format(b) for b in byte_sequence),
' '.join('{:08b}'.format(b) for b in byte_sequence),
)
def CodepointStream(bstream, writer):
"""Decoder for Thompson and Pike's FSS-UTF encoding that yields codepoints."""
for utf8_bytes in Utf8ChunkedStream(bstream):
assert utf8_bytes
byte = utf8_bytes[0]
if 0 <= byte <= 0b01111111:
lead_payload = byte
min_payload = 0
utf8_length = 1
elif 0b11000000 <= byte <= 0b11011111:
lead_payload = byte & 0b00011111
min_payload = 0x80
utf8_length = 2
elif 0b11100000 <= byte <= 0b11101111:
lead_payload = byte & 0b00001111
min_payload = 0x800
utf8_length = 3
elif 0b11110000 <= byte <= 0b11110111:
lead_payload = byte & 0b00000111
min_payload = 0x10000
utf8_length = 4
elif 0b11111000 <= byte <= 0b11111011:
lead_payload = byte & 0b00000011
min_payload = 0x200000
utf8_length = 5
elif 0b11111100 <= byte <= 0b11111101:
lead_payload = byte & 0b00000001
min_payload = 0x4000000
utf8_length = 6
elif 0b11111110 <= byte:
Write(writer, 'Error: Invalid UTF-8 sequence %s: '
'lead byte too large\n',
FormatByteSequence(utf8_bytes))
continue
else:
assert IsFollowByte(byte)
Write(writer, 'Error: Invalid UTF-8 sequence %s: '
'first byte is a follow byte\n',
FormatByteSequence(utf8_bytes))
continue
if len(utf8_bytes) != utf8_length:
Write(writer, 'Error: Invalid UTF-8 sequence %s: '
'expected %d bytes but found %d\n',
FormatByteSequence(utf8_bytes), utf8_length, len(utf8_bytes))
continue
if utf8_length > 4:
Write(writer, 'Warning: Unexpected UTF-8 sequence %s: '
'expected at most 4 bytes but found %d\n',
FormatByteSequence(utf8_bytes), utf8_length)
payload = lead_payload
for follow_byte in utf8_bytes[1:]:
payload <<= 6
payload |= follow_byte & 0b00111111
assert 0 <= payload <= 0x7FFFFFFF
if payload < min_payload:
Write(writer, 'Warning: Unexpected UTF-8 sequence %s: '
'overlong encoding of payload %X\n',
FormatByteSequence(utf8_bytes), payload)
if IsHighSurrogateCodepoint(payload) or IsLowSurrogateCodepoint(payload):
Write(writer, 'Warning: Unexpected UTF-8 sequence %s: '
'surrogate codepoint %X encoded as UTF-8\n',
FormatByteSequence(utf8_bytes), payload)
yield payload
return
def Dump(bstream):
"""Dump information about a byte stream suspected to be UTF-8."""
for codepoint in CodepointStream(bstream, _STDERR):
if 0 <= codepoint <= 0x10FFFF:
# First, work around platform/PyICU bugs in handling surrogates:
if (IsHighSurrogateCodepoint(codepoint) or
IsLowSurrogateCodepoint(codepoint)):
printable = EscapeCodepoint(codepoint)
name = CodepointName(codepoint)
deco = ''
else:
char = CodepointToChar(codepoint)
if IsPrintable(char):
printable = char
else:
printable = EscapeCodepoint(codepoint)
name = CharName(char)
if not name:
name = CodepointName(codepoint)
deco = Decomposition(char)
Write(_STDOUT, '%s\t%s\t%s\t%s\n',
printable, ('%04X' % codepoint).rjust(6), name, deco)
else:
Write(_STDERR, '\t%X\t<Invalid Codepoint>\n', codepoint)
return
def SanityCheck(writer):
Write(writer, INFO)
if CharName('\U00010300') != 'OLD ITALIC LETTER A':
Write(writer, 'Warning: Unicode data too old (< 3.1.0, 2001) or broken\n')
return
def main(argv):
SanityCheck(_STDERR)
if len(argv) == 1:
# For interactive use, read line by line from stdin:
with io.open(0, mode='rb', buffering=0, closefd=False) as bstream:
for line in bstream:
Dump(io.BytesIO(line))
else:
for path in argv[1:]:
with io.open(path, mode='rb') as bstream:
Dump(bstream)
return
if __name__ == '__main__':
main(sys.argv)
| {
"content_hash": "ff1530d78cdafb2e538f6c197dd89a57",
"timestamp": "",
"source": "github",
"line_count": 363,
"max_line_length": 80,
"avg_line_length": 28.72176308539945,
"alnum_prop": 0.625071935545751,
"repo_name": "google/language-resources",
"id": "89d3d081a188d947ce387a7f777c492056ae6459",
"size": "10450",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "utils/utf8_dump.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "5561"
},
{
"name": "C++",
"bytes": "358465"
},
{
"name": "Dockerfile",
"bytes": "6989"
},
{
"name": "Java",
"bytes": "77129"
},
{
"name": "Makefile",
"bytes": "1621"
},
{
"name": "Python",
"bytes": "236246"
},
{
"name": "Ruby",
"bytes": "25"
},
{
"name": "Shell",
"bytes": "41139"
},
{
"name": "Starlark",
"bytes": "161379"
}
],
"symlink_target": ""
} |
"""Test Google Smart Home."""
from homeassistant.core import State
from homeassistant.const import (
ATTR_SUPPORTED_FEATURES, ATTR_UNIT_OF_MEASUREMENT, TEMP_CELSIUS)
from homeassistant.setup import async_setup_component
from homeassistant.components import climate
from homeassistant.components.google_assistant import (
const, trait, helpers, smart_home as sh)
from homeassistant.components.light.demo import DemoLight
BASIC_CONFIG = helpers.Config(
should_expose=lambda state: True,
agent_user_id='test-agent',
)
REQ_ID = 'ff36a3cc-ec34-11e6-b1a0-64510650abcf'
async def test_sync_message(hass):
"""Test a sync message."""
light = DemoLight(
None, 'Demo Light',
state=False,
hs_color=(180, 75),
)
light.hass = hass
light.entity_id = 'light.demo_light'
await light.async_update_ha_state()
# This should not show up in the sync request
hass.states.async_set('sensor.no_match', 'something')
# Excluded via config
hass.states.async_set('light.not_expose', 'on')
config = helpers.Config(
should_expose=lambda state: state.entity_id != 'light.not_expose',
agent_user_id='test-agent',
entity_config={
'light.demo_light': {
const.CONF_ROOM_HINT: 'Living Room',
const.CONF_ALIASES: ['Hello', 'World']
}
}
)
result = await sh.async_handle_message(hass, config, {
"requestId": REQ_ID,
"inputs": [{
"intent": "action.devices.SYNC"
}]
})
assert result == {
'requestId': REQ_ID,
'payload': {
'agentUserId': 'test-agent',
'devices': [{
'id': 'light.demo_light',
'name': {
'name': 'Demo Light',
'nicknames': [
'Hello',
'World',
]
},
'traits': [
trait.TRAIT_BRIGHTNESS,
trait.TRAIT_ONOFF,
trait.TRAIT_COLOR_SPECTRUM,
trait.TRAIT_COLOR_TEMP,
],
'type': sh.TYPE_LIGHT,
'willReportState': False,
'attributes': {
'colorModel': 'rgb',
'temperatureMinK': 2000,
'temperatureMaxK': 6535,
},
'roomHint': 'Living Room'
}]
}
}
async def test_query_message(hass):
"""Test a sync message."""
light = DemoLight(
None, 'Demo Light',
state=False,
hs_color=(180, 75),
)
light.hass = hass
light.entity_id = 'light.demo_light'
await light.async_update_ha_state()
light2 = DemoLight(
None, 'Another Light',
state=True,
hs_color=(180, 75),
ct=400,
brightness=78,
)
light2.hass = hass
light2.entity_id = 'light.another_light'
await light2.async_update_ha_state()
result = await sh.async_handle_message(hass, BASIC_CONFIG, {
"requestId": REQ_ID,
"inputs": [{
"intent": "action.devices.QUERY",
"payload": {
"devices": [{
"id": "light.demo_light",
}, {
"id": "light.another_light",
}, {
"id": "light.non_existing",
}]
}
}]
})
assert result == {
'requestId': REQ_ID,
'payload': {
'devices': {
'light.non_existing': {
'online': False,
},
'light.demo_light': {
'on': False,
'online': True,
},
'light.another_light': {
'on': True,
'online': True,
'brightness': 30,
'color': {
'spectrumRGB': 4194303,
'temperature': 2500,
}
},
}
}
}
async def test_execute(hass):
"""Test an execute command."""
await async_setup_component(hass, 'light', {
'light': {'platform': 'demo'}
})
await hass.services.async_call(
'light', 'turn_off', {'entity_id': 'light.ceiling_lights'},
blocking=True)
result = await sh.async_handle_message(hass, BASIC_CONFIG, {
"requestId": REQ_ID,
"inputs": [{
"intent": "action.devices.EXECUTE",
"payload": {
"commands": [{
"devices": [
{"id": "light.non_existing"},
{"id": "light.ceiling_lights"},
],
"execution": [{
"command": "action.devices.commands.OnOff",
"params": {
"on": True
}
}, {
"command":
"action.devices.commands.BrightnessAbsolute",
"params": {
"brightness": 20
}
}]
}]
}
}]
})
assert result == {
"requestId": REQ_ID,
"payload": {
"commands": [{
"ids": ['light.non_existing'],
"status": "ERROR",
"errorCode": "deviceOffline"
}, {
"ids": ['light.ceiling_lights'],
"status": "SUCCESS",
"states": {
"on": True,
"online": True,
'brightness': 20,
'color': {
'spectrumRGB': 16773155,
'temperature': 2631,
},
}
}]
}
}
async def test_raising_error_trait(hass):
"""Test raising an error while executing a trait command."""
hass.states.async_set('climate.bla', climate.STATE_HEAT, {
climate.ATTR_MIN_TEMP: 15,
climate.ATTR_MAX_TEMP: 30,
ATTR_SUPPORTED_FEATURES: climate.SUPPORT_OPERATION_MODE,
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
})
result = await sh.async_handle_message(hass, BASIC_CONFIG, {
"requestId": REQ_ID,
"inputs": [{
"intent": "action.devices.EXECUTE",
"payload": {
"commands": [{
"devices": [
{"id": "climate.bla"},
],
"execution": [{
"command": "action.devices.commands."
"ThermostatTemperatureSetpoint",
"params": {
"thermostatTemperatureSetpoint": 10
}
}]
}]
}
}]
})
assert result == {
"requestId": REQ_ID,
"payload": {
"commands": [{
"ids": ['climate.bla'],
"status": "ERROR",
"errorCode": "valueOutOfRange"
}]
}
}
def test_serialize_input_boolean():
"""Test serializing an input boolean entity."""
state = State('input_boolean.bla', 'on')
entity = sh._GoogleEntity(None, BASIC_CONFIG, state)
assert entity.sync_serialize() == {
'id': 'input_boolean.bla',
'attributes': {},
'name': {'name': 'bla'},
'traits': ['action.devices.traits.OnOff'],
'type': 'action.devices.types.SWITCH',
'willReportState': False,
}
async def test_unavailable_state_doesnt_sync(hass):
"""Test that an unavailable entity does not sync over."""
light = DemoLight(
None, 'Demo Light',
state=False,
)
light.hass = hass
light.entity_id = 'light.demo_light'
light._available = False
await light.async_update_ha_state()
result = await sh.async_handle_message(hass, BASIC_CONFIG, {
"requestId": REQ_ID,
"inputs": [{
"intent": "action.devices.SYNC"
}]
})
assert result == {
'requestId': REQ_ID,
'payload': {
'agentUserId': 'test-agent',
'devices': []
}
}
async def test_empty_name_doesnt_sync(hass):
"""Test that an entity with empty name does not sync over."""
light = DemoLight(
None, ' ',
state=False,
)
light.hass = hass
light.entity_id = 'light.demo_light'
await light.async_update_ha_state()
result = await sh.async_handle_message(hass, BASIC_CONFIG, {
"requestId": REQ_ID,
"inputs": [{
"intent": "action.devices.SYNC"
}]
})
assert result == {
'requestId': REQ_ID,
'payload': {
'agentUserId': 'test-agent',
'devices': []
}
}
| {
"content_hash": "5afa0d3b17fcfd7991a27edf50e798df",
"timestamp": "",
"source": "github",
"line_count": 314,
"max_line_length": 74,
"avg_line_length": 28.863057324840764,
"alnum_prop": 0.4519474787597926,
"repo_name": "persandstrom/home-assistant",
"id": "66e7747e06a776498e34af548b63f4148d95ace4",
"size": "9063",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/components/google_assistant/test_smart_home.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1067"
},
{
"name": "Python",
"bytes": "11745210"
},
{
"name": "Ruby",
"bytes": "518"
},
{
"name": "Shell",
"bytes": "16652"
}
],
"symlink_target": ""
} |
import pytari2600.memory.cartridge as cartridge
import unittest
import pkg_resources
class TestCartridge(unittest.TestCase):
def test_cartridge(self):
cart = cartridge.GenericCartridge(pkg_resources.resource_filename(__name__, 'dummy_rom.bin'), 4, 0x1000, 0xFF9, 0x0)
# Write should do nothing
cart.write(0,7)
self.assertEqual(cart.read(0), 0)
self.assertEqual(cart.read(3), 3)
self.assertEqual(cart.read(2048+2), 2)
def test_ram_cartridge(self):
cart = cartridge.GenericCartridge(pkg_resources.resource_filename(__name__, 'dummy_rom.bin'), 4, 0x1000, 0xFF9, 0x080)
# Write should go to ram.
cart.write(0,7)
self.assertEqual(cart.read(0x80), 7)
cart.write(0,31)
self.assertEqual(cart.read(0x80), 31)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "6b06708ed93b197b43a7e65e7a86a4ff",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 126,
"avg_line_length": 35.625,
"alnum_prop": 0.6514619883040935,
"repo_name": "ajgrah2000/pytari2600",
"id": "422453266d1ce6d300e0d6223648ce2ac3457ec2",
"size": "855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytari2600/test/test_cartridge.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "260356"
}
],
"symlink_target": ""
} |
from django.http import Http404, HttpResponse
from django.core.exceptions import PermissionDenied
from corehq.apps.domain.decorators import login_and_domain_required, domain_specific_login_redirect
from functools import wraps
from corehq.apps.users.models import CouchUser, CommCareUser
from django.utils.translation import ugettext as _
from corehq.apps.users.dbaccessors.all_commcare_users import get_deleted_user_by_username
def require_permission_raw(permission_check, login_decorator=login_and_domain_required):
"""
A way to do more fine-grained permissions via decorator. The permission_check should be
a function that takes in a couch_user and a domain and returns True if that user can access
the page, otherwise false.
"""
def decorator(view_func):
@wraps(view_func)
def _inner(request, domain, *args, **kwargs):
if not hasattr(request, "couch_user"):
return domain_specific_login_redirect(request, domain)
elif request.user.is_superuser or permission_check(request.couch_user, domain):
return view_func(request, domain, *args, **kwargs)
else:
if request.is_ajax():
return HttpResponse(_("Sorry, you don't have permission to do this action!"), status=403)
raise PermissionDenied()
if login_decorator:
return login_decorator(_inner)
else:
return _inner
return decorator
def get_permission_name(permission):
try:
return permission.name
except AttributeError:
try:
return permission.__name__
except AttributeError:
return None
def require_permission(permission, data=None, login_decorator=login_and_domain_required):
try:
permission = permission.name
except AttributeError:
try:
permission = permission.__name__
except AttributeError:
pass
permission_check = lambda couch_user, domain: couch_user.has_permission(domain, permission, data=data)
return require_permission_raw(permission_check, login_decorator)
require_can_edit_web_users = require_permission('edit_web_users')
require_can_edit_commcare_users = require_permission('edit_commcare_users')
def require_permission_to_edit_user(view_func):
@wraps(view_func)
def _inner(request, domain, couch_user_id, *args, **kwargs):
go_ahead = False
if hasattr(request, "couch_user"):
user = request.couch_user
if user.is_superuser or user.user_id == couch_user_id or (hasattr(user, "is_domain_admin") and user.is_domain_admin()):
go_ahead = True
else:
couch_user = CouchUser.get_by_user_id(couch_user_id)
if not couch_user:
raise Http404()
if couch_user.is_commcare_user() and request.couch_user.can_edit_commcare_users():
go_ahead = True
elif couch_user.is_web_user() and request.couch_user.can_edit_web_users():
go_ahead = True
if go_ahead:
return login_and_domain_required(view_func)(request, domain, couch_user_id, *args, **kwargs)
else:
return domain_specific_login_redirect(request, domain)
return _inner
def ensure_active_user_by_username(username):
"""
:param username: ex: [email protected]
:return
valid: is True by default but is set to False for inactive/deleted user
error_code: mapping in app_string for the user
default_response: english description of the error to be used in case error_code missing
"""
ccu = CommCareUser.get_by_username(username)
valid, message, error_code = True, None, None
if ccu and not ccu.is_active:
valid, message, error_code = False, 'Your account has been deactivated, please contact your domain admin '\
'to reactivate', 'user.deactivated'
elif get_deleted_user_by_username(CommCareUser, username):
valid, message, error_code = False, 'Your account has been deleted, please contact your domain admin to '\
'request for restore', 'user.deleted'
return valid, message, error_code
| {
"content_hash": "2532c4d9e172ba83c1496805bc53ae92",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 131,
"avg_line_length": 43.33,
"alnum_prop": 0.6473574890376183,
"repo_name": "qedsoftware/commcare-hq",
"id": "1ba17692a35f696a776cc7be4dfb1c600ea2352c",
"size": "4333",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/users/decorators.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
} |
import heron.explorer.src.python.args as args
from heron.common.src.python.color import Log
from tabulate import tabulate
import heron.common.src.python.utils as utils
def create_parser(subparsers):
parser = subparsers.add_parser(
'topologies',
help='Display running topologies',
usage="%(prog)s cluster/[role]/[env] [options]",
add_help=True)
args.add_cluster_role_env(parser)
args.add_verbose(parser)
args.add_tracker_url(parser)
args.add_config(parser)
parser.set_defaults(subcommand='topologies')
return subparsers
def to_table(result):
max_count = 20
table, count = [], 0
for role, envs_topos in result.iteritems():
for env, topos in envs_topos.iteritems():
for topo in topos:
count += 1
if count > max_count:
continue
else:
table.append([role, env, topo])
header = ['role', 'env', 'topology']
rest_count = 0 if count <= max_count else count - max_count
return table, header, rest_count
def show_cluster(cluster):
try:
result = utils.get_cluster_topologies(cluster)
if not result:
Log.error('Unknown cluster \'%s\'' % cluster)
return False
result = result[cluster]
except Exception:
return False
table, header, rest_count = to_table(result)
print('Topologies running in cluster \'%s\'' % cluster)
if rest_count:
print(' with %d more...' % rest_count)
print(tabulate(table, headers=header))
return True
def show_cluster_role(cluster, role):
try:
result = utils.get_cluster_role_topologies(cluster, role)
if not result:
Log.error('Unknown cluster/role \'%s\'' % '/'.join([cluster, role]))
return False
result = result[cluster]
except Exception:
return False
table, header, rest_count = to_table(result)
print('Topologies running in cluster \'%s\' submitted by \'%s\':' % (cluster, role))
if rest_count:
print(' with %d more...' % rest_count)
print(tabulate(table, headers=header))
return True
def show_cluster_role_env(cluster, role, env):
try:
result = utils.get_cluster_role_env_topologies(cluster, role, env)
if not result:
Log.error('Unknown cluster/role/env \'%s\'' % '/'.join([cluster, role, env]))
return False
result = result[cluster]
except Exception:
return False
table, header, rest_count = to_table(result)
print('Topologies running in cluster \'%s\', submitted by \'%s\', and\
under environment \'%s\':' % (cluster, role, env))
if rest_count:
print(' with %d more...' % rest_count)
print(tabulate(table, headers=header))
return True
def run(command, parser, cl_args, unknown_args):
location = cl_args['cluster/[role]/[env]'].split('/')
if len(location) == 1:
return show_cluster(*location)
elif len(location) == 2:
return show_cluster_role(*location)
elif len(location) == 3:
return show_cluster_role_env(*location)
else:
Log.error('Invalid topologies selection')
return False
| {
"content_hash": "1a450163a28b12d5e8274a4a65ae52bb",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 86,
"avg_line_length": 30.03030303030303,
"alnum_prop": 0.6619576185671039,
"repo_name": "cliffyg/heron",
"id": "703b7529308a07b1b613be5a1b74c89989a710fd",
"size": "3566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heron/explorer/src/python/topologies.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "8537"
},
{
"name": "C++",
"bytes": "1059998"
},
{
"name": "CSS",
"bytes": "106404"
},
{
"name": "HTML",
"bytes": "153565"
},
{
"name": "Java",
"bytes": "2135618"
},
{
"name": "JavaScript",
"bytes": "165310"
},
{
"name": "M4",
"bytes": "17941"
},
{
"name": "Makefile",
"bytes": "498"
},
{
"name": "Objective-C",
"bytes": "1445"
},
{
"name": "Perl",
"bytes": "9085"
},
{
"name": "Protocol Buffer",
"bytes": "18193"
},
{
"name": "Python",
"bytes": "658792"
},
{
"name": "Ruby",
"bytes": "1930"
},
{
"name": "Scala",
"bytes": "4640"
},
{
"name": "Shell",
"bytes": "128229"
},
{
"name": "Thrift",
"bytes": "915"
}
],
"symlink_target": ""
} |
"""
Demonstrate how to use major and minor tickers.
The two relevant userland classes are Locators and Formatters.
Locators determine where the ticks are and formatters control the
formatting of ticks.
Minor ticks are off by default (NullLocator and NullFormatter). You
can turn minor ticks on w/o labels by setting the minor locator. You
can also turn labeling on for the minor ticker by setting the minor
formatter
Make a plot with major ticks that are multiples of 20 and minor ticks
that are multiples of 5. Label major ticks with %d formatting but
don't label minor ticks
The MultipleLocator ticker class is used to place ticks on multiples of
some base. The FormatStrFormatter uses a string format string (eg
'%d' or '%1.2f' or '%1.1f cm' ) to format the tick
The pylab interface grid command changes the grid settings of the
major ticks of the y and y axis together. If you want to control the
grid of the minor ticks for a given axis, use for example
ax.xaxis.grid(True, which='minor')
Note, you should not use the same locator between different Axis
because the locator stores references to the Axis data and view limits
"""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
majorLocator = MultipleLocator(20)
majorFormatter = FormatStrFormatter('%d')
minorLocator = MultipleLocator(5)
t = np.arange(0.0, 100.0, 0.1)
s = np.sin(0.1*np.pi*t)*np.exp(-t*0.01)
fig, ax = plt.subplots()
plt.plot(t,s)
ax.xaxis.set_major_locator(majorLocator)
ax.xaxis.set_major_formatter(majorFormatter)
#for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.show()
| {
"content_hash": "8b4e6bbc1e09fdf6ddbc5d241c0dcd28",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 71,
"avg_line_length": 32,
"alnum_prop": 0.7735849056603774,
"repo_name": "yavalvas/yav_com",
"id": "ec9db46734142324aabc6f1d22d68b2f70ce61e0",
"size": "1718",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "build/matplotlib/doc/mpl_examples/pylab_examples/major_minor_demo1.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "85377"
},
{
"name": "C++",
"bytes": "568744"
},
{
"name": "CSS",
"bytes": "47585"
},
{
"name": "Erlang",
"bytes": "7112"
},
{
"name": "HTML",
"bytes": "14865"
},
{
"name": "JavaScript",
"bytes": "359937"
},
{
"name": "Objective-C",
"bytes": "188937"
},
{
"name": "Perl",
"bytes": "229498"
},
{
"name": "Python",
"bytes": "7684946"
},
{
"name": "Shell",
"bytes": "1805"
}
],
"symlink_target": ""
} |
from __future__ import annotations
import typing as T
from .baseobjects import MesonInterpreterObject
if T.TYPE_CHECKING:
from .baseobjects import TYPE_var, TYPE_kwargs
class Disabler(MesonInterpreterObject):
def method_call(self, method_name: str, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
if method_name == 'found':
return False
return Disabler()
def _is_arg_disabled(arg: T.Any) -> bool:
if isinstance(arg, Disabler):
return True
if isinstance(arg, list):
for i in arg:
if _is_arg_disabled(i):
return True
return False
def is_disabled(args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
for i in args:
if _is_arg_disabled(i):
return True
for i in kwargs.values():
if _is_arg_disabled(i):
return True
return False
| {
"content_hash": "3406aafa0bbf4813c55ec2362298d604",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 101,
"avg_line_length": 27.875,
"alnum_prop": 0.6188340807174888,
"repo_name": "pexip/meson",
"id": "182bb625ccc358c2dc7af9a478befb761241bec9",
"size": "1484",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mesonbuild/interpreterbase/disabler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4913"
},
{
"name": "Batchfile",
"bytes": "1499"
},
{
"name": "C",
"bytes": "203464"
},
{
"name": "C#",
"bytes": "1130"
},
{
"name": "C++",
"bytes": "59032"
},
{
"name": "CMake",
"bytes": "38429"
},
{
"name": "Cuda",
"bytes": "10592"
},
{
"name": "Cython",
"bytes": "1921"
},
{
"name": "D",
"bytes": "7840"
},
{
"name": "Fortran",
"bytes": "12248"
},
{
"name": "Genie",
"bytes": "476"
},
{
"name": "HTML",
"bytes": "897"
},
{
"name": "Inno Setup",
"bytes": "354"
},
{
"name": "Java",
"bytes": "3768"
},
{
"name": "JavaScript",
"bytes": "150"
},
{
"name": "LLVM",
"bytes": "75"
},
{
"name": "Lex",
"bytes": "219"
},
{
"name": "Limbo",
"bytes": "28"
},
{
"name": "Meson",
"bytes": "595904"
},
{
"name": "Objective-C",
"bytes": "686"
},
{
"name": "Objective-C++",
"bytes": "378"
},
{
"name": "PowerShell",
"bytes": "4748"
},
{
"name": "Python",
"bytes": "4096804"
},
{
"name": "Roff",
"bytes": "625"
},
{
"name": "Rust",
"bytes": "4039"
},
{
"name": "Shell",
"bytes": "12539"
},
{
"name": "Swift",
"bytes": "1152"
},
{
"name": "Vala",
"bytes": "10033"
},
{
"name": "Verilog",
"bytes": "696"
},
{
"name": "Vim Script",
"bytes": "10684"
},
{
"name": "Yacc",
"bytes": "103"
}
],
"symlink_target": ""
} |
import json
from urllib2 import Request as request
import urllib2
import ast
# get the bus lines from the website and parse it to a list
def get_list(start):
# url = 'http://widgets.vvo-online.de/abfahrtsmonitor/Abfahrten.do?ort=Dresden&hst=CasparDavidFriedrichStra%C3%9Fe'
url = 'http://widgets.vvo-online.de/abfahrtsmonitor/Abfahrten.do?ort=Dresden&hst=' + start
req = request(url)
response = urllib2.urlopen(req)
data_raw = response.read()
data_utf = data_raw.decode("utf-8")
data_list = ast.literal_eval(data_utf)
return data_list
# just store the first time a bus comes
def get_first_buses(data_list):
next_buses = []
for ride in data_list:
if ride[0] not in [next_ride[0] for next_ride in next_buses]:
next_buses.append(ride)
return next_buses
# return the first times, a bus line comes
def get_buses(start):
return get_first_buses(get_list(start))
| {
"content_hash": "5bea35e0b31ba90c6f133c7c525080c2",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 118,
"avg_line_length": 34.25925925925926,
"alnum_prop": 0.6994594594594594,
"repo_name": "NWuensche/TimetableBus",
"id": "00b8c2a7f460d5c61c1ac7ac6e935f72a5f7fdf0",
"size": "949",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Timetable.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4573"
}
],
"symlink_target": ""
} |
import os
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from database_files.models import File
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
# make_option('-w', '--overwrite', action='store_true',
# dest='overwrite', default=False,
# help='If given, overwrites any existing files.'),
)
help = 'Dumps all files in the database referenced by FileFields ' + \
'or ImageFields onto the filesystem in the directory specified by ' + \
'MEDIA_ROOT.'
def handle(self, *args, **options):
File.dump_files(verbose=True)
| {
"content_hash": "ae74b0ae1813a706916c8cc05560a92d",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 79,
"avg_line_length": 33.35,
"alnum_prop": 0.6611694152923538,
"repo_name": "rhunwicks/django-database-files",
"id": "36534c9b859c81109d67fe49652227fe7d50b37e",
"size": "667",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "database_files/management/commands/database_files_dump.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "42649"
}
],
"symlink_target": ""
} |
from babel import dates, core, Locale
from website import mails
from website import models as website_models
from website.notifications import constants
from website.notifications import utils
from website.notifications.model import NotificationDigest
from website.notifications.model import NotificationSubscription
from website.util import web_url_for
def notify(event, user, node, timestamp, **context):
"""Retrieve appropriate ***subscription*** and passe user list
:param event: event that triggered the notification
:param user: user who triggered notification
:param node: instance of Node
:param timestamp: time event happened
:param context: optional variables specific to templates
target_user: used with comment_replies
:return: List of user ids notifications were sent to
"""
event_type = utils.find_subscription_type(event)
subscriptions = compile_subscriptions(node, event_type, event)
sent_users = []
target_user = context.get('target_user', None)
if target_user:
target_user_id = target_user._id
for notification_type in subscriptions:
if notification_type != 'none' and subscriptions[notification_type]:
if user in subscriptions[notification_type]:
subscriptions[notification_type].pop(subscriptions[notification_type].index(user))
if target_user and target_user_id in subscriptions[notification_type]:
subscriptions[notification_type].pop(subscriptions[notification_type].index(target_user_id))
if target_user_id != user._id:
store_emails([target_user_id], notification_type, 'comment_replies', user, node,
timestamp, **context)
sent_users.append(target_user_id)
if subscriptions[notification_type]:
store_emails(subscriptions[notification_type], notification_type, event_type, user, node,
timestamp, **context)
sent_users.extend(subscriptions[notification_type])
return sent_users
def store_emails(recipient_ids, notification_type, event, user, node, timestamp, **context):
"""Store notification emails
Emails are sent via celery beat as digests
:param recipient_ids: List of user ids to send mail to.
:param notification_type: from constants.Notification_types
:param event: event that triggered notification
:param user: user who triggered the notification
:param node: instance of Node
:param timestamp: time event happened
:param context:
:return: --
"""
if notification_type == 'none':
return
template = event + '.html.mako'
context['user'] = user
node_lineage_ids = get_node_lineage(node) if node else []
for user_id in recipient_ids:
if user_id == user._id:
continue
recipient = website_models.User.load(user_id)
context['localized_timestamp'] = localize_timestamp(timestamp, recipient)
message = mails.render_message(template, **context)
digest = NotificationDigest(
timestamp=timestamp,
send_type=notification_type,
event=event,
user_id=user_id,
message=message,
node_lineage=node_lineage_ids
)
digest.save()
def compile_subscriptions(node, event_type, event=None, level=0):
"""Recurse through node and parents for subscriptions.
:param node: current node
:param event_type: Generally node_subscriptions_available
:param event: Particular event such a file_updated that has specific file subs
:param level: How deep the recursion is
:return: a dict of notification types with lists of users.
"""
subscriptions = check_node(node, event_type)
if event:
subscriptions = check_node(node, event) # Gets particular event subscriptions
parent_subscriptions = compile_subscriptions(node, event_type, level=level + 1) # get node and parent subs
elif node.parent_id:
parent_subscriptions = \
compile_subscriptions(website_models.Node.load(node.parent_id), event_type, level=level + 1)
else:
parent_subscriptions = check_node(None, event_type)
for notification_type in parent_subscriptions:
p_sub_n = parent_subscriptions[notification_type]
p_sub_n.extend(subscriptions[notification_type])
for nt in subscriptions:
if notification_type != nt:
p_sub_n = list(set(p_sub_n).difference(set(subscriptions[nt])))
if level == 0:
p_sub_n, removed = utils.separate_users(node, p_sub_n)
parent_subscriptions[notification_type] = p_sub_n
return parent_subscriptions
def check_node(node, event):
"""Return subscription for a particular node and event."""
node_subscriptions = {key: [] for key in constants.NOTIFICATION_TYPES}
if node:
subscription = NotificationSubscription.load(utils.to_subscription_key(node._id, event))
for notification_type in node_subscriptions:
users = getattr(subscription, notification_type, [])
for user in users:
if node.has_permission(user, 'read'):
node_subscriptions[notification_type].append(user._id)
return node_subscriptions
def get_node_lineage(node):
""" Get a list of node ids in order from the node to top most project
e.g. [parent._id, node._id]
"""
lineage = [node._id]
while node.parent_id:
node = website_models.Node.load(node.parent_id)
lineage = [node._id] + lineage
return lineage
def get_settings_url(uid, user):
if uid == user._id:
return web_url_for('user_notifications', _absolute=True)
node = website_models.Node.load(uid)
assert node, 'get_settings_url recieved an invalid Node id'
return node.web_url_for('node_setting', _guid=True, _absolute=True)
def localize_timestamp(timestamp, user):
try:
user_timezone = dates.get_timezone(user.timezone)
except LookupError:
user_timezone = dates.get_timezone('Etc/UTC')
try:
user_locale = Locale(user.locale)
except core.UnknownLocaleError:
user_locale = 'en'
formatted_date = dates.format_date(timestamp, format='full', locale=user_locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=user_timezone, locale=user_locale)
return u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
| {
"content_hash": "57041d9ae86882c8dcc2e2c3706d300c",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 115,
"avg_line_length": 40.239263803680984,
"alnum_prop": 0.6686994968745236,
"repo_name": "haoyuchen1992/osf.io",
"id": "f6d0e88869c521f828162f59dd22eb004bdbe833",
"size": "6559",
"binary": false,
"copies": "12",
"ref": "refs/heads/develop",
"path": "website/notifications/emails.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "119433"
},
{
"name": "HTML",
"bytes": "34310"
},
{
"name": "JavaScript",
"bytes": "1214045"
},
{
"name": "Mako",
"bytes": "542037"
},
{
"name": "Python",
"bytes": "3730523"
},
{
"name": "Shell",
"bytes": "1927"
}
],
"symlink_target": ""
} |
"""
rpc中间件,可以监控rpc的一些状态或者做一些tracelog记录
"""
import random, hashlib, gevent
class ClientMiddleware:
def __init__(self):
self.called = True
def client_before_request(self, event):
self.method = event.name
def client_after_request(self, req_event, rep_event, exception):
pass
# assert req_event is not None
# assert req_event.name == "crash" or req_event.name == "echoes_crash"
# self.called = True
# assert isinstance(exception, zerorpc.RemoteError)
# assert exception.name == 'RuntimeError'
# assert 'BrokenEchoModule' in exception.msg
# assert rep_event.name == 'ERR'
def client_handle_remote_error(self, event):
pass
def client_patterns_list(self, patterns):
pass
class ResolverMiddleware:
def resolve_endpoint(self, endpoint):
if endpoint == 'toto':
return endpoint
return endpoint
class TracerMiddleware:
'''Used by test_task_context_* tests'''
def __init__(self, identity):
self._identity = identity
self._locals = gevent.local.local()
self._log = []
@property
def trace_id(self):
return self._locals.__dict__.get('trace_id', None)
def load_task_context(self, event_header):
self._locals.trace_id = event_header.get('trace_id', None)
print self._identity, 'load_task_context', self.trace_id
self._log.append(('load', self.trace_id))
def get_task_context(self):
if self.trace_id is None:
# just an ugly code to generate a beautiful little hash.
self._locals.trace_id = '<{0}>'.format(hashlib.md5(
str(random.random())[3:]
).hexdigest()[0:6].upper())
print self._identity, 'get_task_context! [make a new one]', self.trace_id
self._log.append(('new', self.trace_id))
else:
print self._identity, 'get_task_context! [reuse]', self.trace_id
self._log.append(('reuse', self.trace_id))
return {'trace_id': self.trace_id}
| {
"content_hash": "7e162063b86fb5ef44a1074e0c9a2518",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 85,
"avg_line_length": 31.08955223880597,
"alnum_prop": 0.600096015362458,
"repo_name": "openslack/openslack-api",
"id": "4561e4fca6fbd233c2f0cfc4f40785c32f7f1d65",
"size": "2147",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "openslack/utils/middleware.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "111"
},
{
"name": "HTML",
"bytes": "10735"
},
{
"name": "JavaScript",
"bytes": "3647"
},
{
"name": "Python",
"bytes": "25821"
}
],
"symlink_target": ""
} |
"""
Unit tests for Neutron base agent.
"""
import mock
from neutron.agent import rpc as agent_rpc
from networking_hyperv.neutron.agent import base as agent_base
from networking_hyperv.tests import base as test_base
class _BaseAgent(agent_base.BaseAgent):
def _get_agent_configurations(self):
pass
def _setup_rpc(self):
pass
def _work(self):
pass
class TestBaseAgent(test_base.HyperVBaseTestCase):
def setUp(self):
super(TestBaseAgent, self).setUp()
self._agent = _BaseAgent()
self._agent._agent_id = mock.sentinel.agent_id
self._agent._context = mock.sentinel.admin_context
self._agent._state_rpc = mock.MagicMock(
autospec=agent_rpc.PluginReportStateAPI)
def test_set_agent_state(self):
self._agent._agent_state = {}
self._agent._host = mock.sentinel.host
self._agent._set_agent_state()
expected_keys = ["binary", "host", "configurations", "agent_type",
"topic", "start_flag"]
self.assertEqual(sorted(expected_keys),
sorted(self._agent._agent_state.keys()))
self.assertEqual(mock.sentinel.host, self._agent._agent_state["host"])
@mock.patch('time.time')
@mock.patch('time.sleep')
@mock.patch.object(_BaseAgent, '_work')
@mock.patch.object(_BaseAgent, '_prologue')
def test_daemon_loop(self, mock_prologue, mock_work,
mock_sleep, mock_time):
mock_work.side_effect = [Exception()]
mock_time.side_effect = [1, 3, KeyboardInterrupt]
self.assertRaises(KeyboardInterrupt, self._agent.daemon_loop)
mock_prologue.assert_called_once_with()
def test_report_state(self):
self._agent._agent_state = {'start_flag': True}
self._agent._report_state()
self.assertNotIn('start_flag', self._agent._agent_state)
def test_report_state_exception(self):
self._agent._agent_state = {'start_flag': True}
self._agent._state_rpc.report_state.side_effect = Exception
self._agent._report_state()
self._agent._state_rpc.report_state.assert_called_once_with(
self._agent._context, {'start_flag': True})
self.assertTrue(self._agent._agent_state['start_flag'])
| {
"content_hash": "c31f7cc2e605003a2fb2f7aff2b8eb50",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 78,
"avg_line_length": 30.38157894736842,
"alnum_prop": 0.6271113035946297,
"repo_name": "stackforge/networking-hyperv",
"id": "9f4908c1d52f7d4220cf3c840a89994a85bc4655",
"size": "2948",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "networking_hyperv/tests/unit/neutron/agent/test_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "140104"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import pytest
from datetime import datetime
from numpy import random
import numpy as np
from pandas.compat import lrange, lzip, u
from pandas import (compat, DataFrame, Series, Index, MultiIndex,
date_range, isnull)
import pandas as pd
from pandas.util.testing import assert_frame_equal
from pandas.errors import PerformanceWarning
import pandas.util.testing as tm
from pandas.tests.frame.common import TestData
class TestDataFrameSelectReindex(TestData):
# These are specific reindex-based tests; other indexing tests should go in
# test_indexing
def test_drop_names(self):
df = DataFrame([[1, 2, 3], [3, 4, 5], [5, 6, 7]],
index=['a', 'b', 'c'],
columns=['d', 'e', 'f'])
df.index.name, df.columns.name = 'first', 'second'
df_dropped_b = df.drop('b')
df_dropped_e = df.drop('e', axis=1)
df_inplace_b, df_inplace_e = df.copy(), df.copy()
df_inplace_b.drop('b', inplace=True)
df_inplace_e.drop('e', axis=1, inplace=True)
for obj in (df_dropped_b, df_dropped_e, df_inplace_b, df_inplace_e):
assert obj.index.name == 'first'
assert obj.columns.name == 'second'
assert list(df.columns) == ['d', 'e', 'f']
pytest.raises(ValueError, df.drop, ['g'])
pytest.raises(ValueError, df.drop, ['g'], 1)
# errors = 'ignore'
dropped = df.drop(['g'], errors='ignore')
expected = Index(['a', 'b', 'c'], name='first')
tm.assert_index_equal(dropped.index, expected)
dropped = df.drop(['b', 'g'], errors='ignore')
expected = Index(['a', 'c'], name='first')
tm.assert_index_equal(dropped.index, expected)
dropped = df.drop(['g'], axis=1, errors='ignore')
expected = Index(['d', 'e', 'f'], name='second')
tm.assert_index_equal(dropped.columns, expected)
dropped = df.drop(['d', 'g'], axis=1, errors='ignore')
expected = Index(['e', 'f'], name='second')
tm.assert_index_equal(dropped.columns, expected)
def test_drop_col_still_multiindex(self):
arrays = [['a', 'b', 'c', 'top'],
['', '', '', 'OD'],
['', '', '', 'wx']]
tuples = sorted(zip(*arrays))
index = MultiIndex.from_tuples(tuples)
df = DataFrame(np.random.randn(3, 4), columns=index)
del df[('a', '', '')]
assert(isinstance(df.columns, MultiIndex))
def test_drop(self):
simple = DataFrame({"A": [1, 2, 3, 4], "B": [0, 1, 2, 3]})
assert_frame_equal(simple.drop("A", axis=1), simple[['B']])
assert_frame_equal(simple.drop(["A", "B"], axis='columns'),
simple[[]])
assert_frame_equal(simple.drop([0, 1, 3], axis=0), simple.loc[[2], :])
assert_frame_equal(simple.drop(
[0, 3], axis='index'), simple.loc[[1, 2], :])
pytest.raises(ValueError, simple.drop, 5)
pytest.raises(ValueError, simple.drop, 'C', 1)
pytest.raises(ValueError, simple.drop, [1, 5])
pytest.raises(ValueError, simple.drop, ['A', 'C'], 1)
# errors = 'ignore'
assert_frame_equal(simple.drop(5, errors='ignore'), simple)
assert_frame_equal(simple.drop([0, 5], errors='ignore'),
simple.loc[[1, 2, 3], :])
assert_frame_equal(simple.drop('C', axis=1, errors='ignore'), simple)
assert_frame_equal(simple.drop(['A', 'C'], axis=1, errors='ignore'),
simple[['B']])
# non-unique - wheee!
nu_df = DataFrame(lzip(range(3), range(-3, 1), list('abc')),
columns=['a', 'a', 'b'])
assert_frame_equal(nu_df.drop('a', axis=1), nu_df[['b']])
assert_frame_equal(nu_df.drop('b', axis='columns'), nu_df['a'])
nu_df = nu_df.set_index(pd.Index(['X', 'Y', 'X']))
nu_df.columns = list('abc')
assert_frame_equal(nu_df.drop('X', axis='rows'), nu_df.loc[["Y"], :])
assert_frame_equal(nu_df.drop(['X', 'Y'], axis=0), nu_df.loc[[], :])
# inplace cache issue
# GH 5628
df = pd.DataFrame(np.random.randn(10, 3), columns=list('abc'))
expected = df[~(df.b > 0)]
df.drop(labels=df[df.b > 0].index, inplace=True)
assert_frame_equal(df, expected)
def test_drop_multiindex_not_lexsorted(self):
# GH 11640
# define the lexsorted version
lexsorted_mi = MultiIndex.from_tuples(
[('a', ''), ('b1', 'c1'), ('b2', 'c2')], names=['b', 'c'])
lexsorted_df = DataFrame([[1, 3, 4]], columns=lexsorted_mi)
assert lexsorted_df.columns.is_lexsorted()
# define the non-lexsorted version
not_lexsorted_df = DataFrame(columns=['a', 'b', 'c', 'd'],
data=[[1, 'b1', 'c1', 3],
[1, 'b2', 'c2', 4]])
not_lexsorted_df = not_lexsorted_df.pivot_table(
index='a', columns=['b', 'c'], values='d')
not_lexsorted_df = not_lexsorted_df.reset_index()
assert not not_lexsorted_df.columns.is_lexsorted()
# compare the results
tm.assert_frame_equal(lexsorted_df, not_lexsorted_df)
expected = lexsorted_df.drop('a', axis=1)
with tm.assert_produces_warning(PerformanceWarning):
result = not_lexsorted_df.drop('a', axis=1)
tm.assert_frame_equal(result, expected)
def test_merge_join_different_levels(self):
# GH 9455
# first dataframe
df1 = DataFrame(columns=['a', 'b'], data=[[1, 11], [0, 22]])
# second dataframe
columns = MultiIndex.from_tuples([('a', ''), ('c', 'c1')])
df2 = DataFrame(columns=columns, data=[[1, 33], [0, 44]])
# merge
columns = ['a', 'b', ('c', 'c1')]
expected = DataFrame(columns=columns, data=[[1, 11, 33], [0, 22, 44]])
with tm.assert_produces_warning(UserWarning):
result = pd.merge(df1, df2, on='a')
tm.assert_frame_equal(result, expected)
# join, see discussion in GH 12219
columns = ['a', 'b', ('a', ''), ('c', 'c1')]
expected = DataFrame(columns=columns,
data=[[1, 11, 0, 44], [0, 22, 1, 33]])
with tm.assert_produces_warning(UserWarning):
result = df1.join(df2, on='a')
tm.assert_frame_equal(result, expected)
def test_reindex(self):
newFrame = self.frame.reindex(self.ts1.index)
for col in newFrame.columns:
for idx, val in compat.iteritems(newFrame[col]):
if idx in self.frame.index:
if np.isnan(val):
assert np.isnan(self.frame[col][idx])
else:
assert val == self.frame[col][idx]
else:
assert np.isnan(val)
for col, series in compat.iteritems(newFrame):
assert tm.equalContents(series.index, newFrame.index)
emptyFrame = self.frame.reindex(Index([]))
assert len(emptyFrame.index) == 0
# Cython code should be unit-tested directly
nonContigFrame = self.frame.reindex(self.ts1.index[::2])
for col in nonContigFrame.columns:
for idx, val in compat.iteritems(nonContigFrame[col]):
if idx in self.frame.index:
if np.isnan(val):
assert np.isnan(self.frame[col][idx])
else:
assert val == self.frame[col][idx]
else:
assert np.isnan(val)
for col, series in compat.iteritems(nonContigFrame):
assert tm.equalContents(series.index, nonContigFrame.index)
# corner cases
# Same index, copies values but not index if copy=False
newFrame = self.frame.reindex(self.frame.index, copy=False)
assert newFrame.index is self.frame.index
# length zero
newFrame = self.frame.reindex([])
assert newFrame.empty
assert len(newFrame.columns) == len(self.frame.columns)
# length zero with columns reindexed with non-empty index
newFrame = self.frame.reindex([])
newFrame = newFrame.reindex(self.frame.index)
assert len(newFrame.index) == len(self.frame.index)
assert len(newFrame.columns) == len(self.frame.columns)
# pass non-Index
newFrame = self.frame.reindex(list(self.ts1.index))
tm.assert_index_equal(newFrame.index, self.ts1.index)
# copy with no axes
result = self.frame.reindex()
assert_frame_equal(result, self.frame)
assert result is not self.frame
def test_reindex_nan(self):
df = pd.DataFrame([[1, 2], [3, 5], [7, 11], [9, 23]],
index=[2, np.nan, 1, 5],
columns=['joe', 'jim'])
i, j = [np.nan, 5, 5, np.nan, 1, 2, np.nan], [1, 3, 3, 1, 2, 0, 1]
assert_frame_equal(df.reindex(i), df.iloc[j])
df.index = df.index.astype('object')
assert_frame_equal(df.reindex(i), df.iloc[j], check_index_type=False)
# GH10388
df = pd.DataFrame({'other': ['a', 'b', np.nan, 'c'],
'date': ['2015-03-22', np.nan,
'2012-01-08', np.nan],
'amount': [2, 3, 4, 5]})
df['date'] = pd.to_datetime(df.date)
df['delta'] = (pd.to_datetime('2015-06-18') - df['date']).shift(1)
left = df.set_index(['delta', 'other', 'date']).reset_index()
right = df.reindex(columns=['delta', 'other', 'date', 'amount'])
assert_frame_equal(left, right)
def test_reindex_name_remains(self):
s = Series(random.rand(10))
df = DataFrame(s, index=np.arange(len(s)))
i = Series(np.arange(10), name='iname')
df = df.reindex(i)
assert df.index.name == 'iname'
df = df.reindex(Index(np.arange(10), name='tmpname'))
assert df.index.name == 'tmpname'
s = Series(random.rand(10))
df = DataFrame(s.T, index=np.arange(len(s)))
i = Series(np.arange(10), name='iname')
df = df.reindex(columns=i)
assert df.columns.name == 'iname'
def test_reindex_int(self):
smaller = self.intframe.reindex(self.intframe.index[::2])
assert smaller['A'].dtype == np.int64
bigger = smaller.reindex(self.intframe.index)
assert bigger['A'].dtype == np.float64
smaller = self.intframe.reindex(columns=['A', 'B'])
assert smaller['A'].dtype == np.int64
def test_reindex_like(self):
other = self.frame.reindex(index=self.frame.index[:10],
columns=['C', 'B'])
assert_frame_equal(other, self.frame.reindex_like(other))
def test_reindex_columns(self):
new_frame = self.frame.reindex(columns=['A', 'B', 'E'])
tm.assert_series_equal(new_frame['B'], self.frame['B'])
assert np.isnan(new_frame['E']).all()
assert 'C' not in new_frame
# Length zero
new_frame = self.frame.reindex(columns=[])
assert new_frame.empty
def test_reindex_columns_method(self):
# GH 14992, reindexing over columns ignored method
df = DataFrame(data=[[11, 12, 13], [21, 22, 23], [31, 32, 33]],
index=[1, 2, 4],
columns=[1, 2, 4],
dtype=float)
# default method
result = df.reindex(columns=range(6))
expected = DataFrame(data=[[np.nan, 11, 12, np.nan, 13, np.nan],
[np.nan, 21, 22, np.nan, 23, np.nan],
[np.nan, 31, 32, np.nan, 33, np.nan]],
index=[1, 2, 4],
columns=range(6),
dtype=float)
assert_frame_equal(result, expected)
# method='ffill'
result = df.reindex(columns=range(6), method='ffill')
expected = DataFrame(data=[[np.nan, 11, 12, 12, 13, 13],
[np.nan, 21, 22, 22, 23, 23],
[np.nan, 31, 32, 32, 33, 33]],
index=[1, 2, 4],
columns=range(6),
dtype=float)
assert_frame_equal(result, expected)
# method='bfill'
result = df.reindex(columns=range(6), method='bfill')
expected = DataFrame(data=[[11, 11, 12, 13, 13, np.nan],
[21, 21, 22, 23, 23, np.nan],
[31, 31, 32, 33, 33, np.nan]],
index=[1, 2, 4],
columns=range(6),
dtype=float)
assert_frame_equal(result, expected)
def test_reindex_axes(self):
# GH 3317, reindexing by both axes loses freq of the index
df = DataFrame(np.ones((3, 3)),
index=[datetime(2012, 1, 1),
datetime(2012, 1, 2),
datetime(2012, 1, 3)],
columns=['a', 'b', 'c'])
time_freq = date_range('2012-01-01', '2012-01-03', freq='d')
some_cols = ['a', 'b']
index_freq = df.reindex(index=time_freq).index.freq
both_freq = df.reindex(index=time_freq, columns=some_cols).index.freq
seq_freq = df.reindex(index=time_freq).reindex(
columns=some_cols).index.freq
assert index_freq == both_freq
assert index_freq == seq_freq
def test_reindex_fill_value(self):
df = DataFrame(np.random.randn(10, 4))
# axis=0
result = df.reindex(lrange(15))
assert np.isnan(result.values[-5:]).all()
result = df.reindex(lrange(15), fill_value=0)
expected = df.reindex(lrange(15)).fillna(0)
assert_frame_equal(result, expected)
# axis=1
result = df.reindex(columns=lrange(5), fill_value=0.)
expected = df.copy()
expected[4] = 0.
assert_frame_equal(result, expected)
result = df.reindex(columns=lrange(5), fill_value=0)
expected = df.copy()
expected[4] = 0
assert_frame_equal(result, expected)
result = df.reindex(columns=lrange(5), fill_value='foo')
expected = df.copy()
expected[4] = 'foo'
assert_frame_equal(result, expected)
# reindex_axis
result = df.reindex_axis(lrange(15), fill_value=0., axis=0)
expected = df.reindex(lrange(15)).fillna(0)
assert_frame_equal(result, expected)
result = df.reindex_axis(lrange(5), fill_value=0., axis=1)
expected = df.reindex(columns=lrange(5)).fillna(0)
assert_frame_equal(result, expected)
# other dtypes
df['foo'] = 'foo'
result = df.reindex(lrange(15), fill_value=0)
expected = df.reindex(lrange(15)).fillna(0)
assert_frame_equal(result, expected)
def test_reindex_dups(self):
# GH4746, reindex on duplicate index error messages
arr = np.random.randn(10)
df = DataFrame(arr, index=[1, 2, 3, 4, 5, 1, 2, 3, 4, 5])
# set index is ok
result = df.copy()
result.index = list(range(len(df)))
expected = DataFrame(arr, index=list(range(len(df))))
assert_frame_equal(result, expected)
# reindex fails
pytest.raises(ValueError, df.reindex, index=list(range(len(df))))
def test_align(self):
af, bf = self.frame.align(self.frame)
assert af._data is not self.frame._data
af, bf = self.frame.align(self.frame, copy=False)
assert af._data is self.frame._data
# axis = 0
other = self.frame.iloc[:-5, :3]
af, bf = self.frame.align(other, axis=0, fill_value=-1)
tm.assert_index_equal(bf.columns, other.columns)
# test fill value
join_idx = self.frame.index.join(other.index)
diff_a = self.frame.index.difference(join_idx)
diff_b = other.index.difference(join_idx)
diff_a_vals = af.reindex(diff_a).values
diff_b_vals = bf.reindex(diff_b).values
assert (diff_a_vals == -1).all()
af, bf = self.frame.align(other, join='right', axis=0)
tm.assert_index_equal(bf.columns, other.columns)
tm.assert_index_equal(bf.index, other.index)
tm.assert_index_equal(af.index, other.index)
# axis = 1
other = self.frame.iloc[:-5, :3].copy()
af, bf = self.frame.align(other, axis=1)
tm.assert_index_equal(bf.columns, self.frame.columns)
tm.assert_index_equal(bf.index, other.index)
# test fill value
join_idx = self.frame.index.join(other.index)
diff_a = self.frame.index.difference(join_idx)
diff_b = other.index.difference(join_idx)
diff_a_vals = af.reindex(diff_a).values
# TODO(wesm): unused?
diff_b_vals = bf.reindex(diff_b).values # noqa
assert (diff_a_vals == -1).all()
af, bf = self.frame.align(other, join='inner', axis=1)
tm.assert_index_equal(bf.columns, other.columns)
af, bf = self.frame.align(other, join='inner', axis=1, method='pad')
tm.assert_index_equal(bf.columns, other.columns)
# test other non-float types
af, bf = self.intframe.align(other, join='inner', axis=1, method='pad')
tm.assert_index_equal(bf.columns, other.columns)
af, bf = self.mixed_frame.align(self.mixed_frame,
join='inner', axis=1, method='pad')
tm.assert_index_equal(bf.columns, self.mixed_frame.columns)
af, bf = self.frame.align(other.iloc[:, 0], join='inner', axis=1,
method=None, fill_value=None)
tm.assert_index_equal(bf.index, Index([]))
af, bf = self.frame.align(other.iloc[:, 0], join='inner', axis=1,
method=None, fill_value=0)
tm.assert_index_equal(bf.index, Index([]))
# mixed floats/ints
af, bf = self.mixed_float.align(other.iloc[:, 0], join='inner', axis=1,
method=None, fill_value=0)
tm.assert_index_equal(bf.index, Index([]))
af, bf = self.mixed_int.align(other.iloc[:, 0], join='inner', axis=1,
method=None, fill_value=0)
tm.assert_index_equal(bf.index, Index([]))
# Try to align DataFrame to Series along bad axis
with pytest.raises(ValueError):
self.frame.align(af.iloc[0, :3], join='inner', axis=2)
# align dataframe to series with broadcast or not
idx = self.frame.index
s = Series(range(len(idx)), index=idx)
left, right = self.frame.align(s, axis=0)
tm.assert_index_equal(left.index, self.frame.index)
tm.assert_index_equal(right.index, self.frame.index)
assert isinstance(right, Series)
left, right = self.frame.align(s, broadcast_axis=1)
tm.assert_index_equal(left.index, self.frame.index)
expected = {}
for c in self.frame.columns:
expected[c] = s
expected = DataFrame(expected, index=self.frame.index,
columns=self.frame.columns)
tm.assert_frame_equal(right, expected)
# see gh-9558
df = DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]})
result = df[df['a'] == 2]
expected = DataFrame([[2, 5]], index=[1], columns=['a', 'b'])
tm.assert_frame_equal(result, expected)
result = df.where(df['a'] == 2, 0)
expected = DataFrame({'a': [0, 2, 0], 'b': [0, 5, 0]})
tm.assert_frame_equal(result, expected)
def _check_align(self, a, b, axis, fill_axis, how, method, limit=None):
aa, ab = a.align(b, axis=axis, join=how, method=method, limit=limit,
fill_axis=fill_axis)
join_index, join_columns = None, None
ea, eb = a, b
if axis is None or axis == 0:
join_index = a.index.join(b.index, how=how)
ea = ea.reindex(index=join_index)
eb = eb.reindex(index=join_index)
if axis is None or axis == 1:
join_columns = a.columns.join(b.columns, how=how)
ea = ea.reindex(columns=join_columns)
eb = eb.reindex(columns=join_columns)
ea = ea.fillna(axis=fill_axis, method=method, limit=limit)
eb = eb.fillna(axis=fill_axis, method=method, limit=limit)
assert_frame_equal(aa, ea)
assert_frame_equal(ab, eb)
def test_align_fill_method_inner(self):
for meth in ['pad', 'bfill']:
for ax in [0, 1, None]:
for fax in [0, 1]:
self._check_align_fill('inner', meth, ax, fax)
def test_align_fill_method_outer(self):
for meth in ['pad', 'bfill']:
for ax in [0, 1, None]:
for fax in [0, 1]:
self._check_align_fill('outer', meth, ax, fax)
def test_align_fill_method_left(self):
for meth in ['pad', 'bfill']:
for ax in [0, 1, None]:
for fax in [0, 1]:
self._check_align_fill('left', meth, ax, fax)
def test_align_fill_method_right(self):
for meth in ['pad', 'bfill']:
for ax in [0, 1, None]:
for fax in [0, 1]:
self._check_align_fill('right', meth, ax, fax)
def _check_align_fill(self, kind, meth, ax, fax):
left = self.frame.iloc[0:4, :10]
right = self.frame.iloc[2:, 6:]
empty = self.frame.iloc[:0, :0]
self._check_align(left, right, axis=ax, fill_axis=fax,
how=kind, method=meth)
self._check_align(left, right, axis=ax, fill_axis=fax,
how=kind, method=meth, limit=1)
# empty left
self._check_align(empty, right, axis=ax, fill_axis=fax,
how=kind, method=meth)
self._check_align(empty, right, axis=ax, fill_axis=fax,
how=kind, method=meth, limit=1)
# empty right
self._check_align(left, empty, axis=ax, fill_axis=fax,
how=kind, method=meth)
self._check_align(left, empty, axis=ax, fill_axis=fax,
how=kind, method=meth, limit=1)
# both empty
self._check_align(empty, empty, axis=ax, fill_axis=fax,
how=kind, method=meth)
self._check_align(empty, empty, axis=ax, fill_axis=fax,
how=kind, method=meth, limit=1)
def test_align_int_fill_bug(self):
# GH #910
X = np.arange(10 * 10, dtype='float64').reshape(10, 10)
Y = np.ones((10, 1), dtype=int)
df1 = DataFrame(X)
df1['0.X'] = Y.squeeze()
df2 = df1.astype(float)
result = df1 - df1.mean()
expected = df2 - df2.mean()
assert_frame_equal(result, expected)
def test_align_multiindex(self):
# GH 10665
# same test cases as test_align_multiindex in test_series.py
midx = pd.MultiIndex.from_product([range(2), range(3), range(2)],
names=('a', 'b', 'c'))
idx = pd.Index(range(2), name='b')
df1 = pd.DataFrame(np.arange(12, dtype='int64'), index=midx)
df2 = pd.DataFrame(np.arange(2, dtype='int64'), index=idx)
# these must be the same results (but flipped)
res1l, res1r = df1.align(df2, join='left')
res2l, res2r = df2.align(df1, join='right')
expl = df1
assert_frame_equal(expl, res1l)
assert_frame_equal(expl, res2r)
expr = pd.DataFrame([0, 0, 1, 1, np.nan, np.nan] * 2, index=midx)
assert_frame_equal(expr, res1r)
assert_frame_equal(expr, res2l)
res1l, res1r = df1.align(df2, join='right')
res2l, res2r = df2.align(df1, join='left')
exp_idx = pd.MultiIndex.from_product([range(2), range(2), range(2)],
names=('a', 'b', 'c'))
expl = pd.DataFrame([0, 1, 2, 3, 6, 7, 8, 9], index=exp_idx)
assert_frame_equal(expl, res1l)
assert_frame_equal(expl, res2r)
expr = pd.DataFrame([0, 0, 1, 1] * 2, index=exp_idx)
assert_frame_equal(expr, res1r)
assert_frame_equal(expr, res2l)
def test_align_series_combinations(self):
df = pd.DataFrame({'a': [1, 3, 5],
'b': [1, 3, 5]}, index=list('ACE'))
s = pd.Series([1, 2, 4], index=list('ABD'), name='x')
# frame + series
res1, res2 = df.align(s, axis=0)
exp1 = pd.DataFrame({'a': [1, np.nan, 3, np.nan, 5],
'b': [1, np.nan, 3, np.nan, 5]},
index=list('ABCDE'))
exp2 = pd.Series([1, 2, np.nan, 4, np.nan],
index=list('ABCDE'), name='x')
tm.assert_frame_equal(res1, exp1)
tm.assert_series_equal(res2, exp2)
# series + frame
res1, res2 = s.align(df)
tm.assert_series_equal(res1, exp2)
tm.assert_frame_equal(res2, exp1)
def test_filter(self):
# Items
filtered = self.frame.filter(['A', 'B', 'E'])
assert len(filtered.columns) == 2
assert 'E' not in filtered
filtered = self.frame.filter(['A', 'B', 'E'], axis='columns')
assert len(filtered.columns) == 2
assert 'E' not in filtered
# Other axis
idx = self.frame.index[0:4]
filtered = self.frame.filter(idx, axis='index')
expected = self.frame.reindex(index=idx)
tm.assert_frame_equal(filtered, expected)
# like
fcopy = self.frame.copy()
fcopy['AA'] = 1
filtered = fcopy.filter(like='A')
assert len(filtered.columns) == 2
assert 'AA' in filtered
# like with ints in column names
df = DataFrame(0., index=[0, 1, 2], columns=[0, 1, '_A', '_B'])
filtered = df.filter(like='_')
assert len(filtered.columns) == 2
# regex with ints in column names
# from PR #10384
df = DataFrame(0., index=[0, 1, 2], columns=['A1', 1, 'B', 2, 'C'])
expected = DataFrame(
0., index=[0, 1, 2], columns=pd.Index([1, 2], dtype=object))
filtered = df.filter(regex='^[0-9]+$')
tm.assert_frame_equal(filtered, expected)
expected = DataFrame(0., index=[0, 1, 2], columns=[0, '0', 1, '1'])
# shouldn't remove anything
filtered = expected.filter(regex='^[0-9]+$')
tm.assert_frame_equal(filtered, expected)
# pass in None
with tm.assert_raises_regex(TypeError, 'Must pass'):
self.frame.filter()
with tm.assert_raises_regex(TypeError, 'Must pass'):
self.frame.filter(items=None)
with tm.assert_raises_regex(TypeError, 'Must pass'):
self.frame.filter(axis=1)
# test mutually exclusive arguments
with tm.assert_raises_regex(TypeError, 'mutually exclusive'):
self.frame.filter(items=['one', 'three'], regex='e$', like='bbi')
with tm.assert_raises_regex(TypeError, 'mutually exclusive'):
self.frame.filter(items=['one', 'three'], regex='e$', axis=1)
with tm.assert_raises_regex(TypeError, 'mutually exclusive'):
self.frame.filter(items=['one', 'three'], regex='e$')
with tm.assert_raises_regex(TypeError, 'mutually exclusive'):
self.frame.filter(items=['one', 'three'], like='bbi', axis=0)
with tm.assert_raises_regex(TypeError, 'mutually exclusive'):
self.frame.filter(items=['one', 'three'], like='bbi')
# objects
filtered = self.mixed_frame.filter(like='foo')
assert 'foo' in filtered
# unicode columns, won't ascii-encode
df = self.frame.rename(columns={'B': u('\u2202')})
filtered = df.filter(like='C')
assert 'C' in filtered
def test_filter_regex_search(self):
fcopy = self.frame.copy()
fcopy['AA'] = 1
# regex
filtered = fcopy.filter(regex='[A]+')
assert len(filtered.columns) == 2
assert 'AA' in filtered
# doesn't have to be at beginning
df = DataFrame({'aBBa': [1, 2],
'BBaBB': [1, 2],
'aCCa': [1, 2],
'aCCaBB': [1, 2]})
result = df.filter(regex='BB')
exp = df[[x for x in df.columns if 'BB' in x]]
assert_frame_equal(result, exp)
def test_filter_corner(self):
empty = DataFrame()
result = empty.filter([])
assert_frame_equal(result, empty)
result = empty.filter(like='foo')
assert_frame_equal(result, empty)
def test_select(self):
f = lambda x: x.weekday() == 2
result = self.tsframe.select(f, axis=0)
expected = self.tsframe.reindex(
index=self.tsframe.index[[f(x) for x in self.tsframe.index]])
assert_frame_equal(result, expected)
result = self.frame.select(lambda x: x in ('B', 'D'), axis=1)
expected = self.frame.reindex(columns=['B', 'D'])
# TODO should reindex check_names?
assert_frame_equal(result, expected, check_names=False)
def test_take(self):
# homogeneous
order = [3, 1, 2, 0]
for df in [self.frame]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['D', 'B', 'C', 'A']]
assert_frame_equal(result, expected, check_names=False)
# neg indicies
order = [2, 1, -1]
for df in [self.frame]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['C', 'B', 'D']]
assert_frame_equal(result, expected, check_names=False)
# illegal indices
pytest.raises(IndexError, df.take, [3, 1, 2, 30], axis=0)
pytest.raises(IndexError, df.take, [3, 1, 2, -31], axis=0)
pytest.raises(IndexError, df.take, [3, 1, 2, 5], axis=1)
pytest.raises(IndexError, df.take, [3, 1, 2, -5], axis=1)
# mixed-dtype
order = [4, 1, 2, 0, 3]
for df in [self.mixed_frame]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['foo', 'B', 'C', 'A', 'D']]
assert_frame_equal(result, expected)
# neg indicies
order = [4, 1, -2]
for df in [self.mixed_frame]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['foo', 'B', 'D']]
assert_frame_equal(result, expected)
# by dtype
order = [1, 2, 0, 3]
for df in [self.mixed_float, self.mixed_int]:
result = df.take(order, axis=0)
expected = df.reindex(df.index.take(order))
assert_frame_equal(result, expected)
# axis = 1
result = df.take(order, axis=1)
expected = df.loc[:, ['B', 'C', 'A', 'D']]
assert_frame_equal(result, expected)
def test_reindex_boolean(self):
frame = DataFrame(np.ones((10, 2), dtype=bool),
index=np.arange(0, 20, 2),
columns=[0, 2])
reindexed = frame.reindex(np.arange(10))
assert reindexed.values.dtype == np.object_
assert isnull(reindexed[0][1])
reindexed = frame.reindex(columns=lrange(3))
assert reindexed.values.dtype == np.object_
assert isnull(reindexed[1]).all()
def test_reindex_objects(self):
reindexed = self.mixed_frame.reindex(columns=['foo', 'A', 'B'])
assert 'foo' in reindexed
reindexed = self.mixed_frame.reindex(columns=['A', 'B'])
assert 'foo' not in reindexed
def test_reindex_corner(self):
index = Index(['a', 'b', 'c'])
dm = self.empty.reindex(index=[1, 2, 3])
reindexed = dm.reindex(columns=index)
tm.assert_index_equal(reindexed.columns, index)
# ints are weird
smaller = self.intframe.reindex(columns=['A', 'B', 'E'])
assert smaller['E'].dtype == np.float64
def test_reindex_axis(self):
cols = ['A', 'B', 'E']
reindexed1 = self.intframe.reindex_axis(cols, axis=1)
reindexed2 = self.intframe.reindex(columns=cols)
assert_frame_equal(reindexed1, reindexed2)
rows = self.intframe.index[0:5]
reindexed1 = self.intframe.reindex_axis(rows, axis=0)
reindexed2 = self.intframe.reindex(index=rows)
assert_frame_equal(reindexed1, reindexed2)
pytest.raises(ValueError, self.intframe.reindex_axis, rows, axis=2)
# no-op case
cols = self.frame.columns.copy()
newFrame = self.frame.reindex_axis(cols, axis=1)
assert_frame_equal(newFrame, self.frame)
def test_reindex_with_nans(self):
df = DataFrame([[1, 2], [3, 4], [np.nan, np.nan], [7, 8], [9, 10]],
columns=['a', 'b'],
index=[100.0, 101.0, np.nan, 102.0, 103.0])
result = df.reindex(index=[101.0, 102.0, 103.0])
expected = df.iloc[[1, 3, 4]]
assert_frame_equal(result, expected)
result = df.reindex(index=[103.0])
expected = df.iloc[[4]]
assert_frame_equal(result, expected)
result = df.reindex(index=[101.0])
expected = df.iloc[[1]]
assert_frame_equal(result, expected)
def test_reindex_multi(self):
df = DataFrame(np.random.randn(3, 3))
result = df.reindex(lrange(4), lrange(4))
expected = df.reindex(lrange(4)).reindex(columns=lrange(4))
assert_frame_equal(result, expected)
df = DataFrame(np.random.randint(0, 10, (3, 3)))
result = df.reindex(lrange(4), lrange(4))
expected = df.reindex(lrange(4)).reindex(columns=lrange(4))
assert_frame_equal(result, expected)
df = DataFrame(np.random.randint(0, 10, (3, 3)))
result = df.reindex(lrange(2), lrange(2))
expected = df.reindex(lrange(2)).reindex(columns=lrange(2))
assert_frame_equal(result, expected)
df = DataFrame(np.random.randn(5, 3) + 1j, columns=['a', 'b', 'c'])
result = df.reindex(index=[0, 1], columns=['a', 'b'])
expected = df.reindex([0, 1]).reindex(columns=['a', 'b'])
assert_frame_equal(result, expected)
| {
"content_hash": "5cb240af7df13e3e1bd92c265260f173",
"timestamp": "",
"source": "github",
"line_count": 932,
"max_line_length": 79,
"avg_line_length": 37.87339055793991,
"alnum_prop": 0.5386424160009066,
"repo_name": "ammarkhann/FinalSeniorCode",
"id": "a6326083c1beeb36eeda14860e346b35c505945d",
"size": "35323",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/pandas/tests/frame/test_axis_select_reindex.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "229289"
},
{
"name": "C++",
"bytes": "171536"
},
{
"name": "CSS",
"bytes": "928345"
},
{
"name": "Fortran",
"bytes": "14107"
},
{
"name": "HTML",
"bytes": "853239"
},
{
"name": "JavaScript",
"bytes": "4838516"
},
{
"name": "Jupyter Notebook",
"bytes": "518186"
},
{
"name": "Makefile",
"bytes": "214"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "81804894"
},
{
"name": "Roff",
"bytes": "6673"
},
{
"name": "Shell",
"bytes": "3409"
},
{
"name": "Smarty",
"bytes": "28408"
},
{
"name": "TeX",
"bytes": "1527"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
} |
while True:
s = raw_input('Enter something:')
if s == 'quit':
break
if len(s) < 3:
continue
print 'Input is of sufficient length'
| {
"content_hash": "6defc51caedb7c3f6b55fd7f582dceb7",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 41,
"avg_line_length": 23.142857142857142,
"alnum_prop": 0.5493827160493827,
"repo_name": "xingchaoma/github-python-study",
"id": "ff67bb7ce1c9af54477e3377636cbc68b390786b",
"size": "162",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "byteofpython/ch08_modules/continue.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18668"
}
],
"symlink_target": ""
} |
"""
Datastore utility functions
"""
from oslo_log import log as logging
from oslo_vmware import exceptions as vexc
from oslo_vmware.objects import datastore as ds_obj
from oslo_vmware import pbm
from oslo_vmware import vim_util as vutil
from nova import exception
from nova.i18n import _, _LE, _LI
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
LOG = logging.getLogger(__name__)
ALL_SUPPORTED_DS_TYPES = frozenset([constants.DATASTORE_TYPE_VMFS,
constants.DATASTORE_TYPE_NFS,
constants.DATASTORE_TYPE_NFS41,
constants.DATASTORE_TYPE_VSAN])
def _select_datastore(session, data_stores, best_match, datastore_regex=None,
storage_policy=None,
allowed_ds_types=ALL_SUPPORTED_DS_TYPES):
"""Find the most preferable datastore in a given RetrieveResult object.
:param session: vmwareapi session
:param data_stores: a RetrieveResult object from vSphere API call
:param best_match: the current best match for datastore
:param datastore_regex: an optional regular expression to match names
:param storage_policy: storage policy for the datastore
:param allowed_ds_types: a list of acceptable datastore type names
:return: datastore_ref, datastore_name, capacity, freespace
"""
if storage_policy:
matching_ds = _filter_datastores_matching_storage_policy(
session, data_stores, storage_policy)
if not matching_ds:
return best_match
else:
matching_ds = data_stores
# data_stores is actually a RetrieveResult object from vSphere API call
for obj_content in matching_ds.objects:
# the propset attribute "need not be set" by returning API
if not hasattr(obj_content, 'propSet'):
continue
propdict = vm_util.propset_dict(obj_content.propSet)
if _is_datastore_valid(propdict, datastore_regex, allowed_ds_types):
new_ds = ds_obj.Datastore(
ref=obj_content.obj,
name=propdict['summary.name'],
capacity=propdict['summary.capacity'],
freespace=propdict['summary.freeSpace'])
# favor datastores with more free space
if (best_match is None or
new_ds.freespace > best_match.freespace):
best_match = new_ds
return best_match
def _is_datastore_valid(propdict, datastore_regex, ds_types):
"""Checks if a datastore is valid based on the following criteria.
Criteria:
- Datastore is accessible
- Datastore is not in maintenance mode (optional)
- Datastore's type is one of the given ds_types
- Datastore matches the supplied regex (optional)
:param propdict: datastore summary dict
:param datastore_regex : Regex to match the name of a datastore.
"""
# Local storage identifier vSphere doesn't support CIFS or
# vfat for datastores, therefore filtered
return (propdict.get('summary.accessible') and
(propdict.get('summary.maintenanceMode') is None or
propdict.get('summary.maintenanceMode') == 'normal') and
propdict['summary.type'] in ds_types and
(datastore_regex is None or
datastore_regex.match(propdict['summary.name'])))
def get_datastore(session, cluster, datastore_regex=None,
storage_policy=None,
allowed_ds_types=ALL_SUPPORTED_DS_TYPES):
"""Get the datastore list and choose the most preferable one."""
datastore_ret = session._call_method(vutil,
"get_object_property",
cluster,
"datastore")
# If there are no hosts in the cluster then an empty string is
# returned
if not datastore_ret:
raise exception.DatastoreNotFound()
data_store_mors = datastore_ret.ManagedObjectReference
data_stores = session._call_method(vim_util,
"get_properties_for_a_collection_of_objects",
"Datastore", data_store_mors,
["summary.type", "summary.name",
"summary.capacity", "summary.freeSpace",
"summary.accessible",
"summary.maintenanceMode"])
best_match = None
while data_stores:
best_match = _select_datastore(session,
data_stores,
best_match,
datastore_regex,
storage_policy,
allowed_ds_types)
data_stores = session._call_method(vutil, 'continue_retrieval',
data_stores)
if best_match:
return best_match
if storage_policy:
raise exception.DatastoreNotFound(
_("Storage policy %s did not match any datastores")
% storage_policy)
elif datastore_regex:
raise exception.DatastoreNotFound(
_("Datastore regex %s did not match any datastores")
% datastore_regex.pattern)
else:
raise exception.DatastoreNotFound()
def _get_allowed_datastores(data_stores, datastore_regex):
allowed = []
for obj_content in data_stores.objects:
# the propset attribute "need not be set" by returning API
if not hasattr(obj_content, 'propSet'):
continue
propdict = vm_util.propset_dict(obj_content.propSet)
if _is_datastore_valid(propdict,
datastore_regex,
ALL_SUPPORTED_DS_TYPES):
allowed.append(ds_obj.Datastore(ref=obj_content.obj,
name=propdict['summary.name']))
return allowed
def get_available_datastores(session, cluster=None, datastore_regex=None):
"""Get the datastore list and choose the first local storage."""
ds = session._call_method(vutil,
"get_object_property",
cluster,
"datastore")
if not ds:
return []
data_store_mors = ds.ManagedObjectReference
# NOTE(garyk): use utility method to retrieve remote objects
data_stores = session._call_method(vim_util,
"get_properties_for_a_collection_of_objects",
"Datastore", data_store_mors,
["summary.type", "summary.name", "summary.accessible",
"summary.maintenanceMode"])
allowed = []
while data_stores:
allowed.extend(_get_allowed_datastores(data_stores, datastore_regex))
data_stores = session._call_method(vutil, 'continue_retrieval',
data_stores)
return allowed
def get_allowed_datastore_types(disk_type):
if disk_type == constants.DISK_TYPE_STREAM_OPTIMIZED:
return ALL_SUPPORTED_DS_TYPES
return ALL_SUPPORTED_DS_TYPES - frozenset([constants.DATASTORE_TYPE_VSAN])
def file_delete(session, ds_path, dc_ref):
LOG.debug("Deleting the datastore file %s", ds_path)
vim = session.vim
file_delete_task = session._call_method(
vim,
"DeleteDatastoreFile_Task",
vim.service_content.fileManager,
name=str(ds_path),
datacenter=dc_ref)
session._wait_for_task(file_delete_task)
LOG.debug("Deleted the datastore file")
def file_copy(session, src_file, src_dc_ref, dst_file, dst_dc_ref):
LOG.debug("Copying the datastore file from %(src)s to %(dst)s",
{'src': src_file, 'dst': dst_file})
vim = session.vim
copy_task = session._call_method(
vim,
"CopyDatastoreFile_Task",
vim.service_content.fileManager,
sourceName=src_file,
sourceDatacenter=src_dc_ref,
destinationName=dst_file,
destinationDatacenter=dst_dc_ref)
session._wait_for_task(copy_task)
LOG.debug("Copied the datastore file")
def disk_move(session, dc_ref, src_file, dst_file):
"""Moves the source virtual disk to the destination.
The list of possible faults that the server can return on error
include:
* CannotAccessFile: Thrown if the source file or folder cannot be
moved because of insufficient permissions.
* FileAlreadyExists: Thrown if a file with the given name already
exists at the destination.
* FileFault: Thrown if there is a generic file error
* FileLocked: Thrown if the source file or folder is currently
locked or in use.
* FileNotFound: Thrown if the file or folder specified by sourceName
is not found.
* InvalidDatastore: Thrown if the operation cannot be performed on
the source or destination datastores.
* NoDiskSpace: Thrown if there is not enough space available on the
destination datastore.
* RuntimeFault: Thrown if any type of runtime fault is thrown that
is not covered by the other faults; for example,
a communication error.
"""
LOG.debug("Moving virtual disk from %(src)s to %(dst)s.",
{'src': src_file, 'dst': dst_file})
move_task = session._call_method(
session.vim,
"MoveVirtualDisk_Task",
session.vim.service_content.virtualDiskManager,
sourceName=str(src_file),
sourceDatacenter=dc_ref,
destName=str(dst_file),
destDatacenter=dc_ref,
force=False)
session._wait_for_task(move_task)
LOG.info(_LI("Moved virtual disk from %(src)s to %(dst)s."),
{'src': src_file, 'dst': dst_file})
def disk_copy(session, dc_ref, src_file, dst_file):
"""Copies the source virtual disk to the destination."""
LOG.debug("Copying virtual disk from %(src)s to %(dst)s.",
{'src': src_file, 'dst': dst_file})
copy_disk_task = session._call_method(
session.vim,
"CopyVirtualDisk_Task",
session.vim.service_content.virtualDiskManager,
sourceName=str(src_file),
sourceDatacenter=dc_ref,
destName=str(dst_file),
destDatacenter=dc_ref,
force=False)
session._wait_for_task(copy_disk_task)
LOG.info(_LI("Copied virtual disk from %(src)s to %(dst)s."),
{'src': src_file, 'dst': dst_file})
def disk_delete(session, dc_ref, file_path):
"""Deletes a virtual disk."""
LOG.debug("Deleting virtual disk %s", file_path)
delete_disk_task = session._call_method(
session.vim,
"DeleteVirtualDisk_Task",
session.vim.service_content.virtualDiskManager,
name=str(file_path),
datacenter=dc_ref)
session._wait_for_task(delete_disk_task)
LOG.info(_LI("Deleted virtual disk %s."), file_path)
def file_move(session, dc_ref, src_file, dst_file):
"""Moves the source file or folder to the destination.
The list of possible faults that the server can return on error
include:
* CannotAccessFile: Thrown if the source file or folder cannot be
moved because of insufficient permissions.
* FileAlreadyExists: Thrown if a file with the given name already
exists at the destination.
* FileFault: Thrown if there is a generic file error
* FileLocked: Thrown if the source file or folder is currently
locked or in use.
* FileNotFound: Thrown if the file or folder specified by sourceName
is not found.
* InvalidDatastore: Thrown if the operation cannot be performed on
the source or destination datastores.
* NoDiskSpace: Thrown if there is not enough space available on the
destination datastore.
* RuntimeFault: Thrown if any type of runtime fault is thrown that
is not covered by the other faults; for example,
a communication error.
"""
LOG.debug("Moving file from %(src)s to %(dst)s.",
{'src': src_file, 'dst': dst_file})
vim = session.vim
move_task = session._call_method(
vim,
"MoveDatastoreFile_Task",
vim.service_content.fileManager,
sourceName=str(src_file),
sourceDatacenter=dc_ref,
destinationName=str(dst_file),
destinationDatacenter=dc_ref)
session._wait_for_task(move_task)
LOG.debug("File moved")
def search_datastore_spec(client_factory, file_name):
"""Builds the datastore search spec."""
search_spec = client_factory.create('ns0:HostDatastoreBrowserSearchSpec')
search_spec.matchPattern = [file_name]
search_spec.details = client_factory.create('ns0:FileQueryFlags')
search_spec.details.fileOwner = False
search_spec.details.fileSize = True
search_spec.details.fileType = False
search_spec.details.modification = False
return search_spec
def file_exists(session, ds_browser, ds_path, file_name):
"""Check if the file exists on the datastore."""
client_factory = session.vim.client.factory
search_spec = search_datastore_spec(client_factory, file_name)
search_task = session._call_method(session.vim,
"SearchDatastore_Task",
ds_browser,
datastorePath=str(ds_path),
searchSpec=search_spec)
try:
task_info = session._wait_for_task(search_task)
except vexc.FileNotFoundException:
return False
file_exists = (getattr(task_info.result, 'file', False) and
task_info.result.file[0].path == file_name)
return file_exists
def file_size(session, ds_browser, ds_path, file_name):
"""Returns the size of the specified file."""
client_factory = session.vim.client.factory
search_spec = search_datastore_spec(client_factory, file_name)
search_task = session._call_method(session.vim,
"SearchDatastore_Task",
ds_browser,
datastorePath=str(ds_path),
searchSpec=search_spec)
task_info = session._wait_for_task(search_task)
if hasattr(task_info.result, 'file'):
return task_info.result.file[0].fileSize
def mkdir(session, ds_path, dc_ref):
"""Creates a directory at the path specified. If it is just "NAME",
then a directory with this name is created at the topmost level of the
DataStore.
"""
LOG.debug("Creating directory with path %s", ds_path)
session._call_method(session.vim, "MakeDirectory",
session.vim.service_content.fileManager,
name=str(ds_path), datacenter=dc_ref,
createParentDirectories=True)
LOG.debug("Created directory with path %s", ds_path)
def get_sub_folders(session, ds_browser, ds_path):
"""Return a set of subfolders for a path on a datastore.
If the path does not exist then an empty set is returned.
"""
search_task = session._call_method(
session.vim,
"SearchDatastore_Task",
ds_browser,
datastorePath=str(ds_path))
try:
task_info = session._wait_for_task(search_task)
except vexc.FileNotFoundException:
return set()
# populate the folder entries
if hasattr(task_info.result, 'file'):
return set([file.path for file in task_info.result.file])
return set()
def _filter_datastores_matching_storage_policy(session, data_stores,
storage_policy):
"""Get datastores matching the given storage policy.
:param data_stores: the list of retrieve result wrapped datastore objects
:param storage_policy: the storage policy name
:return the list of datastores conforming to the given storage policy
"""
profile_id = pbm.get_profile_id_by_name(session, storage_policy)
if profile_id:
factory = session.pbm.client.factory
ds_mors = [oc.obj for oc in data_stores.objects]
hubs = pbm.convert_datastores_to_hubs(factory, ds_mors)
matching_hubs = pbm.filter_hubs_by_profile(session, hubs,
profile_id)
if matching_hubs:
matching_ds = pbm.filter_datastores_by_hubs(matching_hubs,
ds_mors)
object_contents = [oc for oc in data_stores.objects
if oc.obj in matching_ds]
data_stores.objects = object_contents
return data_stores
LOG.error(_LE("Unable to retrieve storage policy with name %s"),
storage_policy)
| {
"content_hash": "566c6949d0f94ca0a6c0f9ef8001c025",
"timestamp": "",
"source": "github",
"line_count": 424,
"max_line_length": 78,
"avg_line_length": 40.176886792452834,
"alnum_prop": 0.608981508658644,
"repo_name": "j-carpentier/nova",
"id": "c8755d54ee86b79f4e162c4f4666f8328e746839",
"size": "17644",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/virt/vmwareapi/ds_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16740777"
},
{
"name": "Shell",
"bytes": "20716"
},
{
"name": "Smarty",
"bytes": "259647"
}
],
"symlink_target": ""
} |
""":"
work_dir=$(dirname $0)
base_name=$(basename $0)
cd $work_dir
if [ $HOD_PYTHON_HOME ]; then
exec $HOD_PYTHON_HOME -OO -u $base_name ${1+"$@"}
elif [ -e /usr/bin/python ]; then
exec /usr/bin/python -OO -u $base_name ${1+"$@"}
elif [ -e /usr/local/bin/python ]; then
exec /usr/local/bin/python -OO -u $base_name ${1+"$@"}
else
exec python -OO -u $base_name ${1+"$@"}
fi
":"""
from os import popen3
import os, sys
import re
import time
from datetime import datetime
from optparse import OptionParser
myName = os.path.basename(sys.argv[0])
myName = re.sub(".*/", "", myName)
reVersion = re.compile(".*(\d+_\d+).*")
VERSION = '$HeadURL: http://svn.apache.org/repos/asf/hadoop/common/tags/release-1.0.3/src/contrib/hod/support/logcondense.py $'
reMatch = reVersion.match(VERSION)
if reMatch:
VERSION = reMatch.group(1)
VERSION = re.sub("_", ".", VERSION)
else:
VERSION = 'DEV'
options = ( {'short' : "-p",
'long' : "--package",
'type' : "string",
'action' : "store",
'dest' : "package",
'metavar' : " ",
'default' : 'hadoop',
'help' : "Bin file for hadoop"},
{'short' : "-d",
'long' : "--days",
'type' : "int",
'action' : "store",
'dest' : "days",
'metavar' : " ",
'default' : 7,
'help' : "Number of days before logs are deleted"},
{'short' : "-c",
'long' : "--config",
'type' : "string",
'action' : "store",
'dest' : "config",
'metavar' : " ",
'default' : None,
'help' : "config directory for hadoop"},
{'short' : "-l",
'long' : "--logs",
'type' : "string",
'action' : "store",
'dest' : "log",
'metavar' : " ",
'default' : "/user",
'help' : "directory prefix under which logs are stored per user"},
{'short' : "-n",
'long' : "--dynamicdfs",
'type' : "string",
'action' : "store",
'dest' : "dynamicdfs",
'metavar' : " ",
'default' : "false",
'help' : "'true', if the cluster is used to bring up dynamic dfs clusters, 'false' otherwise"}
)
def getDfsCommand(options, args):
if (options.config == None):
cmd = options.package + " " + "dfs " + args
else:
cmd = options.package + " " + "--config " + options.config + " dfs " + args
return cmd
def runcondense():
import shutil
options = process_args()
# if the cluster is used to bring up dynamic dfs, we must leave NameNode and JobTracker logs,
# otherwise only JobTracker logs. Likewise, in case of dynamic dfs, we must also look for
# deleting datanode logs
filteredNames = ['jobtracker']
deletedNamePrefixes = ['*-tasktracker-*']
if options.dynamicdfs == 'true':
filteredNames.append('namenode')
deletedNamePrefixes.append('*-datanode-*')
filepath = '%s/\*/hod-logs/' % (options.log)
cmd = getDfsCommand(options, "-lsr " + filepath)
(stdin, stdout, stderr) = popen3(cmd)
lastjobid = 'none'
toPurge = { }
for line in stdout:
try:
m = re.match("^.*\s(.*)\n$", line)
filename = m.group(1)
# file name format: <prefix>/<user>/hod-logs/<jobid>/[0-9]*-[jobtracker|tasktracker|datanode|namenode|]-hostname-YYYYMMDDtime-random.tar.gz
# first strip prefix:
if filename.startswith(options.log):
filename = filename[len(options.log):]
if not filename.startswith('/'):
filename = '/' + filename
else:
continue
# Now get other details from filename.
k = re.match("/(.*)/hod-logs/(.*)/.*-.*-([0-9][0-9][0-9][0-9])([0-9][0-9])([0-9][0-9]).*$", filename)
if k:
username = k.group(1)
jobid = k.group(2)
datetimefile = datetime(int(k.group(3)), int(k.group(4)), int(k.group(5)))
datetimenow = datetime.utcnow()
diff = datetimenow - datetimefile
filedate = k.group(3) + k.group(4) + k.group(5)
newdate = datetimenow.strftime("%Y%m%d")
print "%s %s %s %d" % (filename, filedate, newdate, diff.days)
# if the cluster is used to bring up dynamic dfs, we must also leave NameNode logs.
foundFilteredName = False
for name in filteredNames:
if filename.find(name) >= 0:
foundFilteredName = True
break
if foundFilteredName:
continue
if (diff.days > options.days):
desttodel = filename
if not toPurge.has_key(jobid):
toPurge[jobid] = options.log.rstrip("/") + "/" + username + "/hod-logs/" + jobid
except Exception, e:
print >> sys.stderr, e
for job in toPurge.keys():
try:
for prefix in deletedNamePrefixes:
cmd = getDfsCommand(options, "-rm " + toPurge[job] + '/' + prefix)
print cmd
ret = 0
ret = os.system(cmd)
if (ret != 0):
print >> sys.stderr, "Command failed to delete file " + cmd
except Exception, e:
print >> sys.stderr, e
def process_args():
global options, myName, VERSION
usage = "usage: %s <ARGS>" % (myName)
version = "%s %s" % (myName, VERSION)
argParser = OptionParser(usage=usage, version=VERSION)
for option_element in options:
argParser.add_option(option_element['short'], option_element['long'],
type=option_element['type'], action=option_element['action'],
dest=option_element['dest'], default=option_element['default'],
metavar=option_element['metavar'], help=option_element['help'])
(parsedOptions, args) = argParser.parse_args()
if not os.path.exists(parsedOptions.package):
argParser.error("Could not find path to hadoop binary: %s" % parsedOptions.package)
if not os.path.exists(parsedOptions.config):
argParser.error("Could not find config: %s" % parsedOptions.config)
if parsedOptions.days <= 0:
argParser.error("Invalid number of days specified, must be > 0: %s" % parsedOptions.config)
if parsedOptions.dynamicdfs!='true' and parsedOptions.dynamicdfs!='false':
argParser.error("Invalid option for dynamicdfs, must be true or false: %s" % parsedOptions.dynamicdfs)
return parsedOptions
if __name__ == '__main__':
runcondense()
| {
"content_hash": "96b5d88ef89ff363ddc5b6c28939bb64",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 145,
"avg_line_length": 32.36923076923077,
"alnum_prop": 0.5741444866920152,
"repo_name": "CodingCat/LongTermFairScheduler",
"id": "5017203ce2ab360d09ada78f5bb36419fcea89ef",
"size": "7093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/contrib/hod/support/logcondense.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "402884"
},
{
"name": "C++",
"bytes": "368086"
},
{
"name": "Java",
"bytes": "16257106"
},
{
"name": "JavaScript",
"bytes": "38376"
},
{
"name": "Objective-C",
"bytes": "119767"
},
{
"name": "PHP",
"bytes": "152555"
},
{
"name": "Perl",
"bytes": "152209"
},
{
"name": "Python",
"bytes": "621805"
},
{
"name": "Ruby",
"bytes": "28485"
},
{
"name": "Shell",
"bytes": "976307"
},
{
"name": "Smalltalk",
"bytes": "56562"
}
],
"symlink_target": ""
} |
import RPi.GPIO as GPIO, time, os
#Constants
NUM_TESTS = 3
NUM_TESTS_PER_VALUE = 5
def readResistance(RCpin):
'''
Read the resistance with the pin stated in the input, returns an integer with the number
of clock cycles passed
'''
#Discharge the pins and capacitor
GPIO.setup(RCpin, GPIO.OUT)
GPIO.output(RCpin, GPIO.LOW)
time.sleep(0.1)
#Get the time from before
reading = 0 #Set the initial reading to 0
#Start the count down
GPIO.setup(RCpin, GPIO.IN)
# This takes about 1 millisecond per loop cycle
while (GPIO.input(RCpin) == GPIO.LOW):
reading += 1
#Return the results
return reading
#Test code if the file is actually being run
if __name__ == "__main__":
try:
GPIO.setmode(GPIO.BCM)
#Get the user input
pin = input("Which GPIO pin are you using? ")
capValue = input("How many microFarads is the capacitor? ")
#Input was in microFarads so divide accordingly
capValue /= 1000000.0
print "\nWe will first run some resistor tests to have a more accurate reading. Connect a resistor with a known resistance to the circuit and follow the instructions"
print "Test atleast one value but then just press enter to quit at any time"
#Set the initial ratio, needs to be changed
ratio = 0
num = 0
for test in range(NUM_TESTS):
try:
resValue = input("\nTest " + str(test + 1) + ": resistor size (ohms): ")
except Exception:
if ratio == 0:
continue
break
values = []
average = 0.0
print "Calculating..."
#Read some values
for i in range(NUM_TESTS_PER_VALUE):
values.append(readResistance(pin))
average += values[i]
time.sleep(0.1)
#Take the average
average /= NUM_TESTS_PER_VALUE
print "Average No. of Clock Cycles: %f" % (average)
#This is the time it should take for the
#capacitor to charge in an RC circuit
exactTime = resValue * capValue
#Add the ratio of the time found and the clock cycles
ratio += (exactTime / average)
num += 1
#Take the average of the ratios
ratio /= num
print "\nTests completed\n"
#Get the sleep time limit
timeLimit = min(max(0.2, input("How often to update resistance(seconds and 0.2 < s < 5): ")), 5)
#Loop while user is running
while True:
#Get the number of cycles
numCycles = readResistance(pin)
#Predict the resistance in ohms
resistance = (numCycles * ratio) / capValue
#Print the results
print "Number Of Clock Cycles: %d" % (numCycles)
print "Predicted Resistance: %f Ohms\n" % (resistance)
#Sleep for the desired time
time.sleep(timeLimit)
except KeyboardInterrupt:
GPIO.cleanup()
| {
"content_hash": "8176cff226c7c1912781ff39f308eb0f",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 174,
"avg_line_length": 30.685185185185187,
"alnum_prop": 0.5386240193120096,
"repo_name": "basimkhajwal/RaspberryPi",
"id": "548877ef3514c2118857fe74608adc8a35a04f75",
"size": "3314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/ReadResistance.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7522"
},
{
"name": "Shell",
"bytes": "132"
}
],
"symlink_target": ""
} |
'''
@author: Sana Dev Team
Created on May 12, 2011
'''
from __future__ import with_statement
import logging
import urllib
try:
import json
except ImportError:
import simplejson as json
from django.conf import settings
from django.core.mail import send_mail
from piston.handler import BaseHandler
from piston.utils import rc
from piston import resource
from sana.api.middleware.sms.util import format_sms
from sana.api.util import fail, succeed, get_logger, validate
class NotificationHandler(BaseHandler):
''' Handles notification requests. The field names which will be recognized
while handling the request:
Allowed methods: GET, POST
'''
allowed_methods = ('GET','POST',)
def create(self, request):
request.full_clean()
form = request.data
result = False
try:
messages = format_sms(form['message'])
for message in messages:
params = urllib.urlencode({
'username': settings.KANNEL_USER,
'password': settings.KANNEL_PASSWORD,
'to': form['recipient_addr'],
'text': message
})
response = urllib.urlopen(settings.KANNEL_URI % params).read()
logging.info("Kannel response: %s" % response)
result = succeed('message sent to: %s' % form['recipient_addr'])
except Exception as e:
logging.error("Couldn't submit Kannel notification for %s: %s" % (form['recipient_addr'], e))
result = fail('message send fail: %s' % form['recipient_addr'])
return result
def read(self, request, notification_id=None):
''' Requests notifications cached and sent from this server '''
pass
notification_resource = resource.Resource(NotificationHandler)
| {
"content_hash": "d0a67a341915889fb79b15a5578414b2",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 105,
"avg_line_length": 33.08771929824562,
"alnum_prop": 0.6166489925768823,
"repo_name": "satvikdhandhania/vit-11",
"id": "da541dba442c854c1351b5bc922009326c1bb137",
"size": "1886",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sana/contrib/middleware/kannel/handlers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from twisted.trial import unittest
from axiom import store
from xmantissa import ixmantissa, endpoint
class MantissaQ2Q(unittest.TestCase):
def testInstallation(self):
d = self.mktemp()
s = store.Store(unicode(d))
q = endpoint.UniversalEndpointService(store=s)
q.installOn(s)
self.assertIdentical(ixmantissa.IQ2QService(s), q)
| {
"content_hash": "08a497cfb456b4a77f4ea7c01d008f61",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 58,
"avg_line_length": 28.76923076923077,
"alnum_prop": 0.7058823529411765,
"repo_name": "twisted/mantissa",
"id": "03161e10d3d974bfa2d6db87f4024e151c71efd9",
"size": "375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xmantissa/test/test_q2q.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27264"
},
{
"name": "HTML",
"bytes": "57439"
},
{
"name": "JavaScript",
"bytes": "865621"
},
{
"name": "Python",
"bytes": "1631375"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from dbparti import backend
from dbparti.backends.exceptions import PartitionColumnError, PartitionFilterError
class PartitionableAdmin(admin.ModelAdmin):
partition_show = 'all'
def __init__(self, *args, **kwargs):
super(PartitionableAdmin, self).__init__(*args, **kwargs)
if not self.opts.partition_column in self.opts.get_all_field_names():
raise PartitionColumnError(
model=self.opts.__dict__['object_name'],
current_value=self.opts.partition_column,
allowed_values=self.opts.get_all_field_names()
)
try:
self.filter = getattr(backend.filters, '{0}PartitionFilter'.format(
self.opts.partition_type.capitalize()))(self.partition_show, **self.opts.__dict__)
except AttributeError:
import re
raise PartitionFilterError(
model=self.opts.__dict__['object_name'],
current_value=self.opts.partition_type,
allowed_values=[c.replace('PartitionFilter', '').lower() for c in dir(
backend.filters) if re.match('\w+PartitionFilter', c) is not None and 'Base' not in c]
)
def queryset(self, request):
"""Determines data from what partitions should be shown in django admin"""
qs = super(PartitionableAdmin, self).queryset(request)
if self.partition_show != 'all':
qs = qs.extra(where=self.filter.apply())
return qs
| {
"content_hash": "b76f9335600e30528a6d6b50ca17b82e",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 106,
"avg_line_length": 40.44736842105263,
"alnum_prop": 0.6148340923877684,
"repo_name": "maxtepkeev/django-db-parti",
"id": "b8bf867988cdfc7b2c180919e93859cbf1b45bf1",
"size": "1537",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dbparti/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "30956"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division
import imgaug as ia
from imgaug import augmenters as iaa
from scipy import misc, ndimage
import numpy as np
from skimage import data
import cv2
TIME_PER_STEP = 10000
def main():
image = data.astronaut()
print("image shape:", image.shape)
print("Press any key or wait %d ms to proceed to the next image." % (TIME_PER_STEP,))
children_all = [
("hflip", iaa.Fliplr(1)),
("add", iaa.Add(50)),
("dropout", iaa.Dropout(0.2)),
("affine", iaa.Affine(rotate=35))
]
channels_all = [
None,
0,
[],
[0],
[0, 1],
[1, 2],
[0, 1, 2]
]
cv2.namedWindow("aug", cv2.WINDOW_NORMAL)
cv2.imshow("aug", image[..., ::-1])
cv2.waitKey(TIME_PER_STEP)
for children_title, children in children_all:
for channels in channels_all:
aug = iaa.WithChannels(channels=channels, children=children)
img_aug = aug.augment_image(image)
print("dtype", img_aug.dtype, "averages", np.average(img_aug, axis=tuple(range(0, img_aug.ndim-1))))
#print("dtype", img_aug.dtype, "averages", img_aug.mean(axis=range(1, img_aug.ndim)))
title = "children=%s | channels=%s" % (children_title, channels)
img_aug = ia.draw_text(img_aug, x=5, y=5, text=title)
cv2.imshow("aug", img_aug[..., ::-1]) # here with rgb2bgr
cv2.waitKey(TIME_PER_STEP)
if __name__ == "__main__":
main()
| {
"content_hash": "660deaf8160d252346b90247085ed68d",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 112,
"avg_line_length": 29.764705882352942,
"alnum_prop": 0.5718050065876152,
"repo_name": "nektor211/imgaug",
"id": "8b83e092c1a53371f52224644fce7bec2ba98865",
"size": "1518",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/check_withchannels.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "887186"
}
],
"symlink_target": ""
} |
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from horizon import forms
from horizon.utils import fields
from horizon.utils import validators
from horizon import workflows
from openstack_dashboard import api
port_validator = validators.validate_port_or_colon_separated_port_range
class AddRuleAction(workflows.Action):
name = forms.CharField(
max_length=80,
label=_("Name"),
required=False)
description = forms.CharField(
max_length=80,
label=_("Description"),
required=False)
protocol = forms.ChoiceField(
label=_("Protocol"),
choices=[('tcp', _('TCP')),
('udp', _('UDP')),
('icmp', _('ICMP')),
('any', _('ANY'))],)
action = forms.ChoiceField(
label=_("Action"),
choices=[('allow', _('ALLOW')),
('deny', _('DENY'))],)
source_ip_address = fields.IPField(
label=_("Source IP Address/Subnet"),
version=fields.IPv4 | fields.IPv6,
required=False, mask=True)
destination_ip_address = fields.IPField(
label=_("Destination IP Address/Subnet"),
version=fields.IPv4 | fields.IPv6,
required=False, mask=True)
source_port = forms.CharField(
max_length=80,
label=_("Source Port/Port Range"),
required=False,
validators=[port_validator])
destination_port = forms.CharField(
max_length=80,
label=_("Destination Port/Port Range"),
required=False,
validators=[port_validator])
shared = forms.BooleanField(
label=_("Shared"), initial=False, required=False)
enabled = forms.BooleanField(
label=_("Enabled"), initial=True, required=False)
def __init__(self, request, *args, **kwargs):
super(AddRuleAction, self).__init__(request, *args, **kwargs)
class Meta:
name = _("AddRule")
permissions = ('openstack.services.network',)
help_text = _("Create a firewall rule.\n\n"
"Protocol and action must be specified. "
"Other fields are optional.")
class AddRuleStep(workflows.Step):
action_class = AddRuleAction
contributes = ("name", "description", "protocol", "action",
"source_ip_address", "source_port",
"destination_ip_address", "destination_port",
"enabled", "shared")
def contribute(self, data, context):
context = super(AddRuleStep, self).contribute(data, context)
if data:
if context['protocol'] == 'any':
del context['protocol']
for field in ['source_port',
'destination_port',
'source_ip_address',
'destination_ip_address']:
if not context[field]:
del context[field]
return context
class AddRule(workflows.Workflow):
slug = "addrule"
name = _("Add Rule")
finalize_button_name = _("Add")
success_message = _('Added Rule "%s".')
failure_message = _('Unable to add Rule "%s".')
success_url = "horizon:project:firewalls:index"
# fwaas is designed to support a wide range of vendor
# firewalls. Considering the multitude of vendor firewall
# features in place today, firewall_rule definition can
# involve more complex configuration over time. Hence,
# a workflow instead of a single form is used for
# firewall_rule add to be ready for future extension.
default_steps = (AddRuleStep,)
def format_status_message(self, message):
return message % self.context.get('name')
def handle(self, request, context):
try:
api.fwaas.rule_create(request, **context)
return True
except Exception as e:
msg = self.format_status_message(self.failure_message) + str(e)
exceptions.handle(request, msg)
return False
class SelectRulesAction(workflows.Action):
rule = forms.MultipleChoiceField(
label=_("Rules"),
required=False,
widget=forms.CheckboxSelectMultiple(),
help_text=_("Create a policy with selected rules."))
class Meta:
name = _("Rules")
permissions = ('openstack.services.network',)
help_text = _("Select rules for your policy.")
def populate_rule_choices(self, request, context):
try:
tenant_id = self.request.user.tenant_id
rules = api.fwaas.rule_list(request, tenant_id=tenant_id)
for r in rules:
r.set_id_as_name_if_empty()
rules = sorted(rules,
key=lambda rule: rule.name)
rule_list = [(rule.id, rule.name) for rule in rules
if not rule.firewall_policy_id]
except Exception as e:
rule_list = []
exceptions.handle(request,
_('Unable to retrieve rules (%(error)s).') % {
'error': str(e)})
return rule_list
class SelectRulesStep(workflows.Step):
action_class = SelectRulesAction
template_name = "project/firewalls/_update_rules.html"
contributes = ("firewall_rules",)
def contribute(self, data, context):
if data:
rules = self.workflow.request.POST.getlist("rule")
if rules:
rules = [r for r in rules if r != '']
context['firewall_rules'] = rules
return context
class AddPolicyAction(workflows.Action):
name = forms.CharField(max_length=80,
label=_("Name"),
required=True)
description = forms.CharField(max_length=80,
label=_("Description"),
required=False)
shared = forms.BooleanField(label=_("Shared"),
initial=False,
required=False)
audited = forms.BooleanField(label=_("Audited"),
initial=False,
required=False)
def __init__(self, request, *args, **kwargs):
super(AddPolicyAction, self).__init__(request, *args, **kwargs)
class Meta:
name = _("AddPolicy")
permissions = ('openstack.services.network',)
help_text = _("Create a firewall policy with an ordered list "
"of firewall rules.\n\n"
"A name must be given. Firewall rules are "
"added in the order placed under the Rules tab.")
class AddPolicyStep(workflows.Step):
action_class = AddPolicyAction
contributes = ("name", "description", "shared", "audited")
def contribute(self, data, context):
context = super(AddPolicyStep, self).contribute(data, context)
if data:
return context
class AddPolicy(workflows.Workflow):
slug = "addpolicy"
name = _("Add Policy")
finalize_button_name = _("Add")
success_message = _('Added Policy "%s".')
failure_message = _('Unable to add Policy "%s".')
success_url = "horizon:project:firewalls:index"
default_steps = (AddPolicyStep, SelectRulesStep)
def format_status_message(self, message):
return message % self.context.get('name')
def handle(self, request, context):
try:
api.fwaas.policy_create(request, **context)
return True
except Exception as e:
msg = self.format_status_message(self.failure_message) + str(e)
exceptions.handle(request, msg)
return False
class AddFirewallAction(workflows.Action):
name = forms.CharField(max_length=80,
label=_("Name"),
required=False)
description = forms.CharField(max_length=80,
label=_("Description"),
required=False)
firewall_policy_id = forms.ChoiceField(label=_("Policy"),
required=True)
shared = forms.BooleanField(label=_("Shared"),
initial=False,
required=False)
admin_state_up = forms.BooleanField(label=_("Admin State"),
initial=True,
required=False)
def __init__(self, request, *args, **kwargs):
super(AddFirewallAction, self).__init__(request, *args, **kwargs)
firewall_policy_id_choices = [('', _("Select a Policy"))]
try:
tenant_id = self.request.user.tenant_id
policies = api.fwaas.policy_list(request, tenant_id=tenant_id)
policies = sorted(policies, key=lambda policy: policy.name)
except Exception as e:
exceptions.handle(
request,
_('Unable to retrieve policy list (%(error)s).') % {
'error': str(e)})
policies = []
for p in policies:
p.set_id_as_name_if_empty()
firewall_policy_id_choices.append((p.id, p.name))
self.fields['firewall_policy_id'].choices = firewall_policy_id_choices
# only admin can set 'shared' attribute to True
if not request.user.is_superuser:
self.fields['shared'].widget.attrs['disabled'] = 'disabled'
class Meta:
name = _("AddFirewall")
permissions = ('openstack.services.network',)
help_text = _("Create a firewall based on a policy.\n\n"
"A policy must be selected. "
"Other fields are optional.")
class AddFirewallStep(workflows.Step):
action_class = AddFirewallAction
contributes = ("name", "firewall_policy_id", "description",
"shared", "admin_state_up")
def contribute(self, data, context):
context = super(AddFirewallStep, self).contribute(data, context)
return context
class AddFirewall(workflows.Workflow):
slug = "addfirewall"
name = _("Add Firewall")
finalize_button_name = _("Add")
success_message = _('Added Firewall "%s".')
failure_message = _('Unable to add Firewall "%s".')
success_url = "horizon:project:firewalls:index"
# fwaas is designed to support a wide range of vendor
# firewalls. Considering the multitude of vendor firewall
# features in place today, firewall definition can
# involve more complex configuration over time. Hence,
# a workflow instead of a single form is used for
# firewall_rule add to be ready for future extension.
default_steps = (AddFirewallStep,)
def format_status_message(self, message):
return message % self.context.get('name')
def handle(self, request, context):
try:
api.fwaas.firewall_create(request, **context)
return True
except Exception as e:
msg = self.format_status_message(self.failure_message) + str(e)
exceptions.handle(request, msg)
return False
| {
"content_hash": "92a34439ae42b15b07655003e021b63f",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 78,
"avg_line_length": 37.424749163879596,
"alnum_prop": 0.5699731903485254,
"repo_name": "ikargis/horizon_fod",
"id": "d54bec57c292153d982144876bcf967079a63811",
"size": "11898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/project/firewalls/workflows.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "167455"
},
{
"name": "JavaScript",
"bytes": "1099746"
},
{
"name": "Python",
"bytes": "3023860"
},
{
"name": "Shell",
"bytes": "13740"
}
],
"symlink_target": ""
} |
import os
import shutil
from subprocess import Popen
from vex.tests import path_type, str_type
class TempVenv(object):
def __init__(self, parent, name, args):
assert isinstance(parent, path_type)
assert isinstance(name, str_type)
assert os.path.abspath(parent) == parent
self.parent = parent
self.name = name
self.args = args or []
self.path = os.path.join(parent, name.encode("utf-8"))
self.open()
def open(self):
assert isinstance(self.parent, path_type)
assert os.path.exists(self.parent)
args = ["virtualenv", "--quiet", self.path] + self.args
if not os.path.exists(self.path):
process = Popen(args)
process.wait()
assert process.returncode == 0
assert os.path.exists(self.path)
bin_path = os.path.join(self.path, b"bin")
assert os.path.exists(bin_path)
def close(self):
if os.path.exists(self.path):
shutil.rmtree(self.path)
assert not os.path.exists(self.path)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
| {
"content_hash": "de2f77c9a2854f4a2120687268d6eac0",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 63,
"avg_line_length": 30.53846153846154,
"alnum_prop": 0.5969773299748111,
"repo_name": "sashahart/vex",
"id": "70d2860dfc3263c6252364784f407b3a3af43513",
"size": "1191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vex/tests/tempvenv.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "74025"
}
],
"symlink_target": ""
} |
from matplotlib import pyplot as plt
from ..core.status import Status
class PlotBase(object):
def __init__(self, analysis):
self.analysis = analysis
def plot_by(self, by, variable, df, gridLines = False):
ax = df.plot(kind='scatter',x=by ,y=variable,title=variable+" By " +by,alpha=0.6,legend=None)
ax.set_xlim([df[by].min()-1,df[by].max()+1])
ax.set_xlabel(by)
ax.set_ylabel(variable)
if gridLines:
ax.grid(True)
plt.show() | {
"content_hash": "159001302c9de407d14af78ad9e47ba7",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 101,
"avg_line_length": 26.15,
"alnum_prop": 0.5812619502868069,
"repo_name": "peterdougstuart/PCWG",
"id": "f27787631f793d0cd426970efab262f90c7690f7",
"size": "524",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pcwg/visualisation/plot_base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2475"
},
{
"name": "Python",
"bytes": "921759"
}
],
"symlink_target": ""
} |
import os
import itertools
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), "sentinels", "__version__.py")) as version_file:
exec(version_file.read())
setup(name="sentinels",
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
description="Various objects to denote special meanings in python",
license="BSD",
author="Rotem Yaari",
author_email="[email protected]",
version=__version__,
packages=find_packages(exclude=["tests"]),
install_requires=[],
scripts=[],
namespace_packages=[]
)
| {
"content_hash": "2334f2b447b9dafa39aa3f874388d580",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 98,
"avg_line_length": 34.2962962962963,
"alnum_prop": 0.5950323974082073,
"repo_name": "vmalloc/sentinels",
"id": "9abc46039f6fc78f5de2d4f7985b3c452cb82178",
"size": "927",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2539"
}
],
"symlink_target": ""
} |
"""The test for the sql sensor platform."""
import unittest
import pytest
import voluptuous as vol
from homeassistant.components.sql.sensor import validate_sql_select
from homeassistant.const import STATE_UNKNOWN
from homeassistant.setup import setup_component
from tests.common import get_test_home_assistant
class TestSQLSensor(unittest.TestCase):
"""Test the SQL sensor."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_query(self):
"""Test the SQL sensor."""
config = {
"sensor": {
"platform": "sql",
"db_url": "sqlite://",
"queries": [
{
"name": "count_tables",
"query": "SELECT 5 as value",
"column": "value",
}
],
}
}
assert setup_component(self.hass, "sensor", config)
self.hass.block_till_done()
state = self.hass.states.get("sensor.count_tables")
assert state.state == "5"
assert state.attributes["value"] == 5
def test_invalid_query(self):
"""Test the SQL sensor for invalid queries."""
with pytest.raises(vol.Invalid):
validate_sql_select("DROP TABLE *")
config = {
"sensor": {
"platform": "sql",
"db_url": "sqlite://",
"queries": [
{
"name": "count_tables",
"query": "SELECT * value FROM sqlite_master;",
"column": "value",
}
],
}
}
assert setup_component(self.hass, "sensor", config)
self.hass.block_till_done()
state = self.hass.states.get("sensor.count_tables")
assert state.state == STATE_UNKNOWN
| {
"content_hash": "ae3315118f981e76bb92a20351f5b232",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 70,
"avg_line_length": 29.35211267605634,
"alnum_prop": 0.5038387715930902,
"repo_name": "mKeRix/home-assistant",
"id": "8b8bca5e37c9b994ffc29f26bb6f591052fadf13",
"size": "2084",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "tests/components/sql/test_sensor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1466026"
},
{
"name": "Python",
"bytes": "4770710"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "12407"
}
],
"symlink_target": ""
} |
import time # For sleeping between sensoract transfers
import sys, os # For importing from project directory
from DataSink import DataSink
from SensorAct.SensorActUploader import SensorActUploader
from SensorAct.SensorActDeviceRegisterer import SensorActDeviceRegisterer
import json
class SensorActSink(DataSink):
""" Initializes SensorActSink with config, which is a
dictionary constructed from config.json and contains
info about SensorAct and the rest of the
sensors/sinks. """
def __init__(self, config, queue, interval):
super(SensorActSink, self).__init__(config, queue, interval)
self.sensorActUploader = SensorActUploader(config["IP"], config["PORT"])
self.registerer = SensorActDeviceRegisterer(config["IP"],
config["PORT"],
config["API_KEY"])
""" Functions child classes must implement """
def registerDevice(self, devicename, config):
""" Registers a device to the service with the device's name (i.e. Eaton). """
# This is a hack to make several names work
dev_name = ''.join(char for char in devicename if not char.isdigit())
self.registerer.registerDevice(dev_name, config)
#pass uncomment if you want to stop registering SensorAct devices
def getSensorName(self, channel_name):
sensor_name = ""
if "Voltage" in channel_name:
sensor_name = "Voltage"
elif "Current" in channel_name:
sensor_name = "Current"
elif "PowerFactor" in channel_name:
sensor_name = "PowerFactor"
elif "VARs" in channel_name:
sensor_name = "VARs"
elif "VAs" in channel_name:
sensor_name = "VAs"
elif "Power" in channel_name:
sensor_name = "Power"
else:
raise NotImplementedError("No such sensor name for channel " + channel_name)
return sensor_name
def update(self, data):
""" Updates SensorAct with the data given """
messages = []
device_name = data["devicename"]
formatted_data_messages = []
for sensor_name, channels in data["channels"].iteritems():
message = {}
formatted_data = {}
formatted_data = {"dname": device_name,
"sname": sensor_name,
"timestamp": data["timestamp"],
}
channel_list = []
for channel in channels["measurements"]:
channel_data = {"cname": channel[0],
"unit": channels["units"],
"readings": [channel[1]]
}
channel_list.append(channel_data)
formatted_data["channels"] = channel_list
message = {"secretkey": self.config["API_KEY"], "data": formatted_data }
formatted_data_messages.append(json.dumps(message))
for message in formatted_data_messages:
self.sensorActUploader.send(message)
| {
"content_hash": "8857a6a4737ebd109657738ac6cc94f6",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 88,
"avg_line_length": 39.74074074074074,
"alnum_prop": 0.5632183908045977,
"repo_name": "nesl/LabSense",
"id": "28cbce64afe32fb5e42fb80c327c793ae11b4626",
"size": "3219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DataSinks/SensorActSink.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "62769"
},
{
"name": "C++",
"bytes": "35543"
},
{
"name": "JavaScript",
"bytes": "290755"
},
{
"name": "Lua",
"bytes": "2066"
},
{
"name": "Python",
"bytes": "173208"
},
{
"name": "Shell",
"bytes": "4268"
}
],
"symlink_target": ""
} |
import itertools
from copy import deepcopy
from django.utils.hashcompat import md5_constructor
from django.utils.safestring import mark_safe
from mypage.widgets.models import Widget, RenderedWidget
from mypage.widgets.models import get_object
from mypage.widgets.templatetags.splitobjectlist import split_list
from mypage.pages.conf import settings
class Layout(dict):
class WidgetInLayoutDoesNotExist(Exception): pass
def __init__(self, page, *args, **kwargs):
super(Layout, self).__init__(*args, **kwargs)
self.changed = False
self.page = page
self['containers'] = [Container(self, c) for c in self.setdefault('containers', [])]
self['static_containers'] = [StaticContainer(self, c) for c in self.setdefault('static_containers', [])]
#self['template_config'] = TemplateConfig(self, self.setdefault('template_config', {}))
# TODO after page.skin mimgration raplace line below by the line above
self['template_config'] = TemplateConfig(self, self.setdefault('template_config', self.auto_migrate_tpl_config()))
def auto_migrate_tpl_config(self):
"""
Temporary automigration method migrates skin option from page.skin field
"""
# TODO remove this method def after pake.skin migration
if not 'template_config' in self:
self['template_config'] = dict(skin=self.page.skin or 'default')
@property
def containers(self):
return self['containers']
@property
def static_containers(self):
return self['static_containers']
@property
def template_config(self):
return self['template_config']
@property
def dynamic_widgets(self):
return list(itertools.chain(*self.containers))
@property
def static_widgets(self):
return list(itertools.chain(*self.static_containers))
@property
def widgets(self):
return self.dynamic_widgets + self.static_widgets
def save(self):
self.page.layout = self
self.changed = True
def render(self, context, settab=None):
"""
Returns layout containers filled with rendered widgets
"""
def checktab(settab, widget_ct_id, widget_id):
if settab and (int(settab['widget_ct_id']), int(settab['widget_id'])) == (widget_ct_id, widget_id):
return settab['tab']
return None
layout = {}
for k in ('static_containers', 'containers',):
layout[k] = [ [wil.render(context, tab=checktab(settab, wil.widget_ct_id, wil.widget_id)) for wil in c] for c in self[k] ]
return layout
def insert_widget(self, widget, container=0, position=None, config=None, state=None):
"""
Inserts widget to given position
If allready assigned, it does nothing.
"""
if not self.contains_widget_by_instance(widget):
container = int(container) # FIXME IMHO may be in view function somewhere :)
container = self.containers[container] # may raise Index/ValueError
container.insert_widget(widget, position, config, state)
def contains_widget(self, widget_ct_id, widget_id):
try:
self.get_widget(widget_ct_id, widget_id)
return True
except self.WidgetInLayoutDoesNotExist, e:
return False
def contains_widget_by_instance(self, widget):
return self.contains_widget(widget.content_type_id, widget.pk)
def remove_widget(self, widget):
"""
Removes all found widget's wils
"""
for container in self.containers:
container.remove_widget(widget)
def get_widget_by_instance(self, widget):
"""
Returns WIL by given widget instance
"""
return self.get_widget(widget.content_type_id, widget.pk)
def get_widget(self, widget_ct_id=None, widget_id=None):
"""
Returns WIL by given keys
"""
for wil in self.widgets:
if (wil.widget_ct_id, wil.widget_id) == (widget_ct_id, widget_id):
return wil
raise self.WidgetInLayoutDoesNotExist("WidgeInLayout with given keys does not exist!")
def configure_widget(self, widget_ct_id, widget_id, data):
"""
Configures WIL found in containers
"""
wil = self.get_widget(widget_ct_id, widget_id)
return wil.configure(data)
def configure_widget_by_instance(self, widget):
return self.configure_widget(widget.content_type_id, widget.pk)
def arrange_containers(self, cols):
"""
Splits widgets to given number of containers
"""
self['containers'] = split_list(self.dynamic_widgets, cols)
self.save()
def arrange_widgets(self, containers):
"""
Updates widggets positions in containers
Widgets can be placed and removed via this method.
"""
new_containers = []
for container in containers:
new_container = Container(self, [])
for widget_ct_id, widget_id in container:
try:
wil = self.get_widget(widget_ct_id, widget_id)
except self.WidgetInLayoutDoesNotExist, e:
widget = get_object(widget_ct_id, widget_id) # existence check
wil = WidgetInLayout.factory(new_container, widget)
new_container.append(wil)
new_containers.append(new_container)
self['containers'] = new_containers
self.save()
return self.containers
def clone(self):
return deepcopy(self)
class Container(list):
def __init__(self, layout, widgets):
self.layout = layout
return super(Container, self).__init__([WidgetInLayout(self, w) for w in widgets])
def save(self):
self.layout.save()
def insert_widget(self, widget, position=None, config=None, state=None):
wil = WidgetInLayout.factory(self, widget, config, state)
if position is not None:
self.insert(position, wil)
else:
self.append(wil)
self.save()
def remove_widget(self, widget):
for wil in self:
if (wil.widget_ct_id, wil.widget_id) == (widget.content_type_id, widget.pk):
self.remove(wil)
self.save()
class StaticContainer(Container):
pass
class WidgetInLayout(dict):
STATE_NORMAL = 0
STATE_NEW = 2
STATE_MINIMIZED = 1
def __init__(self, container, *args, **kwargs):
self.container = container
return super(WidgetInLayout, self).__init__(*args, **kwargs)
@property
def widget_ct_id(self):
return self['widget_ct_id']
@property
def widget_id(self):
return self['widget_id']
def config_get(self):
return self['config']
def config_set(self, value):
self['config'] = value
self.save()
config = property(config_get, config_set)
def state_get(self):
return self['state']
def state_set(self, value):
self['state'] = value
self.save()
state = property(state_get, state_set)
@property
def widget(self):
return get_object(self.widget_ct_id, self.widget_id)
def render(self, context={}, allow_fetch=False, tab=None):
if self.state == self.STATE_NEW:
self.state = self.STATE_NORMAL
rendered_widget = self.widget.rendered_widget_class(self.widget, self.state)
return mark_safe(rendered_widget.render(self.config, context, allow_fetch, tab=tab))
def configure(self, data, widget_config_function=None):
if widget_config_function is None:
widget_config_function = self.widget.get_widget_in_page_configuration
if self.config in ('', None):
self.config = {}
self.config = widget_config_function(self.config or {}, data)
self.save()
def save(self):
self.container.save()
@classmethod
def factory(cls, container, widget, config=None, state=None):
if state is None:
state = cls.STATE_NEW
return cls(container, dict(
widget_ct_id = widget.content_type_id,
widget_id = widget.pk,
config = config or {},
state = state
))
class TemplateConfig(dict):
options = settings.PAGE_TEMPLATE_OPTIONS
def __init__(self, layout, *args, **kwargs):
self.layout = layout
super(TemplateConfig, self).__init__(*args, **kwargs)
def save(self):
self.layout.save()
def as_hash(self):
return md5_constructor(self.__str__()).hexdigest()
def __getitem__(self, key):
try:
return super(TemplateConfig, self).__getitem__(key)
except KeyError, e:
return self.get_default(key)
@classmethod
def get_default(cls, key):
try:
return cls.options[key][1]
except KeyError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Template option \"%s\" is not defined. Check PAGE_TEMPLATE_OPTIONS in your settings (Format: PAGE_TEMPLATE_OPTIONS = {<option_name>: (<choice_list>, <default_value>)})" % key)
| {
"content_hash": "49fe3511ece3a19757a5f3a546067b56",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 215,
"avg_line_length": 33.27956989247312,
"alnum_prop": 0.6150780829294561,
"repo_name": "ella/mypage",
"id": "cc6d33da13e690666332dac12cc6908b0b7e160a",
"size": "9285",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mypage/pages/layout.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "232497"
},
{
"name": "Shell",
"bytes": "3912"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('Movies', '0002_auto_20161108_0054'),
]
operations = [
migrations.AddField(
model_name='movie',
name='datetime_added',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='movie',
name='format',
field=models.CharField(choices=[('BRD', 'BRD'), ('DVD', 'DVD'), ('VHS', 'VHS')], default='BRD', max_length=3),
),
migrations.AlterField(
model_name='movie',
name='runtime',
field=models.PositiveSmallIntegerField(),
),
migrations.AlterField(
model_name='movie',
name='title',
field=models.CharField(max_length=250, unique_for_year='release_date'),
),
]
| {
"content_hash": "25fd678bda8ec3b44d47146a6d32c489",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 122,
"avg_line_length": 30.057142857142857,
"alnum_prop": 0.564638783269962,
"repo_name": "odty101/MediaCollector",
"id": "27ba20f254db637cbff7a814d2cc7906e0dce1ac",
"size": "1125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MediaCollector/Movies/migrations/0003_auto_20161108_0523.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "155"
},
{
"name": "HTML",
"bytes": "7556"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "9957"
}
],
"symlink_target": ""
} |
from django.core.exceptions import PermissionDenied
from django.utils.encoding import force_text
from django.utils.html import escape
class LookupChannel(object):
"""
Subclass this, setting the model and implementing methods to taste.
Attributes:
model (Model): The Django Model that this lookup channel will search for.
plugin_options (dict): Options passed to jQuery UI plugin that are specific to this channel.
min_length (int): Minimum number of characters user types before a search is initiated.
This is passed to the jQuery plugin_options.
It is used in jQuery's UI when filtering results from its own cache.
It is also used in the django view to prevent expensive database queries.
Large datasets can choke if they search too often with small queries.
Better to demand at least 2 or 3 characters.
"""
model = None
plugin_options = {}
min_length = 1
def get_query(self, q, request):
"""
Return a QuerySet searching for the query string `q`.
Note that you may return any iterable so you can return a list or even use yield and turn this
method into a generator.
Args:
q (str, unicode): The query string to search for.
request (Request): This can be used to customize the search by User or to use additional GET variables.
Returns:
(QuerySet, list, generator): iterable of related_models
"""
kwargs = {"%s__icontains" % self.search_field: q}
return self.model.objects.filter(**kwargs).order_by(self.search_field)
def get_result(self, obj):
"""The text result of autocompleting the entered query.
For a partial string that the user typed in, each matched result is here converted to the fully completed text.
This is currently displayed only for a moment in the text field after the user has selected the item.
Then the item is displayed in the item_display deck and the text field is cleared.
Args:
obj (Model):
Returns:
str: The object as string
"""
return escape(force_text(obj))
def format_match(self, obj):
"""(HTML) Format item for displaying in the dropdown.
Args:
obj (Model):
Returns:
str: formatted string, may contain HTML.
"""
return escape(force_text(obj))
def format_item_display(self, obj):
""" (HTML) format item for displaying item in the selected deck area.
Args:
obj (Model):
Returns:
str: formatted string, may contain HTML.
"""
return escape(force_text(obj))
def get_objects(self, ids):
"""This is used to retrieve the currently selected objects for either ManyToMany or ForeignKey.
Note that the order of the ids supplied for ManyToMany fields is dependent on how the
objects manager fetches it.
ie. what is returned by `YourModel.{fieldname}_set.all()`
In most situations (especially postgres) this order is indeterminate -- not the order that you originally
added them in the interface.
See :doc:`/Ordered-ManyToMany` for a solution to this.
Args:
ids (list): list of primary keys
Returns:
list: list of Model objects
"""
# return objects in the same order as passed in here
pk_type = self.model._meta.pk.to_python
ids = [pk_type(pk) for pk in ids]
things = self.model.objects.in_bulk(ids)
return [things[aid] for aid in ids if aid in things]
def can_add(self, user, other_model):
"""Check if the user has permission to add a ForeignKey or M2M model.
This enables the green popup + on the widget.
Default implentation is the standard django permission check.
Args:
user (User)
other_model (Model): the ForeignKey or M2M model to check if the User can add.
Returns:
bool
"""
from django.contrib.contenttypes.models import ContentType
ctype = ContentType.objects.get_for_model(other_model)
return user.has_perm("%s.add_%s" % (ctype.app_label, ctype.model))
def check_auth(self, request):
"""By default only request.user.is_staff have access.
This ensures that nobody can get your data by simply knowing the lookup URL.
This is called from the ajax_lookup view.
Public facing forms (outside of the Admin) should implement this to allow
non-staff to use this LookupChannel.
Args:
request (Request)
Raises:
PermissionDenied
"""
if not request.user.is_staff:
raise PermissionDenied
| {
"content_hash": "8666d6e5d5b626d1147d3db8969ac69a",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 119,
"avg_line_length": 36.26119402985075,
"alnum_prop": 0.6342868903066474,
"repo_name": "roberzguerra/rover",
"id": "11adc72e65e57aa9533718d14f30df6a66449597",
"size": "4859",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "ajax_select/lookup_channel.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "716090"
},
{
"name": "HTML",
"bytes": "268370"
},
{
"name": "JavaScript",
"bytes": "34275"
},
{
"name": "Python",
"bytes": "294021"
},
{
"name": "Shell",
"bytes": "1417"
}
],
"symlink_target": ""
} |
import ast
import sys
import os
####################################################################################################################################################################################################################################
####################################################################################################### PRE-DEFINED CONSTANTS ######################################################################################################
####################################################################################################################################################################################################################################
# Possible characters to send to the maze application
# Any other will be ignored
# Do not edit this code
UP = 'U'
DOWN = 'D'
LEFT = 'L'
RIGHT = 'R'
####################################################################################################################################################################################################################################
# Name of your team
# It will be displayed in the maze
# You have to edit this code
TEAM_NAME = "closest"
####################################################################################################################################################################################################################################
########################################################################################################## YOUR VARIABLES ##########################################################################################################
####################################################################################################################################################################################################################################
# Stores all the moves in a list to restitute them one by one
allMoves = [UP, UP, UP, RIGHT, UP, UP, UP, RIGHT, RIGHT, RIGHT, RIGHT, RIGHT, RIGHT, UP, UP, RIGHT]
####################################################################################################################################################################################################################################
####################################################################################################### PRE-DEFINED FUNCTIONS ######################################################################################################
####################################################################################################################################################################################################################################
# Writes a message to the shell
# Use for debugging your program
# Channels stdout and stdin are captured to enable communication with the maze
# Do not edit this code
def debug (text) :
# Writes to the stderr channel
sys.stderr.write(str(text) + "\n")
sys.stderr.flush()
####################################################################################################################################################################################################################################
# Reads one line of information sent by the maze application
# This function is blocking, and will wait for a line to terminate
# The received information is automatically converted to the correct type
# Do not edit this code
def readFromPipe () :
# Reads from the stdin channel and returns the structure associated to the string
try :
text = sys.stdin.readline()
return ast.literal_eval(text.strip())
except :
os._exit(-1)
####################################################################################################################################################################################################################################
# Sends the text to the maze application
# Do not edit this code
def writeToPipe (text) :
# Writes to the stdout channel
sys.stdout.write(text)
sys.stdout.flush()
####################################################################################################################################################################################################################################
# Reads the initial maze information
# The function processes the text and returns the associated variables
# The dimensions of the maze are positive integers
# Maze map is a dictionary associating to a location its adjacent locations and the associated weights
# The preparation time gives the time during which 'initializationCode' can make computations before the game starts
# The turn time gives the time during which 'determineNextMove' can make computations before returning a decision
# Player locations are tuples (line, column)
# Coins are given as a list of locations where they appear
# A boolean indicates if the game is over
# Do not edit this code
def processInitialInformation () :
# We read from the pipe
data = readFromPipe()
return (data['mazeWidth'], data['mazeHeight'], data['mazeMap'], data['preparationTime'], data['turnTime'], data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
####################################################################################################################################################################################################################################
# Reads the information after each player moved
# The maze map and allowed times are no longer provided since they do not change
# Do not edit this code
def processNextInformation () :
# We read from the pipe
data = readFromPipe()
return (data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
####################################################################################################################################################################################################################################
########################################################################################################## YOUR FUNCTIONS ##########################################################################################################
####################################################################################################################################################################################################################################
# This is where you should write your code to do things during the initialization delay
# This function should not return anything, but should be used for a short preprocessing
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def initializationCode (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
# Nothing to do
pass
####################################################################################################################################################################################################################################
# This is where you should write your code to determine the next direction
# This function should return one of the directions defined in the CONSTANTS section
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def determineNextMove (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
# We return the next move as described by the list
global allMoves
nextMove = allMoves[0]
allMoves = allMoves[1:]
return nextMove
####################################################################################################################################################################################################################################
############################################################################################################# MAIN LOOP ############################################################################################################
####################################################################################################################################################################################################################################
# This is the entry point when executing this file
# We first send the name of the team to the maze
# The first message we receive from the maze includes its dimensions and map, the times allowed to the various steps, and the players and coins locations
# Then, at every loop iteration, we get the maze status and determine a move
# Do not edit this code
if __name__ == "__main__" :
# We send the team name
writeToPipe(TEAM_NAME + "\n")
# We process the initial information and have a delay to compute things using it
(mazeWidth, mazeHeight, mazeMap, preparationTime, turnTime, playerLocation, opponentLocation, coins, gameIsOver) = processInitialInformation()
initializationCode(mazeWidth, mazeHeight, mazeMap, preparationTime, playerLocation, opponentLocation, coins)
# We decide how to move and wait for the next step
while not gameIsOver :
(playerLocation, opponentLocation, coins, gameIsOver) = processNextInformation()
if gameIsOver :
break
nextMove = determineNextMove(mazeWidth, mazeHeight, mazeMap, turnTime, playerLocation, opponentLocation, coins)
writeToPipe(nextMove)
####################################################################################################################################################################################################################################
#################################################################################################################################################################################################################################### | {
"content_hash": "804e02238c9ba072f44144c0caf8d086",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 228,
"avg_line_length": 63.451219512195124,
"alnum_prop": 0.37353449932731114,
"repo_name": "dimtion/jml",
"id": "81327bac3dc0c1c3e4b3afcda1cd593ef13e5982",
"size": "11241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "outputFiles/statistics/archives/ourIA/closest.py/0.8/5/player1.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1654391"
},
{
"name": "TeX",
"bytes": "1439179"
}
],
"symlink_target": ""
} |
"""Our models structure."""
from sqlalchemy import (
Column,
Index,
Integer,
Unicode,
Float,
ForeignKey
)
from .meta import Base
class User(Base):
"""Model for our users."""
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
username = Column(Unicode, unique=True)
password = Column(Unicode)
class Sentiments(Base):
"""Mode for storing sentiments per user."""
__tablename__ = 'sentiments'
id = Column(Integer, primary_key=True)
body = Column(Unicode)
positive_sentiment = Column(Float)
negative_sentiment = Column(Float)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
| {
"content_hash": "281ef712dc6646485311651c9a408aed",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 69,
"avg_line_length": 21.21875,
"alnum_prop": 0.6480117820324006,
"repo_name": "Bonanashelby/MoodBot",
"id": "5d4cea6f8d307804df78d467e5b5ca4266f611e7",
"size": "679",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mood_bot/mood_bot/models/mymodel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "135"
},
{
"name": "Python",
"bytes": "26105"
},
{
"name": "Shell",
"bytes": "99"
}
],
"symlink_target": ""
} |
from pandac.PandaModules import Vec4, BitMask32, Quat, Point3, NodePath
from pandac.PandaModules import OdePlaneGeom, OdeBody, OdeSphereGeom, OdeMass, OdeUtil, OdeBoxGeom
from direct.directnotify import DirectNotifyGlobal
from toontown.minigame import DistributedMinigamePhysicsWorld
from toontown.minigame import IceGameGlobals
from toontown.golf import BuildGeometry
MetersToFeet = 3.2808399
FeetToMeters = 1.0 / MetersToFeet
class DistributedIceWorld(DistributedMinigamePhysicsWorld.DistributedMinigamePhysicsWorld):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedMinigamePhysicsWorld')
floorCollideId = 1
floorMask = BitMask32(floorCollideId)
wallCollideId = 1 << 1
wallMask = BitMask32(wallCollideId)
obstacleCollideId = 1 << 2
obstacleMask = BitMask32(obstacleCollideId)
tireCollideIds = [1 << 8,
1 << 9,
1 << 10,
1 << 11]
tire0Mask = BitMask32(tireCollideIds[0])
tire1Mask = BitMask32(tireCollideIds[1])
tire2Mask = BitMask32(tireCollideIds[2])
tire3Mask = BitMask32(tireCollideIds[3])
allTiresMask = tire0Mask | tire1Mask | tire2Mask | tire3Mask
tireMasks = (tire0Mask,
tire1Mask,
tire2Mask,
tire3Mask)
tireDensity = 1
tireSurfaceType = 0
iceSurfaceType = 1
fenceSurfaceType = 2
def __init__(self, cr):
DistributedMinigamePhysicsWorld.DistributedMinigamePhysicsWorld.__init__(self, cr)
def delete(self):
DistributedMinigamePhysicsWorld.DistributedMinigamePhysicsWorld.delete(self)
if hasattr(self, 'floor'):
self.floor = None
return
def setupSimulation(self):
DistributedMinigamePhysicsWorld.DistributedMinigamePhysicsWorld.setupSimulation(self)
self.world.setGravity(0, 0, -32.174)
self.world.setAutoDisableFlag(1)
self.world.setAutoDisableLinearThreshold(0.5 * MetersToFeet)
self.world.setAutoDisableAngularThreshold(OdeUtil.getInfinity())
self.world.setAutoDisableSteps(10)
self.world.setCfm(1e-05 * MetersToFeet)
self.world.initSurfaceTable(3)
self.world.setSurfaceEntry(0, 1, 0.2, 0, 0, 0, 0, 0, 0.1)
self.world.setSurfaceEntry(0, 0, 0.1, 0.9, 0.1, 0, 0, 0, 0)
self.world.setSurfaceEntry(0, 2, 0.9, 0.9, 0.1, 0, 0, 0, 0)
self.floor = OdePlaneGeom(self.space, Vec4(0.0, 0.0, 1.0, -20.0))
self.floor.setCollideBits(self.allTiresMask)
self.floor.setCategoryBits(self.floorMask)
self.westWall = OdePlaneGeom(self.space, Vec4(1.0, 0.0, 0.0, IceGameGlobals.MinWall[0]))
self.westWall.setCollideBits(self.allTiresMask)
self.westWall.setCategoryBits(self.wallMask)
self.space.setSurfaceType(self.westWall, self.fenceSurfaceType)
self.space.setCollideId(self.westWall, self.wallCollideId)
self.eastWall = OdePlaneGeom(self.space, Vec4(-1.0, 0.0, 0.0, -IceGameGlobals.MaxWall[0]))
self.eastWall.setCollideBits(self.allTiresMask)
self.eastWall.setCategoryBits(self.wallMask)
self.space.setSurfaceType(self.eastWall, self.fenceSurfaceType)
self.space.setCollideId(self.eastWall, self.wallCollideId)
self.southWall = OdePlaneGeom(self.space, Vec4(0.0, 1.0, 0.0, IceGameGlobals.MinWall[1]))
self.southWall.setCollideBits(self.allTiresMask)
self.southWall.setCategoryBits(self.wallMask)
self.space.setSurfaceType(self.southWall, self.fenceSurfaceType)
self.space.setCollideId(self.southWall, self.wallCollideId)
self.northWall = OdePlaneGeom(self.space, Vec4(0.0, -1.0, 0.0, -IceGameGlobals.MaxWall[1]))
self.northWall.setCollideBits(self.allTiresMask)
self.northWall.setCategoryBits(self.wallMask)
self.space.setSurfaceType(self.northWall, self.fenceSurfaceType)
self.space.setCollideId(self.northWall, self.wallCollideId)
self.floorTemp = OdePlaneGeom(self.space, Vec4(0.0, 0.0, 1.0, 0.0))
self.floorTemp.setCollideBits(self.allTiresMask)
self.floorTemp.setCategoryBits(self.floorMask)
self.space.setSurfaceType(self.floorTemp, self.iceSurfaceType)
self.space.setCollideId(self.floorTemp, self.floorCollideId)
self.space.setAutoCollideWorld(self.world)
self.space.setAutoCollideJointGroup(self.contactgroup)
self.totalPhysicsSteps = 0
def createTire(self, tireIndex):
if tireIndex < 0 or tireIndex >= len(self.tireMasks):
self.notify.error('invalid tireIndex %s' % tireIndex)
self.notify.debug('create tireindex %s' % tireIndex)
zOffset = 0
body = OdeBody(self.world)
mass = OdeMass()
mass.setSphere(self.tireDensity, IceGameGlobals.TireRadius)
body.setMass(mass)
body.setPosition(IceGameGlobals.StartingPositions[tireIndex][0], IceGameGlobals.StartingPositions[tireIndex][1], IceGameGlobals.StartingPositions[tireIndex][2])
body.setAutoDisableDefaults()
geom = OdeSphereGeom(self.space, IceGameGlobals.TireRadius)
self.space.setSurfaceType(geom, self.tireSurfaceType)
self.space.setCollideId(geom, self.tireCollideIds[tireIndex])
self.massList.append(mass)
self.geomList.append(geom)
geom.setCollideBits(self.allTiresMask | self.wallMask | self.floorMask | self.obstacleMask)
geom.setCategoryBits(self.tireMasks[tireIndex])
geom.setBody(body)
if self.notify.getDebug():
self.notify.debug('tire geom id')
geom.write()
self.notify.debug(' -')
if self.canRender:
testTire = render.attachNewNode('tire holder %d' % tireIndex)
smileyModel = NodePath()
if not smileyModel.isEmpty():
smileyModel.setScale(IceGameGlobals.TireRadius)
smileyModel.reparentTo(testTire)
smileyModel.setAlphaScale(0.5)
smileyModel.setTransparency(1)
testTire.setPos(IceGameGlobals.StartingPositions[tireIndex])
tireModel = loader.loadModel('phase_4/models/minigames/ice_game_tire')
tireHeight = 1
tireModel.setZ(-IceGameGlobals.TireRadius + 0.01)
tireModel.reparentTo(testTire)
self.odePandaRelationList.append((testTire, body))
else:
testTire = None
self.bodyList.append((None, body))
return (testTire, body, geom)
def placeBodies(self):
for pair in self.odePandaRelationList:
pandaNodePathGeom = pair[0]
odeBody = pair[1]
if pandaNodePathGeom:
pandaNodePathGeom.setPos(odeBody.getPosition())
pandaNodePathGeom.setQuat(Quat(odeBody.getQuaternion()[0], odeBody.getQuaternion()[1], odeBody.getQuaternion()[2], odeBody.getQuaternion()[3]))
pandaNodePathGeom.setP(0)
pandaNodePathGeom.setR(0)
newQuat = pandaNodePathGeom.getQuat()
odeBody.setQuaternion(newQuat)
def postStep(self):
DistributedMinigamePhysicsWorld.DistributedMinigamePhysicsWorld.postStep(self)
self.placeBodies()
self.totalPhysicsSteps += 1
def createObstacle(self, pos, obstacleIndex, cubicObstacle):
if cubicObstacle:
return self.createCubicObstacle(pos, obstacleIndex)
else:
return self.createCircularObstacle(pos, obstacleIndex)
def createCircularObstacle(self, pos, obstacleIndex):
self.notify.debug('create obstacleindex %s' % obstacleIndex)
geom = OdeSphereGeom(self.space, IceGameGlobals.TireRadius)
geom.setCollideBits(self.allTiresMask)
geom.setCategoryBits(self.obstacleMask)
self.space.setCollideId(geom, self.obstacleCollideId)
tireModel = loader.loadModel('phase_4/models/minigames/ice_game_tirestack')
tireHeight = 1
tireModel.setPos(pos)
tireModel.reparentTo(render)
geom.setPosition(tireModel.getPos())
tireModel.setZ(0)
return tireModel
def createCubicObstacle(self, pos, obstacleIndex):
self.notify.debug('create obstacleindex %s' % obstacleIndex)
sideLength = IceGameGlobals.TireRadius * 2
geom = OdeBoxGeom(self.space, sideLength, sideLength, sideLength)
geom.setCollideBits(self.allTiresMask)
geom.setCategoryBits(self.obstacleMask)
self.space.setCollideId(geom, self.obstacleCollideId)
tireModel = loader.loadModel('phase_4/models/minigames/ice_game_crate')
tireModel.setPos(pos)
tireModel.reparentTo(render)
geom.setPosition(tireModel.getPos())
tireModel.setZ(0)
return tireModel
| {
"content_hash": "8884846985d32945500458d4d6596e32",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 168,
"avg_line_length": 48.294444444444444,
"alnum_prop": 0.6896353387783274,
"repo_name": "linktlh/Toontown-journey",
"id": "d48600369690eb7af3c37e207768f209e624499b",
"size": "8693",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "toontown/minigame/DistributedIceWorld.py",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
'''
resource API wrapper
@author: Youyk
'''
import os
import sys
import traceback
import time
import apibinding.api_actions as api_actions
import apibinding.inventory as inventory
import account_operations
# Define default get resource method. default is using searchAPI, it can also be ListAPI.
SEARCH_RESOURCE_METHOD = 'search'
LIST_RESOURCE_METHOD = 'list'
GET_RESOURCE_METHOD_BY_GET = 'get'
# GET_RESOURCE_METHOD = SEARCH_RESOURCE_METHOD
GET_RESOURCE_METHOD = LIST_RESOURCE_METHOD
BACKUP_STORAGE = 'BackupStorage'
SFTP_BACKUP_STORAGE = 'SftpBackupStorage'
ZONE = 'Zone'
CLUSTER = 'Cluster'
PRIMARY_STORAGE = 'PrimaryStorage'
L2_NETWORK = 'L2Network'
L2_VLAN_NETWORK = 'L2VlanNetwork'
L3_NETWORK = 'L3Network'
INSTANCE_OFFERING = 'InstanceOffering'
IMAGE = 'Image'
VOLUME = 'Volume'
VM_INSTANCE = 'VmInstance'
IP_RANGE = 'IpRange'
HOST = 'Host'
NETWORK_SERVICE_PROVIDER = 'NetworkServiceProvider'
NETWORK_SERVICE_PROVIDER_L3_REF = 'NetworkServiceProviderL3Ref'
APPLIANCE_VM = 'ApplianceVm'
DISK_OFFERING = 'DiskOffering'
ACCOUNT = 'Account'
PRIMARY_STORAGE = 'PrimaryStorage'
SECURITY_GROUP = 'SecurityGroup'
SECURITY_GROUP_RULE = 'SecurityGroupRule'
VM_SECURITY_GROUP = 'VmSecurityGroup'
VM_NIC = 'VmNic'
PORT_FORWARDING = 'PortForwarding'
MANAGEMENT_NODE = 'ManagementNode'
EIP = 'Eip'
VIP = 'Vip'
IP_CAPACITY = 'IpCapacity'
VR_OFFERING = 'VirtualRouterOffering'
SYSTEM_TAG = 'SystemTag'
USER_TAG = 'UserTag'
VOLUME_SNAPSHOT_TREE = 'VolumeSnapshotTree'
VOLUME_SNAPSHOT = 'VolumeSnapshot'
def find_item_by_uuid(inventories, uuid):
for item in inventories:
if item.uuid == uuid:
# test_util.test_logger("Item found by UUID: %s" % uuid)
return [item]
# test_util.test_logger("Not found item with UUID: %s" % uuid)
return None
def find_item_by_name(inventories, name):
for item in inventories:
if item.name == name:
# test_util.test_logger("Item found by name: %s" % name)
return [item]
# test_util.test_logger("Not found item with name: %s" % name)
return None
# Using List API
def list_resource(resource, session_uuid=None, uuid=None, name=None):
'''
Return: list by list API.
'''
if resource == BACKUP_STORAGE:
action = api_actions.ListBackupStorageAction()
elif resource == ZONE:
action = api_actions.ListZonesAction()
elif resource == PRIMARY_STORAGE:
action = api_actions.ListPrimaryStorageAction()
elif resource == L2_NETWORK:
action = api_actions.ListL2NetworkAction()
elif resource == L2_VLAN_NETWORK:
action = api_actions.ListL2VlanNetworkAction()
elif resource == CLUSTER:
action = api_actions.ListClusterAction()
elif resource == L3_NETWORK:
action = api_actions.ListL3NetworkAction()
elif resource == INSTANCE_OFFERING:
action = api_actions.ListInstanceOfferingAction()
elif resource == IMAGE:
action = api_actions.ListImageAction()
elif resource == VOLUME:
action = api_actions.ListVolumeAction()
elif resource == VM_INSTANCE:
action = api_actions.ListVmInstanceAction()
elif resource == IP_RANGE:
action = api_actions.ListIpRangeAction()
elif resource == HOST:
action = api_actions.ListHostAction()
elif resource == NETWORK_SERVICE_PROVIDER:
action = api_actions.ListNetworkServiceProviderAction()
elif resource == APPLIANCE_VM:
action = api_actions.ListApplianceVmAction()
elif resource == DISK_OFFERING:
action = api_actions.ListDiskOfferingAction()
elif resource == ACCOUNT:
action = api_actions.ListAccountAction()
elif resource == PRIMARY_STORAGE:
action = api_actions.ListPrimaryStorageAction()
elif resource == SECURITY_GROUP:
action = api_actions.ListSecurityGroupAction()
elif resource == VM_SECURITY_GROUP:
action = api_actions.ListVmNicInSecurityGroupAction()
elif resource == VM_NIC:
action = api_actions.ListVmNicAction()
elif resource == PORT_FORWARDING:
action = api_actions.ListPortForwardingRuleAction()
elif resource == MANAGEMENT_NODE:
action = api_actions.ListManagementNodeAction()
ret = account_operations.execute_action_with_session(action, session_uuid)
if uuid:
return find_item_by_uuid(ret, uuid)
if name:
return find_item_by_name(ret, name)
return ret
# Using Search API
def search_resource(resource, session_uuid, uuid=None, name=None):
'''
Return: list by search
This API was deprecated.
'''
if resource == BACKUP_STORAGE:
action = api_actions.SearchBackupStorageAction()
elif resource == ZONE:
action = api_actions.SearchZoneAction()
elif resource == PRIMARY_STORAGE:
action = api_actions.SearchPrimaryStorageAction()
elif resource == L2_NETWORK:
action = api_actions.SearchL2NetworkAction()
elif resource == L2_VLAN_NETWORK:
action = api_actions.SearchL2VlanNetworkAction()
elif resource == CLUSTER:
action = api_actions.SearchClusterAction()
elif resource == L3_NETWORK:
action = api_actions.SearchL3NetworkAction()
elif resource == INSTANCE_OFFERING:
action = api_actions.SearchInstanceOfferingAction()
elif resource == IMAGE:
action = api_actions.SearchImageAction()
elif resource == VOLUME:
action = api_actions.SearchVolumeAction()
elif resource == VM_INSTANCE:
action = api_actions.SearchVmInstanceAction()
elif resource == IP_RANGE:
action = api_actions.SearchIpRangeAction()
elif resource == HOST:
action = api_actions.SearchHostAction()
elif resource == NETWORK_SERVICE_PROVIDER:
action = api_actions.SearchNetworkServiceProviderAction()
elif resource == APPLIANCE_VM:
action = api_actions.SearchApplianceVmAction()
elif resource == DISK_OFFERING:
action = api_actions.SearchDiskOfferingAction()
elif resource == ACCOUNT:
action = api_actions.SearchAccountAction()
elif resource == PRIMARY_STORAGE:
action = api_actions.SearchPrimaryStorageAction()
# elif resource == SECURITY_GROUP:
# action = api_actions.SearchSecurityGroupAction()
# elif resource == VM_SECURITY_GROUP:
# action = api_actions.SearchVmNicInSecurityGroupAction()
action.sessionUuid = session_uuid
action.nameOpValueTriples = []
if uuid:
t = inventory.NOVTriple()
t.name = 'uuid'
t.op = inventory.AND_EQ
t.val = uuid
action.nameOpValueTriples.append(t)
if name:
t = inventory.NOVTriple()
t.name = 'name'
t.op = inventory.AND_EQ
t.val = name
action.nameOpValueTriples.append(t)
# the time delay is because of elastic search inventory will delay 0.5s after original data was created in database.
time.sleep(0.3)
ret = action.run()
return ret
def get_resource_by_get(resource, session_uuid, uuid):
'''
Return a list by get API.
'''
if resource == BACKUP_STORAGE:
action = api_actions.GetBackupStorageAction()
elif resource == ZONE:
action = api_actions.GetZoneAction()
elif resource == PRIMARY_STORAGE:
action = api_actions.GetPrimaryStorageAction()
elif resource == L2_NETWORK:
action = api_actions.GetL2NetworkAction()
elif resource == L2_VLAN_NETWORK:
action = api_actions.GetL2VlanNetworkAction()
elif resource == CLUSTER:
action = api_actions.GetClusterAction()
elif resource == L3_NETWORK:
action = api_actions.GetL3NetworkAction()
elif resource == INSTANCE_OFFERING:
action = api_actions.GetInstanceOfferingAction()
elif resource == IMAGE:
action = api_actions.GetImageAction()
elif resource == VOLUME:
action = api_actions.GetVolumeAction()
elif resource == VM_INSTANCE:
action = api_actions.GetVmInstanceAction()
elif resource == IP_RANGE:
action = api_actions.GetIpRangeAction()
elif resource == HOST:
action = api_actions.GetHostAction()
elif resource == NETWORK_SERVICE_PROVIDER:
action = api_actions.GetNetworkServiceProviderAction()
elif resource == APPLIANCE_VM:
action = api_actions.GetApplianceVmAction()
elif resource == DISK_OFFERING:
action = api_actions.GetDiskOfferingAction()
elif resource == ACCOUNT:
action = api_actions.GetAccountAction()
elif resource == PRIMARY_STORAGE:
action = api_actions.GetPrimaryStorageAction()
elif resource == VR_OFFERING:
action = api_actions.GetVirtualRouterOfferingAction()
# elif resource == SECURITY_GROUP:
# action = api_actions.GetSecurityGroupAction()
# elif resource == VM_SECURITY_GROUP:
# action = api_actions.GetVmNicInSecurityGroupAction()
action.uuid = uuid
ret = account_operations.execute_action_with_session(action, session_uuid)
return ret
def gen_query_conditions(name, op, value, conditions=[]):
new_conditions = [{'name': name, 'op': op, 'value': value}]
new_conditions.extend(conditions)
return new_conditions
def _gen_query_action(resource):
if resource == BACKUP_STORAGE:
action = api_actions.QueryBackupStorageAction()
elif resource == SFTP_BACKUP_STORAGE:
action = api_actions.QuerySftpBackupStorageAction()
elif resource == ZONE:
action = api_actions.QueryZoneAction()
elif resource == PRIMARY_STORAGE:
action = api_actions.QueryPrimaryStorageAction()
elif resource == L2_NETWORK:
action = api_actions.QueryL2NetworkAction()
elif resource == L2_VLAN_NETWORK:
action = api_actions.QueryL2VlanNetworkAction()
elif resource == CLUSTER:
action = api_actions.QueryClusterAction()
elif resource == L3_NETWORK:
action = api_actions.QueryL3NetworkAction()
elif resource == INSTANCE_OFFERING:
action = api_actions.QueryInstanceOfferingAction()
elif resource == IMAGE:
action = api_actions.QueryImageAction()
elif resource == VOLUME:
action = api_actions.QueryVolumeAction()
elif resource == VM_INSTANCE:
action = api_actions.QueryVmInstanceAction()
elif resource == IP_RANGE:
action = api_actions.QueryIpRangeAction()
elif resource == HOST:
action = api_actions.QueryHostAction()
elif resource == NETWORK_SERVICE_PROVIDER:
action = api_actions.QueryNetworkServiceProviderAction()
elif resource == NETWORK_SERVICE_PROVIDER_L3_REF:
action = api_actions.QueryNetworkServiceL3NetworkRefAction()
elif resource == APPLIANCE_VM:
action = api_actions.QueryApplianceVmAction()
elif resource == DISK_OFFERING:
action = api_actions.QueryDiskOfferingAction()
elif resource == ACCOUNT:
action = api_actions.QueryAccountAction()
elif resource == PRIMARY_STORAGE:
action = api_actions.QueryPrimaryStorageAction()
elif resource == SECURITY_GROUP:
action = api_actions.QuerySecurityGroupAction()
elif resource == SECURITY_GROUP_RULE:
action = api_actions.QuerySecurityGroupRuleAction()
elif resource == VM_SECURITY_GROUP:
action = api_actions.QueryVmNicInSecurityGroupAction()
elif resource == VM_NIC:
action = api_actions.QueryVmNicAction()
elif resource == PORT_FORWARDING:
action = api_actions.QueryPortForwardingRuleAction()
elif resource == MANAGEMENT_NODE:
action = api_actions.QueryManagementNodeAction()
elif resource == EIP:
action = api_actions.QueryEipAction()
elif resource == VIP:
action = api_actions.QueryVipAction()
elif resource == VR_OFFERING:
action = api_actions.QueryVirtualRouterOfferingAction()
elif resource == SYSTEM_TAG:
action = api_actions.QuerySystemTagAction()
elif resource == USER_TAG:
action = api_actions.QueryUserTagAction()
elif resource == VOLUME_SNAPSHOT_TREE:
action = api_actions.QueryVolumeSnapshotTreeAction()
elif resource == VOLUME_SNAPSHOT:
action = api_actions.QueryVolumeSnapshotAction()
return action
def query_resource(resource, conditions=[], session_uuid=None, count='false'):
'''
Call Query API and return all matched resource.
conditions could be generated by gen_query_conditions()
If session_uuid is missing, we will create one for you and only live in
this API.
'''
action = _gen_query_action(resource)
action.conditions = conditions
ret = account_operations.execute_action_with_session(action, session_uuid)
return ret
def query_resource_count(resource, conditions=[], session_uuid=None):
'''
Call Query API to return the matched resource count
When count=true, it will only return the number of matched resource
'''
action = _gen_query_action(resource)
action.conditions = conditions
action.count = 'true'
account_operations.execute_action_with_session(action, session_uuid)
return action.reply.total
def query_resource_with_num(resource, conditions=[], session_uuid=None,
fields=[], start=0, limit=1000):
'''
Query matched resource and return required numbers.
'''
action = _gen_query_action(resource)
action.conditions = conditions
action.start = start
action.limit = limit
action.fields = fields
ret = account_operations.execute_action_with_session(action, session_uuid)
return ret
def query_resource_fields(resource, conditions=[], session_uuid=None,
fields=[], start=0, limit=1000):
'''
Query matched resource by returning required fields and required numbers.
'''
action = _gen_query_action(resource)
action.conditions = conditions
action.start = start
action.limit = limit
action.fields = fields
ret = account_operations.execute_action_with_session(action, session_uuid)
return ret
def get_resource(resource, session_uuid=None, uuid=None, name=None):
if uuid:
cond = gen_query_conditions('uuid', '=', uuid)
elif name:
cond = gen_query_conditions('name', '=', name)
else:
cond = gen_query_conditions('uuid', '!=', 'NULL')
return query_resource(resource, cond, session_uuid)
# if GET_RESOURCE_METHOD == LIST_RESOURCE_METHOD:
# return list_resource(resource, session_uuid, uuid=uuid, name=name)
# elif GET_RESOURCE_METHOD == GET_RESOURCE_METHOD_BY_GET:
# if not uuid:
# raise Exception('Get_Resource function error, uuid can not be None')
# return get_resource_by_get(resource, session_uuid, uuid=uuid)
# else:
# return search_resource(resource, session_uuid, uuid=uuid, name=name)
def safely_get_resource(res_name, cond=[], session_uuid=None,
fields=None, limit=100):
res_count = query_resource_count(res_name, cond, session_uuid)
res_list = []
if res_count <= limit:
res_list = query_resource_fields(res_name, cond, session_uuid, fields)
else:
curr_count = 0
while curr_count <= res_count:
curr_list = query_resource_with_num(res_name, cond, session_uuid, fields, start=curr_count, limit=limit)
res_list.extend(curr_list)
curr_count += limit
return res_list
| {
"content_hash": "02b99ebc82138426c92f65d58b6ef304",
"timestamp": "",
"source": "github",
"line_count": 429,
"max_line_length": 120,
"avg_line_length": 35.97435897435897,
"alnum_prop": 0.676861271301756,
"repo_name": "live4thee/zstack-utility",
"id": "b135bdf7634e70b383548e8f3e6b3703656ae62f",
"size": "15433",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "zstackcli/zstackcli/resource_operations.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "AGS Script",
"bytes": "1147"
},
{
"name": "HTML",
"bytes": "4445"
},
{
"name": "Pascal",
"bytes": "187"
},
{
"name": "Puppet",
"bytes": "10417"
},
{
"name": "Python",
"bytes": "2346166"
},
{
"name": "Shell",
"bytes": "241290"
}
],
"symlink_target": ""
} |
import json
import scrapy
# Most AJAX based websites can be scraped by reproducing the API calls made
# by the browser, as we do in this simple example that scrapes
# a website paginated via infinite scrolling (quotes.toscrape.com/scroll)
class ToScrapeInfiniteScrollingSpider(scrapy.Spider):
name = 'toscrape-infinite-scrolling'
base_url = 'http://quotes.toscrape.com/api/quotes?page=%d'
start_urls = [base_url % 1]
def parse(self, response):
json_data = json.loads(response.text)
for quote in json_data['quotes']:
yield quote
if json_data['has_next']:
yield scrapy.Request(self.base_url % (int(json_data['page']) + 1))
| {
"content_hash": "fae1a962dbf219500f504d23a99852e5",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 78,
"avg_line_length": 38.22222222222222,
"alnum_prop": 0.686046511627907,
"repo_name": "fpldataspiders/SPIDERS",
"id": "a5492a8ce0320056ba209e3c5a2bac0ee7e60866",
"size": "688",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sample-projects-master/quotes_crawler/quotes_crawler/spiders/toscrape-infinite-scrolling.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1335"
},
{
"name": "Lua",
"bytes": "3740"
},
{
"name": "Python",
"bytes": "101913"
}
],
"symlink_target": ""
} |
import mock
from oslo_config import cfg
from neutron.common import config as neutron_config
from neutron.plugins.ml2 import config as ml2_config
from neutron.tests import base
UCSM_IP_ADDRESS_1 = '1.1.1.1'
UCSM_USERNAME_1 = 'username1'
UCSM_PASSWORD_1 = 'password1'
UCSM_IP_ADDRESS_2 = '2.2.2.2'
UCSM_USERNAME_2 = 'username2'
UCSM_PASSWORD_2 = 'password2'
UCSM_PHY_NETS = ['test_physnet']
class ConfigMixin(object):
"""Mock config for UCSM driver."""
mocked_parser = None
def set_up_mocks(self):
# Mock the configuration file
args = ['--config-file', base.etcdir('neutron.conf')]
neutron_config.init(args=args)
# Configure the ML2 mechanism drivers and network types
ml2_opts = {
'mechanism_drivers': ['cisco_ucsm'],
'tenant_network_types': ['vlan'],
}
for opt, val in ml2_opts.items():
ml2_config.cfg.CONF.set_override(opt, val, 'ml2')
# Configure the Cisco UCS Manager mechanism driver
ucsm_test_config = {
'ml2_cisco_ucsm_ip: 1.1.1.1': {
'ucsm_username': UCSM_USERNAME_1,
'ucsm_password': UCSM_PASSWORD_1,
},
'ml2_cisco_ucsm_ip: 2.2.2.2': {
'ucsm_username': UCSM_USERNAME_2,
'ucsm_password': UCSM_PASSWORD_2,
},
}
self.mocked_parser = mock.patch.object(cfg,
'MultiConfigParser').start()
self.mocked_parser.return_value.read.return_value = [ucsm_test_config]
self.mocked_parser.return_value.parsed = [ucsm_test_config]
| {
"content_hash": "c2c81003da0948857af3214c9050aa9c",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 78,
"avg_line_length": 30.358490566037737,
"alnum_prop": 0.5997513983840895,
"repo_name": "CiscoSystems/networking-cisco",
"id": "6357e2de86c5c6f96bb2c219be4061ae0bb65235",
"size": "2212",
"binary": false,
"copies": "1",
"ref": "refs/heads/asr1k_liberty_master_wip",
"path": "networking_cisco/tests/unit/ml2/drivers/cisco/ucsm/test_cisco_ucsm_common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1043"
},
{
"name": "Python",
"bytes": "2082062"
},
{
"name": "Shell",
"bytes": "44368"
}
],
"symlink_target": ""
} |
import os
import tempfile
import vtk
import wx
# get the slice3dVWR marked by the user
sv = devideApp.ModuleManager.getMarkedModule('slice3dVWR')
mc = devideApp.ModuleManager.getMarkedModule('marchingCubes')
if sv and mc:
# bring the window to the front
sv.view()
# make sure it actually happens
wx.Yield()
w2i = vtk.vtkWindowToImageFilter()
w2i.SetInput(sv._threedRenderer.GetRenderWindow())
pngWriter = vtk.vtkPNGWriter()
pngWriter.SetInput(w2i.GetOutput())
tempdir = tempfile.gettempdir()
fprefix = os.path.join(tempdir, 'devideFrame')
camera = sv._threedRenderer.GetActiveCamera()
for i in range(160):
print i
mc._contourFilter.SetValue(0,i / 2.0)
camera.Azimuth(5)
sv.render3D()
# make sure w2i knows it's a new image
w2i.Modified()
pngWriter.SetFileName('%s%03d.png' % (fprefix, i))
pngWriter.Write()
print "The frames have been written as %s*.png." % (fprefix,)
else:
print "You have to mark a slice3dVWR module and a marchingCubes module!"
| {
"content_hash": "b283735fc84e534773fef0e7aa047902",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 76,
"avg_line_length": 27.794871794871796,
"alnum_prop": 0.6688191881918819,
"repo_name": "chrisidefix/devide",
"id": "5058af8bbc840b83923e1ed54164e000c68631bf",
"size": "1655",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "snippets/rotateCameraIncreaseIsovalue.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Diff",
"bytes": "1373"
},
{
"name": "NSIS",
"bytes": "2786"
},
{
"name": "Python",
"bytes": "3104368"
},
{
"name": "Shell",
"bytes": "7369"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.