repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
kyuupichan/electrum
|
gui/qt/main_window.py
|
1
|
133088
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys, time, threading
import os, json, traceback
import shutil
import weakref
import webbrowser
import csv
from decimal import Decimal
import base64
from functools import partial
from PyQt5.QtGui import *
from PyQt5.QtCore import *
import PyQt5.QtCore as QtCore
from .exception_window import Exception_Hook
from PyQt5.QtWidgets import *
from electrum import keystore, simple_config, ecc
from electrum.bitcoin import COIN, is_address, TYPE_ADDRESS
from electrum import constants
from electrum.plugins import run_hook
from electrum.i18n import _
from electrum.util import (format_time, format_satoshis, format_fee_satoshis,
format_satoshis_plain, NotEnoughFunds, PrintError,
UserCancelled, NoDynamicFeeEstimates, profiler,
export_meta, import_meta, bh2u, bfh, InvalidPassword,
base_units, base_units_list, base_unit_name_to_decimal_point,
decimal_point_to_base_unit_name, quantize_feerate)
from electrum import Transaction
from electrum import util, bitcoin, commands, coinchooser
from electrum import paymentrequest
from electrum.wallet import Multisig_Wallet, AddTransactionException
from .amountedit import AmountEdit, BTCAmountEdit, MyLineEdit, FeerateEdit
from .qrcodewidget import QRCodeWidget, QRDialog
from .qrtextedit import ShowQRTextEdit, ScanQRTextEdit
from .transaction_dialog import show_transaction
from .fee_slider import FeeSlider
from .util import *
class StatusBarButton(QPushButton):
def __init__(self, icon, tooltip, func):
QPushButton.__init__(self, icon, '')
self.setToolTip(tooltip)
self.setFlat(True)
self.setMaximumWidth(25)
self.clicked.connect(self.onPress)
self.func = func
self.setIconSize(QSize(25,25))
def onPress(self, checked=False):
'''Drops the unwanted PyQt5 "checked" argument'''
self.func()
def keyPressEvent(self, e):
if e.key() == Qt.Key_Return:
self.func()
from electrum.paymentrequest import PR_PAID
class ElectrumWindow(QMainWindow, MessageBoxMixin, PrintError):
payment_request_ok_signal = pyqtSignal()
payment_request_error_signal = pyqtSignal()
notify_transactions_signal = pyqtSignal()
new_fx_quotes_signal = pyqtSignal()
new_fx_history_signal = pyqtSignal()
network_signal = pyqtSignal(str, object)
alias_received_signal = pyqtSignal()
computing_privkeys_signal = pyqtSignal()
show_privkeys_signal = pyqtSignal()
def __init__(self, gui_object, wallet):
QMainWindow.__init__(self)
self.gui_object = gui_object
self.config = config = gui_object.config
self.setup_exception_hook()
self.network = gui_object.daemon.network
self.fx = gui_object.daemon.fx
self.invoices = wallet.invoices
self.contacts = wallet.contacts
self.tray = gui_object.tray
self.app = gui_object.app
self.cleaned_up = False
self.is_max = False
self.payment_request = None
self.checking_accounts = False
self.qr_window = None
self.not_enough_funds = False
self.pluginsdialog = None
self.require_fee_update = False
self.tx_notifications = []
self.tl_windows = []
self.tx_external_keypairs = {}
self.create_status_bar()
self.need_update = threading.Event()
self.decimal_point = config.get('decimal_point', 5)
self.num_zeros = int(config.get('num_zeros',0))
self.completions = QStringListModel()
self.tabs = tabs = QTabWidget(self)
self.send_tab = self.create_send_tab()
self.receive_tab = self.create_receive_tab()
self.addresses_tab = self.create_addresses_tab()
self.utxo_tab = self.create_utxo_tab()
self.console_tab = self.create_console_tab()
self.contacts_tab = self.create_contacts_tab()
tabs.addTab(self.create_history_tab(), QIcon(":icons/tab_history.png"), _('History'))
tabs.addTab(self.send_tab, QIcon(":icons/tab_send.png"), _('Send'))
tabs.addTab(self.receive_tab, QIcon(":icons/tab_receive.png"), _('Receive'))
def add_optional_tab(tabs, tab, icon, description, name):
tab.tab_icon = icon
tab.tab_description = description
tab.tab_pos = len(tabs)
tab.tab_name = name
if self.config.get('show_{}_tab'.format(name), False):
tabs.addTab(tab, icon, description.replace("&", ""))
add_optional_tab(tabs, self.addresses_tab, QIcon(":icons/tab_addresses.png"), _("&Addresses"), "addresses")
add_optional_tab(tabs, self.utxo_tab, QIcon(":icons/tab_coins.png"), _("Co&ins"), "utxo")
add_optional_tab(tabs, self.contacts_tab, QIcon(":icons/tab_contacts.png"), _("Con&tacts"), "contacts")
add_optional_tab(tabs, self.console_tab, QIcon(":icons/tab_console.png"), _("Con&sole"), "console")
tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setCentralWidget(tabs)
if self.config.get("is_maximized"):
self.showMaximized()
self.setWindowIcon(QIcon(":icons/electrum.png"))
self.init_menubar()
wrtabs = weakref.proxy(tabs)
QShortcut(QKeySequence("Ctrl+W"), self, self.close)
QShortcut(QKeySequence("Ctrl+Q"), self, self.close)
QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)
QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() - 1)%wrtabs.count()))
QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() + 1)%wrtabs.count()))
for i in range(wrtabs.count()):
QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: wrtabs.setCurrentIndex(i))
self.payment_request_ok_signal.connect(self.payment_request_ok)
self.payment_request_error_signal.connect(self.payment_request_error)
self.notify_transactions_signal.connect(self.notify_transactions)
self.history_list.setFocus(True)
# network callbacks
if self.network:
self.network_signal.connect(self.on_network_qt)
interests = ['updated', 'new_transaction', 'status',
'banner', 'verified', 'fee']
# To avoid leaking references to "self" that prevent the
# window from being GC-ed when closed, callbacks should be
# methods of this class only, and specifically not be
# partials, lambdas or methods of subobjects. Hence...
self.network.register_callback(self.on_network, interests)
# set initial message
self.console.showMessage(self.network.banner)
self.network.register_callback(self.on_quotes, ['on_quotes'])
self.network.register_callback(self.on_history, ['on_history'])
self.new_fx_quotes_signal.connect(self.on_fx_quotes)
self.new_fx_history_signal.connect(self.on_fx_history)
# update fee slider in case we missed the callback
self.fee_slider.update()
self.load_wallet(wallet)
self.connect_slots(gui_object.timer)
self.fetch_alias()
def on_history(self, b):
self.new_fx_history_signal.emit()
def setup_exception_hook(self):
Exception_Hook(self)
def on_fx_history(self):
self.history_list.refresh_headers()
self.history_list.update()
self.address_list.update()
def on_quotes(self, b):
self.new_fx_quotes_signal.emit()
def on_fx_quotes(self):
self.update_status()
# Refresh edits with the new rate
edit = self.fiat_send_e if self.fiat_send_e.is_last_edited else self.amount_e
edit.textEdited.emit(edit.text())
edit = self.fiat_receive_e if self.fiat_receive_e.is_last_edited else self.receive_amount_e
edit.textEdited.emit(edit.text())
# History tab needs updating if it used spot
if self.fx.history_used_spot:
self.history_list.update()
def toggle_tab(self, tab):
show = not self.config.get('show_{}_tab'.format(tab.tab_name), False)
self.config.set_key('show_{}_tab'.format(tab.tab_name), show)
item_text = (_("Hide") if show else _("Show")) + " " + tab.tab_description
tab.menu_action.setText(item_text)
if show:
# Find out where to place the tab
index = len(self.tabs)
for i in range(len(self.tabs)):
try:
if tab.tab_pos < self.tabs.widget(i).tab_pos:
index = i
break
except AttributeError:
pass
self.tabs.insertTab(index, tab, tab.tab_icon, tab.tab_description.replace("&", ""))
else:
i = self.tabs.indexOf(tab)
self.tabs.removeTab(i)
def push_top_level_window(self, window):
'''Used for e.g. tx dialog box to ensure new dialogs are appropriately
parented. This used to be done by explicitly providing the parent
window, but that isn't something hardware wallet prompts know.'''
self.tl_windows.append(window)
def pop_top_level_window(self, window):
self.tl_windows.remove(window)
def top_level_window(self, test_func=None):
'''Do the right thing in the presence of tx dialog windows'''
override = self.tl_windows[-1] if self.tl_windows else None
if override and test_func and not test_func(override):
override = None # only override if ok for test_func
return self.top_level_window_recurse(override, test_func)
def diagnostic_name(self):
return "%s/%s" % (PrintError.diagnostic_name(self),
self.wallet.basename() if self.wallet else "None")
def is_hidden(self):
return self.isMinimized() or self.isHidden()
def show_or_hide(self):
if self.is_hidden():
self.bring_to_top()
else:
self.hide()
def bring_to_top(self):
self.show()
self.raise_()
def on_error(self, exc_info):
if not isinstance(exc_info[1], UserCancelled):
traceback.print_exception(*exc_info)
self.show_error(str(exc_info[1]))
def on_network(self, event, *args):
if event == 'updated':
self.need_update.set()
self.gui_object.network_updated_signal_obj.network_updated_signal \
.emit(event, args)
elif event == 'new_transaction':
self.tx_notifications.append(args[0])
self.notify_transactions_signal.emit()
elif event in ['status', 'banner', 'verified', 'fee']:
# Handle in GUI thread
self.network_signal.emit(event, args)
else:
self.print_error("unexpected network message:", event, args)
def on_network_qt(self, event, args=None):
# Handle a network message in the GUI thread
if event == 'status':
self.update_status()
elif event == 'banner':
self.console.showMessage(args[0])
elif event == 'verified':
self.history_list.update_item(*args)
elif event == 'fee':
if self.config.is_dynfee():
self.fee_slider.update()
self.do_update_fee()
elif event == 'fee_histogram':
if self.config.is_dynfee():
self.fee_slider.update()
self.do_update_fee()
# todo: update only unconfirmed tx
self.history_list.update()
else:
self.print_error("unexpected network_qt signal:", event, args)
def fetch_alias(self):
self.alias_info = None
alias = self.config.get('alias')
if alias:
alias = str(alias)
def f():
self.alias_info = self.contacts.resolve_openalias(alias)
self.alias_received_signal.emit()
t = threading.Thread(target=f)
t.setDaemon(True)
t.start()
def close_wallet(self):
if self.wallet:
self.print_error('close_wallet', self.wallet.storage.path)
run_hook('close_wallet', self.wallet)
@profiler
def load_wallet(self, wallet):
wallet.thread = TaskThread(self, self.on_error)
self.wallet = wallet
self.update_recently_visited(wallet.storage.path)
# address used to create a dummy transaction and estimate transaction fee
self.history_list.update()
self.address_list.update()
self.utxo_list.update()
self.need_update.set()
# Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized
self.notify_transactions()
# update menus
self.seed_menu.setEnabled(self.wallet.has_seed())
self.update_lock_icon()
self.update_buttons_on_seed()
self.update_console()
self.clear_receive_tab()
self.request_list.update()
self.tabs.show()
self.init_geometry()
if self.config.get('hide_gui') and self.gui_object.tray.isVisible():
self.hide()
else:
self.show()
self.watching_only_changed()
run_hook('load_wallet', wallet, self)
def init_geometry(self):
winpos = self.wallet.storage.get("winpos-qt")
try:
screen = self.app.desktop().screenGeometry()
assert screen.contains(QRect(*winpos))
self.setGeometry(*winpos)
except:
self.print_error("using default geometry")
self.setGeometry(100, 100, 840, 400)
def watching_only_changed(self):
name = "Electrum Testnet" if constants.net.TESTNET else "Electrum"
title = '%s %s - %s' % (name, self.wallet.electrum_version,
self.wallet.basename())
extra = [self.wallet.storage.get('wallet_type', '?')]
if self.wallet.is_watching_only():
self.warn_if_watching_only()
extra.append(_('watching only'))
title += ' [%s]'% ', '.join(extra)
self.setWindowTitle(title)
self.password_menu.setEnabled(self.wallet.may_have_password())
self.import_privkey_menu.setVisible(self.wallet.can_import_privkey())
self.import_address_menu.setVisible(self.wallet.can_import_address())
self.export_menu.setEnabled(self.wallet.can_export())
def warn_if_watching_only(self):
if self.wallet.is_watching_only():
msg = ' '.join([
_("This wallet is watching-only."),
_("This means you will not be able to spend Bitcoins with it."),
_("Make sure you own the seed phrase or the private keys, before you request Bitcoins to be sent to this wallet.")
])
self.show_warning(msg, title=_('Information'))
def open_wallet(self):
try:
wallet_folder = self.get_wallet_folder()
except FileNotFoundError as e:
self.show_error(str(e))
return
filename, __ = QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder)
if not filename:
return
self.gui_object.new_window(filename)
def backup_wallet(self):
path = self.wallet.storage.path
wallet_folder = os.path.dirname(path)
filename, __ = QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder)
if not filename:
return
new_path = os.path.join(wallet_folder, filename)
if new_path != path:
try:
shutil.copy2(path, new_path)
self.show_message(_("A copy of your wallet file was created in")+" '%s'" % str(new_path), title=_("Wallet backup created"))
except BaseException as reason:
self.show_critical(_("Electrum was unable to copy your wallet file to the specified location.") + "\n" + str(reason), title=_("Unable to create backup"))
def update_recently_visited(self, filename):
recent = self.config.get('recently_open', [])
try:
sorted(recent)
except:
recent = []
if filename in recent:
recent.remove(filename)
recent.insert(0, filename)
recent = recent[:5]
self.config.set_key('recently_open', recent)
self.recently_visited_menu.clear()
for i, k in enumerate(sorted(recent)):
b = os.path.basename(k)
def loader(k):
return lambda: self.gui_object.new_window(k)
self.recently_visited_menu.addAction(b, loader(k)).setShortcut(QKeySequence("Ctrl+%d"%(i+1)))
self.recently_visited_menu.setEnabled(len(recent))
def get_wallet_folder(self):
return os.path.dirname(os.path.abspath(self.config.get_wallet_path()))
def new_wallet(self):
try:
wallet_folder = self.get_wallet_folder()
except FileNotFoundError as e:
self.show_error(str(e))
return
i = 1
while True:
filename = "wallet_%d" % i
if filename in os.listdir(wallet_folder):
i += 1
else:
break
full_path = os.path.join(wallet_folder, filename)
self.gui_object.start_new_window(full_path, None)
def init_menubar(self):
menubar = QMenuBar()
file_menu = menubar.addMenu(_("&File"))
self.recently_visited_menu = file_menu.addMenu(_("&Recently open"))
file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open)
file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New)
file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs)
file_menu.addAction(_("Delete"), self.remove_wallet)
file_menu.addSeparator()
file_menu.addAction(_("&Quit"), self.close)
wallet_menu = menubar.addMenu(_("&Wallet"))
wallet_menu.addAction(_("&Information"), self.show_master_public_keys)
wallet_menu.addSeparator()
self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog)
self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog)
self.private_keys_menu = wallet_menu.addMenu(_("&Private keys"))
self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog)
self.import_privkey_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey)
self.export_menu = self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog)
self.import_address_menu = wallet_menu.addAction(_("Import addresses"), self.import_addresses)
wallet_menu.addSeparator()
addresses_menu = wallet_menu.addMenu(_("&Addresses"))
addresses_menu.addAction(_("&Filter"), lambda: self.address_list.toggle_toolbar(self.config))
labels_menu = wallet_menu.addMenu(_("&Labels"))
labels_menu.addAction(_("&Import"), self.do_import_labels)
labels_menu.addAction(_("&Export"), self.do_export_labels)
history_menu = wallet_menu.addMenu(_("&History"))
history_menu.addAction(_("&Filter"), lambda: self.history_list.toggle_toolbar(self.config))
history_menu.addAction(_("&Summary"), self.history_list.show_summary)
history_menu.addAction(_("&Plot"), self.history_list.plot_history_dialog)
history_menu.addAction(_("&Export"), self.history_list.export_history_dialog)
contacts_menu = wallet_menu.addMenu(_("Contacts"))
contacts_menu.addAction(_("&New"), self.new_contact_dialog)
contacts_menu.addAction(_("Import"), lambda: self.contact_list.import_contacts())
contacts_menu.addAction(_("Export"), lambda: self.contact_list.export_contacts())
invoices_menu = wallet_menu.addMenu(_("Invoices"))
invoices_menu.addAction(_("Import"), lambda: self.invoice_list.import_invoices())
invoices_menu.addAction(_("Export"), lambda: self.invoice_list.export_invoices())
wallet_menu.addSeparator()
wallet_menu.addAction(_("Find"), self.toggle_search).setShortcut(QKeySequence("Ctrl+F"))
def add_toggle_action(view_menu, tab):
is_shown = self.config.get('show_{}_tab'.format(tab.tab_name), False)
item_name = (_("Hide") if is_shown else _("Show")) + " " + tab.tab_description
tab.menu_action = view_menu.addAction(item_name, lambda: self.toggle_tab(tab))
view_menu = menubar.addMenu(_("&View"))
add_toggle_action(view_menu, self.addresses_tab)
add_toggle_action(view_menu, self.utxo_tab)
add_toggle_action(view_menu, self.contacts_tab)
add_toggle_action(view_menu, self.console_tab)
tools_menu = menubar.addMenu(_("&Tools"))
# Settings / Preferences are all reserved keywords in macOS using this as work around
tools_menu.addAction(_("Electrum preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog)
tools_menu.addAction(_("&Network"), lambda: self.gui_object.show_network_dialog(self))
tools_menu.addAction(_("&Plugins"), self.plugins_dialog)
tools_menu.addSeparator()
tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message)
tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message)
tools_menu.addSeparator()
paytomany_menu = tools_menu.addAction(_("&Pay to many"), self.paytomany)
raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))
raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file)
raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text)
raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid)
raw_transaction_menu.addAction(_("&From QR code"), self.read_tx_from_qrcode)
self.raw_transaction_menu = raw_transaction_menu
run_hook('init_menubar_tools', self, tools_menu)
help_menu = menubar.addMenu(_("&Help"))
help_menu.addAction(_("&About"), self.show_about)
help_menu.addAction(_("&Official website"), lambda: webbrowser.open("https://electrum.org"))
help_menu.addSeparator()
help_menu.addAction(_("&Documentation"), lambda: webbrowser.open("http://docs.electrum.org/")).setShortcut(QKeySequence.HelpContents)
help_menu.addAction(_("&Report Bug"), self.show_report_bug)
help_menu.addSeparator()
help_menu.addAction(_("&Donate to server"), self.donate_to_server)
self.setMenuBar(menubar)
def donate_to_server(self):
d = self.network.get_donation_address()
if d:
host = self.network.get_parameters()[0]
self.pay_to_URI('bitcoin:%s?message=donation for %s'%(d, host))
else:
self.show_error(_('No donation address for this server'))
def show_about(self):
QMessageBox.about(self, "Electrum",
_("Version")+" %s" % (self.wallet.electrum_version) + "\n\n" +
_("Electrum's focus is speed, with low resource usage and simplifying Bitcoin. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the Bitcoin system." + "\n\n" +
_("Uses icons from the Icons8 icon pack (icons8.com).")))
def show_report_bug(self):
msg = ' '.join([
_("Please report any bugs as issues on github:<br/>"),
"<a href=\"https://github.com/spesmilo/electrum/issues\">https://github.com/spesmilo/electrum/issues</a><br/><br/>",
_("Before reporting a bug, upgrade to the most recent version of Electrum (latest release or git HEAD), and include the version number in your report."),
_("Try to explain not only what the bug is, but how it occurs.")
])
self.show_message(msg, title="Electrum - " + _("Reporting Bugs"))
def notify_transactions(self):
if not self.network or not self.network.is_connected():
return
self.print_error("Notifying GUI")
if len(self.tx_notifications) > 0:
# Combine the transactions if there are at least three
num_txns = len(self.tx_notifications)
if num_txns >= 3:
total_amount = 0
for tx in self.tx_notifications:
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if v > 0:
total_amount += v
self.notify(_("{} new transactions received: Total amount received in the new transactions {}")
.format(num_txns, self.format_amount_and_units(total_amount)))
self.tx_notifications = []
else:
for tx in self.tx_notifications:
if tx:
self.tx_notifications.remove(tx)
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if v > 0:
self.notify(_("New transaction received: {}").format(self.format_amount_and_units(v)))
def notify(self, message):
if self.tray:
try:
# this requires Qt 5.9
self.tray.showMessage("Electrum", message, QIcon(":icons/electrum_dark_icon"), 20000)
except TypeError:
self.tray.showMessage("Electrum", message, QSystemTrayIcon.Information, 20000)
# custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user
def getOpenFileName(self, title, filter = ""):
directory = self.config.get('io_dir', os.path.expanduser('~'))
fileName, __ = QFileDialog.getOpenFileName(self, title, directory, filter)
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def getSaveFileName(self, title, filename, filter = ""):
directory = self.config.get('io_dir', os.path.expanduser('~'))
path = os.path.join( directory, filename )
fileName, __ = QFileDialog.getSaveFileName(self, title, path, filter)
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def connect_slots(self, sender):
sender.timer_signal.connect(self.timer_actions)
def timer_actions(self):
# Note this runs in the GUI thread
if self.need_update.is_set():
self.need_update.clear()
self.update_wallet()
# resolve aliases
# FIXME this is a blocking network call that has a timeout of 5 sec
self.payto_e.resolve()
# update fee
if self.require_fee_update:
self.do_update_fee()
self.require_fee_update = False
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, self.num_zeros, self.decimal_point, is_diff=is_diff, whitespaces=whitespaces)
def format_amount_and_units(self, amount):
text = self.format_amount(amount) + ' '+ self.base_unit()
x = self.fx.format_amount_and_units(amount) if self.fx else None
if text and x:
text += ' (%s)'%x
return text
def format_fee_rate(self, fee_rate):
return format_fee_satoshis(fee_rate/1000, self.num_zeros) + ' sat/byte'
def get_decimal_point(self):
return self.decimal_point
def base_unit(self):
return decimal_point_to_base_unit_name(self.decimal_point)
def connect_fields(self, window, btc_e, fiat_e, fee_e):
def edit_changed(edit):
if edit.follows:
return
edit.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
fiat_e.is_last_edited = (edit == fiat_e)
amount = edit.get_amount()
rate = self.fx.exchange_rate() if self.fx else Decimal('NaN')
if rate.is_nan() or amount is None:
if edit is fiat_e:
btc_e.setText("")
if fee_e:
fee_e.setText("")
else:
fiat_e.setText("")
else:
if edit is fiat_e:
btc_e.follows = True
btc_e.setAmount(int(amount / Decimal(rate) * COIN))
btc_e.setStyleSheet(ColorScheme.BLUE.as_stylesheet())
btc_e.follows = False
if fee_e:
window.update_fee()
else:
fiat_e.follows = True
fiat_e.setText(self.fx.ccy_amount_str(
amount * Decimal(rate) / COIN, False))
fiat_e.setStyleSheet(ColorScheme.BLUE.as_stylesheet())
fiat_e.follows = False
btc_e.follows = False
fiat_e.follows = False
fiat_e.textChanged.connect(partial(edit_changed, fiat_e))
btc_e.textChanged.connect(partial(edit_changed, btc_e))
fiat_e.is_last_edited = False
def update_status(self):
if not self.wallet:
return
if self.network is None or not self.network.is_running():
text = _("Offline")
icon = QIcon(":icons/status_disconnected.png")
elif self.network.is_connected():
server_height = self.network.get_server_height()
server_lag = self.network.get_local_height() - server_height
# Server height can be 0 after switching to a new server
# until we get a headers subscription request response.
# Display the synchronizing message in that case.
if not self.wallet.up_to_date or server_height == 0:
text = _("Synchronizing...")
icon = QIcon(":icons/status_waiting.png")
elif server_lag > 1:
text = _("Server is lagging ({} blocks)").format(server_lag)
icon = QIcon(":icons/status_lagging.png")
else:
c, u, x = self.wallet.get_balance()
text = _("Balance" ) + ": %s "%(self.format_amount_and_units(c))
if u:
text += " [%s unconfirmed]"%(self.format_amount(u, is_diff=True).strip())
if x:
text += " [%s unmatured]"%(self.format_amount(x, is_diff=True).strip())
# append fiat balance and price
if self.fx.is_enabled():
text += self.fx.get_fiat_status_text(c + u + x,
self.base_unit(), self.get_decimal_point()) or ''
if not self.network.proxy:
icon = QIcon(":icons/status_connected.png")
else:
icon = QIcon(":icons/status_connected_proxy.png")
else:
if self.network.proxy:
text = "{} ({})".format(_("Not connected"), _("proxy enabled"))
else:
text = _("Not connected")
icon = QIcon(":icons/status_disconnected.png")
self.tray.setToolTip("%s (%s)" % (text, self.wallet.basename()))
self.balance_label.setText(text)
self.status_button.setIcon( icon )
def update_wallet(self):
self.update_status()
if self.wallet.up_to_date or not self.network or not self.network.is_connected():
self.update_tabs()
def update_tabs(self):
self.history_list.update()
self.request_list.update()
self.address_list.update()
self.utxo_list.update()
self.contact_list.update()
self.invoice_list.update()
self.update_completions()
def create_history_tab(self):
from .history_list import HistoryList
self.history_list = l = HistoryList(self)
l.searchable_list = l
toolbar = l.create_toolbar(self.config)
toolbar_shown = self.config.get('show_toolbar_history', False)
l.show_toolbar(toolbar_shown)
return self.create_list_tab(l, toolbar)
def show_address(self, addr):
from . import address_dialog
d = address_dialog.AddressDialog(self, addr)
d.exec_()
def show_transaction(self, tx, tx_desc = None):
'''tx_desc is set only for txs created in the Send tab'''
show_transaction(tx, self, tx_desc)
def create_receive_tab(self):
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
self.receive_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
self.receive_address_e = ButtonsLineEdit()
self.receive_address_e.addCopyButton(self.app)
self.receive_address_e.setReadOnly(True)
msg = _('Bitcoin address where the payment should be received. Note that each payment request uses a different Bitcoin address.')
self.receive_address_label = HelpLabel(_('Receiving address'), msg)
self.receive_address_e.textChanged.connect(self.update_receive_qr)
self.receive_address_e.setFocusPolicy(Qt.ClickFocus)
grid.addWidget(self.receive_address_label, 0, 0)
grid.addWidget(self.receive_address_e, 0, 1, 1, -1)
self.receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self.receive_message_e, 1, 1, 1, -1)
self.receive_message_e.textChanged.connect(self.update_receive_qr)
self.receive_amount_e = BTCAmountEdit(self.get_decimal_point)
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self.receive_amount_e, 2, 1)
self.receive_amount_e.textChanged.connect(self.update_receive_qr)
self.fiat_receive_e = AmountEdit(self.fx.get_currency if self.fx else '')
if not self.fx or not self.fx.is_enabled():
self.fiat_receive_e.setVisible(False)
grid.addWidget(self.fiat_receive_e, 2, 2, Qt.AlignLeft)
self.connect_fields(self, self.receive_amount_e, self.fiat_receive_e, None)
self.expires_combo = QComboBox()
self.expires_combo.addItems([i[0] for i in expiration_values])
self.expires_combo.setCurrentIndex(3)
self.expires_combo.setFixedWidth(self.receive_amount_e.width())
msg = ' '.join([
_('Expiration date of your request.'),
_('This information is seen by the recipient if you send them a signed payment request.'),
_('Expired requests have to be deleted manually from your list, in order to free the corresponding Bitcoin addresses.'),
_('The bitcoin address never expires and will always be part of this electrum wallet.'),
])
grid.addWidget(HelpLabel(_('Request expires'), msg), 3, 0)
grid.addWidget(self.expires_combo, 3, 1)
self.expires_label = QLineEdit('')
self.expires_label.setReadOnly(1)
self.expires_label.setFocusPolicy(Qt.NoFocus)
self.expires_label.hide()
grid.addWidget(self.expires_label, 3, 1)
self.save_request_button = QPushButton(_('Save'))
self.save_request_button.clicked.connect(self.save_payment_request)
self.new_request_button = QPushButton(_('New'))
self.new_request_button.clicked.connect(self.new_payment_request)
self.receive_qr = QRCodeWidget(fixedSize=200)
self.receive_qr.mouseReleaseEvent = lambda x: self.toggle_qr_window()
self.receive_qr.enterEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))
self.receive_qr.leaveEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))
self.receive_buttons = buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.save_request_button)
buttons.addWidget(self.new_request_button)
grid.addLayout(buttons, 4, 1, 1, 2)
self.receive_requests_label = QLabel(_('Requests'))
from .request_list import RequestList
self.request_list = RequestList(self)
# layout
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addStretch()
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addWidget(self.receive_qr)
w = QWidget()
w.searchable_list = self.request_list
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.receive_requests_label)
vbox.addWidget(self.request_list)
vbox.setStretchFactor(self.request_list, 1000)
return w
def delete_payment_request(self, addr):
self.wallet.remove_payment_request(addr, self.config)
self.request_list.update()
self.clear_receive_tab()
def get_request_URI(self, addr):
req = self.wallet.receive_requests[addr]
message = self.wallet.labels.get(addr, '')
amount = req['amount']
URI = util.create_URI(addr, amount, message)
if req.get('time'):
URI += "&time=%d"%req.get('time')
if req.get('exp'):
URI += "&exp=%d"%req.get('exp')
if req.get('name') and req.get('sig'):
sig = bfh(req.get('sig'))
sig = bitcoin.base_encode(sig, base=58)
URI += "&name=" + req['name'] + "&sig="+sig
return str(URI)
def sign_payment_request(self, addr):
alias = self.config.get('alias')
alias_privkey = None
if alias and self.alias_info:
alias_addr, alias_name, validated = self.alias_info
if alias_addr:
if self.wallet.is_mine(alias_addr):
msg = _('This payment request will be signed.') + '\n' + _('Please enter your password')
password = None
if self.wallet.has_keystore_encryption():
password = self.password_dialog(msg)
if not password:
return
try:
self.wallet.sign_payment_request(addr, alias, alias_addr, password)
except Exception as e:
self.show_error(str(e))
return
else:
return
def save_payment_request(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = self.receive_message_e.text()
if not message and not amount:
self.show_error(_('No message or amount'))
return False
i = self.expires_combo.currentIndex()
expiration = list(map(lambda x: x[1], expiration_values))[i]
req = self.wallet.make_payment_request(addr, amount, message, expiration)
try:
self.wallet.add_payment_request(req, self.config)
except Exception as e:
traceback.print_exc(file=sys.stderr)
self.show_error(_('Error adding payment request') + ':\n' + str(e))
else:
self.sign_payment_request(addr)
self.save_request_button.setEnabled(False)
finally:
self.request_list.update()
self.address_list.update()
def view_and_paste(self, title, msg, data):
dialog = WindowModalDialog(self, title)
vbox = QVBoxLayout()
label = QLabel(msg)
label.setWordWrap(True)
vbox.addWidget(label)
pr_e = ShowQRTextEdit(text=data)
vbox.addWidget(pr_e)
vbox.addLayout(Buttons(CopyCloseButton(pr_e.text, self.app, dialog)))
dialog.setLayout(vbox)
dialog.exec_()
def export_payment_request(self, addr):
r = self.wallet.receive_requests.get(addr)
pr = paymentrequest.serialize_request(r).SerializeToString()
name = r['id'] + '.bip70'
fileName = self.getSaveFileName(_("Select where to save your payment request"), name, "*.bip70")
if fileName:
with open(fileName, "wb+") as f:
f.write(util.to_bytes(pr))
self.show_message(_("Request saved successfully"))
self.saved = True
def new_payment_request(self):
addr = self.wallet.get_unused_address()
if addr is None:
if not self.wallet.is_deterministic():
msg = [
_('No more addresses in your wallet.'),
_('You are using a non-deterministic wallet, which cannot create new addresses.'),
_('If you want to create new addresses, use a deterministic wallet instead.')
]
self.show_message(' '.join(msg))
return
if not self.question(_("Warning: The next address will not be recovered automatically if you restore your wallet from seed; you may need to add it manually.\n\nThis occurs because you have too many unused addresses in your wallet. To avoid this situation, use the existing addresses first.\n\nCreate anyway?")):
return
addr = self.wallet.create_new_address(False)
self.set_receive_address(addr)
self.expires_label.hide()
self.expires_combo.show()
self.new_request_button.setEnabled(False)
self.receive_message_e.setFocus(1)
def set_receive_address(self, addr):
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
def clear_receive_tab(self):
addr = self.wallet.get_receiving_address() or ''
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
self.expires_label.hide()
self.expires_combo.show()
def toggle_qr_window(self):
from . import qrwindow
if not self.qr_window:
self.qr_window = qrwindow.QR_Window(self)
self.qr_window.setVisible(True)
self.qr_window_geometry = self.qr_window.geometry()
else:
if not self.qr_window.isVisible():
self.qr_window.setVisible(True)
self.qr_window.setGeometry(self.qr_window_geometry)
else:
self.qr_window_geometry = self.qr_window.geometry()
self.qr_window.setVisible(False)
self.update_receive_qr()
def show_send_tab(self):
self.tabs.setCurrentIndex(self.tabs.indexOf(self.send_tab))
def show_receive_tab(self):
self.tabs.setCurrentIndex(self.tabs.indexOf(self.receive_tab))
def receive_at(self, addr):
if not bitcoin.is_address(addr):
return
self.show_receive_tab()
self.receive_address_e.setText(addr)
self.new_request_button.setEnabled(True)
def update_receive_qr(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = self.receive_message_e.text()
self.save_request_button.setEnabled((amount is not None) or (message != ""))
uri = util.create_URI(addr, amount, message)
self.receive_qr.setData(uri)
if self.qr_window and self.qr_window.isVisible():
self.qr_window.set_content(addr, amount, message, uri)
def set_feerounding_text(self, num_satoshis_added):
self.feerounding_text = (_('Additional {} satoshis are going to be added.')
.format(num_satoshis_added))
def create_send_tab(self):
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
self.send_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
from .paytoedit import PayToEdit
self.amount_e = BTCAmountEdit(self.get_decimal_point)
self.payto_e = PayToEdit(self)
msg = _('Recipient of the funds.') + '\n\n'\
+ _('You may enter a Bitcoin address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a Bitcoin address)')
payto_label = HelpLabel(_('Pay to'), msg)
grid.addWidget(payto_label, 1, 0)
grid.addWidget(self.payto_e, 1, 1, 1, -1)
completer = QCompleter()
completer.setCaseSensitivity(False)
self.payto_e.set_completer(completer)
completer.setModel(self.completions)
msg = _('Description of the transaction (not mandatory).') + '\n\n'\
+ _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')
description_label = HelpLabel(_('Description'), msg)
grid.addWidget(description_label, 2, 0)
self.message_e = MyLineEdit()
grid.addWidget(self.message_e, 2, 1, 1, -1)
self.from_label = QLabel(_('From'))
grid.addWidget(self.from_label, 3, 0)
self.from_list = MyTreeWidget(self, self.from_list_menu, ['',''])
self.from_list.setHeaderHidden(True)
self.from_list.setMaximumHeight(80)
grid.addWidget(self.from_list, 3, 1, 1, -1)
self.set_pay_from([])
msg = _('Amount to be sent.') + '\n\n' \
+ _('The amount will be displayed in red if you do not have enough funds in your wallet.') + ' ' \
+ _('Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') + '\n\n' \
+ _('Keyboard shortcut: type "!" to send all your coins.')
amount_label = HelpLabel(_('Amount'), msg)
grid.addWidget(amount_label, 4, 0)
grid.addWidget(self.amount_e, 4, 1)
self.fiat_send_e = AmountEdit(self.fx.get_currency if self.fx else '')
if not self.fx or not self.fx.is_enabled():
self.fiat_send_e.setVisible(False)
grid.addWidget(self.fiat_send_e, 4, 2)
self.amount_e.frozen.connect(
lambda: self.fiat_send_e.setFrozen(self.amount_e.isReadOnly()))
self.max_button = EnterButton(_("Max"), self.spend_max)
self.max_button.setFixedWidth(140)
grid.addWidget(self.max_button, 4, 3)
hbox = QHBoxLayout()
hbox.addStretch(1)
grid.addLayout(hbox, 4, 4)
msg = _('Bitcoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\
+ _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\
+ _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')
self.fee_e_label = HelpLabel(_('Fee'), msg)
def fee_cb(dyn, pos, fee_rate):
if dyn:
if self.config.use_mempool_fees():
self.config.set_key('depth_level', pos, False)
else:
self.config.set_key('fee_level', pos, False)
else:
self.config.set_key('fee_per_kb', fee_rate, False)
if fee_rate:
fee_rate = Decimal(fee_rate)
self.feerate_e.setAmount(quantize_feerate(fee_rate / 1000))
else:
self.feerate_e.setAmount(None)
self.fee_e.setModified(False)
self.fee_slider.activate()
self.spend_max() if self.is_max else self.update_fee()
self.fee_slider = FeeSlider(self, self.config, fee_cb)
self.fee_slider.setFixedWidth(140)
def on_fee_or_feerate(edit_changed, editing_finished):
edit_other = self.feerate_e if edit_changed == self.fee_e else self.fee_e
if editing_finished:
if not edit_changed.get_amount():
# This is so that when the user blanks the fee and moves on,
# we go back to auto-calculate mode and put a fee back.
edit_changed.setModified(False)
else:
# edit_changed was edited just now, so make sure we will
# freeze the correct fee setting (this)
edit_other.setModified(False)
self.fee_slider.deactivate()
self.update_fee()
class TxSizeLabel(QLabel):
def setAmount(self, byte_size):
self.setText(('x %s bytes =' % byte_size) if byte_size else '')
self.size_e = TxSizeLabel()
self.size_e.setAlignment(Qt.AlignCenter)
self.size_e.setAmount(0)
self.size_e.setFixedWidth(140)
self.size_e.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())
self.feerate_e = FeerateEdit(lambda: 0)
self.feerate_e.setAmount(self.config.fee_per_byte())
self.feerate_e.textEdited.connect(partial(on_fee_or_feerate, self.feerate_e, False))
self.feerate_e.editingFinished.connect(partial(on_fee_or_feerate, self.feerate_e, True))
self.fee_e = BTCAmountEdit(self.get_decimal_point)
self.fee_e.textEdited.connect(partial(on_fee_or_feerate, self.fee_e, False))
self.fee_e.editingFinished.connect(partial(on_fee_or_feerate, self.fee_e, True))
def feerounding_onclick():
text = (self.feerounding_text + '\n\n' +
_('To somewhat protect your privacy, Electrum tries to create change with similar precision to other outputs.') + ' ' +
_('At most 100 satoshis might be lost due to this rounding.') + ' ' +
_("You can disable this setting in '{}'.").format(_('Preferences')) + '\n' +
_('Also, dust is not kept as change, but added to the fee.'))
QMessageBox.information(self, 'Fee rounding', text)
self.feerounding_icon = QPushButton(QIcon(':icons/info.png'), '')
self.feerounding_icon.setFixedWidth(20)
self.feerounding_icon.setFlat(True)
self.feerounding_icon.clicked.connect(feerounding_onclick)
self.feerounding_icon.setVisible(False)
self.connect_fields(self, self.amount_e, self.fiat_send_e, self.fee_e)
vbox_feelabel = QVBoxLayout()
vbox_feelabel.addWidget(self.fee_e_label)
vbox_feelabel.addStretch(1)
grid.addLayout(vbox_feelabel, 5, 0)
self.fee_adv_controls = QWidget()
hbox = QHBoxLayout(self.fee_adv_controls)
hbox.setContentsMargins(0, 0, 0, 0)
hbox.addWidget(self.feerate_e)
hbox.addWidget(self.size_e)
hbox.addWidget(self.fee_e)
hbox.addWidget(self.feerounding_icon, Qt.AlignLeft)
hbox.addStretch(1)
vbox_feecontrol = QVBoxLayout()
vbox_feecontrol.addWidget(self.fee_adv_controls)
vbox_feecontrol.addWidget(self.fee_slider)
grid.addLayout(vbox_feecontrol, 5, 1, 1, -1)
if not self.config.get('show_fee', False):
self.fee_adv_controls.setVisible(False)
self.preview_button = EnterButton(_("Preview"), self.do_preview)
self.preview_button.setToolTip(_('Display the details of your transaction before signing it.'))
self.send_button = EnterButton(_("Send"), self.do_send)
self.clear_button = EnterButton(_("Clear"), self.do_clear)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.clear_button)
buttons.addWidget(self.preview_button)
buttons.addWidget(self.send_button)
grid.addLayout(buttons, 6, 1, 1, 3)
self.amount_e.shortcut.connect(self.spend_max)
self.payto_e.textChanged.connect(self.update_fee)
self.amount_e.textEdited.connect(self.update_fee)
def reset_max(t):
self.is_max = False
self.max_button.setEnabled(not bool(t))
self.amount_e.textEdited.connect(reset_max)
self.fiat_send_e.textEdited.connect(reset_max)
def entry_changed():
text = ""
amt_color = ColorScheme.DEFAULT
fee_color = ColorScheme.DEFAULT
feerate_color = ColorScheme.DEFAULT
if self.not_enough_funds:
amt_color, fee_color = ColorScheme.RED, ColorScheme.RED
feerate_color = ColorScheme.RED
text = _( "Not enough funds" )
c, u, x = self.wallet.get_frozen_balance()
if c+u+x:
text += ' (' + self.format_amount(c+u+x).strip() + ' ' + self.base_unit() + ' ' +_("are frozen") + ')'
# blue color denotes auto-filled values
elif self.fee_e.isModified():
feerate_color = ColorScheme.BLUE
elif self.feerate_e.isModified():
fee_color = ColorScheme.BLUE
elif self.amount_e.isModified():
fee_color = ColorScheme.BLUE
feerate_color = ColorScheme.BLUE
else:
amt_color = ColorScheme.BLUE
fee_color = ColorScheme.BLUE
feerate_color = ColorScheme.BLUE
self.statusBar().showMessage(text)
self.amount_e.setStyleSheet(amt_color.as_stylesheet())
self.fee_e.setStyleSheet(fee_color.as_stylesheet())
self.feerate_e.setStyleSheet(feerate_color.as_stylesheet())
self.amount_e.textChanged.connect(entry_changed)
self.fee_e.textChanged.connect(entry_changed)
self.feerate_e.textChanged.connect(entry_changed)
self.invoices_label = QLabel(_('Invoices'))
from .invoice_list import InvoiceList
self.invoice_list = InvoiceList(self)
vbox0 = QVBoxLayout()
vbox0.addLayout(grid)
hbox = QHBoxLayout()
hbox.addLayout(vbox0)
w = QWidget()
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.invoices_label)
vbox.addWidget(self.invoice_list)
vbox.setStretchFactor(self.invoice_list, 1000)
w.searchable_list = self.invoice_list
run_hook('create_send_tab', grid)
return w
def spend_max(self):
self.is_max = True
self.do_update_fee()
def update_fee(self):
self.require_fee_update = True
def get_payto_or_dummy(self):
r = self.payto_e.get_recipient()
if r:
return r
return (TYPE_ADDRESS, self.wallet.dummy_address())
def do_update_fee(self):
'''Recalculate the fee. If the fee was manually input, retain it, but
still build the TX to see if there are enough funds.
'''
freeze_fee = self.is_send_fee_frozen()
freeze_feerate = self.is_send_feerate_frozen()
amount = '!' if self.is_max else self.amount_e.get_amount()
if amount is None:
if not freeze_fee:
self.fee_e.setAmount(None)
self.not_enough_funds = False
self.statusBar().showMessage('')
else:
fee_estimator = self.get_send_fee_estimator()
outputs = self.payto_e.get_outputs(self.is_max)
if not outputs:
_type, addr = self.get_payto_or_dummy()
outputs = [(_type, addr, amount)]
is_sweep = bool(self.tx_external_keypairs)
make_tx = lambda fee_est: \
self.wallet.make_unsigned_transaction(
self.get_coins(), outputs, self.config,
fixed_fee=fee_est, is_sweep=is_sweep)
try:
tx = make_tx(fee_estimator)
self.not_enough_funds = False
except (NotEnoughFunds, NoDynamicFeeEstimates) as e:
if not freeze_fee:
self.fee_e.setAmount(None)
if not freeze_feerate:
self.feerate_e.setAmount(None)
self.feerounding_icon.setVisible(False)
if isinstance(e, NotEnoughFunds):
self.not_enough_funds = True
elif isinstance(e, NoDynamicFeeEstimates):
try:
tx = make_tx(0)
size = tx.estimated_size()
self.size_e.setAmount(size)
except BaseException:
pass
return
except BaseException:
traceback.print_exc(file=sys.stderr)
return
size = tx.estimated_size()
self.size_e.setAmount(size)
fee = tx.get_fee()
fee = None if self.not_enough_funds else fee
# Displayed fee/fee_rate values are set according to user input.
# Due to rounding or dropping dust in CoinChooser,
# actual fees often differ somewhat.
if freeze_feerate or self.fee_slider.is_active():
displayed_feerate = self.feerate_e.get_amount()
if displayed_feerate:
displayed_feerate = quantize_feerate(displayed_feerate / 1000)
else:
# fallback to actual fee
displayed_feerate = quantize_feerate(fee / size) if fee is not None else None
self.feerate_e.setAmount(displayed_feerate)
displayed_fee = round(displayed_feerate * size) if displayed_feerate is not None else None
self.fee_e.setAmount(displayed_fee)
else:
if freeze_fee:
displayed_fee = self.fee_e.get_amount()
else:
# fallback to actual fee if nothing is frozen
displayed_fee = fee
self.fee_e.setAmount(displayed_fee)
displayed_fee = displayed_fee if displayed_fee else 0
displayed_feerate = quantize_feerate(displayed_fee / size) if displayed_fee is not None else None
self.feerate_e.setAmount(displayed_feerate)
# show/hide fee rounding icon
feerounding = (fee - displayed_fee) if fee else 0
self.set_feerounding_text(int(feerounding))
self.feerounding_icon.setToolTip(self.feerounding_text)
self.feerounding_icon.setVisible(abs(feerounding) >= 1)
if self.is_max:
amount = tx.output_value()
self.amount_e.setAmount(amount)
def from_list_delete(self, item):
i = self.from_list.indexOfTopLevelItem(item)
self.pay_from.pop(i)
self.redraw_from_list()
self.update_fee()
def from_list_menu(self, position):
item = self.from_list.itemAt(position)
menu = QMenu()
menu.addAction(_("Remove"), lambda: self.from_list_delete(item))
menu.exec_(self.from_list.viewport().mapToGlobal(position))
def set_pay_from(self, coins):
self.pay_from = list(coins)
self.redraw_from_list()
def redraw_from_list(self):
self.from_list.clear()
self.from_label.setHidden(len(self.pay_from) == 0)
self.from_list.setHidden(len(self.pay_from) == 0)
def format(x):
h = x.get('prevout_hash')
return h[0:10] + '...' + h[-10:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address')
for item in self.pay_from:
self.from_list.addTopLevelItem(QTreeWidgetItem( [format(item), self.format_amount(item['value']) ]))
def get_contact_payto(self, key):
_type, label = self.contacts.get(key)
return label + ' <' + key + '>' if _type == 'address' else key
def update_completions(self):
l = [self.get_contact_payto(key) for key in self.contacts.keys()]
self.completions.setStringList(l)
def protected(func):
'''Password request wrapper. The password is passed to the function
as the 'password' named argument. "None" indicates either an
unencrypted wallet, or the user cancelled the password request.
An empty input is passed as the empty string.'''
def request_password(self, *args, **kwargs):
parent = self.top_level_window()
password = None
while self.wallet.has_keystore_encryption():
password = self.password_dialog(parent=parent)
if password is None:
# User cancelled password input
return
try:
self.wallet.check_password(password)
break
except Exception as e:
self.show_error(str(e), parent=parent)
continue
kwargs['password'] = password
return func(self, *args, **kwargs)
return request_password
def is_send_fee_frozen(self):
return self.fee_e.isVisible() and self.fee_e.isModified() \
and (self.fee_e.text() or self.fee_e.hasFocus())
def is_send_feerate_frozen(self):
return self.feerate_e.isVisible() and self.feerate_e.isModified() \
and (self.feerate_e.text() or self.feerate_e.hasFocus())
def get_send_fee_estimator(self):
if self.is_send_fee_frozen():
fee_estimator = self.fee_e.get_amount()
elif self.is_send_feerate_frozen():
amount = self.feerate_e.get_amount()
amount = 0 if amount is None else amount
fee_estimator = partial(
simple_config.SimpleConfig.estimate_fee_for_feerate, amount)
else:
fee_estimator = None
return fee_estimator
def read_send_tab(self):
if self.payment_request and self.payment_request.has_expired():
self.show_error(_('Payment request has expired'))
return
label = self.message_e.text()
if self.payment_request:
outputs = self.payment_request.get_outputs()
else:
errors = self.payto_e.get_errors()
if errors:
self.show_warning(_("Invalid Lines found:") + "\n\n" + '\n'.join([ _("Line #") + str(x[0]+1) + ": " + x[1] for x in errors]))
return
outputs = self.payto_e.get_outputs(self.is_max)
if self.payto_e.is_alias and self.payto_e.validated is False:
alias = self.payto_e.toPlainText()
msg = _('WARNING: the alias "{}" could not be validated via an additional '
'security check, DNSSEC, and thus may not be correct.').format(alias) + '\n'
msg += _('Do you wish to continue?')
if not self.question(msg):
return
if not outputs:
self.show_error(_('No outputs'))
return
for _type, addr, amount in outputs:
if addr is None:
self.show_error(_('Bitcoin Address is None'))
return
if _type == TYPE_ADDRESS and not bitcoin.is_address(addr):
self.show_error(_('Invalid Bitcoin Address'))
return
if amount is None:
self.show_error(_('Invalid Amount'))
return
fee_estimator = self.get_send_fee_estimator()
coins = self.get_coins()
return outputs, fee_estimator, label, coins
def do_preview(self):
self.do_send(preview = True)
def do_send(self, preview = False):
if run_hook('abort_send', self):
return
r = self.read_send_tab()
if not r:
return
outputs, fee_estimator, tx_desc, coins = r
try:
is_sweep = bool(self.tx_external_keypairs)
tx = self.wallet.make_unsigned_transaction(
coins, outputs, self.config, fixed_fee=fee_estimator,
is_sweep=is_sweep)
except NotEnoughFunds:
self.show_message(_("Insufficient funds"))
return
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
amount = tx.output_value() if self.is_max else sum(map(lambda x:x[2], outputs))
fee = tx.get_fee()
use_rbf = self.config.get('use_rbf', True)
if use_rbf:
tx.set_rbf(True)
if fee < self.wallet.relayfee() * tx.estimated_size() / 1000:
self.show_error('\n'.join([
_("This transaction requires a higher fee, or it will not be propagated by your current server"),
_("Try to raise your transaction fee, or use a server with a lower relay fee.")
]))
return
if preview:
self.show_transaction(tx, tx_desc)
return
if not self.network:
self.show_error(_("You can't broadcast a transaction without a live network connection."))
return
# confirmation dialog
msg = [
_("Amount to be sent") + ": " + self.format_amount_and_units(amount),
_("Mining fee") + ": " + self.format_amount_and_units(fee),
]
x_fee = run_hook('get_tx_extra_fee', self.wallet, tx)
if x_fee:
x_fee_address, x_fee_amount = x_fee
msg.append( _("Additional fees") + ": " + self.format_amount_and_units(x_fee_amount) )
confirm_rate = simple_config.FEERATE_WARNING_HIGH_FEE
if fee > confirm_rate * tx.estimated_size() / 1000:
msg.append(_('Warning') + ': ' + _("The fee for this transaction seems unusually high."))
if self.wallet.has_keystore_encryption():
msg.append("")
msg.append(_("Enter your password to proceed"))
password = self.password_dialog('\n'.join(msg))
if not password:
return
else:
msg.append(_('Proceed?'))
password = None
if not self.question('\n'.join(msg)):
return
def sign_done(success):
if success:
if not tx.is_complete():
self.show_transaction(tx)
self.do_clear()
else:
self.broadcast_transaction(tx, tx_desc)
self.sign_tx_with_password(tx, sign_done, password)
@protected
def sign_tx(self, tx, callback, password):
self.sign_tx_with_password(tx, callback, password)
def sign_tx_with_password(self, tx, callback, password):
'''Sign the transaction in a separate thread. When done, calls
the callback with a success code of True or False.
'''
def on_signed(result):
callback(True)
def on_failed(exc_info):
self.on_error(exc_info)
callback(False)
if self.tx_external_keypairs:
# can sign directly
task = partial(Transaction.sign, tx, self.tx_external_keypairs)
else:
task = partial(self.wallet.sign_transaction, tx, password)
WaitingDialog(self, _('Signing transaction...'), task,
on_signed, on_failed)
def broadcast_transaction(self, tx, tx_desc):
def broadcast_thread():
# non-GUI thread
pr = self.payment_request
if pr and pr.has_expired():
self.payment_request = None
return False, _("Payment request has expired")
status, msg = self.network.broadcast(tx)
if pr and status is True:
self.invoices.set_paid(pr, tx.txid())
self.invoices.save()
self.payment_request = None
refund_address = self.wallet.get_receiving_addresses()[0]
ack_status, ack_msg = pr.send_ack(str(tx), refund_address)
if ack_status:
msg = ack_msg
return status, msg
# Capture current TL window; override might be removed on return
parent = self.top_level_window(lambda win: isinstance(win, MessageBoxMixin))
def broadcast_done(result):
# GUI thread
if result:
status, msg = result
if status:
if tx_desc is not None and tx.is_complete():
self.wallet.set_label(tx.txid(), tx_desc)
parent.show_message(_('Payment sent.') + '\n' + msg)
self.invoice_list.update()
self.do_clear()
else:
parent.show_error(msg)
WaitingDialog(self, _('Broadcasting transaction...'),
broadcast_thread, broadcast_done, self.on_error)
def query_choice(self, msg, choices):
# Needed by QtHandler for hardware wallets
dialog = WindowModalDialog(self.top_level_window())
clayout = ChoicesLayout(msg, choices)
vbox = QVBoxLayout(dialog)
vbox.addLayout(clayout.layout())
vbox.addLayout(Buttons(OkButton(dialog)))
if not dialog.exec_():
return None
return clayout.selected_index()
def lock_amount(self, b):
self.amount_e.setFrozen(b)
self.max_button.setEnabled(not b)
def prepare_for_payment_request(self):
self.show_send_tab()
self.payto_e.is_pr = True
for e in [self.payto_e, self.amount_e, self.message_e]:
e.setFrozen(True)
self.payto_e.setText(_("please wait..."))
return True
def delete_invoice(self, key):
self.invoices.remove(key)
self.invoice_list.update()
def payment_request_ok(self):
pr = self.payment_request
key = self.invoices.add(pr)
status = self.invoices.get_status(key)
self.invoice_list.update()
if status == PR_PAID:
self.show_message("invoice already paid")
self.do_clear()
self.payment_request = None
return
self.payto_e.is_pr = True
if not pr.has_expired():
self.payto_e.setGreen()
else:
self.payto_e.setExpired()
self.payto_e.setText(pr.get_requestor())
self.amount_e.setText(format_satoshis_plain(pr.get_amount(), self.decimal_point))
self.message_e.setText(pr.get_memo())
# signal to set fee
self.amount_e.textEdited.emit("")
def payment_request_error(self):
self.show_message(self.payment_request.error)
self.payment_request = None
self.do_clear()
def on_pr(self, request):
self.payment_request = request
if self.payment_request.verify(self.contacts):
self.payment_request_ok_signal.emit()
else:
self.payment_request_error_signal.emit()
def pay_to_URI(self, URI):
if not URI:
return
try:
out = util.parse_URI(URI, self.on_pr)
except BaseException as e:
self.show_error(_('Invalid bitcoin URI:') + '\n' + str(e))
return
self.show_send_tab()
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if r or (name and sig):
self.prepare_for_payment_request()
return
address = out.get('address')
amount = out.get('amount')
label = out.get('label')
message = out.get('message')
# use label as description (not BIP21 compliant)
if label and not message:
message = label
if address:
self.payto_e.setText(address)
if message:
self.message_e.setText(message)
if amount:
self.amount_e.setAmount(amount)
self.amount_e.textEdited.emit("")
def do_clear(self):
self.is_max = False
self.not_enough_funds = False
self.payment_request = None
self.payto_e.is_pr = False
for e in [self.payto_e, self.message_e, self.amount_e, self.fiat_send_e,
self.fee_e, self.feerate_e]:
e.setText('')
e.setFrozen(False)
self.fee_slider.activate()
self.feerate_e.setAmount(self.config.fee_per_byte())
self.size_e.setAmount(0)
self.feerounding_icon.setVisible(False)
self.set_pay_from([])
self.tx_external_keypairs = {}
self.update_status()
run_hook('do_clear', self)
def set_frozen_state(self, addrs, freeze):
self.wallet.set_frozen_state(addrs, freeze)
self.address_list.update()
self.utxo_list.update()
self.update_fee()
def create_list_tab(self, l, toolbar=None):
w = QWidget()
w.searchable_list = l
vbox = QVBoxLayout()
w.setLayout(vbox)
vbox.setContentsMargins(0, 0, 0, 0)
vbox.setSpacing(0)
if toolbar:
vbox.addLayout(toolbar)
vbox.addWidget(l)
return w
def create_addresses_tab(self):
from .address_list import AddressList
self.address_list = l = AddressList(self)
toolbar = l.create_toolbar(self.config)
toolbar_shown = self.config.get('show_toolbar_addresses', False)
l.show_toolbar(toolbar_shown)
return self.create_list_tab(l, toolbar)
def create_utxo_tab(self):
from .utxo_list import UTXOList
self.utxo_list = l = UTXOList(self)
return self.create_list_tab(l)
def create_contacts_tab(self):
from .contact_list import ContactList
self.contact_list = l = ContactList(self)
return self.create_list_tab(l)
def remove_address(self, addr):
if self.question(_("Do you want to remove {} from your wallet?").format(addr)):
self.wallet.delete_address(addr)
self.need_update.set() # history, addresses, coins
self.clear_receive_tab()
def get_coins(self):
if self.pay_from:
return self.pay_from
else:
return self.wallet.get_spendable_coins(None, self.config)
def spend_coins(self, coins):
self.set_pay_from(coins)
self.show_send_tab()
self.update_fee()
def paytomany(self):
self.show_send_tab()
self.payto_e.paytomany()
msg = '\n'.join([
_('Enter a list of outputs in the \'Pay to\' field.'),
_('One output per line.'),
_('Format: address, amount'),
_('You may load a CSV file using the file icon.')
])
self.show_message(msg, title=_('Pay to many'))
def payto_contacts(self, labels):
paytos = [self.get_contact_payto(label) for label in labels]
self.show_send_tab()
if len(paytos) == 1:
self.payto_e.setText(paytos[0])
self.amount_e.setFocus()
else:
text = "\n".join([payto + ", 0" for payto in paytos])
self.payto_e.setText(text)
self.payto_e.setFocus()
def set_contact(self, label, address):
if not is_address(address):
self.show_error(_('Invalid Address'))
self.contact_list.update() # Displays original unchanged value
return False
self.contacts[address] = ('address', label)
self.contact_list.update()
self.history_list.update()
self.update_completions()
return True
def delete_contacts(self, labels):
if not self.question(_("Remove {} from your list of contacts?")
.format(" + ".join(labels))):
return
for label in labels:
self.contacts.pop(label)
self.history_list.update()
self.contact_list.update()
self.update_completions()
def show_invoice(self, key):
pr = self.invoices.get(key)
if pr is None:
self.show_error('Cannot find payment request in wallet.')
return
pr.verify(self.contacts)
self.show_pr_details(pr)
def show_pr_details(self, pr):
key = pr.get_id()
d = WindowModalDialog(self, _("Invoice"))
vbox = QVBoxLayout(d)
grid = QGridLayout()
grid.addWidget(QLabel(_("Requestor") + ':'), 0, 0)
grid.addWidget(QLabel(pr.get_requestor()), 0, 1)
grid.addWidget(QLabel(_("Amount") + ':'), 1, 0)
outputs_str = '\n'.join(map(lambda x: self.format_amount(x[2])+ self.base_unit() + ' @ ' + x[1], pr.get_outputs()))
grid.addWidget(QLabel(outputs_str), 1, 1)
expires = pr.get_expiration_date()
grid.addWidget(QLabel(_("Memo") + ':'), 2, 0)
grid.addWidget(QLabel(pr.get_memo()), 2, 1)
grid.addWidget(QLabel(_("Signature") + ':'), 3, 0)
grid.addWidget(QLabel(pr.get_verify_status()), 3, 1)
if expires:
grid.addWidget(QLabel(_("Expires") + ':'), 4, 0)
grid.addWidget(QLabel(format_time(expires)), 4, 1)
vbox.addLayout(grid)
def do_export():
fn = self.getSaveFileName(_("Save invoice to file"), "*.bip70")
if not fn:
return
with open(fn, 'wb') as f:
data = f.write(pr.raw)
self.show_message(_('Invoice saved as' + ' ' + fn))
exportButton = EnterButton(_('Save'), do_export)
def do_delete():
if self.question(_('Delete invoice?')):
self.invoices.remove(key)
self.history_list.update()
self.invoice_list.update()
d.close()
deleteButton = EnterButton(_('Delete'), do_delete)
vbox.addLayout(Buttons(exportButton, deleteButton, CloseButton(d)))
d.exec_()
def do_pay_invoice(self, key):
pr = self.invoices.get(key)
self.payment_request = pr
self.prepare_for_payment_request()
pr.error = None # this forces verify() to re-run
if pr.verify(self.contacts):
self.payment_request_ok()
else:
self.payment_request_error()
def create_console_tab(self):
from .console import Console
self.console = console = Console()
return console
def update_console(self):
console = self.console
console.history = self.config.get("console-history",[])
console.history_index = len(console.history)
console.updateNamespace({'wallet' : self.wallet,
'network' : self.network,
'plugins' : self.gui_object.plugins,
'window': self})
console.updateNamespace({'util' : util, 'bitcoin':bitcoin})
c = commands.Commands(self.config, self.wallet, self.network, lambda: self.console.set_json(True))
methods = {}
def mkfunc(f, method):
return lambda *args: f(method, args, self.password_dialog)
for m in dir(c):
if m[0]=='_' or m in ['network','wallet']: continue
methods[m] = mkfunc(c._run, m)
console.updateNamespace(methods)
def create_status_bar(self):
sb = QStatusBar()
sb.setFixedHeight(35)
qtVersion = qVersion()
self.balance_label = QLabel("")
self.balance_label.setTextInteractionFlags(Qt.TextSelectableByMouse)
self.balance_label.setStyleSheet("""QLabel { padding: 0 }""")
sb.addWidget(self.balance_label)
self.search_box = QLineEdit()
self.search_box.textChanged.connect(self.do_search)
self.search_box.hide()
sb.addPermanentWidget(self.search_box)
self.lock_icon = QIcon()
self.password_button = StatusBarButton(self.lock_icon, _("Password"), self.change_password_dialog )
sb.addPermanentWidget(self.password_button)
sb.addPermanentWidget(StatusBarButton(QIcon(":icons/preferences.png"), _("Preferences"), self.settings_dialog ) )
self.seed_button = StatusBarButton(QIcon(":icons/seed.png"), _("Seed"), self.show_seed_dialog )
sb.addPermanentWidget(self.seed_button)
self.status_button = StatusBarButton(QIcon(":icons/status_disconnected.png"), _("Network"), lambda: self.gui_object.show_network_dialog(self))
sb.addPermanentWidget(self.status_button)
run_hook('create_status_bar', sb)
self.setStatusBar(sb)
def update_lock_icon(self):
icon = QIcon(":icons/lock.png") if self.wallet.has_password() else QIcon(":icons/unlock.png")
self.password_button.setIcon(icon)
def update_buttons_on_seed(self):
self.seed_button.setVisible(self.wallet.has_seed())
self.password_button.setVisible(self.wallet.may_have_password())
self.send_button.setVisible(not self.wallet.is_watching_only())
def change_password_dialog(self):
from electrum.storage import STO_EV_XPUB_PW
if self.wallet.get_available_storage_encryption_version() == STO_EV_XPUB_PW:
from .password_dialog import ChangePasswordDialogForHW
d = ChangePasswordDialogForHW(self, self.wallet)
ok, encrypt_file = d.run()
if not ok:
return
try:
hw_dev_pw = self.wallet.keystore.get_password_for_storage_encryption()
except UserCancelled:
return
except BaseException as e:
traceback.print_exc(file=sys.stderr)
self.show_error(str(e))
return
old_password = hw_dev_pw if self.wallet.has_password() else None
new_password = hw_dev_pw if encrypt_file else None
else:
from .password_dialog import ChangePasswordDialogForSW
d = ChangePasswordDialogForSW(self, self.wallet)
ok, old_password, new_password, encrypt_file = d.run()
if not ok:
return
try:
self.wallet.update_password(old_password, new_password, encrypt_file)
except InvalidPassword as e:
self.show_error(str(e))
return
except BaseException:
traceback.print_exc(file=sys.stdout)
self.show_error(_('Failed to update password'))
return
msg = _('Password was updated successfully') if self.wallet.has_password() else _('Password is disabled, this wallet is not protected')
self.show_message(msg, title=_("Success"))
self.update_lock_icon()
def toggle_search(self):
tab = self.tabs.currentWidget()
#if hasattr(tab, 'searchable_list'):
# tab.searchable_list.toggle_toolbar()
#return
self.search_box.setHidden(not self.search_box.isHidden())
if not self.search_box.isHidden():
self.search_box.setFocus(1)
else:
self.do_search('')
def do_search(self, t):
tab = self.tabs.currentWidget()
if hasattr(tab, 'searchable_list'):
tab.searchable_list.filter(t)
def new_contact_dialog(self):
d = WindowModalDialog(self, _("New Contact"))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('New Contact') + ':'))
grid = QGridLayout()
line1 = QLineEdit()
line1.setFixedWidth(280)
line2 = QLineEdit()
line2.setFixedWidth(280)
grid.addWidget(QLabel(_("Address")), 1, 0)
grid.addWidget(line1, 1, 1)
grid.addWidget(QLabel(_("Name")), 2, 0)
grid.addWidget(line2, 2, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if d.exec_():
self.set_contact(line2.text(), line1.text())
def show_master_public_keys(self):
dialog = WindowModalDialog(self, _("Wallet Information"))
dialog.setMinimumSize(500, 100)
mpk_list = self.wallet.get_master_public_keys()
vbox = QVBoxLayout()
wallet_type = self.wallet.storage.get('wallet_type', '')
grid = QGridLayout()
basename = os.path.basename(self.wallet.storage.path)
grid.addWidget(QLabel(_("Wallet name")+ ':'), 0, 0)
grid.addWidget(QLabel(basename), 0, 1)
grid.addWidget(QLabel(_("Wallet type")+ ':'), 1, 0)
grid.addWidget(QLabel(wallet_type), 1, 1)
grid.addWidget(QLabel(_("Script type")+ ':'), 2, 0)
grid.addWidget(QLabel(self.wallet.txin_type), 2, 1)
vbox.addLayout(grid)
if self.wallet.is_deterministic():
mpk_text = ShowQRTextEdit()
mpk_text.setMaximumHeight(150)
mpk_text.addCopyButton(self.app)
def show_mpk(index):
mpk_text.setText(mpk_list[index])
# only show the combobox in case multiple accounts are available
if len(mpk_list) > 1:
def label(key):
if isinstance(self.wallet, Multisig_Wallet):
return _("cosigner") + ' ' + str(key+1)
return ''
labels = [label(i) for i in range(len(mpk_list))]
on_click = lambda clayout: show_mpk(clayout.selected_index())
labels_clayout = ChoicesLayout(_("Master Public Keys"), labels, on_click)
vbox.addLayout(labels_clayout.layout())
else:
vbox.addWidget(QLabel(_("Master Public Key")))
show_mpk(0)
vbox.addWidget(mpk_text)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
def remove_wallet(self):
if self.question('\n'.join([
_('Delete wallet file?'),
"%s"%self.wallet.storage.path,
_('If your wallet contains funds, make sure you have saved its seed.')])):
self._delete_wallet()
@protected
def _delete_wallet(self, password):
wallet_path = self.wallet.storage.path
basename = os.path.basename(wallet_path)
self.gui_object.daemon.stop_wallet(wallet_path)
self.close()
os.unlink(wallet_path)
self.show_error(_("Wallet removed: {}").format(basename))
@protected
def show_seed_dialog(self, password):
if not self.wallet.has_seed():
self.show_message(_('This wallet has no seed'))
return
keystore = self.wallet.get_keystore()
try:
seed = keystore.get_seed(password)
passphrase = keystore.get_passphrase(password)
except BaseException as e:
self.show_error(str(e))
return
from .seed_dialog import SeedDialog
d = SeedDialog(self, seed, passphrase)
d.exec_()
def show_qrcode(self, data, title = _("QR code"), parent=None):
if not data:
return
d = QRDialog(data, parent or self, title)
d.exec_()
@protected
def show_private_key(self, address, password):
if not address:
return
try:
pk, redeem_script = self.wallet.export_private_key(address, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
xtype = bitcoin.deserialize_privkey(pk)[0]
d = WindowModalDialog(self, _("Private key"))
d.setMinimumSize(600, 150)
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_("Address") + ': ' + address))
vbox.addWidget(QLabel(_("Script type") + ': ' + xtype))
vbox.addWidget(QLabel(_("Private key") + ':'))
keys_e = ShowQRTextEdit(text=pk)
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
if redeem_script:
vbox.addWidget(QLabel(_("Redeem Script") + ':'))
rds_e = ShowQRTextEdit(text=redeem_script)
rds_e.addCopyButton(self.app)
vbox.addWidget(rds_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
msg_sign = _("Signing with an address actually means signing with the corresponding "
"private key, and verifying with the corresponding public key. The "
"address you have entered does not have a unique public key, so these "
"operations cannot be performed.") + '\n\n' + \
_('The operation is undefined. Not just in Electrum, but in general.')
@protected
def do_sign(self, address, message, signature, password):
address = address.text().strip()
message = message.toPlainText().strip()
if not bitcoin.is_address(address):
self.show_message(_('Invalid Bitcoin address.'))
return
if self.wallet.is_watching_only():
self.show_message(_('This is a watching-only wallet.'))
return
if not self.wallet.is_mine(address):
self.show_message(_('Address not in wallet.'))
return
txin_type = self.wallet.get_txin_type(address)
if txin_type not in ['p2pkh', 'p2wpkh', 'p2wpkh-p2sh']:
self.show_message(_('Cannot sign messages with this type of address:') + \
' ' + txin_type + '\n\n' + self.msg_sign)
return
task = partial(self.wallet.sign_message, address, message, password)
def show_signed_message(sig):
try:
signature.setText(base64.b64encode(sig).decode('ascii'))
except RuntimeError:
# (signature) wrapped C/C++ object has been deleted
pass
self.wallet.thread.add(task, on_success=show_signed_message)
def do_verify(self, address, message, signature):
address = address.text().strip()
message = message.toPlainText().strip().encode('utf-8')
if not bitcoin.is_address(address):
self.show_message(_('Invalid Bitcoin address.'))
return
try:
# This can throw on invalid base64
sig = base64.b64decode(str(signature.toPlainText()))
verified = ecc.verify_message_with_address(address, sig, message)
except Exception as e:
verified = False
if verified:
self.show_message(_("Signature verified"))
else:
self.show_error(_("Wrong signature"))
def sign_verify_message(self, address=''):
d = WindowModalDialog(self, _('Sign/verify Message'))
d.setMinimumSize(610, 290)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
address_e = QLineEdit()
address_e.setText(address)
layout.addWidget(QLabel(_('Address')), 2, 0)
layout.addWidget(address_e, 2, 1)
signature_e = QTextEdit()
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(signature_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Sign"))
b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Verify"))
b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
@protected
def do_decrypt(self, message_e, pubkey_e, encrypted_e, password):
if self.wallet.is_watching_only():
self.show_message(_('This is a watching-only wallet.'))
return
cyphertext = encrypted_e.toPlainText()
task = partial(self.wallet.decrypt_message, pubkey_e.text(), cyphertext, password)
def setText(text):
try:
message_e.setText(text.decode('utf-8'))
except RuntimeError:
# (message_e) wrapped C/C++ object has been deleted
pass
self.wallet.thread.add(task, on_success=setText)
def do_encrypt(self, message_e, pubkey_e, encrypted_e):
message = message_e.toPlainText()
message = message.encode('utf-8')
try:
public_key = ecc.ECPubkey(bfh(pubkey_e.text()))
encrypted = public_key.encrypt_message(message)
encrypted_e.setText(encrypted.decode('ascii'))
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_warning(str(e))
def encrypt_message(self, address=''):
d = WindowModalDialog(self, _('Encrypt/decrypt Message'))
d.setMinimumSize(610, 490)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
pubkey_e = QLineEdit()
if address:
pubkey = self.wallet.get_public_key(address)
pubkey_e.setText(pubkey)
layout.addWidget(QLabel(_('Public key')), 2, 0)
layout.addWidget(pubkey_e, 2, 1)
encrypted_e = QTextEdit()
layout.addWidget(QLabel(_('Encrypted')), 3, 0)
layout.addWidget(encrypted_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Encrypt"))
b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Decrypt"))
b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
def password_dialog(self, msg=None, parent=None):
from .password_dialog import PasswordDialog
parent = parent or self
d = PasswordDialog(parent, msg)
return d.run()
def tx_from_text(self, txt):
from electrum.transaction import tx_from_str
try:
tx = tx_from_str(txt)
return Transaction(tx)
except BaseException as e:
self.show_critical(_("Electrum was unable to parse your transaction") + ":\n" + str(e))
return
def read_tx_from_qrcode(self):
from electrum import qrscanner
try:
data = qrscanner.scan_barcode(self.config.get_video_device())
except BaseException as e:
self.show_error(str(e))
return
if not data:
return
# if the user scanned a bitcoin URI
if str(data).startswith("bitcoin:"):
self.pay_to_URI(data)
return
# else if the user scanned an offline signed tx
try:
data = bh2u(bitcoin.base_decode(data, length=None, base=43))
except BaseException as e:
self.show_error((_('Could not decode QR code')+':\n{}').format(e))
return
tx = self.tx_from_text(data)
if not tx:
return
self.show_transaction(tx)
def read_tx_from_file(self):
fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")
if not fileName:
return
try:
with open(fileName, "r") as f:
file_content = f.read()
except (ValueError, IOError, os.error) as reason:
self.show_critical(_("Electrum was unable to open your transaction file") + "\n" + str(reason), title=_("Unable to read file or no transaction found"))
return
return self.tx_from_text(file_content)
def do_process_from_text(self):
text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))
if not text:
return
tx = self.tx_from_text(text)
if tx:
self.show_transaction(tx)
def do_process_from_file(self):
tx = self.read_tx_from_file()
if tx:
self.show_transaction(tx)
def do_process_from_txid(self):
from electrum import transaction
txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':')
if ok and txid:
txid = str(txid).strip()
try:
r = self.network.synchronous_get(('blockchain.transaction.get',[txid]))
except BaseException as e:
self.show_message(str(e))
return
tx = transaction.Transaction(r)
self.show_transaction(tx)
@protected
def export_privkeys_dialog(self, password):
if self.wallet.is_watching_only():
self.show_message(_("This is a watching-only wallet"))
return
if isinstance(self.wallet, Multisig_Wallet):
self.show_message(_('WARNING: This is a multi-signature wallet.') + '\n' +
_('It cannot be "backed up" by simply exporting these private keys.'))
d = WindowModalDialog(self, _('Private keys'))
d.setMinimumSize(980, 300)
vbox = QVBoxLayout(d)
msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."),
_("Exposing a single private key can compromise your entire wallet!"),
_("In particular, DO NOT use 'redeem private key' services proposed by third parties."))
vbox.addWidget(QLabel(msg))
e = QTextEdit()
e.setReadOnly(True)
vbox.addWidget(e)
defaultname = 'electrum-private-keys.csv'
select_msg = _('Select file to export your private keys to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
b = OkButton(d, _('Export'))
b.setEnabled(False)
vbox.addLayout(Buttons(CancelButton(d), b))
private_keys = {}
addresses = self.wallet.get_addresses()
done = False
cancelled = False
def privkeys_thread():
for addr in addresses:
time.sleep(0.1)
if done or cancelled:
break
privkey = self.wallet.export_private_key(addr, password)[0]
private_keys[addr] = privkey
self.computing_privkeys_signal.emit()
if not cancelled:
self.computing_privkeys_signal.disconnect()
self.show_privkeys_signal.emit()
def show_privkeys():
s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items()))
e.setText(s)
b.setEnabled(True)
self.show_privkeys_signal.disconnect()
nonlocal done
done = True
def on_dialog_closed(*args):
nonlocal done
nonlocal cancelled
if not done:
cancelled = True
self.computing_privkeys_signal.disconnect()
self.show_privkeys_signal.disconnect()
self.computing_privkeys_signal.connect(lambda: e.setText("Please wait... %d/%d"%(len(private_keys),len(addresses))))
self.show_privkeys_signal.connect(show_privkeys)
d.finished.connect(on_dialog_closed)
threading.Thread(target=privkeys_thread).start()
if not d.exec_():
done = True
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_privkeys(filename, private_keys, csv_button.isChecked())
except (IOError, os.error) as reason:
txt = "\n".join([
_("Electrum was unable to produce a private key-export."),
str(reason)
])
self.show_critical(txt, title=_("Unable to create csv"))
except Exception as e:
self.show_message(str(e))
return
self.show_message(_("Private keys exported."))
def do_export_privkeys(self, fileName, pklist, is_csv):
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f)
transaction.writerow(["address", "private_key"])
for addr, pk in pklist.items():
transaction.writerow(["%34s"%addr,pk])
else:
import json
f.write(json.dumps(pklist, indent = 4))
def do_import_labels(self):
def import_labels(path):
def _validate(data):
return data # TODO
def import_labels_assign(data):
for key, value in data.items():
self.wallet.set_label(key, value)
import_meta(path, _validate, import_labels_assign)
def on_import():
self.need_update.set()
import_meta_gui(self, _('labels'), import_labels, on_import)
def do_export_labels(self):
def export_labels(filename):
export_meta(self.wallet.labels, filename)
export_meta_gui(self, _('labels'), export_labels)
def sweep_key_dialog(self):
d = WindowModalDialog(self, title=_('Sweep private keys'))
d.setMinimumSize(600, 300)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_("Enter private keys:")))
keys_e = ScanQRTextEdit(allow_multi=True)
keys_e.setTabChangesFocus(True)
vbox.addWidget(keys_e)
addresses = self.wallet.get_unused_addresses()
if not addresses:
try:
addresses = self.wallet.get_receiving_addresses()
except AttributeError:
addresses = self.wallet.get_addresses()
h, address_e = address_field(addresses)
vbox.addLayout(h)
vbox.addStretch(1)
button = OkButton(d, _('Sweep'))
vbox.addLayout(Buttons(CancelButton(d), button))
button.setEnabled(False)
def get_address():
addr = str(address_e.text()).strip()
if bitcoin.is_address(addr):
return addr
def get_pk():
text = str(keys_e.toPlainText())
return keystore.get_private_keys(text)
f = lambda: button.setEnabled(get_address() is not None and get_pk() is not None)
on_address = lambda text: address_e.setStyleSheet((ColorScheme.DEFAULT if get_address() else ColorScheme.RED).as_stylesheet())
keys_e.textChanged.connect(f)
address_e.textChanged.connect(f)
address_e.textChanged.connect(on_address)
if not d.exec_():
return
from electrum.wallet import sweep_preparations
try:
self.do_clear()
coins, keypairs = sweep_preparations(get_pk(), self.network)
self.tx_external_keypairs = keypairs
self.spend_coins(coins)
self.payto_e.setText(get_address())
self.spend_max()
self.payto_e.setFrozen(True)
self.amount_e.setFrozen(True)
except BaseException as e:
self.show_message(str(e))
return
self.warn_if_watching_only()
def _do_import(self, title, msg, func):
text = text_dialog(self, title, msg + ' :', _('Import'),
allow_multi=True)
if not text:
return
bad = []
good = []
for key in str(text).split():
try:
addr = func(key)
good.append(addr)
except BaseException as e:
bad.append(key)
continue
if good:
self.show_message(_("The following addresses were added") + ':\n' + '\n'.join(good))
if bad:
self.show_critical(_("The following inputs could not be imported") + ':\n'+ '\n'.join(bad))
self.address_list.update()
self.history_list.update()
def import_addresses(self):
if not self.wallet.can_import_address():
return
title, msg = _('Import addresses'), _("Enter addresses")
self._do_import(title, msg, self.wallet.import_address)
@protected
def do_import_privkey(self, password):
if not self.wallet.can_import_privkey():
return
title, msg = _('Import private keys'), _("Enter private keys")
self._do_import(title, msg, lambda x: self.wallet.import_private_key(x, password))
def update_fiat(self):
b = self.fx and self.fx.is_enabled()
self.fiat_send_e.setVisible(b)
self.fiat_receive_e.setVisible(b)
self.history_list.refresh_headers()
self.history_list.update()
self.address_list.refresh_headers()
self.address_list.update()
self.update_status()
def settings_dialog(self):
self.need_restart = False
d = WindowModalDialog(self, _('Preferences'))
vbox = QVBoxLayout()
tabs = QTabWidget()
gui_widgets = []
fee_widgets = []
tx_widgets = []
id_widgets = []
# language
lang_help = _('Select which language is used in the GUI (after restart).')
lang_label = HelpLabel(_('Language') + ':', lang_help)
lang_combo = QComboBox()
from electrum.i18n import languages
lang_combo.addItems(list(languages.values()))
lang_keys = list(languages.keys())
lang_cur_setting = self.config.get("language", '')
try:
index = lang_keys.index(lang_cur_setting)
except ValueError: # not in list
index = 0
lang_combo.setCurrentIndex(index)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
def on_lang(x):
lang_request = list(languages.keys())[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
self.need_restart = True
lang_combo.currentIndexChanged.connect(on_lang)
gui_widgets.append((lang_label, lang_combo))
nz_help = _('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')
nz_label = HelpLabel(_('Zeros after decimal point') + ':', nz_help)
nz = QSpinBox()
nz.setMinimum(0)
nz.setMaximum(self.decimal_point)
nz.setValue(self.num_zeros)
if not self.config.is_modifiable('num_zeros'):
for w in [nz, nz_label]: w.setEnabled(False)
def on_nz():
value = nz.value()
if self.num_zeros != value:
self.num_zeros = value
self.config.set_key('num_zeros', value, True)
self.history_list.update()
self.address_list.update()
nz.valueChanged.connect(on_nz)
gui_widgets.append((nz_label, nz))
msg = '\n'.join([
_('Time based: fee rate is based on average confirmation time estimates'),
_('Mempool based: fee rate is targeting a depth in the memory pool')
]
)
fee_type_label = HelpLabel(_('Fee estimation') + ':', msg)
fee_type_combo = QComboBox()
fee_type_combo.addItems([_('Static'), _('ETA'), _('Mempool')])
fee_type_combo.setCurrentIndex((2 if self.config.use_mempool_fees() else 1) if self.config.is_dynfee() else 0)
def on_fee_type(x):
self.config.set_key('mempool_fees', x==2)
self.config.set_key('dynamic_fees', x>0)
self.fee_slider.update()
fee_type_combo.currentIndexChanged.connect(on_fee_type)
fee_widgets.append((fee_type_label, fee_type_combo))
feebox_cb = QCheckBox(_('Edit fees manually'))
feebox_cb.setChecked(self.config.get('show_fee', False))
feebox_cb.setToolTip(_("Show fee edit box in send tab."))
def on_feebox(x):
self.config.set_key('show_fee', x == Qt.Checked)
self.fee_adv_controls.setVisible(bool(x))
feebox_cb.stateChanged.connect(on_feebox)
fee_widgets.append((feebox_cb, None))
use_rbf_cb = QCheckBox(_('Use Replace-By-Fee'))
use_rbf_cb.setChecked(self.config.get('use_rbf', True))
use_rbf_cb.setToolTip(
_('If you check this box, your transactions will be marked as non-final,') + '\n' + \
_('and you will have the possibility, while they are unconfirmed, to replace them with transactions that pay higher fees.') + '\n' + \
_('Note that some merchants do not accept non-final transactions until they are confirmed.'))
def on_use_rbf(x):
self.config.set_key('use_rbf', x == Qt.Checked)
use_rbf_cb.stateChanged.connect(on_use_rbf)
fee_widgets.append((use_rbf_cb, None))
msg = _('OpenAlias record, used to receive coins and to sign payment requests.') + '\n\n'\
+ _('The following alias providers are available:') + '\n'\
+ '\n'.join(['https://cryptoname.co/', 'http://xmr.link']) + '\n\n'\
+ 'For more information, see https://openalias.org'
alias_label = HelpLabel(_('OpenAlias') + ':', msg)
alias = self.config.get('alias','')
alias_e = QLineEdit(alias)
def set_alias_color():
if not self.config.get('alias'):
alias_e.setStyleSheet("")
return
if self.alias_info:
alias_addr, alias_name, validated = self.alias_info
alias_e.setStyleSheet((ColorScheme.GREEN if validated else ColorScheme.RED).as_stylesheet(True))
else:
alias_e.setStyleSheet(ColorScheme.RED.as_stylesheet(True))
def on_alias_edit():
alias_e.setStyleSheet("")
alias = str(alias_e.text())
self.config.set_key('alias', alias, True)
if alias:
self.fetch_alias()
set_alias_color()
self.alias_received_signal.connect(set_alias_color)
alias_e.editingFinished.connect(on_alias_edit)
id_widgets.append((alias_label, alias_e))
# SSL certificate
msg = ' '.join([
_('SSL certificate used to sign payment requests.'),
_('Use setconfig to set ssl_chain and ssl_privkey.'),
])
if self.config.get('ssl_privkey') or self.config.get('ssl_chain'):
try:
SSL_identity = paymentrequest.check_ssl_config(self.config)
SSL_error = None
except BaseException as e:
SSL_identity = "error"
SSL_error = str(e)
else:
SSL_identity = ""
SSL_error = None
SSL_id_label = HelpLabel(_('SSL certificate') + ':', msg)
SSL_id_e = QLineEdit(SSL_identity)
SSL_id_e.setStyleSheet((ColorScheme.RED if SSL_error else ColorScheme.GREEN).as_stylesheet(True) if SSL_identity else '')
if SSL_error:
SSL_id_e.setToolTip(SSL_error)
SSL_id_e.setReadOnly(True)
id_widgets.append((SSL_id_label, SSL_id_e))
units = base_units_list
msg = (_('Base unit of your wallet.')
+ '\n1 BTC = 1000 mBTC. 1 mBTC = 1000 bits. 1 bit = 100 sat.\n'
+ _('This setting affects the Send tab, and all balance related fields.'))
unit_label = HelpLabel(_('Base unit') + ':', msg)
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.base_unit()))
def on_unit(x, nz):
unit_result = units[unit_combo.currentIndex()]
if self.base_unit() == unit_result:
return
edits = self.amount_e, self.fee_e, self.receive_amount_e
amounts = [edit.get_amount() for edit in edits]
self.decimal_point = base_unit_name_to_decimal_point(unit_result)
self.config.set_key('decimal_point', self.decimal_point, True)
nz.setMaximum(self.decimal_point)
self.history_list.update()
self.request_list.update()
self.address_list.update()
for edit, amount in zip(edits, amounts):
edit.setAmount(amount)
self.update_status()
unit_combo.currentIndexChanged.connect(lambda x: on_unit(x, nz))
gui_widgets.append((unit_label, unit_combo))
block_explorers = sorted(util.block_explorer_info().keys())
msg = _('Choose which online block explorer to use for functions that open a web browser')
block_ex_label = HelpLabel(_('Online Block Explorer') + ':', msg)
block_ex_combo = QComboBox()
block_ex_combo.addItems(block_explorers)
block_ex_combo.setCurrentIndex(block_ex_combo.findText(util.block_explorer(self.config)))
def on_be(x):
be_result = block_explorers[block_ex_combo.currentIndex()]
self.config.set_key('block_explorer', be_result, True)
block_ex_combo.currentIndexChanged.connect(on_be)
gui_widgets.append((block_ex_label, block_ex_combo))
from electrum import qrscanner
system_cameras = qrscanner._find_system_cameras()
qr_combo = QComboBox()
qr_combo.addItem("Default","default")
for camera, device in system_cameras.items():
qr_combo.addItem(camera, device)
#combo.addItem("Manually specify a device", config.get("video_device"))
index = qr_combo.findData(self.config.get("video_device"))
qr_combo.setCurrentIndex(index)
msg = _("Install the zbar package to enable this.")
qr_label = HelpLabel(_('Video Device') + ':', msg)
qr_combo.setEnabled(qrscanner.libzbar is not None)
on_video_device = lambda x: self.config.set_key("video_device", qr_combo.itemData(x), True)
qr_combo.currentIndexChanged.connect(on_video_device)
gui_widgets.append((qr_label, qr_combo))
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
def on_usechange(x):
usechange_result = x == Qt.Checked
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.storage.put('use_change', self.wallet.use_change)
multiple_cb.setEnabled(self.wallet.use_change)
usechange_cb.stateChanged.connect(on_usechange)
usechange_cb.setToolTip(_('Using change addresses makes it more difficult for other people to track your transactions.'))
tx_widgets.append((usechange_cb, None))
def on_multiple(x):
multiple = x == Qt.Checked
if self.wallet.multiple_change != multiple:
self.wallet.multiple_change = multiple
self.wallet.storage.put('multiple_change', multiple)
multiple_change = self.wallet.multiple_change
multiple_cb = QCheckBox(_('Use multiple change addresses'))
multiple_cb.setEnabled(self.wallet.use_change)
multiple_cb.setToolTip('\n'.join([
_('In some cases, use up to 3 change addresses in order to break '
'up large coin amounts and obfuscate the recipient address.'),
_('This may result in higher transactions fees.')
]))
multiple_cb.setChecked(multiple_change)
multiple_cb.stateChanged.connect(on_multiple)
tx_widgets.append((multiple_cb, None))
def fmt_docs(key, klass):
lines = [ln.lstrip(" ") for ln in klass.__doc__.split("\n")]
return '\n'.join([key, "", " ".join(lines)])
choosers = sorted(coinchooser.COIN_CHOOSERS.keys())
if len(choosers) > 1:
chooser_name = coinchooser.get_name(self.config)
msg = _('Choose coin (UTXO) selection method. The following are available:\n\n')
msg += '\n\n'.join(fmt_docs(*item) for item in coinchooser.COIN_CHOOSERS.items())
chooser_label = HelpLabel(_('Coin selection') + ':', msg)
chooser_combo = QComboBox()
chooser_combo.addItems(choosers)
i = choosers.index(chooser_name) if chooser_name in choosers else 0
chooser_combo.setCurrentIndex(i)
def on_chooser(x):
chooser_name = choosers[chooser_combo.currentIndex()]
self.config.set_key('coin_chooser', chooser_name)
chooser_combo.currentIndexChanged.connect(on_chooser)
tx_widgets.append((chooser_label, chooser_combo))
def on_unconf(x):
self.config.set_key('confirmed_only', bool(x))
conf_only = self.config.get('confirmed_only', False)
unconf_cb = QCheckBox(_('Spend only confirmed coins'))
unconf_cb.setToolTip(_('Spend only confirmed inputs.'))
unconf_cb.setChecked(conf_only)
unconf_cb.stateChanged.connect(on_unconf)
tx_widgets.append((unconf_cb, None))
def on_outrounding(x):
self.config.set_key('coin_chooser_output_rounding', bool(x))
enable_outrounding = self.config.get('coin_chooser_output_rounding', False)
outrounding_cb = QCheckBox(_('Enable output value rounding'))
outrounding_cb.setToolTip(
_('Set the value of the change output so that it has similar precision to the other outputs.') + '\n' +
_('This might improve your privacy somewhat.') + '\n' +
_('If enabled, at most 100 satoshis might be lost due to this, per transaction.'))
outrounding_cb.setChecked(enable_outrounding)
outrounding_cb.stateChanged.connect(on_outrounding)
tx_widgets.append((outrounding_cb, None))
# Fiat Currency
hist_checkbox = QCheckBox()
hist_capgains_checkbox = QCheckBox()
fiat_address_checkbox = QCheckBox()
ccy_combo = QComboBox()
ex_combo = QComboBox()
def update_currencies():
if not self.fx: return
currencies = sorted(self.fx.get_currencies(self.fx.get_history_config()))
ccy_combo.clear()
ccy_combo.addItems([_('None')] + currencies)
if self.fx.is_enabled():
ccy_combo.setCurrentIndex(ccy_combo.findText(self.fx.get_currency()))
def update_history_cb():
if not self.fx: return
hist_checkbox.setChecked(self.fx.get_history_config())
hist_checkbox.setEnabled(self.fx.is_enabled())
def update_fiat_address_cb():
if not self.fx: return
fiat_address_checkbox.setChecked(self.fx.get_fiat_address_config())
def update_history_capgains_cb():
if not self.fx: return
hist_capgains_checkbox.setChecked(self.fx.get_history_capital_gains_config())
hist_capgains_checkbox.setEnabled(hist_checkbox.isChecked())
def update_exchanges():
if not self.fx: return
b = self.fx.is_enabled()
ex_combo.setEnabled(b)
if b:
h = self.fx.get_history_config()
c = self.fx.get_currency()
exchanges = self.fx.get_exchanges_by_ccy(c, h)
else:
exchanges = self.fx.get_exchanges_by_ccy('USD', False)
ex_combo.clear()
ex_combo.addItems(sorted(exchanges))
ex_combo.setCurrentIndex(ex_combo.findText(self.fx.config_exchange()))
def on_currency(hh):
if not self.fx: return
b = bool(ccy_combo.currentIndex())
ccy = str(ccy_combo.currentText()) if b else None
self.fx.set_enabled(b)
if b and ccy != self.fx.ccy:
self.fx.set_currency(ccy)
update_history_cb()
update_exchanges()
self.update_fiat()
def on_exchange(idx):
exchange = str(ex_combo.currentText())
if self.fx and self.fx.is_enabled() and exchange and exchange != self.fx.exchange.name():
self.fx.set_exchange(exchange)
def on_history(checked):
if not self.fx: return
self.fx.set_history_config(checked)
update_exchanges()
self.history_list.refresh_headers()
if self.fx.is_enabled() and checked:
# reset timeout to get historical rates
self.fx.timeout = 0
update_history_capgains_cb()
def on_history_capgains(checked):
if not self.fx: return
self.fx.set_history_capital_gains_config(checked)
self.history_list.refresh_headers()
def on_fiat_address(checked):
if not self.fx: return
self.fx.set_fiat_address_config(checked)
self.address_list.refresh_headers()
self.address_list.update()
update_currencies()
update_history_cb()
update_history_capgains_cb()
update_fiat_address_cb()
update_exchanges()
ccy_combo.currentIndexChanged.connect(on_currency)
hist_checkbox.stateChanged.connect(on_history)
hist_capgains_checkbox.stateChanged.connect(on_history_capgains)
fiat_address_checkbox.stateChanged.connect(on_fiat_address)
ex_combo.currentIndexChanged.connect(on_exchange)
fiat_widgets = []
fiat_widgets.append((QLabel(_('Fiat currency')), ccy_combo))
fiat_widgets.append((QLabel(_('Show history rates')), hist_checkbox))
fiat_widgets.append((QLabel(_('Show capital gains in history')), hist_capgains_checkbox))
fiat_widgets.append((QLabel(_('Show Fiat balance for addresses')), fiat_address_checkbox))
fiat_widgets.append((QLabel(_('Source')), ex_combo))
tabs_info = [
(fee_widgets, _('Fees')),
(tx_widgets, _('Transactions')),
(gui_widgets, _('Appearance')),
(fiat_widgets, _('Fiat')),
(id_widgets, _('Identity')),
]
for widgets, name in tabs_info:
tab = QWidget()
grid = QGridLayout(tab)
grid.setColumnStretch(0,1)
for a,b in widgets:
i = grid.rowCount()
if b:
if a:
grid.addWidget(a, i, 0)
grid.addWidget(b, i, 1)
else:
grid.addWidget(a, i, 0, 1, 2)
tabs.addTab(tab, name)
vbox.addWidget(tabs)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
# run the dialog
d.exec_()
if self.fx:
self.fx.timeout = 0
self.alias_received_signal.disconnect(set_alias_color)
run_hook('close_settings_dialog')
if self.need_restart:
self.show_warning(_('Please restart Electrum to activate the new GUI settings'), title=_('Success'))
def closeEvent(self, event):
# It seems in some rare cases this closeEvent() is called twice
if not self.cleaned_up:
self.cleaned_up = True
self.clean_up()
event.accept()
def clean_up(self):
self.wallet.thread.stop()
if self.network:
self.network.unregister_callback(self.on_network)
self.config.set_key("is_maximized", self.isMaximized())
if not self.isMaximized():
g = self.geometry()
self.wallet.storage.put("winpos-qt", [g.left(),g.top(),
g.width(),g.height()])
self.config.set_key("console-history", self.console.history[-50:],
True)
if self.qr_window:
self.qr_window.close()
self.close_wallet()
self.gui_object.close_window(self)
def plugins_dialog(self):
self.pluginsdialog = d = WindowModalDialog(self, _('Electrum Plugins'))
plugins = self.gui_object.plugins
vbox = QVBoxLayout(d)
# plugins
scroll = QScrollArea()
scroll.setEnabled(True)
scroll.setWidgetResizable(True)
scroll.setMinimumSize(400,250)
vbox.addWidget(scroll)
w = QWidget()
scroll.setWidget(w)
w.setMinimumHeight(plugins.count() * 35)
grid = QGridLayout()
grid.setColumnStretch(0,1)
w.setLayout(grid)
settings_widgets = {}
def enable_settings_widget(p, name, i):
widget = settings_widgets.get(name)
if not widget and p and p.requires_settings():
widget = settings_widgets[name] = p.settings_widget(d)
grid.addWidget(widget, i, 1)
if widget:
widget.setEnabled(bool(p and p.is_enabled()))
def do_toggle(cb, name, i):
p = plugins.toggle(name)
cb.setChecked(bool(p))
enable_settings_widget(p, name, i)
run_hook('init_qt', self.gui_object)
for i, descr in enumerate(plugins.descriptions.values()):
name = descr['__name__']
p = plugins.get(name)
if descr.get('registers_keystore'):
continue
try:
cb = QCheckBox(descr['fullname'])
plugin_is_loaded = p is not None
cb_enabled = (not plugin_is_loaded and plugins.is_available(name, self.wallet)
or plugin_is_loaded and p.can_user_disable())
cb.setEnabled(cb_enabled)
cb.setChecked(plugin_is_loaded and p.is_enabled())
grid.addWidget(cb, i, 0)
enable_settings_widget(p, name, i)
cb.clicked.connect(partial(do_toggle, cb, name, i))
msg = descr['description']
if descr.get('requires'):
msg += '\n\n' + _('Requires') + ':\n' + '\n'.join(map(lambda x: x[1], descr.get('requires')))
grid.addWidget(HelpButton(msg), i, 2)
except Exception:
self.print_msg("error: cannot display plugin", name)
traceback.print_exc(file=sys.stdout)
grid.setRowStretch(len(plugins.descriptions.values()), 1)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
def cpfp(self, parent_tx, new_tx):
total_size = parent_tx.estimated_size() + new_tx.estimated_size()
d = WindowModalDialog(self, _('Child Pays for Parent'))
vbox = QVBoxLayout(d)
msg = (
"A CPFP is a transaction that sends an unconfirmed output back to "
"yourself, with a high fee. The goal is to have miners confirm "
"the parent transaction in order to get the fee attached to the "
"child transaction.")
vbox.addWidget(WWLabel(_(msg)))
msg2 = ("The proposed fee is computed using your "
"fee/kB settings, applied to the total size of both child and "
"parent transactions. After you broadcast a CPFP transaction, "
"it is normal to see a new unconfirmed transaction in your history.")
vbox.addWidget(WWLabel(_(msg2)))
grid = QGridLayout()
grid.addWidget(QLabel(_('Total size') + ':'), 0, 0)
grid.addWidget(QLabel('%d bytes'% total_size), 0, 1)
max_fee = new_tx.output_value()
grid.addWidget(QLabel(_('Input amount') + ':'), 1, 0)
grid.addWidget(QLabel(self.format_amount(max_fee) + ' ' + self.base_unit()), 1, 1)
output_amount = QLabel('')
grid.addWidget(QLabel(_('Output amount') + ':'), 2, 0)
grid.addWidget(output_amount, 2, 1)
fee_e = BTCAmountEdit(self.get_decimal_point)
# FIXME with dyn fees, without estimates, there are all kinds of crashes here
def f(x):
a = max_fee - fee_e.get_amount()
output_amount.setText((self.format_amount(a) + ' ' + self.base_unit()) if a else '')
fee_e.textChanged.connect(f)
fee = self.config.fee_per_kb() * total_size / 1000
fee_e.setAmount(fee)
grid.addWidget(QLabel(_('Fee' + ':')), 3, 0)
grid.addWidget(fee_e, 3, 1)
def on_rate(dyn, pos, fee_rate):
fee = fee_rate * total_size / 1000
fee = min(max_fee, fee)
fee_e.setAmount(fee)
fee_slider = FeeSlider(self, self.config, on_rate)
fee_slider.update()
grid.addWidget(fee_slider, 4, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
fee = fee_e.get_amount()
if fee > max_fee:
self.show_error(_('Max fee exceeded'))
return
new_tx = self.wallet.cpfp(parent_tx, fee)
new_tx.set_rbf(True)
self.show_transaction(new_tx)
def bump_fee_dialog(self, tx):
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
if fee is None:
self.show_error(_("Can't bump fee: unknown fee for original transaction."))
return
tx_label = self.wallet.get_label(tx.txid())
tx_size = tx.estimated_size()
d = WindowModalDialog(self, _('Bump Fee'))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('Current fee') + ': %s'% self.format_amount(fee) + ' ' + self.base_unit()))
vbox.addWidget(QLabel(_('New fee' + ':')))
fee_e = BTCAmountEdit(self.get_decimal_point)
fee_e.setAmount(fee * 1.5)
vbox.addWidget(fee_e)
def on_rate(dyn, pos, fee_rate):
fee = fee_rate * tx_size / 1000
fee_e.setAmount(fee)
fee_slider = FeeSlider(self, self.config, on_rate)
vbox.addWidget(fee_slider)
cb = QCheckBox(_('Final'))
vbox.addWidget(cb)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
is_final = cb.isChecked()
new_fee = fee_e.get_amount()
delta = new_fee - fee
if delta < 0:
self.show_error("fee too low")
return
try:
new_tx = self.wallet.bump_fee(tx, delta)
except BaseException as e:
self.show_error(str(e))
return
if is_final:
new_tx.set_rbf(False)
self.show_transaction(new_tx, tx_label)
def save_transaction_into_wallet(self, tx):
win = self.top_level_window()
try:
if not self.wallet.add_transaction(tx.txid(), tx):
win.show_error(_("Transaction could not be saved.") + "\n" +
_("It conflicts with current history."))
return False
except AddTransactionException as e:
win.show_error(e)
return False
else:
self.wallet.save_transactions(write=True)
# need to update at least: history_list, utxo_list, address_list
self.need_update.set()
msg = (_("Transaction added to wallet history.") + '\n\n' +
_("Note: this is an offline transaction, if you want the network "
"to see it, you need to broadcast it."))
win.msg_box(QPixmap(":icons/offline_tx.png"), None, _('Success'), msg)
return True
|
mit
|
yahman72/robotframework
|
src/robot/__init__.py
|
4
|
2014
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The root of the Robot Framework package.
The command line entry points provided by the framework are exposed for
programmatic usage as follows:
* :func:`~robot.run.run`: Function to run tests.
* :func:`~robot.run.run_cli`: Function to run tests
with command line argument processing.
* :func:`~robot.rebot.rebot`: Function to post-process outputs.
* :func:`~robot.rebot.rebot_cli`: Function to post-process outputs
with command line argument processing.
* :mod:`~robot.libdoc`: Module for library documentation generation.
* :mod:`~robot.testdoc`: Module for test case documentation generation.
* :mod:`~robot.tidy`: Module for test data clean-up and format change.
All the functions above can be imported like ``from robot import run``.
Functions and classes provided by the modules need to be imported like
``from robot.libdoc import libdoc_cli``.
The functions and modules listed above are considered stable. Other modules in
this package are for for internal usage and may change without prior notice.
.. tip:: More public APIs are exposed by the :mod:`robot.api` package.
"""
import sys
if 'pythonpathsetter' not in sys.modules:
from robot import pythonpathsetter as _
from robot.rebot import rebot, rebot_cli
from robot.run import run, run_cli
from robot.version import get_version
__all__ = ['run', 'run_cli', 'rebot', 'rebot_cli']
__version__ = get_version()
|
apache-2.0
|
ryfeus/lambda-packs
|
Tensorflow_Pandas_Numpy/source3.6/tensorflow/contrib/layers/python/layers/feature_column_ops.py
|
60
|
36916
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities related to FeatureColumn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from tensorflow.contrib.framework.python.framework import experimental
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.layers.python.layers import embedding_ops
from tensorflow.contrib.layers.python.layers import feature_column as fc
from tensorflow.contrib.layers.python.layers import layers
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor as sparse_tensor_py
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
def _maybe_reshape_input_tensor(tensor, column_name, output_rank):
"""Reshape the input tensor by the following rule.
1. If `output_rank > input_rank + 1`, raise a `ValueError`.
2. If `output_rank == input_rank + 1`, expand the tensor by one dimension.
3. If `output_rank == input_rank`, do nothing.
4. If `output_rank < input_rank`, flatten the inner dimensions of the tensor.
Args:
tensor: A Tensor or SparseTensor to be reshaped.
column_name: A string name of the feature column for the tensor.
output_rank: the desired rank of the tensor.
Returns:
A reshaped Tensor or SparseTensor.
Raises:
ValueError: if `output_rank > input_rank + 1` for the input tensor.
"""
input_rank = tensor.get_shape().ndims
if input_rank is None and isinstance(tensor, sparse_tensor_py.SparseTensor):
# Try to get the rank of a sparse tensor by its dense_shape's shape.
input_rank = tensor.dense_shape.get_shape().as_list()[0]
if input_rank is None:
raise ValueError('Error while processing column {}. Rank of input Tensor '
'can not be None.'.format(column_name))
if output_rank > input_rank + 1:
raise ValueError('Error while processing column {}. Rank of input Tensor '
'({}) should be the same as output_rank ({}). For '
'example, sequence data should typically be 3 '
'dimensional (rank 3) while non-sequence data is '
'typically 2 dimensional (rank 2).'.format(
column_name, input_rank, output_rank))
elif output_rank == input_rank + 1:
# Expand the tensor's shape by 1 dimension.
if isinstance(tensor, sparse_tensor_py.SparseTensor):
output_shape = array_ops.concat([tensor.dense_shape, [1]], 0)
return sparse_ops.sparse_reshape(tensor, output_shape)
else:
reshaped = array_ops.expand_dims(tensor, -1)
# Try to calculate the new shape.
static_shape = tensor.get_shape()
if static_shape is not None and static_shape.dims is not None:
reshaped.set_shape(static_shape.as_list() + [1])
return reshaped
elif output_rank < input_rank:
return layers._inner_flatten(tensor, output_rank) # pylint: disable=protected-access
else:
return tensor
def _input_from_feature_columns(columns_to_tensors,
feature_columns,
weight_collections,
trainable,
scope,
output_rank,
default_name):
"""Implementation of `input_from(_sequence)_feature_columns`."""
columns_to_tensors = columns_to_tensors.copy()
check_feature_columns(feature_columns)
with variable_scope.variable_scope(scope,
default_name=default_name,
values=columns_to_tensors.values()):
output_tensors = []
transformer = _Transformer(columns_to_tensors)
if weight_collections:
weight_collections = list(set(list(weight_collections) +
[ops.GraphKeys.GLOBAL_VARIABLES]))
for column in sorted(set(feature_columns), key=lambda x: x.key):
with variable_scope.variable_scope(None,
default_name=column.name,
values=columns_to_tensors.values()):
transformed_tensor = transformer.transform(column)
if output_rank == 3:
transformed_tensor = nest.map_structure(
functools.partial(
_maybe_reshape_input_tensor,
column_name=column.name,
output_rank=output_rank), transformed_tensor)
try:
# pylint: disable=protected-access
arguments = column._deep_embedding_lookup_arguments(
transformed_tensor)
output_tensors.append(
fc._embeddings_from_arguments( # pylint: disable=protected-access
column,
arguments,
weight_collections,
trainable,
output_rank=output_rank))
except NotImplementedError as ee:
try:
# pylint: disable=protected-access
output_tensors.append(column._to_dnn_input_layer(
transformed_tensor,
weight_collections,
trainable,
output_rank=output_rank))
except ValueError as e:
raise ValueError('Error creating input layer for column: {}.\n'
'{}, {}'.format(column.name, e, ee))
return array_ops.concat(output_tensors, output_rank - 1)
def input_from_feature_columns(columns_to_tensors,
feature_columns,
weight_collections=None,
trainable=True,
scope=None):
"""A tf.contrib.layers style input layer builder based on FeatureColumns.
Generally a single example in training data is described with feature columns.
At the first layer of the model, this column oriented data should be converted
to a single tensor. Each feature column needs a different kind of operation
during this conversion. For example sparse features need a totally different
handling than continuous features.
Example:
```python
# Building model for training
columns_to_tensor = tf.parse_example(...)
first_layer = input_from_feature_columns(
columns_to_tensors=columns_to_tensor,
feature_columns=feature_columns)
second_layer = fully_connected(inputs=first_layer, ...)
...
```
where feature_columns can be defined as follows:
```python
sparse_feature = sparse_column_with_hash_bucket(
column_name="sparse_col", ...)
sparse_feature_emb = embedding_column(sparse_id_column=sparse_feature, ...)
real_valued_feature = real_valued_column(...)
real_valued_buckets = bucketized_column(
source_column=real_valued_feature, ...)
feature_columns=[sparse_feature_emb, real_valued_buckets]
```
Args:
columns_to_tensors: A mapping from feature column to tensors. 'string' key
means a base feature (not-transformed). It can have FeatureColumn as a
key too. That means that FeatureColumn is already transformed by input
pipeline.
feature_columns: A set containing all the feature columns. All items in the
set should be instances of classes derived by FeatureColumn.
weight_collections: List of graph collections to which weights are added.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for variable_scope.
Returns:
A Tensor which can be consumed by hidden layers in the neural network.
Raises:
ValueError: if FeatureColumn cannot be consumed by a neural network.
"""
return _input_from_feature_columns(columns_to_tensors,
feature_columns,
weight_collections,
trainable,
scope,
output_rank=2,
default_name='input_from_feature_columns')
@experimental
def sequence_input_from_feature_columns(columns_to_tensors,
feature_columns,
weight_collections=None,
trainable=True,
scope=None):
"""Builds inputs for sequence models from `FeatureColumn`s.
See documentation for `input_from_feature_columns`. The following types of
`FeatureColumn` are permitted in `feature_columns`: `_OneHotColumn`,
`_EmbeddingColumn`, `_ScatteredEmbeddingColumn`, `_RealValuedColumn`,
`_DataFrameColumn`. In addition, columns in `feature_columns` may not be
constructed using any of the following: `ScatteredEmbeddingColumn`,
`BucketizedColumn`, `CrossedColumn`.
Args:
columns_to_tensors: A mapping from feature column to tensors. 'string' key
means a base feature (not-transformed). It can have FeatureColumn as a
key too. That means that FeatureColumn is already transformed by input
pipeline.
feature_columns: A set containing all the feature columns. All items in the
set should be instances of classes derived by FeatureColumn.
weight_collections: List of graph collections to which weights are added.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for variable_scope.
Returns:
A Tensor which can be consumed by hidden layers in the neural network.
Raises:
ValueError: if FeatureColumn cannot be consumed by a neural network.
"""
_check_supported_sequence_columns(feature_columns)
_check_forbidden_sequence_columns(feature_columns)
return _input_from_feature_columns(
columns_to_tensors,
feature_columns,
weight_collections,
trainable,
scope,
output_rank=3,
default_name='sequence_input_from_feature_columns')
def _create_embedding_lookup(column,
columns_to_tensors,
embedding_lookup_arguments,
num_outputs,
trainable,
weight_collections):
"""Creates variables and returns predictions for linear weights in a model.
Args:
column: the column we're working on.
columns_to_tensors: a map from column name to tensors.
embedding_lookup_arguments: arguments for embedding lookup.
num_outputs: how many outputs.
trainable: whether the variable we create is trainable.
weight_collections: weights will be placed here.
Returns:
variables: the created embeddings.
predictions: the computed predictions.
"""
with variable_scope.variable_scope(
None, default_name=column.name, values=columns_to_tensors.values()):
variable = contrib_variables.model_variable(
name='weights',
shape=[embedding_lookup_arguments.vocab_size, num_outputs],
dtype=dtypes.float32,
initializer=embedding_lookup_arguments.initializer,
trainable=trainable,
collections=weight_collections)
if fc._is_variable(variable): # pylint: disable=protected-access
variable = [variable]
else:
variable = variable._get_variable_list() # pylint: disable=protected-access
predictions = embedding_ops.safe_embedding_lookup_sparse(
variable,
embedding_lookup_arguments.input_tensor,
sparse_weights=embedding_lookup_arguments.weight_tensor,
combiner=embedding_lookup_arguments.combiner,
name=column.name + '_weights')
return variable, predictions
def _create_joint_embedding_lookup(columns_to_tensors,
embedding_lookup_arguments,
num_outputs,
trainable,
weight_collections):
"""Creates an embedding lookup for all columns sharing a single weight."""
for arg in embedding_lookup_arguments:
assert arg.weight_tensor is None, (
'Joint sums for weighted sparse columns are not supported. '
'Please use weighted_sum_from_feature_columns instead.')
assert arg.combiner == 'sum', (
'Combiners other than sum are not supported for joint sums. '
'Please use weighted_sum_from_feature_columns instead.')
assert len(embedding_lookup_arguments) >= 1, (
'At least one column must be in the model.')
prev_size = 0
sparse_tensors = []
for a in embedding_lookup_arguments:
t = a.input_tensor
values = t.values + prev_size
prev_size += a.vocab_size
sparse_tensors.append(
sparse_tensor_py.SparseTensor(t.indices,
values,
t.dense_shape))
sparse_tensor = sparse_ops.sparse_concat(1, sparse_tensors)
with variable_scope.variable_scope(
None, default_name='linear_weights', values=columns_to_tensors.values()):
variable = contrib_variables.model_variable(
name='weights',
shape=[prev_size, num_outputs],
dtype=dtypes.float32,
initializer=init_ops.zeros_initializer(),
trainable=trainable,
collections=weight_collections)
if fc._is_variable(variable): # pylint: disable=protected-access
variable = [variable]
else:
variable = variable._get_variable_list() # pylint: disable=protected-access
predictions = embedding_ops.safe_embedding_lookup_sparse(
variable,
sparse_tensor,
sparse_weights=None,
combiner='sum',
name='_weights')
return variable, predictions
def joint_weighted_sum_from_feature_columns(columns_to_tensors,
feature_columns,
num_outputs,
weight_collections=None,
trainable=True,
scope=None):
"""A restricted linear prediction builder based on FeatureColumns.
As long as all feature columns are unweighted sparse columns this computes the
prediction of a linear model which stores all weights in a single variable.
Args:
columns_to_tensors: A mapping from feature column to tensors. 'string' key
means a base feature (not-transformed). It can have FeatureColumn as a
key too. That means that FeatureColumn is already transformed by input
pipeline. For example, `inflow` may have handled transformations.
feature_columns: A set containing all the feature columns. All items in the
set should be instances of classes derived from FeatureColumn.
num_outputs: An integer specifying number of outputs. Default value is 1.
weight_collections: List of graph collections to which weights are added.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for variable_scope.
Returns:
A tuple containing:
* A Tensor which represents predictions of a linear model.
* A list of Variables storing the weights.
* A Variable which is used for bias.
Raises:
ValueError: if FeatureColumn cannot be used for linear predictions.
"""
columns_to_tensors = columns_to_tensors.copy()
check_feature_columns(feature_columns)
with variable_scope.variable_scope(
scope,
default_name='joint_weighted_sum_from_feature_columns',
values=columns_to_tensors.values()):
transformer = _Transformer(columns_to_tensors)
embedding_lookup_arguments = []
for column in sorted(set(feature_columns), key=lambda x: x.key):
transformed_tensor = transformer.transform(column)
try:
embedding_lookup_arguments.append(
column._wide_embedding_lookup_arguments(transformed_tensor)) # pylint: disable=protected-access
except NotImplementedError:
raise NotImplementedError('Real-valued columns are not supported. '
'Use weighted_sum_from_feature_columns '
'instead, or bucketize these columns.')
variable, predictions_no_bias = _create_joint_embedding_lookup(
columns_to_tensors,
embedding_lookup_arguments,
num_outputs,
trainable,
weight_collections)
bias = contrib_variables.model_variable(
'bias_weight',
shape=[num_outputs],
initializer=init_ops.zeros_initializer(),
trainable=trainable,
collections=_add_variable_collection(weight_collections))
_log_variable(bias)
predictions = nn_ops.bias_add(predictions_no_bias, bias)
return predictions, variable, bias
def weighted_sum_from_feature_columns(columns_to_tensors,
feature_columns,
num_outputs,
weight_collections=None,
trainable=True,
scope=None):
"""A tf.contrib.layers style linear prediction builder based on FeatureColumn.
Generally a single example in training data is described with feature columns.
This function generates weighted sum for each num_outputs. Weighted sum refers
to logits in classification problems. It refers to prediction itself for
linear regression problems.
Example:
```
# Building model for training
feature_columns = (
real_valued_column("my_feature1"),
...
)
columns_to_tensor = tf.parse_example(...)
logits = weighted_sum_from_feature_columns(
columns_to_tensors=columns_to_tensor,
feature_columns=feature_columns,
num_outputs=1)
loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=labels,
logits=logits)
```
Args:
columns_to_tensors: A mapping from feature column to tensors. 'string' key
means a base feature (not-transformed). It can have FeatureColumn as a
key too. That means that FeatureColumn is already transformed by input
pipeline. For example, `inflow` may have handled transformations.
feature_columns: A set containing all the feature columns. All items in the
set should be instances of classes derived from FeatureColumn.
num_outputs: An integer specifying number of outputs. Default value is 1.
weight_collections: List of graph collections to which weights are added.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for variable_scope.
Returns:
A tuple containing:
* A Tensor which represents predictions of a linear model.
* A dictionary which maps feature_column to corresponding Variable.
* A Variable which is used for bias.
Raises:
ValueError: if FeatureColumn cannot be used for linear predictions.
"""
columns_to_tensors = columns_to_tensors.copy()
check_feature_columns(feature_columns)
with variable_scope.variable_scope(
scope,
default_name='weighted_sum_from_feature_columns',
values=columns_to_tensors.values()):
output_tensors = []
column_to_variable = dict()
transformer = _Transformer(columns_to_tensors)
# pylint: disable=protected-access
for column in sorted(set(feature_columns), key=lambda x: x.key):
transformed_tensor = transformer.transform(column)
try:
embedding_lookup_arguments = column._wide_embedding_lookup_arguments(
transformed_tensor)
variable, predictions = _create_embedding_lookup(
column,
columns_to_tensors,
embedding_lookup_arguments,
num_outputs,
trainable,
weight_collections)
except NotImplementedError:
with variable_scope.variable_scope(
None,
default_name=column.name,
values=columns_to_tensors.values()):
tensor = column._to_dense_tensor(transformed_tensor)
tensor = _maybe_reshape_input_tensor(
tensor, column.name, output_rank=2)
variable = [
contrib_variables.model_variable(
name='weight',
shape=[tensor.get_shape()[1], num_outputs],
initializer=init_ops.zeros_initializer(),
trainable=trainable,
collections=weight_collections)
]
predictions = math_ops.matmul(tensor, variable[0], name='matmul')
except ValueError as ee:
raise ValueError('Error creating weighted sum for column: {}.\n'
'{}'.format(column.name, ee))
output_tensors.append(array_ops.reshape(
predictions, shape=(-1, num_outputs)))
column_to_variable[column] = variable
_log_variable(variable)
fc._maybe_restore_from_checkpoint(column._checkpoint_path(), variable) # pylint: disable=protected-access
# pylint: enable=protected-access
predictions_no_bias = math_ops.add_n(output_tensors)
bias = contrib_variables.model_variable(
'bias_weight',
shape=[num_outputs],
initializer=init_ops.zeros_initializer(),
trainable=trainable,
collections=_add_variable_collection(weight_collections))
_log_variable(bias)
predictions = nn_ops.bias_add(predictions_no_bias, bias)
return predictions, column_to_variable, bias
def parse_feature_columns_from_examples(serialized,
feature_columns,
name=None,
example_names=None):
"""Parses tf.Examples to extract tensors for given feature_columns.
This is a wrapper of 'tf.parse_example'.
Example:
```python
columns_to_tensor = parse_feature_columns_from_examples(
serialized=my_data,
feature_columns=my_features)
# Where my_features are:
# Define features and transformations
sparse_feature_a = sparse_column_with_keys(
column_name="sparse_feature_a", keys=["AB", "CD", ...])
embedding_feature_a = embedding_column(
sparse_id_column=sparse_feature_a, dimension=3, combiner="sum")
sparse_feature_b = sparse_column_with_hash_bucket(
column_name="sparse_feature_b", hash_bucket_size=1000)
embedding_feature_b = embedding_column(
sparse_id_column=sparse_feature_b, dimension=16, combiner="sum")
crossed_feature_a_x_b = crossed_column(
columns=[sparse_feature_a, sparse_feature_b], hash_bucket_size=10000)
real_feature = real_valued_column("real_feature")
real_feature_buckets = bucketized_column(
source_column=real_feature, boundaries=[...])
my_features = [embedding_feature_b, real_feature_buckets, embedding_feature_a]
```
Args:
serialized: A vector (1-D Tensor) of strings, a batch of binary
serialized `Example` protos.
feature_columns: An iterable containing all the feature columns. All items
should be instances of classes derived from _FeatureColumn.
name: A name for this operation (optional).
example_names: A vector (1-D Tensor) of strings (optional), the names of
the serialized protos in the batch.
Returns:
A `dict` mapping FeatureColumn to `Tensor` and `SparseTensor` values.
"""
check_feature_columns(feature_columns)
columns_to_tensors = parsing_ops.parse_example(
serialized=serialized,
features=fc.create_feature_spec_for_parsing(feature_columns),
name=name,
example_names=example_names)
transformer = _Transformer(columns_to_tensors)
for column in sorted(set(feature_columns), key=lambda x: x.key):
transformer.transform(column)
return columns_to_tensors
def transform_features(features, feature_columns):
"""Returns transformed features based on features columns passed in.
Example:
```python
columns_to_tensor = transform_features(features=features,
feature_columns=feature_columns)
# Where my_features are:
# Define features and transformations
sparse_feature_a = sparse_column_with_keys(
column_name="sparse_feature_a", keys=["AB", "CD", ...])
embedding_feature_a = embedding_column(
sparse_id_column=sparse_feature_a, dimension=3, combiner="sum")
sparse_feature_b = sparse_column_with_hash_bucket(
column_name="sparse_feature_b", hash_bucket_size=1000)
embedding_feature_b = embedding_column(
sparse_id_column=sparse_feature_b, dimension=16, combiner="sum")
crossed_feature_a_x_b = crossed_column(
columns=[sparse_feature_a, sparse_feature_b], hash_bucket_size=10000)
real_feature = real_valued_column("real_feature")
real_feature_buckets = bucketized_column(
source_column=real_feature, boundaries=[...])
feature_columns = [embedding_feature_b,
real_feature_buckets,
embedding_feature_a]
```
Args:
features: A dictionary of features.
feature_columns: An iterable containing all the feature columns. All items
should be instances of classes derived from _FeatureColumn.
Returns:
A `dict` mapping FeatureColumn to `Tensor` and `SparseTensor` values.
"""
columns_to_tensor = features.copy()
check_feature_columns(feature_columns)
transformer = _Transformer(columns_to_tensor)
for column in sorted(set(feature_columns), key=lambda x: x.key):
transformer.transform(column)
keys = list(columns_to_tensor.keys())
for k in keys:
if k not in feature_columns:
columns_to_tensor.pop(k)
return columns_to_tensor
def parse_feature_columns_from_sequence_examples(
serialized,
context_feature_columns,
sequence_feature_columns,
name=None,
example_name=None):
"""Parses tf.SequenceExamples to extract tensors for given `FeatureColumn`s.
Args:
serialized: A scalar (0-D Tensor) of type string, a single serialized
`SequenceExample` proto.
context_feature_columns: An iterable containing the feature columns for
context features. All items should be instances of classes derived from
`_FeatureColumn`. Can be `None`.
sequence_feature_columns: An iterable containing the feature columns for
sequence features. All items should be instances of classes derived from
`_FeatureColumn`. Can be `None`.
name: A name for this operation (optional).
example_name: A scalar (0-D Tensor) of type string (optional), the names of
the serialized proto.
Returns:
A tuple consisting of:
context_features: a dict mapping `FeatureColumns` from
`context_feature_columns` to their parsed `Tensors`/`SparseTensor`s.
sequence_features: a dict mapping `FeatureColumns` from
`sequence_feature_columns` to their parsed `Tensors`/`SparseTensor`s.
"""
# Sequence example parsing requires a single (scalar) example.
try:
serialized = array_ops.reshape(serialized, [])
except ValueError as e:
raise ValueError(
'serialized must contain as single sequence example. Batching must be '
'done after parsing for sequence examples. Error: {}'.format(e))
if context_feature_columns is None:
context_feature_columns = []
if sequence_feature_columns is None:
sequence_feature_columns = []
check_feature_columns(context_feature_columns)
context_feature_spec = fc.create_feature_spec_for_parsing(
context_feature_columns)
check_feature_columns(sequence_feature_columns)
sequence_feature_spec = fc._create_sequence_feature_spec_for_parsing( # pylint: disable=protected-access
sequence_feature_columns, allow_missing_by_default=False)
return parsing_ops.parse_single_sequence_example(serialized,
context_feature_spec,
sequence_feature_spec,
example_name,
name)
def _log_variable(variable):
if isinstance(variable, list):
for var in variable:
if fc._is_variable(variable): # pylint: disable=protected-access
logging.info('Created variable %s, with device=%s', var.name,
var.device)
elif fc._is_variable(variable): # pylint: disable=protected-access
logging.info('Created variable %s, with device=%s', variable.name,
variable.device)
def _infer_real_valued_column_for_tensor(name, tensor):
"""Creates a real_valued_column for given tensor and name."""
if isinstance(tensor, sparse_tensor_py.SparseTensor):
raise ValueError(
'SparseTensor is not supported for auto detection. Please define '
'corresponding FeatureColumn for tensor {} {}.', name, tensor)
if not (tensor.dtype.is_integer or tensor.dtype.is_floating):
raise ValueError(
'Non integer or non floating types are not supported for auto detection'
'. Please define corresponding FeatureColumn for tensor {} {}.', name,
tensor)
shape = tensor.get_shape().as_list()
dimension = 1
for i in range(1, len(shape)):
dimension *= shape[i]
return fc.real_valued_column(name, dimension=dimension, dtype=tensor.dtype)
def infer_real_valued_columns(features):
if not isinstance(features, dict):
return [_infer_real_valued_column_for_tensor('', features)]
feature_columns = []
for key, value in features.items():
feature_columns.append(_infer_real_valued_column_for_tensor(key, value))
return feature_columns
def check_feature_columns(feature_columns):
"""Checks the validity of the set of FeatureColumns.
Args:
feature_columns: An iterable of instances or subclasses of FeatureColumn.
Raises:
ValueError: If `feature_columns` is a dict.
ValueError: If there are duplicate feature column keys.
"""
if isinstance(feature_columns, dict):
raise ValueError('Expected feature_columns to be iterable, found dict.')
seen_keys = set()
for f in feature_columns:
key = f.key
if key in seen_keys:
raise ValueError('Duplicate feature column key found for column: {}. '
'This usually means that the column is almost identical '
'to another column, and one must be discarded.'.format(
f.name))
seen_keys.add(key)
class _Transformer(object):
"""Handles all the transformations defined by FeatureColumn if needed.
FeatureColumn specifies how to digest an input column to the network. Some
feature columns require data transformations. This class handles those
transformations if they are not handled already.
Some features may be used in more than one place. For example, one can use a
bucketized feature by itself and a cross with it. In that case Transformer
should create only one bucketization op instead of multiple ops for each
feature column. To handle re-use of transformed columns, Transformer keeps all
previously transformed columns.
Example:
```python
sparse_feature = sparse_column_with_hash_bucket(...)
real_valued_feature = real_valued_column(...)
real_valued_buckets = bucketized_column(source_column=real_valued_feature,
...)
sparse_x_real = crossed_column(
columns=[sparse_feature, real_valued_buckets], hash_bucket_size=10000)
columns_to_tensor = tf.parse_example(...)
transformer = Transformer(columns_to_tensor)
sparse_x_real_tensor = transformer.transform(sparse_x_real)
sparse_tensor = transformer.transform(sparse_feature)
real_buckets_tensor = transformer.transform(real_valued_buckets)
```
"""
def __init__(self, columns_to_tensors):
"""Initializes transfomer.
Args:
columns_to_tensors: A mapping from feature columns to tensors. 'string'
key means a base feature (not-transformed). It can have FeatureColumn as
a key too. That means that FeatureColumn is already transformed by input
pipeline. For example, `inflow` may have handled transformations.
Transformed features are inserted in columns_to_tensors.
"""
self._columns_to_tensors = columns_to_tensors
def transform(self, feature_column):
"""Returns a Tensor which represents given feature_column.
Args:
feature_column: An instance of FeatureColumn.
Returns:
A Tensor which represents given feature_column. It may create a new Tensor
or re-use an existing one.
Raises:
ValueError: if FeatureColumn cannot be handled by this Transformer.
"""
logging.debug('Transforming feature_column %s', feature_column)
if feature_column in self._columns_to_tensors:
# Feature_column is already transformed.
return self._columns_to_tensors[feature_column]
feature_column.insert_transformed_feature(self._columns_to_tensors)
if feature_column not in self._columns_to_tensors:
raise ValueError('Column {} is not supported.'.format(
feature_column.name))
return self._columns_to_tensors[feature_column]
def _add_variable_collection(weight_collections):
if weight_collections:
weight_collections = list(
set(list(weight_collections) + [ops.GraphKeys.GLOBAL_VARIABLES]))
return weight_collections
# TODO(jamieas): remove the following logic once all FeatureColumn types are
# supported for sequences.
# pylint: disable=protected-access
_SUPPORTED_SEQUENCE_COLUMNS = (fc._OneHotColumn,
fc._EmbeddingColumn,
fc._RealValuedColumn,
fc._RealValuedVarLenColumn)
_FORBIDDEN_SEQUENCE_COLUMNS = (fc._ScatteredEmbeddingColumn,
fc._BucketizedColumn,
fc._CrossedColumn)
def _check_supported_sequence_columns(feature_columns):
"""Asserts `feature_columns` are in `_SUPPORTED_SEQUENCE_COLUMNS`."""
for col in feature_columns:
if not isinstance(col, _SUPPORTED_SEQUENCE_COLUMNS):
raise ValueError(
'FeatureColumn type {} is not currently supported for sequence data.'.
format(type(col).__name__))
def _get_parent_columns(feature_column):
"""Returns the tuple of `FeatureColumn`s that `feature_column` depends on."""
if isinstance(feature_column, (fc._WeightedSparseColumn,
fc._OneHotColumn,
fc._EmbeddingColumn,)):
return (feature_column.sparse_id_column,)
if isinstance(feature_column, (fc._BucketizedColumn,)):
return (feature_column.source_column,)
if isinstance(feature_column, (fc._CrossedColumn)):
return tuple(feature_column.columns)
return tuple()
def _gather_feature_columns(feature_columns):
"""Returns a list of all ancestor `FeatureColumns` of `feature_columns`."""
gathered = list(feature_columns)
i = 0
while i < len(gathered):
for column in _get_parent_columns(gathered[i]):
if column not in gathered:
gathered.append(column)
i += 1
return gathered
def _check_forbidden_sequence_columns(feature_columns):
"""Recursively cecks `feature_columns` for `_FORBIDDEN_SEQUENCE_COLUMNS`."""
all_feature_columns = _gather_feature_columns(feature_columns)
for feature_column in all_feature_columns:
if isinstance(feature_column, _FORBIDDEN_SEQUENCE_COLUMNS):
raise ValueError(
'Column {} is of type {}, which is not currently supported for '
'sequences.'.format(feature_column.name,
type(feature_column).__name__))
|
mit
|
eharney/cinder
|
cinder/tests/unit/test_db_api.py
|
1
|
149710
|
# Copyright 2014 IBM Corp.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for cinder.db.api."""
import datetime
import ddt
import enum
import mock
from mock import call
from oslo_config import cfg
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from sqlalchemy.sql import operators
from cinder.api import common
from cinder import context
from cinder import db
from cinder.db.sqlalchemy import api as sqlalchemy_api
from cinder.db.sqlalchemy import models
from cinder import exception
from cinder import objects
from cinder.objects import fields
from cinder import quota
from cinder import test
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import utils
CONF = cfg.CONF
THREE = 3
THREE_HUNDREDS = 300
ONE_HUNDREDS = 100
UTC_NOW = timeutils.utcnow()
def _quota_reserve(context, project_id):
"""Create sample Quota, QuotaUsage and Reservation objects.
There is no method db.quota_usage_create(), so we have to use
db.quota_reserve() for creating QuotaUsage objects.
Returns reservations uuids.
"""
def get_sync(resource, usage):
def sync(elevated, project_id, session):
return {resource: usage}
return sync
quotas = {}
resources = {}
deltas = {}
for i, resource in enumerate(('volumes', 'gigabytes')):
quota_obj = db.quota_create(context, project_id, resource, i + 1)
quotas[resource] = quota_obj.hard_limit
resources[resource] = quota.ReservableResource(resource,
'_sync_%s' % resource)
deltas[resource] = i + 1
return db.quota_reserve(
context, resources, quotas, deltas,
datetime.datetime.utcnow(), datetime.datetime.utcnow(),
datetime.timedelta(days=1), project_id
)
class BaseTest(test.TestCase, test.ModelsObjectComparatorMixin):
def setUp(self):
super(BaseTest, self).setUp()
self.ctxt = context.get_admin_context()
@ddt.ddt
class DBCommonFilterTestCase(BaseTest):
def setUp(self):
super(DBCommonFilterTestCase, self).setUp()
self.fake_volume = db.volume_create(self.ctxt,
{'display_name': 'fake_name'})
self.fake_group = utils.create_group(
self.ctxt,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID])
@mock.patch('sqlalchemy.orm.query.Query.filter')
def test__process_model_like_filter(self, mock_filter):
filters = {'display_name': 'fake_name',
'display_description': 'fake_description',
'host': 123,
'status': []}
session = sqlalchemy_api.get_session()
query = session.query(models.Volume)
mock_filter.return_value = query
with mock.patch.object(operators.Operators, 'op') as mock_op:
def fake_operator(value):
return value
mock_op.return_value = fake_operator
sqlalchemy_api._process_model_like_filter(models.Volume,
query, filters)
calls = [call('%fake_description%'),
call('%fake_name%'), call('%123%')]
mock_filter.assert_has_calls(calls, any_order=True)
@ddt.data({'handler': [db.volume_create, db.volume_get_all],
'column': 'display_name',
'resource': 'volume'},
{'handler': [db.snapshot_create, db.snapshot_get_all],
'column': 'display_name',
'resource': 'snapshot'},
{'handler': [db.message_create, db.message_get_all],
'column': 'message_level',
'resource': 'message'},
{'handler': [db.backup_create, db.backup_get_all],
'column': 'display_name',
'resource': 'backup'},
{'handler': [db.group_create, db.group_get_all],
'column': 'name',
'resource': 'group'},
{'handler': [utils.create_group_snapshot,
db.group_snapshot_get_all],
'column': 'name',
'resource': 'group_snapshot'})
@ddt.unpack
def test_resource_get_all_like_filter(self, handler, column, resource):
for index in ['001', '002']:
option = {column: "fake_%s_%s" % (column, index)}
if resource in ['snapshot', 'backup']:
option['volume_id'] = self.fake_volume.id
if resource in ['message']:
option['project_id'] = fake.PROJECT_ID
option['event_id'] = fake.UUID1
if resource in ['group_snapshot']:
handler[0](self.ctxt, self.fake_group.id,
name="fake_%s_%s" % (column, index))
else:
handler[0](self.ctxt, option)
# test exact match
exact_filter = {column: 'fake_%s' % column}
resources = handler[1](self.ctxt, filters=exact_filter)
self.assertEqual(0, len(resources))
# test inexact match
inexact_filter = {"%s~" % column: 'fake_%s' % column}
resources = handler[1](self.ctxt, filters=inexact_filter)
self.assertEqual(2, len(resources))
@ddt.ddt
class DBAPIServiceTestCase(BaseTest):
"""Unit tests for cinder.db.api.service_*."""
def test_service_create(self):
# Add a cluster value to the service
values = {'cluster_name': 'cluster'}
service = utils.create_service(self.ctxt, values)
self.assertIsNotNone(service['id'])
expected = utils.default_service_values()
expected.update(values)
for key, value in expected.items():
self.assertEqual(value, service[key])
def test_service_destroy(self):
service1 = utils.create_service(self.ctxt, {})
service2 = utils.create_service(self.ctxt, {'host': 'fake_host2'})
self.assertDictEqual(
{'deleted': True, 'deleted_at': mock.ANY},
db.service_destroy(self.ctxt, service1['id']))
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, service1['id'])
self._assertEqualObjects(
service2,
db.service_get(self.ctxt, service2['id']))
def test_service_update(self):
service = utils.create_service(self.ctxt, {})
new_values = {
'host': 'fake_host1',
'binary': 'fake_binary1',
'topic': 'fake_topic1',
'report_count': 4,
'disabled': True
}
db.service_update(self.ctxt, service['id'], new_values)
updated_service = db.service_get(self.ctxt, service['id'])
for key, value in new_values.items():
self.assertEqual(value, updated_service[key])
def test_service_update_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_update, self.ctxt, 100500, {})
def test_service_get(self):
service1 = utils.create_service(self.ctxt, {})
real_service1 = db.service_get(self.ctxt, service1['id'])
self._assertEqualObjects(service1, real_service1)
def test_service_get_by_cluster(self):
service = utils.create_service(self.ctxt,
{'cluster_name': 'cluster@backend'})
# Search with an exact match
real_service = db.service_get(self.ctxt,
cluster_name='cluster@backend')
self._assertEqualObjects(service, real_service)
# Search without the backend
real_service = db.service_get(self.ctxt, cluster_name='cluster')
self._assertEqualObjects(service, real_service)
def test_service_get_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, 100500)
def test_service_get_by_host_and_topic(self):
service1 = utils.create_service(self.ctxt,
{'host': 'host1', 'topic': 'topic1'})
real_service1 = db.service_get(self.ctxt, host='host1', topic='topic1')
self._assertEqualObjects(service1, real_service1)
@ddt.data('disabled', 'frozen')
def test_service_get_all_boolean_by_cluster(self, field_name):
values = [
# Enabled/Unfrozen services
{'host': 'host1', 'binary': 'b1', field_name: False},
{'host': 'host2', 'binary': 'b1', field_name: False,
'cluster_name': 'enabled_unfrozen_cluster'},
{'host': 'host3', 'binary': 'b1', field_name: True,
'cluster_name': 'enabled_unfrozen_cluster'},
# Disabled/Frozen services
{'host': 'host4', 'binary': 'b1', field_name: True},
{'host': 'host5', 'binary': 'b1', field_name: False,
'cluster_name': 'disabled_frozen_cluster'},
{'host': 'host6', 'binary': 'b1', field_name: True,
'cluster_name': 'disabled_frozen_cluster'},
]
db.cluster_create(self.ctxt, {'name': 'enabled_unfrozen_cluster',
'binary': 'b1',
field_name: False}),
db.cluster_create(self.ctxt, {'name': 'disabled_frozen_cluster',
'binary': 'b1',
field_name: True}),
services = [utils.create_service(self.ctxt, vals) for vals in values]
false_services = db.service_get_all(self.ctxt, **{field_name: False})
true_services = db.service_get_all(self.ctxt, **{field_name: True})
self.assertSetEqual({s.host for s in services[:3]},
{s.host for s in false_services})
self.assertSetEqual({s.host for s in services[3:]},
{s.host for s in true_services})
def test_service_get_all(self):
expired = (datetime.datetime.utcnow()
- datetime.timedelta(seconds=CONF.service_down_time + 1))
db.cluster_create(self.ctxt, {'name': 'cluster_disabled',
'binary': 'fake_binary',
'disabled': True})
db.cluster_create(self.ctxt, {'name': 'cluster_enabled',
'binary': 'fake_binary',
'disabled': False})
values = [
# Now we are updating updated_at at creation as well so this one
# is up.
{'host': 'host1', 'binary': 'b1', 'created_at': expired},
{'host': 'host1@ceph', 'binary': 'b2'},
{'host': 'host2', 'binary': 'b2'},
{'disabled': False, 'cluster_name': 'cluster_enabled'},
{'disabled': True, 'cluster_name': 'cluster_enabled'},
{'disabled': False, 'cluster_name': 'cluster_disabled'},
{'disabled': True, 'cluster_name': 'cluster_disabled'},
{'disabled': True, 'created_at': expired, 'updated_at': expired},
]
services = [utils.create_service(self.ctxt, vals) for vals in values]
disabled_services = services[-3:]
non_disabled_services = services[:-3]
up_services = services[:7]
down_services = [services[7]]
expected = services[:2]
expected_bin = services[1:3]
compares = [
(services, db.service_get_all(self.ctxt)),
(expected, db.service_get_all(self.ctxt, host='host1')),
(expected_bin, db.service_get_all(self.ctxt, binary='b2')),
(disabled_services, db.service_get_all(self.ctxt, disabled=True)),
(non_disabled_services, db.service_get_all(self.ctxt,
disabled=False)),
(up_services, db.service_get_all(self.ctxt, is_up=True)),
(down_services, db.service_get_all(self.ctxt, is_up=False)),
]
for i, comp in enumerate(compares):
self._assertEqualListsOfObjects(*comp,
msg='Error comparing %s' % i)
def test_service_get_all_by_topic(self):
values = [
{'host': 'host1', 'topic': 't1'},
{'host': 'host2', 'topic': 't1'},
{'host': 'host4', 'disabled': True, 'topic': 't1'},
{'host': 'host3', 'topic': 't2'}
]
services = [utils.create_service(self.ctxt, vals) for vals in values]
expected = services[:3]
real = db.service_get_all(self.ctxt, topic='t1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_all_by_binary(self):
values = [
{'host': 'host1', 'binary': 'b1'},
{'host': 'host2', 'binary': 'b1'},
{'host': 'host4', 'disabled': True, 'binary': 'b1'},
{'host': 'host3', 'binary': 'b2'}
]
services = [utils.create_service(self.ctxt, vals) for vals in values]
expected = services[:3]
real = db.service_get_all(self.ctxt, binary='b1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_by_args(self):
values = [
{'host': 'host1', 'binary': 'a'},
{'host': 'host2', 'binary': 'b'}
]
services = [utils.create_service(self.ctxt, vals) for vals in values]
service1 = db.service_get(self.ctxt, host='host1', binary='a')
self._assertEqualObjects(services[0], service1)
service2 = db.service_get(self.ctxt, host='host2', binary='b')
self._assertEqualObjects(services[1], service2)
def test_service_get_all_by_cluster(self):
values = [
{'host': 'host1', 'cluster_name': 'cluster'},
{'host': 'host2', 'cluster_name': 'cluster'},
{'host': 'host3', 'cluster_name': 'cluster@backend'},
{'host': 'host4', 'cluster_name': 'cluster2'},
]
services = [utils.create_service(self.ctxt, vals) for vals in values]
expected = services[:3]
real = db.service_get_all(self.ctxt, cluster_name='cluster')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_all_by_host_or_cluster(self):
values = [
{'host': 'host1', 'cluster_name': 'cluster'},
{'host': 'host2', 'cluster_name': 'host1'},
{'host': 'host3', 'cluster_name': 'cluster@backend'},
{'host': 'host4', 'cluster_name': 'cluster2'},
]
services = [utils.create_service(self.ctxt, vals) for vals in values]
expected = services[0:2]
real = db.service_get_all(self.ctxt, host_or_cluster='host1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_by_args_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_get,
self.ctxt, host='non-exists-host', binary='a')
@mock.patch('sqlalchemy.orm.query.Query.filter_by')
def test_service_get_by_args_with_case_insensitive(self, filter_by):
CONF.set_default('connection', 'mysql://', 'database')
db.service_get(self.ctxt, host='host', binary='a')
self.assertNotEqual(0, filter_by.call_count)
self.assertEqual(1, filter_by.return_value.filter.call_count)
or_op = filter_by.return_value.filter.call_args[0][0].clauses[0]
self.assertIsInstance(or_op,
sqlalchemy_api.sql.elements.BinaryExpression)
binary_op = or_op.right
self.assertIsInstance(binary_op, sqlalchemy_api.sql.functions.Function)
self.assertEqual('binary', binary_op.name)
@ddt.ddt
class DBAPIVolumeTestCase(BaseTest):
"""Unit tests for cinder.db.api.volume_*."""
def test_volume_create(self):
volume = db.volume_create(self.ctxt, {'host': 'host1'})
self.assertTrue(uuidutils.is_uuid_like(volume['id']))
self.assertEqual('host1', volume.host)
def test_volume_attached_invalid_uuid(self):
self.assertRaises(exception.InvalidUUID, db.volume_attached, self.ctxt,
42, 'invalid-uuid', None, '/tmp')
def test_volume_attached_to_instance(self):
volume = db.volume_create(self.ctxt, {'host': 'host1'})
instance_uuid = fake.INSTANCE_ID
values = {'volume_id': volume['id'],
'instance_uuid': instance_uuid,
'attach_status': fields.VolumeAttachStatus.ATTACHING, }
attachment = db.volume_attach(self.ctxt, values)
volume_db, updated_values = db.volume_attached(
self.ctxt,
attachment['id'],
instance_uuid, None, '/tmp')
expected_updated_values = {
'mountpoint': '/tmp',
'attach_status': fields.VolumeAttachStatus.ATTACHED,
'instance_uuid': instance_uuid,
'attached_host': None,
'attach_time': mock.ANY,
'attach_mode': 'rw'}
self.assertDictEqual(expected_updated_values, updated_values)
volume = db.volume_get(self.ctxt, volume['id'])
attachment = db.volume_attachment_get(self.ctxt, attachment['id'])
self._assertEqualObjects(volume, volume_db,
ignored_keys='volume_attachment')
self._assertEqualListsOfObjects(volume.volume_attachment,
volume_db.volume_attachment, 'volume')
self.assertEqual('in-use', volume['status'])
self.assertEqual('/tmp', attachment['mountpoint'])
self.assertEqual(fields.VolumeAttachStatus.ATTACHED,
attachment['attach_status'])
self.assertEqual(instance_uuid, attachment['instance_uuid'])
self.assertIsNone(attachment['attached_host'])
self.assertEqual(volume.project_id, attachment['volume']['project_id'])
def test_volume_attached_to_host(self):
volume = db.volume_create(self.ctxt, {'host': 'host1'})
host_name = 'fake_host'
values = {'volume_id': volume['id'],
'attached_host': host_name,
'attach_status': fields.VolumeAttachStatus.ATTACHING, }
attachment = db.volume_attach(self.ctxt, values)
volume_db, updated_values = db.volume_attached(
self.ctxt, attachment['id'],
None, host_name, '/tmp')
expected_updated_values = {
'mountpoint': '/tmp',
'attach_status': fields.VolumeAttachStatus.ATTACHED,
'instance_uuid': None,
'attached_host': host_name,
'attach_time': mock.ANY,
'attach_mode': 'rw'}
self.assertDictEqual(expected_updated_values, updated_values)
volume = db.volume_get(self.ctxt, volume['id'])
self._assertEqualObjects(volume, volume_db,
ignored_keys='volume_attachment')
self._assertEqualListsOfObjects(volume.volume_attachment,
volume_db.volume_attachment, 'volume')
attachment = db.volume_attachment_get(self.ctxt, attachment['id'])
self.assertEqual('in-use', volume['status'])
self.assertEqual('/tmp', attachment['mountpoint'])
self.assertEqual(fields.VolumeAttachStatus.ATTACHED,
attachment['attach_status'])
self.assertIsNone(attachment['instance_uuid'])
self.assertEqual(attachment['attached_host'], host_name)
self.assertEqual(volume.project_id, attachment['volume']['project_id'])
def test_volume_data_get_for_host(self):
for i in range(THREE):
for j in range(THREE):
db.volume_create(self.ctxt, {'host': 'h%d' % i,
'size': ONE_HUNDREDS})
for i in range(THREE):
self.assertEqual((THREE, THREE_HUNDREDS),
db.volume_data_get_for_host(
self.ctxt, 'h%d' % i))
def test_volume_data_get_for_host_for_multi_backend(self):
for i in range(THREE):
for j in range(THREE):
db.volume_create(self.ctxt, {'host':
'h%d@lvmdriver-1#lvmdriver-1' % i,
'size': ONE_HUNDREDS})
for i in range(THREE):
self.assertEqual((THREE, THREE_HUNDREDS),
db.volume_data_get_for_host(
self.ctxt, 'h%d@lvmdriver-1' % i))
def test_volume_data_get_for_project(self):
for i in range(THREE):
for j in range(THREE):
db.volume_create(self.ctxt, {'project_id': 'p%d' % i,
'size': ONE_HUNDREDS,
'host': 'h-%d-%d' % (i, j),
})
for i in range(THREE):
self.assertEqual((THREE, THREE_HUNDREDS),
db.volume_data_get_for_project(
self.ctxt, 'p%d' % i))
def test_volume_detached_from_instance(self):
volume = db.volume_create(self.ctxt, {})
instance_uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
values = {'volume_id': volume['id'],
'instance_uuid': instance_uuid,
'attach_status': fields.VolumeAttachStatus.ATTACHING, }
attachment = db.volume_attach(self.ctxt, values)
db.volume_attached(self.ctxt, attachment.id,
instance_uuid,
None, '/tmp')
volume_updates, attachment_updates = (
db.volume_detached(self.ctxt, volume.id, attachment.id))
expected_attachment = {
'attach_status': fields.VolumeAttachStatus.DETACHED,
'detach_time': mock.ANY,
'deleted': True,
'deleted_at': mock.ANY, }
self.assertDictEqual(expected_attachment, attachment_updates)
expected_volume = {
'status': 'available',
'attach_status': fields.VolumeAttachStatus.DETACHED, }
self.assertDictEqual(expected_volume, volume_updates)
volume = db.volume_get(self.ctxt, volume.id)
self.assertRaises(exception.VolumeAttachmentNotFound,
db.volume_attachment_get,
self.ctxt,
attachment.id)
self.assertEqual('available', volume.status)
def test_volume_detached_two_attachments(self):
volume = db.volume_create(self.ctxt, {})
instance_uuid = fake.INSTANCE_ID
values = {'volume_id': volume.id,
'instance_uuid': instance_uuid,
'attach_status': fields.VolumeAttachStatus.ATTACHING, }
attachment = db.volume_attach(self.ctxt, values)
values2 = {'volume_id': volume.id,
'instance_uuid': fake.OBJECT_ID,
'attach_status': fields.VolumeAttachStatus.ATTACHING, }
db.volume_attach(self.ctxt, values2)
db.volume_attached(self.ctxt, attachment.id,
instance_uuid,
None, '/tmp')
volume_updates, attachment_updates = (
db.volume_detached(self.ctxt, volume.id, attachment.id))
expected_attachment = {
'attach_status': fields.VolumeAttachStatus.DETACHED,
'detach_time': mock.ANY,
'deleted': True,
'deleted_at': mock.ANY, }
self.assertDictEqual(expected_attachment, attachment_updates)
expected_volume = {
'status': 'in-use',
'attach_status': fields.VolumeAttachStatus.ATTACHED, }
self.assertDictEqual(expected_volume, volume_updates)
volume = db.volume_get(self.ctxt, volume.id)
self.assertRaises(exception.VolumeAttachmentNotFound,
db.volume_attachment_get,
self.ctxt,
attachment.id)
self.assertEqual('in-use', volume.status)
def test_volume_detached_invalid_attachment(self):
volume = db.volume_create(self.ctxt, {})
# detach it again
volume_updates, attachment_updates = (
db.volume_detached(self.ctxt, volume.id, fake.ATTACHMENT_ID))
self.assertIsNone(attachment_updates)
expected_volume = {
'status': 'available',
'attach_status': fields.VolumeAttachStatus.DETACHED, }
self.assertDictEqual(expected_volume, volume_updates)
volume = db.volume_get(self.ctxt, volume.id)
self.assertEqual('available', volume.status)
def test_volume_detached_from_host(self):
volume = db.volume_create(self.ctxt, {})
host_name = 'fake_host'
values = {'volume_id': volume.id,
'attach_host': host_name,
'attach_status': fields.VolumeAttachStatus.ATTACHING, }
attachment = db.volume_attach(self.ctxt, values)
db.volume_attached(self.ctxt, attachment.id,
None, host_name, '/tmp')
volume_updates, attachment_updates = (
db.volume_detached(self.ctxt, volume.id, attachment.id))
expected_attachment = {
'attach_status': fields.VolumeAttachStatus.DETACHED,
'detach_time': mock.ANY,
'deleted': True,
'deleted_at': mock.ANY}
self.assertDictEqual(expected_attachment, attachment_updates)
expected_volume = {
'status': 'available',
'attach_status': fields.VolumeAttachStatus.DETACHED, }
self.assertDictEqual(expected_volume, volume_updates)
volume = db.volume_get(self.ctxt, volume.id)
self.assertRaises(exception.VolumeAttachmentNotFound,
db.volume_attachment_get,
self.ctxt,
attachment.id)
self.assertEqual('available', volume.status)
def test_volume_get(self):
volume = db.volume_create(self.ctxt, {})
self._assertEqualObjects(volume, db.volume_get(self.ctxt,
volume['id']))
@mock.patch('oslo_utils.timeutils.utcnow', return_value=UTC_NOW)
def test_volume_destroy(self, utcnow_mock):
volume = db.volume_create(self.ctxt, {})
self.assertDictEqual(
{'status': 'deleted', 'deleted': True, 'deleted_at': UTC_NOW,
'migration_status': None},
db.volume_destroy(self.ctxt, volume['id']))
self.assertRaises(exception.VolumeNotFound, db.volume_get,
self.ctxt, volume['id'])
def test_volume_get_all(self):
volumes = [db.volume_create(self.ctxt,
{'host': 'h%d' % i, 'size': i})
for i in range(3)]
self._assertEqualListsOfObjects(volumes, db.volume_get_all(
self.ctxt, None, None, ['host'], None))
@ddt.data('cluster_name', 'host')
def test_volume_get_all_filter_host_and_cluster(self, field):
volumes = []
for i in range(2):
for value in ('host%d@backend#pool', 'host%d@backend', 'host%d'):
kwargs = {field: value % i}
volumes.append(utils.create_volume(self.ctxt, **kwargs))
for i in range(3):
filters = {field: getattr(volumes[i], field)}
result = db.volume_get_all(self.ctxt, filters=filters)
self.assertEqual(i + 1, len(result))
self.assertSetEqual({v.id for v in volumes[:i + 1]},
{v.id for v in result})
def test_volume_get_all_marker_passed(self):
volumes = [
db.volume_create(self.ctxt, {'id': 1}),
db.volume_create(self.ctxt, {'id': 2}),
db.volume_create(self.ctxt, {'id': 3}),
db.volume_create(self.ctxt, {'id': 4}),
]
self._assertEqualListsOfObjects(volumes[2:], db.volume_get_all(
self.ctxt, 2, 2, ['id'], ['asc']))
def test_volume_get_all_by_host(self):
volumes = []
for i in range(3):
volumes.append([db.volume_create(self.ctxt, {'host': 'h%d' % i})
for j in range(3)])
for i in range(3):
self._assertEqualListsOfObjects(volumes[i],
db.volume_get_all_by_host(
self.ctxt, 'h%d' % i))
def test_volume_get_all_by_host_with_pools(self):
volumes = []
vol_on_host_wo_pool = [db.volume_create(self.ctxt, {'host': 'foo'})
for j in range(3)]
vol_on_host_w_pool = [db.volume_create(
self.ctxt, {'host': 'foo#pool0'})]
volumes.append((vol_on_host_wo_pool +
vol_on_host_w_pool))
# insert an additional record that doesn't belongs to the same
# host as 'foo' and test if it is included in the result
db.volume_create(self.ctxt, {'host': 'foobar'})
self._assertEqualListsOfObjects(volumes[0],
db.volume_get_all_by_host(
self.ctxt, 'foo'))
def test_volume_get_all_by_host_with_filters(self):
v1 = db.volume_create(self.ctxt, {'host': 'h1', 'display_name': 'v1',
'status': 'available'})
v2 = db.volume_create(self.ctxt, {'host': 'h1', 'display_name': 'v2',
'status': 'available'})
v3 = db.volume_create(self.ctxt, {'host': 'h2', 'display_name': 'v1',
'status': 'available'})
self._assertEqualListsOfObjects(
[v1],
db.volume_get_all_by_host(self.ctxt, 'h1',
filters={'display_name': 'v1'}))
self._assertEqualListsOfObjects(
[v1, v2],
db.volume_get_all_by_host(
self.ctxt, 'h1',
filters={'display_name': ['v1', 'v2', 'foo']}))
self._assertEqualListsOfObjects(
[v1, v2],
db.volume_get_all_by_host(self.ctxt, 'h1',
filters={'status': 'available'}))
self._assertEqualListsOfObjects(
[v3],
db.volume_get_all_by_host(self.ctxt, 'h2',
filters={'display_name': 'v1'}))
# No match
vols = db.volume_get_all_by_host(self.ctxt, 'h1',
filters={'status': 'foo'})
self.assertEqual([], vols)
# Bogus filter, should return empty list
vols = db.volume_get_all_by_host(self.ctxt, 'h1',
filters={'foo': 'bar'})
self.assertEqual([], vols)
def test_volume_get_all_by_group(self):
volumes = []
for i in range(3):
volumes.append([db.volume_create(self.ctxt, {
'consistencygroup_id': 'g%d' % i}) for j in range(3)])
for i in range(3):
self._assertEqualListsOfObjects(volumes[i],
db.volume_get_all_by_group(
self.ctxt, 'g%d' % i))
def test_volume_get_all_by_group_with_filters(self):
v1 = db.volume_create(self.ctxt, {'consistencygroup_id': 'g1',
'display_name': 'v1'})
v2 = db.volume_create(self.ctxt, {'consistencygroup_id': 'g1',
'display_name': 'v2'})
v3 = db.volume_create(self.ctxt, {'consistencygroup_id': 'g2',
'display_name': 'v1'})
self._assertEqualListsOfObjects(
[v1],
db.volume_get_all_by_group(self.ctxt, 'g1',
filters={'display_name': 'v1'}))
self._assertEqualListsOfObjects(
[v1, v2],
db.volume_get_all_by_group(self.ctxt, 'g1',
filters={'display_name': ['v1', 'v2']}))
self._assertEqualListsOfObjects(
[v3],
db.volume_get_all_by_group(self.ctxt, 'g2',
filters={'display_name': 'v1'}))
# No match
vols = db.volume_get_all_by_group(self.ctxt, 'g1',
filters={'display_name': 'foo'})
self.assertEqual([], vols)
# Bogus filter, should return empty list
vols = db.volume_get_all_by_group(self.ctxt, 'g1',
filters={'foo': 'bar'})
self.assertEqual([], vols)
def test_volume_get_all_by_project(self):
volumes = []
for i in range(3):
volumes.append([db.volume_create(self.ctxt, {
'project_id': 'p%d' % i}) for j in range(3)])
for i in range(3):
self._assertEqualListsOfObjects(volumes[i],
db.volume_get_all_by_project(
self.ctxt, 'p%d' % i, None,
None, ['host'], None))
def test_volume_get_by_name(self):
db.volume_create(self.ctxt, {'display_name': 'vol1'})
db.volume_create(self.ctxt, {'display_name': 'vol2'})
db.volume_create(self.ctxt, {'display_name': 'vol3'})
# no name filter
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['asc'])
self.assertEqual(3, len(volumes))
# filter on name
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['asc'], {'display_name': 'vol2'})
self.assertEqual(1, len(volumes))
self.assertEqual('vol2', volumes[0]['display_name'])
# filter no match
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['asc'], {'display_name': 'vol4'})
self.assertEqual(0, len(volumes))
def test_volume_list_by_status(self):
db.volume_create(self.ctxt, {'display_name': 'vol1',
'status': 'available'})
db.volume_create(self.ctxt, {'display_name': 'vol2',
'status': 'available'})
db.volume_create(self.ctxt, {'display_name': 'vol3',
'status': 'in-use'})
# no status filter
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['asc'])
self.assertEqual(3, len(volumes))
# single match
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['asc'], {'status': 'in-use'})
self.assertEqual(1, len(volumes))
self.assertEqual('in-use', volumes[0]['status'])
# multiple match
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['asc'], {'status': 'available'})
self.assertEqual(2, len(volumes))
for volume in volumes:
self.assertEqual('available', volume['status'])
# multiple filters
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['asc'], {'status': 'available',
'display_name': 'vol1'})
self.assertEqual(1, len(volumes))
self.assertEqual('vol1', volumes[0]['display_name'])
self.assertEqual('available', volumes[0]['status'])
# no match
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['asc'], {'status': 'in-use',
'display_name': 'vol1'})
self.assertEqual(0, len(volumes))
def _assertEqualsVolumeOrderResult(self, correct_order, limit=None,
sort_keys=None, sort_dirs=None,
filters=None, project_id=None,
marker=None,
match_keys=['id', 'display_name',
'volume_metadata',
'created_at']):
"""Verifies that volumes are returned in the correct order."""
if project_id:
result = db.volume_get_all_by_project(self.ctxt, project_id,
marker, limit,
sort_keys=sort_keys,
sort_dirs=sort_dirs,
filters=filters)
else:
result = db.volume_get_all(self.ctxt, marker, limit,
sort_keys=sort_keys,
sort_dirs=sort_dirs,
filters=filters)
self.assertEqual(len(correct_order), len(result))
for vol1, vol2 in zip(result, correct_order):
for key in match_keys:
val1 = vol1.get(key)
val2 = vol2.get(key)
# metadata is a dict, compare the 'key' and 'value' of each
if key == 'volume_metadata':
self.assertEqual(len(val1), len(val2))
val1_dict = {x.key: x.value for x in val1}
val2_dict = {x.key: x.value for x in val2}
self.assertDictEqual(val1_dict, val2_dict)
else:
self.assertEqual(val1, val2)
return result
def test_volume_get_by_filter(self):
"""Verifies that all filtering is done at the DB layer."""
vols = []
vols.extend([db.volume_create(self.ctxt,
{'project_id': 'g1',
'display_name': 'name_%d' % i,
'size': 1})
for i in range(2)])
vols.extend([db.volume_create(self.ctxt,
{'project_id': 'g1',
'display_name': 'name_%d' % i,
'size': 2})
for i in range(2)])
vols.extend([db.volume_create(self.ctxt,
{'project_id': 'g1',
'display_name': 'name_%d' % i})
for i in range(2)])
vols.extend([db.volume_create(self.ctxt,
{'project_id': 'g2',
'display_name': 'name_%d' % i,
'size': 1})
for i in range(2)])
# By project, filter on size and name
filters = {'size': '1'}
correct_order = [vols[1], vols[0]]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters,
project_id='g1')
filters = {'size': '1', 'display_name': 'name_1'}
correct_order = [vols[1]]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters,
project_id='g1')
# Remove project scope
filters = {'size': '1'}
correct_order = [vols[7], vols[6], vols[1], vols[0]]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters)
filters = {'size': '1', 'display_name': 'name_1'}
correct_order = [vols[7], vols[1]]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters)
# Remove size constraint
filters = {'display_name': 'name_1'}
correct_order = [vols[5], vols[3], vols[1]]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters,
project_id='g1')
correct_order = [vols[7], vols[5], vols[3], vols[1]]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters)
# Verify bogus values return nothing
filters = {'display_name': 'name_1', 'bogus_value': 'foo'}
self._assertEqualsVolumeOrderResult([], filters=filters,
project_id='g1')
self._assertEqualsVolumeOrderResult([], project_id='bogus')
self._assertEqualsVolumeOrderResult([], filters=filters)
self._assertEqualsVolumeOrderResult([], filters={'metadata':
'not valid'})
self._assertEqualsVolumeOrderResult([], filters={'metadata':
['not', 'valid']})
# Verify that relationship property keys return nothing, these
# exist on the Volumes model but are not columns
filters = {'volume_type': 'bogus_type'}
self._assertEqualsVolumeOrderResult([], filters=filters)
def test_volume_get_all_filters_limit(self):
vol1 = db.volume_create(self.ctxt, {'display_name': 'test1'})
vol2 = db.volume_create(self.ctxt, {'display_name': 'test2'})
vol3 = db.volume_create(self.ctxt, {'display_name': 'test2',
'metadata': {'key1': 'val1'}})
vol4 = db.volume_create(self.ctxt, {'display_name': 'test3',
'metadata': {'key1': 'val1',
'key2': 'val2'}})
vol5 = db.volume_create(self.ctxt, {'display_name': 'test3',
'metadata': {'key2': 'val2',
'key3': 'val3'},
'host': 'host5'})
db.volume_admin_metadata_update(self.ctxt, vol5.id,
{"readonly": "True"}, False)
vols = [vol5, vol4, vol3, vol2, vol1]
# Ensure we have 5 total instances
self._assertEqualsVolumeOrderResult(vols)
# No filters, test limit
self._assertEqualsVolumeOrderResult(vols[:1], limit=1)
self._assertEqualsVolumeOrderResult(vols[:4], limit=4)
# Just the test2 volumes
filters = {'display_name': 'test2'}
self._assertEqualsVolumeOrderResult([vol3, vol2], filters=filters)
self._assertEqualsVolumeOrderResult([vol3], limit=1,
filters=filters)
self._assertEqualsVolumeOrderResult([vol3, vol2], limit=2,
filters=filters)
self._assertEqualsVolumeOrderResult([vol3, vol2], limit=100,
filters=filters)
# metadata filters
filters = {'metadata': {'key1': 'val1'}}
self._assertEqualsVolumeOrderResult([vol4, vol3], filters=filters)
self._assertEqualsVolumeOrderResult([vol4], limit=1,
filters=filters)
self._assertEqualsVolumeOrderResult([vol4, vol3], limit=10,
filters=filters)
filters = {'metadata': {'readonly': 'True'}}
self._assertEqualsVolumeOrderResult([vol5], filters=filters)
filters = {'metadata': {'key1': 'val1',
'key2': 'val2'}}
self._assertEqualsVolumeOrderResult([vol4], filters=filters)
self._assertEqualsVolumeOrderResult([vol4], limit=1,
filters=filters)
# No match
filters = {'metadata': {'key1': 'val1',
'key2': 'val2',
'key3': 'val3'}}
self._assertEqualsVolumeOrderResult([], filters=filters)
filters = {'metadata': {'key1': 'val1',
'key2': 'bogus'}}
self._assertEqualsVolumeOrderResult([], filters=filters)
filters = {'metadata': {'key1': 'val1',
'key2': 'val1'}}
self._assertEqualsVolumeOrderResult([], filters=filters)
# Combination
filters = {'display_name': 'test2',
'metadata': {'key1': 'val1'}}
self._assertEqualsVolumeOrderResult([vol3], filters=filters)
self._assertEqualsVolumeOrderResult([vol3], limit=1,
filters=filters)
self._assertEqualsVolumeOrderResult([vol3], limit=100,
filters=filters)
filters = {'display_name': 'test3',
'metadata': {'key2': 'val2',
'key3': 'val3'},
'host': 'host5'}
self._assertEqualsVolumeOrderResult([vol5], filters=filters)
self._assertEqualsVolumeOrderResult([vol5], limit=1,
filters=filters)
def test_volume_get_no_migration_targets(self):
"""Verifies the unique 'no_migration_targets'=True filter.
This filter returns volumes with either a NULL 'migration_status'
or a non-NULL value that does not start with 'target:'.
"""
vol1 = db.volume_create(self.ctxt, {'display_name': 'test1'})
vol2 = db.volume_create(self.ctxt, {'display_name': 'test2',
'migration_status': 'bogus'})
vol3 = db.volume_create(self.ctxt, {'display_name': 'test3',
'migration_status': 'btarget:'})
vol4 = db.volume_create(self.ctxt, {'display_name': 'test4',
'migration_status': 'target:'})
# Ensure we have 4 total instances, default sort of created_at (desc)
self._assertEqualsVolumeOrderResult([vol4, vol3, vol2, vol1])
# Apply the unique filter
filters = {'no_migration_targets': True}
self._assertEqualsVolumeOrderResult([vol3, vol2, vol1],
filters=filters)
self._assertEqualsVolumeOrderResult([vol3, vol2], limit=2,
filters=filters)
filters = {'no_migration_targets': True,
'display_name': 'test4'}
self._assertEqualsVolumeOrderResult([], filters=filters)
def test_volume_get_all_by_filters_sort_keys(self):
# Volumes that will reply to the query
test_h1_avail = db.volume_create(self.ctxt, {'display_name': 'test',
'status': 'available',
'host': 'h1'})
test_h1_error = db.volume_create(self.ctxt, {'display_name': 'test',
'status': 'error',
'host': 'h1'})
test_h1_error2 = db.volume_create(self.ctxt, {'display_name': 'test',
'status': 'error',
'host': 'h1'})
test_h2_avail = db.volume_create(self.ctxt, {'display_name': 'test',
'status': 'available',
'host': 'h2'})
test_h2_error = db.volume_create(self.ctxt, {'display_name': 'test',
'status': 'error',
'host': 'h2'})
test_h2_error2 = db.volume_create(self.ctxt, {'display_name': 'test',
'status': 'error',
'host': 'h2'})
# Other volumes in the DB, will not match name filter
other_error = db.volume_create(self.ctxt, {'display_name': 'other',
'status': 'error',
'host': 'a'})
other_active = db.volume_create(self.ctxt, {'display_name': 'other',
'status': 'available',
'host': 'a'})
filters = {'display_name': 'test'}
# Verify different sort key/direction combinations
sort_keys = ['host', 'status', 'created_at']
sort_dirs = ['asc', 'asc', 'asc']
correct_order = [test_h1_avail, test_h1_error, test_h1_error2,
test_h2_avail, test_h2_error, test_h2_error2]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
sort_dirs = ['asc', 'desc', 'asc']
correct_order = [test_h1_error, test_h1_error2, test_h1_avail,
test_h2_error, test_h2_error2, test_h2_avail]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
sort_dirs = ['desc', 'desc', 'asc']
correct_order = [test_h2_error, test_h2_error2, test_h2_avail,
test_h1_error, test_h1_error2, test_h1_avail]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# created_at is added by default if not supplied, descending order
sort_keys = ['host', 'status']
sort_dirs = ['desc', 'desc']
correct_order = [test_h2_error2, test_h2_error, test_h2_avail,
test_h1_error2, test_h1_error, test_h1_avail]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
sort_dirs = ['asc', 'asc']
correct_order = [test_h1_avail, test_h1_error, test_h1_error2,
test_h2_avail, test_h2_error, test_h2_error2]
self._assertEqualsVolumeOrderResult(correct_order, filters=filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# Remove name filter
correct_order = [other_active, other_error,
test_h1_avail, test_h1_error, test_h1_error2,
test_h2_avail, test_h2_error, test_h2_error2]
self._assertEqualsVolumeOrderResult(correct_order, sort_keys=sort_keys,
sort_dirs=sort_dirs)
# No sort data, default sort of created_at, id (desc)
correct_order = [other_active, other_error,
test_h2_error2, test_h2_error, test_h2_avail,
test_h1_error2, test_h1_error, test_h1_avail]
self._assertEqualsVolumeOrderResult(correct_order)
def test_volume_get_all_by_filters_sort_keys_paginate(self):
"""Verifies sort order with pagination."""
# Volumes that will reply to the query
test1_avail = db.volume_create(self.ctxt, {'display_name': 'test',
'size': 1,
'status': 'available'})
test1_error = db.volume_create(self.ctxt, {'display_name': 'test',
'size': 1,
'status': 'error'})
test1_error2 = db.volume_create(self.ctxt, {'display_name': 'test',
'size': 1,
'status': 'error'})
test2_avail = db.volume_create(self.ctxt, {'display_name': 'test',
'size': 2,
'status': 'available'})
test2_error = db.volume_create(self.ctxt, {'display_name': 'test',
'size': 2,
'status': 'error'})
test2_error2 = db.volume_create(self.ctxt, {'display_name': 'test',
'size': 2,
'status': 'error'})
# Other volumes in the DB, will not match name filter
db.volume_create(self.ctxt, {'display_name': 'other'})
db.volume_create(self.ctxt, {'display_name': 'other'})
filters = {'display_name': 'test'}
# Common sort information for every query
sort_keys = ['size', 'status', 'created_at']
sort_dirs = ['asc', 'desc', 'asc']
# Overall correct volume order based on the sort keys
correct_order = [test1_error, test1_error2, test1_avail,
test2_error, test2_error2, test2_avail]
# Limits of 1, 2, and 3, verify that the volumes returned are in the
# correct sorted order, update the marker to get the next correct page
for limit in range(1, 4):
marker = None
# Include the maximum number of volumes (ie, 6) to ensure that
# the last query (with marker pointing to the last volume)
# returns 0 servers
for i in range(0, 7, limit):
if i == len(correct_order):
correct = []
else:
correct = correct_order[i:i + limit]
vols = self._assertEqualsVolumeOrderResult(
correct, filters=filters,
sort_keys=sort_keys, sort_dirs=sort_dirs,
limit=limit, marker=marker)
if correct:
marker = vols[-1]['id']
self.assertEqual(correct[-1]['id'], marker)
def test_volume_get_all_invalid_sort_key(self):
for keys in (['foo'], ['display_name', 'foo']):
self.assertRaises(exception.InvalidInput, db.volume_get_all,
self.ctxt, None, None, sort_keys=keys)
def test_volume_update(self):
volume = db.volume_create(self.ctxt, {'host': 'h1'})
db.volume_update(self.ctxt, volume.id,
{'host': 'h2',
'metadata': {'m1': 'v1'}})
volume = db.volume_get(self.ctxt, volume.id)
self.assertEqual('h2', volume.host)
self.assertEqual(1, len(volume.volume_metadata))
db_metadata = volume.volume_metadata[0]
self.assertEqual('m1', db_metadata.key)
self.assertEqual('v1', db_metadata.value)
def test_volume_update_nonexistent(self):
self.assertRaises(exception.VolumeNotFound, db.volume_update,
self.ctxt, 42, {})
def test_volume_metadata_get(self):
metadata = {'a': 'b', 'c': 'd'}
db.volume_create(self.ctxt, {'id': 1, 'metadata': metadata})
self.assertEqual(metadata, db.volume_metadata_get(self.ctxt, 1))
def test_volume_metadata_update(self):
metadata1 = {'a': '1', 'c': '2'}
metadata2 = {'a': '3', 'd': '5'}
should_be = {'a': '3', 'c': '2', 'd': '5'}
db.volume_create(self.ctxt, {'id': 1, 'metadata': metadata1})
db_meta = db.volume_metadata_update(self.ctxt, 1, metadata2, False)
self.assertEqual(should_be, db_meta)
@mock.patch.object(db.sqlalchemy.api,
'_volume_glance_metadata_key_to_id',
return_value = '1')
def test_volume_glance_metadata_key_to_id_called(self,
metadata_key_to_id_mock):
image_metadata = {'abc': '123'}
# create volume with metadata.
db.volume_create(self.ctxt, {'id': 1,
'metadata': image_metadata})
# delete metadata associated with the volume.
db.volume_metadata_delete(self.ctxt,
1,
'abc',
meta_type=common.METADATA_TYPES.image)
# assert _volume_glance_metadata_key_to_id() was called exactly once
metadata_key_to_id_mock.assert_called_once_with(self.ctxt, 1, 'abc')
def test_case_sensitive_glance_metadata_delete(self):
user_metadata = {'a': '1', 'c': '2'}
image_metadata = {'abc': '123', 'ABC': '123'}
# create volume with metadata.
db.volume_create(self.ctxt, {'id': 1,
'metadata': user_metadata})
# delete user metadata associated with the volume.
db.volume_metadata_delete(self.ctxt, 1, 'c',
meta_type=common.METADATA_TYPES.user)
user_metadata.pop('c')
self.assertEqual(user_metadata,
db.volume_metadata_get(self.ctxt, 1))
# create image metadata associated with the volume.
db.volume_metadata_update(
self.ctxt,
1,
image_metadata,
False,
meta_type=common.METADATA_TYPES.image)
# delete image metadata associated with the volume.
db.volume_metadata_delete(
self.ctxt,
1,
'abc',
meta_type=common.METADATA_TYPES.image)
image_metadata.pop('abc')
# parse the result to build the dict.
rows = db.volume_glance_metadata_get(self.ctxt, 1)
result = {}
for row in rows:
result[row['key']] = row['value']
self.assertEqual(image_metadata, result)
def test_volume_metadata_update_with_metatype(self):
user_metadata1 = {'a': '1', 'c': '2'}
user_metadata2 = {'a': '3', 'd': '5'}
expected1 = {'a': '3', 'c': '2', 'd': '5'}
image_metadata1 = {'e': '1', 'f': '2'}
image_metadata2 = {'e': '3', 'g': '5'}
expected2 = {'e': '3', 'f': '2', 'g': '5'}
FAKE_METADATA_TYPE = enum.Enum('METADATA_TYPES', 'fake_type')
db.volume_create(self.ctxt, {'id': 1, 'metadata': user_metadata1})
# update user metatdata associated with volume.
db_meta = db.volume_metadata_update(
self.ctxt,
1,
user_metadata2,
False,
meta_type=common.METADATA_TYPES.user)
self.assertEqual(expected1, db_meta)
# create image metatdata associated with volume.
db_meta = db.volume_metadata_update(
self.ctxt,
1,
image_metadata1,
False,
meta_type=common.METADATA_TYPES.image)
self.assertEqual(image_metadata1, db_meta)
# update image metatdata associated with volume.
db_meta = db.volume_metadata_update(
self.ctxt,
1,
image_metadata2,
False,
meta_type=common.METADATA_TYPES.image)
self.assertEqual(expected2, db_meta)
# update volume with invalid metadata type.
self.assertRaises(exception.InvalidMetadataType,
db.volume_metadata_update,
self.ctxt,
1,
image_metadata1,
False,
FAKE_METADATA_TYPE.fake_type)
@ddt.data(common.METADATA_TYPES.user, common.METADATA_TYPES.image)
@mock.patch.object(timeutils, 'utcnow')
@mock.patch.object(sqlalchemy_api, 'resource_exists')
@mock.patch.object(sqlalchemy_api, 'conditional_update')
@mock.patch.object(sqlalchemy_api, '_volume_x_metadata_get_query')
def test_volume_metadata_delete_deleted_at_updated(self,
meta_type,
mock_query,
mock_update,
mock_resource,
mock_utc):
mock_query.all.return_value = {}
mock_utc.return_value = 'fake_time'
db.volume_metadata_update(self.ctxt, 1, {}, True, meta_type=meta_type)
mock_update.assert_called_once_with(mock.ANY, mock.ANY,
{'deleted': True,
'deleted_at': 'fake_time'},
mock.ANY)
def test_volume_metadata_update_delete(self):
metadata1 = {'a': '1', 'c': '2'}
metadata2 = {'a': '3', 'd': '4'}
should_be = metadata2
db.volume_create(self.ctxt, {'id': 1, 'metadata': metadata1})
db_meta = db.volume_metadata_update(self.ctxt, 1, metadata2, True)
self.assertEqual(should_be, db_meta)
def test_volume_metadata_delete(self):
metadata = {'a': 'b', 'c': 'd'}
db.volume_create(self.ctxt, {'id': 1, 'metadata': metadata})
db.volume_metadata_delete(self.ctxt, 1, 'c')
metadata.pop('c')
self.assertEqual(metadata, db.volume_metadata_get(self.ctxt, 1))
def test_volume_metadata_delete_with_metatype(self):
user_metadata = {'a': '1', 'c': '2'}
image_metadata = {'e': '1', 'f': '2'}
FAKE_METADATA_TYPE = enum.Enum('METADATA_TYPES', 'fake_type')
# test that user metadata deleted with meta_type specified.
db.volume_create(self.ctxt, {'id': 1, 'metadata': user_metadata})
db.volume_metadata_delete(self.ctxt, 1, 'c',
meta_type=common.METADATA_TYPES.user)
user_metadata.pop('c')
self.assertEqual(user_metadata, db.volume_metadata_get(self.ctxt, 1))
# update the image metadata associated with the volume.
db.volume_metadata_update(
self.ctxt,
1,
image_metadata,
False,
meta_type=common.METADATA_TYPES.image)
# test that image metadata deleted with meta_type specified.
db.volume_metadata_delete(self.ctxt, 1, 'e',
meta_type=common.METADATA_TYPES.image)
image_metadata.pop('e')
# parse the result to build the dict.
rows = db.volume_glance_metadata_get(self.ctxt, 1)
result = {}
for row in rows:
result[row['key']] = row['value']
self.assertEqual(image_metadata, result)
# delete volume with invalid metadata type.
self.assertRaises(exception.InvalidMetadataType,
db.volume_metadata_delete,
self.ctxt,
1,
'f',
FAKE_METADATA_TYPE.fake_type)
def test_volume_glance_metadata_create(self):
volume = db.volume_create(self.ctxt, {'host': 'h1'})
db.volume_glance_metadata_create(self.ctxt, volume['id'],
'image_name',
u'\xe4\xbd\xa0\xe5\xa5\xbd')
glance_meta = db.volume_glance_metadata_get(self.ctxt, volume['id'])
for meta_entry in glance_meta:
if meta_entry.key == 'image_name':
image_name = meta_entry.value
self.assertEqual(u'\xe4\xbd\xa0\xe5\xa5\xbd', image_name)
def test_volume_glance_metadata_list_get(self):
"""Test volume_glance_metadata_list_get in DB API."""
db.volume_create(self.ctxt, {'id': 'fake1', 'status': 'available',
'host': 'test', 'provider_location': '',
'size': 1})
db.volume_glance_metadata_create(self.ctxt, 'fake1', 'key1', 'value1')
db.volume_glance_metadata_create(self.ctxt, 'fake1', 'key2', 'value2')
db.volume_create(self.ctxt, {'id': 'fake2', 'status': 'available',
'host': 'test', 'provider_location': '',
'size': 1})
db.volume_glance_metadata_create(self.ctxt, 'fake2', 'key3', 'value3')
db.volume_glance_metadata_create(self.ctxt, 'fake2', 'key4', 'value4')
expect_result = [{'volume_id': 'fake1', 'key': 'key1',
'value': 'value1'},
{'volume_id': 'fake1', 'key': 'key2',
'value': 'value2'},
{'volume_id': 'fake2', 'key': 'key3',
'value': 'value3'},
{'volume_id': 'fake2', 'key': 'key4',
'value': 'value4'}]
self._assertEqualListsOfObjects(expect_result,
db.volume_glance_metadata_list_get(
self.ctxt, ['fake1', 'fake2']),
ignored_keys=['id',
'snapshot_id',
'created_at',
'deleted', 'deleted_at',
'updated_at'])
def _create_volume_with_image_metadata(self):
vol1 = db.volume_create(self.ctxt, {'display_name': 'test1'})
db.volume_glance_metadata_create(self.ctxt, vol1.id, 'image_name',
'imageTestOne')
db.volume_glance_metadata_create(self.ctxt, vol1.id, 'test_image_key',
'test_image_value')
vol2 = db.volume_create(self.ctxt, {'display_name': 'test2'})
db.volume_glance_metadata_create(self.ctxt, vol2.id, 'image_name',
'imageTestTwo')
db.volume_glance_metadata_create(self.ctxt, vol2.id, 'disk_format',
'qcow2')
return [vol1, vol2]
def test_volume_get_all_by_image_name_and_key(self):
vols = self._create_volume_with_image_metadata()
filters = {'glance_metadata': {'image_name': 'imageTestOne',
'test_image_key': 'test_image_value'}}
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['desc'], filters=filters)
self._assertEqualListsOfObjects([vols[0]], volumes)
def test_volume_get_all_by_image_name_and_disk_format(self):
vols = self._create_volume_with_image_metadata()
filters = {'glance_metadata': {'image_name': 'imageTestTwo',
'disk_format': 'qcow2'}}
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['desc'], filters=filters)
self._assertEqualListsOfObjects([vols[1]], volumes)
def test_volume_get_all_by_invalid_image_metadata(self):
# Test with invalid image metadata
self._create_volume_with_image_metadata()
filters = {'glance_metadata': {'invalid_key': 'invalid_value',
'test_image_key': 'test_image_value'}}
volumes = db.volume_get_all(self.ctxt, None, None, ['created_at'],
['desc'], filters=filters)
self._assertEqualListsOfObjects([], volumes)
def _create_volumes_to_test_include_in(self):
"""Helper method for test_volume_include_in_* tests."""
return [
db.volume_create(self.ctxt,
{'host': 'host1@backend1#pool1',
'cluster_name': 'cluster1@backend1#pool1'}),
db.volume_create(self.ctxt,
{'host': 'host1@backend2#pool2',
'cluster_name': 'cluster1@backend2#pool2'}),
db.volume_create(self.ctxt,
{'host': 'host2@backend#poo1',
'cluster_name': 'cluster2@backend#pool'}),
]
@ddt.data('host1@backend1#pool1', 'host1@backend1')
def test_volume_include_in_cluster_by_host(self, host):
"""Basic volume include test filtering by host and with full rename."""
vol = self._create_volumes_to_test_include_in()[0]
cluster_name = 'my_cluster'
result = db.volume_include_in_cluster(self.ctxt, cluster_name,
partial_rename=False,
host=host)
self.assertEqual(1, result)
db_vol = db.volume_get(self.ctxt, vol.id)
self.assertEqual(cluster_name, db_vol.cluster_name)
def test_volume_include_in_cluster_by_host_multiple(self):
"""Partial cluster rename filtering with host level info."""
vols = self._create_volumes_to_test_include_in()[0:2]
host = 'host1'
cluster_name = 'my_cluster'
result = db.volume_include_in_cluster(self.ctxt, cluster_name,
partial_rename=True,
host=host)
self.assertEqual(2, result)
db_vols = [db.volume_get(self.ctxt, vols[0].id),
db.volume_get(self.ctxt, vols[1].id)]
for i in range(2):
self.assertEqual(cluster_name + vols[i].host[len(host):],
db_vols[i].cluster_name)
@ddt.data('cluster1@backend1#pool1', 'cluster1@backend1')
def test_volume_include_in_cluster_by_cluster_name(self, cluster_name):
"""Basic volume include test filtering by cluster with full rename."""
vol = self._create_volumes_to_test_include_in()[0]
new_cluster_name = 'cluster_new@backend1#pool'
result = db.volume_include_in_cluster(self.ctxt, new_cluster_name,
partial_rename=False,
cluster_name=cluster_name)
self.assertEqual(1, result)
db_vol = db.volume_get(self.ctxt, vol.id)
self.assertEqual(new_cluster_name, db_vol.cluster_name)
def test_volume_include_in_cluster_by_cluster_multiple(self):
"""Partial rename filtering with cluster with host level info."""
vols = self._create_volumes_to_test_include_in()[0:2]
cluster_name = 'cluster1'
new_cluster_name = 'my_cluster'
result = db.volume_include_in_cluster(self.ctxt, new_cluster_name,
partial_rename=True,
cluster_name=cluster_name)
self.assertEqual(2, result)
db_vols = [db.volume_get(self.ctxt, vols[0].id),
db.volume_get(self.ctxt, vols[1].id)]
for i in range(2):
self.assertEqual(
new_cluster_name + vols[i].cluster_name[len(cluster_name):],
db_vols[i].cluster_name)
@ddt.ddt
class DBAPISnapshotTestCase(BaseTest):
"""Tests for cinder.db.api.snapshot_*."""
def test_snapshot_data_get_for_project(self):
actual = db.snapshot_data_get_for_project(self.ctxt, 'project1')
self.assertEqual((0, 0), actual)
db.volume_create(self.ctxt, {'id': 1,
'project_id': 'project1',
'size': 42})
db.snapshot_create(self.ctxt, {'id': 1, 'volume_id': 1,
'project_id': 'project1',
'volume_size': 42})
actual = db.snapshot_data_get_for_project(self.ctxt, 'project1')
self.assertEqual((1, 42), actual)
@ddt.data({'time_collection': [1, 2, 3],
'latest': 1},
{'time_collection': [4, 2, 6],
'latest': 2},
{'time_collection': [8, 2, 1],
'latest': 1})
@ddt.unpack
def test_snapshot_get_latest_for_volume(self, time_collection, latest):
def hours_ago(hour):
return timeutils.utcnow() - datetime.timedelta(
hours=hour)
db.volume_create(self.ctxt, {'id': 1})
for snapshot in time_collection:
db.snapshot_create(self.ctxt,
{'id': snapshot, 'volume_id': 1,
'display_name': 'one',
'created_at': hours_ago(snapshot),
'status': fields.SnapshotStatus.AVAILABLE})
snapshot = db.snapshot_get_latest_for_volume(self.ctxt, 1)
self.assertEqual(six.text_type(latest), snapshot['id'])
def test_snapshot_get_latest_for_volume_not_found(self):
db.volume_create(self.ctxt, {'id': 1})
for t_id in [2, 3]:
db.snapshot_create(self.ctxt,
{'id': t_id, 'volume_id': t_id,
'display_name': 'one',
'status': fields.SnapshotStatus.AVAILABLE})
self.assertRaises(exception.VolumeSnapshotNotFound,
db.snapshot_get_latest_for_volume, self.ctxt, 1)
def test_snapshot_get_all_by_filter(self):
db.volume_create(self.ctxt, {'id': 1})
db.volume_create(self.ctxt, {'id': 2})
snapshot1 = db.snapshot_create(self.ctxt,
{'id': 1, 'volume_id': 1,
'display_name': 'one',
'status':
fields.SnapshotStatus.AVAILABLE})
snapshot2 = db.snapshot_create(self.ctxt,
{'id': 2, 'volume_id': 1,
'display_name': 'two',
'status':
fields.SnapshotStatus.CREATING})
snapshot3 = db.snapshot_create(self.ctxt,
{'id': 3, 'volume_id': 2,
'display_name': 'three',
'status':
fields.SnapshotStatus.AVAILABLE})
# no filter
filters = {}
snapshots = db.snapshot_get_all(self.ctxt, filters=filters)
self.assertEqual(3, len(snapshots))
# single match
filters = {'display_name': 'two'}
self._assertEqualListsOfObjects([snapshot2],
db.snapshot_get_all(
self.ctxt,
filters),
ignored_keys=['metadata', 'volume'])
filters = {'volume_id': 2}
self._assertEqualListsOfObjects([snapshot3],
db.snapshot_get_all(
self.ctxt,
filters),
ignored_keys=['metadata', 'volume'])
# filter no match
filters = {'volume_id': 5}
self._assertEqualListsOfObjects([],
db.snapshot_get_all(
self.ctxt,
filters),
ignored_keys=['metadata', 'volume'])
filters = {'status': fields.SnapshotStatus.ERROR}
self._assertEqualListsOfObjects([],
db.snapshot_get_all(
self.ctxt,
filters),
ignored_keys=['metadata', 'volume'])
# multiple match
filters = {'volume_id': 1}
self._assertEqualListsOfObjects([snapshot1, snapshot2],
db.snapshot_get_all(
self.ctxt,
filters),
ignored_keys=['metadata', 'volume'])
filters = {'status': fields.SnapshotStatus.AVAILABLE}
self._assertEqualListsOfObjects([snapshot1, snapshot3],
db.snapshot_get_all(
self.ctxt,
filters),
ignored_keys=['metadata', 'volume'])
filters = {'volume_id': 1, 'status': fields.SnapshotStatus.AVAILABLE}
self._assertEqualListsOfObjects([snapshot1],
db.snapshot_get_all(
self.ctxt,
filters),
ignored_keys=['metadata', 'volume'])
filters = {'fake_key': 'fake'}
self._assertEqualListsOfObjects([],
db.snapshot_get_all(
self.ctxt,
filters),
ignored_keys=['metadata', 'volume'])
@ddt.data('cluster_name', 'host')
def test_snapshot_get_all_filter_host_and_cluster(self, field):
volumes = []
snapshots = []
for i in range(2):
for value in ('host%d@backend#pool', 'host%d@backend', 'host%d'):
kwargs = {field: value % i}
vol = utils.create_volume(self.ctxt, **kwargs)
volumes.append(vol)
snapshots.append(utils.create_snapshot(self.ctxt, vol.id))
for i in range(3):
filters = {field: getattr(volumes[i], field)}
result = db.snapshot_get_all(self.ctxt, filters=filters)
self.assertEqual(i + 1, len(result))
self.assertSetEqual({s.id for s in snapshots[:i + 1]},
{s.id for s in result})
def test_snapshot_get_all_by_host(self):
db.volume_create(self.ctxt, {'id': 1, 'host': 'host1'})
db.volume_create(self.ctxt, {'id': 2, 'host': 'host2'})
snapshot1 = db.snapshot_create(self.ctxt, {'id': 1, 'volume_id': 1})
snapshot2 = db.snapshot_create(self.ctxt,
{'id': 2,
'volume_id': 2,
'status':
fields.SnapshotStatus.ERROR})
self._assertEqualListsOfObjects([snapshot1],
db.snapshot_get_all_by_host(
self.ctxt,
'host1'),
ignored_keys='volume')
self._assertEqualListsOfObjects([snapshot2],
db.snapshot_get_all_by_host(
self.ctxt,
'host2'),
ignored_keys='volume')
self._assertEqualListsOfObjects(
[], db.snapshot_get_all_by_host(self.ctxt, 'host2', {
'status': fields.SnapshotStatus.AVAILABLE}),
ignored_keys='volume')
self._assertEqualListsOfObjects(
[snapshot2], db.snapshot_get_all_by_host(self.ctxt, 'host2', {
'status': fields.SnapshotStatus.ERROR}),
ignored_keys='volume')
self._assertEqualListsOfObjects([],
db.snapshot_get_all_by_host(
self.ctxt,
'host2', {'fake_key': 'fake'}),
ignored_keys='volume')
# If host is None or empty string, empty list should be returned.
self.assertEqual([], db.snapshot_get_all_by_host(self.ctxt, None))
self.assertEqual([], db.snapshot_get_all_by_host(self.ctxt, ''))
def test_snapshot_get_all_by_host_with_pools(self):
db.volume_create(self.ctxt, {'id': 1, 'host': 'host1#pool1'})
db.volume_create(self.ctxt, {'id': 2, 'host': 'host1#pool2'})
snapshot1 = db.snapshot_create(self.ctxt, {'id': 1, 'volume_id': 1})
snapshot2 = db.snapshot_create(self.ctxt, {'id': 2, 'volume_id': 2})
self._assertEqualListsOfObjects([snapshot1, snapshot2],
db.snapshot_get_all_by_host(
self.ctxt,
'host1'),
ignored_keys='volume')
self._assertEqualListsOfObjects([snapshot1],
db.snapshot_get_all_by_host(
self.ctxt,
'host1#pool1'),
ignored_keys='volume')
self._assertEqualListsOfObjects([],
db.snapshot_get_all_by_host(
self.ctxt,
'host1#pool0'),
ignored_keys='volume')
def test_snapshot_get_all_by_project(self):
db.volume_create(self.ctxt, {'id': 1})
db.volume_create(self.ctxt, {'id': 2})
snapshot1 = db.snapshot_create(self.ctxt, {'id': 1, 'volume_id': 1,
'project_id': 'project1'})
snapshot2 = db.snapshot_create(
self.ctxt, {'id': 2, 'volume_id': 2, 'status':
fields.SnapshotStatus.ERROR, 'project_id': 'project2'})
self._assertEqualListsOfObjects([snapshot1],
db.snapshot_get_all_by_project(
self.ctxt,
'project1'),
ignored_keys='volume')
self._assertEqualListsOfObjects([snapshot2],
db.snapshot_get_all_by_project(
self.ctxt,
'project2'),
ignored_keys='volume')
self._assertEqualListsOfObjects(
[], db.snapshot_get_all_by_project(
self.ctxt,
'project2',
{'status': fields.SnapshotStatus.AVAILABLE}),
ignored_keys='volume')
self._assertEqualListsOfObjects(
[snapshot2], db.snapshot_get_all_by_project(
self.ctxt, 'project2', {
'status': fields.SnapshotStatus.ERROR}),
ignored_keys='volume')
self._assertEqualListsOfObjects([],
db.snapshot_get_all_by_project(
self.ctxt,
'project2',
{'fake_key': 'fake'}),
ignored_keys='volume')
def test_snapshot_metadata_get(self):
metadata = {'a': 'b', 'c': 'd'}
db.volume_create(self.ctxt, {'id': 1})
db.snapshot_create(self.ctxt,
{'id': 1, 'volume_id': 1, 'metadata': metadata})
self.assertEqual(metadata, db.snapshot_metadata_get(self.ctxt, 1))
def test_snapshot_metadata_update(self):
metadata1 = {'a': '1', 'c': '2'}
metadata2 = {'a': '3', 'd': '5'}
should_be = {'a': '3', 'c': '2', 'd': '5'}
db.volume_create(self.ctxt, {'id': 1})
db.snapshot_create(self.ctxt,
{'id': 1, 'volume_id': 1, 'metadata': metadata1})
db_meta = db.snapshot_metadata_update(self.ctxt, 1, metadata2, False)
self.assertEqual(should_be, db_meta)
def test_snapshot_metadata_update_delete(self):
metadata1 = {'a': '1', 'c': '2'}
metadata2 = {'a': '3', 'd': '5'}
should_be = metadata2
db.volume_create(self.ctxt, {'id': 1})
db.snapshot_create(self.ctxt,
{'id': 1, 'volume_id': 1, 'metadata': metadata1})
db_meta = db.snapshot_metadata_update(self.ctxt, 1, metadata2, True)
self.assertEqual(should_be, db_meta)
@mock.patch.object(timeutils, 'utcnow')
@mock.patch.object(sqlalchemy_api, 'resource_exists')
@mock.patch.object(sqlalchemy_api, '_snapshot_metadata_get')
@mock.patch.object(sqlalchemy_api, '_snapshot_metadata_get_item')
def test_snapshot_metadata_delete_deleted_at_updated(self,
mock_metadata_item,
mock_metadata,
mock_resource,
mock_utc):
fake_metadata = {'fake_key1': 'fake_value1'}
mock_item = mock.Mock()
mock_metadata.return_value = fake_metadata
mock_utc.return_value = 'fake_time'
mock_metadata_item.side_effect = [mock_item]
db.snapshot_metadata_update(self.ctxt, 1, {}, True)
mock_item.update.assert_called_once_with({'deleted': True,
'deleted_at': 'fake_time'})
def test_snapshot_metadata_delete(self):
metadata = {'a': '1', 'c': '2'}
should_be = {'a': '1'}
db.volume_create(self.ctxt, {'id': 1})
db.snapshot_create(self.ctxt,
{'id': 1, 'volume_id': 1, 'metadata': metadata})
db.snapshot_metadata_delete(self.ctxt, 1, 'c')
self.assertEqual(should_be, db.snapshot_metadata_get(self.ctxt, 1))
@ddt.ddt
class DBAPIConsistencygroupTestCase(BaseTest):
def _create_cgs_to_test_include_in(self):
"""Helper method for test_consistencygroup_include_in_* tests."""
return [
db.consistencygroup_create(
self.ctxt, {'host': 'host1@backend1#pool1',
'cluster_name': 'cluster1@backend1#pool1'}),
db.consistencygroup_create(
self.ctxt, {'host': 'host1@backend2#pool2',
'cluster_name': 'cluster1@backend2#pool1'}),
db.consistencygroup_create(
self.ctxt, {'host': 'host2@backend#poo1',
'cluster_name': 'cluster2@backend#pool'}),
]
@ddt.data('host1@backend1#pool1', 'host1@backend1')
def test_consistencygroup_include_in_cluster_by_host(self, host):
"""Basic CG include test filtering by host and with full rename."""
cg = self._create_cgs_to_test_include_in()[0]
cluster_name = 'my_cluster'
result = db.consistencygroup_include_in_cluster(self.ctxt,
cluster_name,
partial_rename=False,
host=host)
self.assertEqual(1, result)
db_cg = db.consistencygroup_get(self.ctxt, cg.id)
self.assertEqual(cluster_name, db_cg.cluster_name)
def test_consistencygroup_include_in_cluster_by_host_multiple(self):
"""Partial cluster rename filtering with host level info."""
cgs = self._create_cgs_to_test_include_in()[0:2]
host = 'host1'
cluster_name = 'my_cluster'
result = db.consistencygroup_include_in_cluster(self.ctxt,
cluster_name,
partial_rename=True,
host=host)
self.assertEqual(2, result)
db_cgs = [db.consistencygroup_get(self.ctxt, cgs[0].id),
db.consistencygroup_get(self.ctxt, cgs[1].id)]
for i in range(2):
self.assertEqual(cluster_name + cgs[i].host[len(host):],
db_cgs[i].cluster_name)
@ddt.data('cluster1@backend1#pool1', 'cluster1@backend1')
def test_consistencygroup_include_in_cluster_by_cluster_name(self,
cluster_name):
"""Basic CG include test filtering by cluster with full rename."""
cg = self._create_cgs_to_test_include_in()[0]
new_cluster_name = 'cluster_new@backend1#pool'
result = db.consistencygroup_include_in_cluster(
self.ctxt, new_cluster_name, partial_rename=False,
cluster_name=cluster_name)
self.assertEqual(1, result)
db_cg = db.consistencygroup_get(self.ctxt, cg.id)
self.assertEqual(new_cluster_name, db_cg.cluster_name)
def test_consistencygroup_include_in_cluster_by_cluster_multiple(self):
"""Partial rename filtering with cluster with host level info."""
cgs = self._create_cgs_to_test_include_in()[0:2]
cluster_name = 'cluster1'
new_cluster_name = 'my_cluster'
result = db.consistencygroup_include_in_cluster(
self.ctxt, new_cluster_name, partial_rename=True,
cluster_name=cluster_name)
self.assertEqual(2, result)
db_cgs = [db.consistencygroup_get(self.ctxt, cgs[0].id),
db.consistencygroup_get(self.ctxt, cgs[1].id)]
for i in range(2):
self.assertEqual(
new_cluster_name + cgs[i].cluster_name[len(cluster_name):],
db_cgs[i].cluster_name)
class DBAPICgsnapshotTestCase(BaseTest):
"""Tests for cinder.db.api.cgsnapshot_*."""
def _cgsnapshot_create(self, values):
return utils.create_cgsnapshot(self.ctxt, return_vo=False, **values)
def test_cgsnapshot_get_all_by_filter(self):
cgsnapshot1 = self._cgsnapshot_create(
{'id': fake.CGSNAPSHOT_ID,
'consistencygroup_id': fake.CONSISTENCY_GROUP_ID})
cgsnapshot2 = self._cgsnapshot_create(
{'id': fake.CGSNAPSHOT2_ID,
'consistencygroup_id': fake.CONSISTENCY_GROUP_ID})
cgsnapshot3 = self._cgsnapshot_create(
{'id': fake.CGSNAPSHOT3_ID,
'consistencygroup_id': fake.CONSISTENCY_GROUP2_ID})
tests = [
({'consistencygroup_id': fake.CONSISTENCY_GROUP_ID},
[cgsnapshot1, cgsnapshot2]),
({'id': fake.CGSNAPSHOT3_ID}, [cgsnapshot3]),
({'fake_key': 'fake'}, [])
]
# no filter
filters = None
cgsnapshots = db.cgsnapshot_get_all(self.ctxt, filters=filters)
self.assertEqual(3, len(cgsnapshots))
for filters, expected in tests:
self._assertEqualListsOfObjects(expected,
db.cgsnapshot_get_all(
self.ctxt,
filters))
def test_cgsnapshot_get_all_by_group(self):
cgsnapshot1 = self._cgsnapshot_create(
{'id': fake.CGSNAPSHOT_ID,
'consistencygroup_id': fake.CONSISTENCY_GROUP_ID})
cgsnapshot2 = self._cgsnapshot_create(
{'id': fake.CGSNAPSHOT2_ID,
'consistencygroup_id': fake.CONSISTENCY_GROUP_ID})
self._cgsnapshot_create(
{'id': fake.CGSNAPSHOT3_ID,
'consistencygroup_id': fake.CONSISTENCY_GROUP2_ID})
tests = [
({'consistencygroup_id': fake.CONSISTENCY_GROUP_ID},
[cgsnapshot1, cgsnapshot2]),
({'id': fake.CGSNAPSHOT3_ID}, []),
({'consistencygroup_id': fake.CONSISTENCY_GROUP2_ID}, []),
(None, [cgsnapshot1, cgsnapshot2]),
]
for filters, expected in tests:
self._assertEqualListsOfObjects(expected,
db.cgsnapshot_get_all_by_group(
self.ctxt,
fake.CONSISTENCY_GROUP_ID,
filters))
db.cgsnapshot_destroy(self.ctxt, '1')
db.cgsnapshot_destroy(self.ctxt, '2')
db.cgsnapshot_destroy(self.ctxt, '3')
def test_cgsnapshot_get_all_by_project(self):
cgsnapshot1 = self._cgsnapshot_create(
{'id': fake.CGSNAPSHOT_ID,
'consistencygroup_id': fake.CONSISTENCY_GROUP_ID,
'project_id': fake.PROJECT_ID})
cgsnapshot2 = self._cgsnapshot_create(
{'id': fake.CGSNAPSHOT2_ID,
'consistencygroup_id': fake.CONSISTENCY_GROUP_ID,
'project_id': fake.PROJECT_ID})
tests = [
({'id': fake.CGSNAPSHOT_ID}, [cgsnapshot1]),
({'consistencygroup_id': fake.CONSISTENCY_GROUP_ID},
[cgsnapshot1, cgsnapshot2]),
({'fake_key': 'fake'}, [])
]
for filters, expected in tests:
self._assertEqualListsOfObjects(expected,
db.cgsnapshot_get_all_by_project(
self.ctxt,
fake.PROJECT_ID,
filters))
class DBAPIVolumeTypeTestCase(BaseTest):
"""Tests for the db.api.volume_type_* methods."""
def setUp(self):
self.ctxt = context.get_admin_context()
super(DBAPIVolumeTypeTestCase, self).setUp()
def test_volume_type_create_exists(self):
vt = db.volume_type_create(self.ctxt, {'name': 'n1'})
self.assertRaises(exception.VolumeTypeExists,
db.volume_type_create,
self.ctxt,
{'name': 'n1'})
self.assertRaises(exception.VolumeTypeExists,
db.volume_type_create,
self.ctxt,
{'name': 'n2', 'id': vt['id']})
def test_volume_type_access_remove(self):
vt = db.volume_type_create(self.ctxt, {'name': 'n1'})
db.volume_type_access_add(self.ctxt, vt['id'], 'fake_project')
vtas = db.volume_type_access_get_all(self.ctxt, vt['id'])
self.assertEqual(1, len(vtas))
db.volume_type_access_remove(self.ctxt, vt['id'], 'fake_project')
vtas = db.volume_type_access_get_all(self.ctxt, vt['id'])
self.assertEqual(0, len(vtas))
def test_volume_type_access_remove_high_id(self):
vt = db.volume_type_create(self.ctxt, {'name': 'n1'})
vta = db.volume_type_access_add(self.ctxt, vt['id'], 'fake_project')
vtas = db.volume_type_access_get_all(self.ctxt, vt['id'])
self.assertEqual(1, len(vtas))
# NOTE(dulek): Bug 1496747 uncovered problems when deleting accesses
# with id column higher than 128. This is regression test for that
# case.
session = sqlalchemy_api.get_session()
vta.id = 150
vta.save(session=session)
session.close()
db.volume_type_access_remove(self.ctxt, vt['id'], 'fake_project')
vtas = db.volume_type_access_get_all(self.ctxt, vt['id'])
self.assertEqual(0, len(vtas))
def test_get_volume_type_extra_specs(self):
# Ensure that volume type extra specs can be accessed after
# the DB session is closed.
vt_extra_specs = {'mock_key': 'mock_value'}
vt = db.volume_type_create(self.ctxt,
{'name': 'n1',
'extra_specs': vt_extra_specs})
volume_ref = db.volume_create(self.ctxt, {'volume_type_id': vt.id})
session = sqlalchemy_api.get_session()
volume = sqlalchemy_api._volume_get(self.ctxt, volume_ref.id,
session=session)
session.close()
actual_specs = {}
for spec in volume.volume_type.extra_specs:
actual_specs[spec.key] = spec.value
self.assertEqual(vt_extra_specs, actual_specs)
class DBAPIEncryptionTestCase(BaseTest):
"""Tests for the db.api.volume_(type_)?encryption_* methods."""
_ignored_keys = [
'deleted',
'deleted_at',
'created_at',
'updated_at',
'encryption_id',
]
def setUp(self):
super(DBAPIEncryptionTestCase, self).setUp()
self.created = \
[db.volume_type_encryption_create(self.ctxt,
values['volume_type_id'], values)
for values in self._get_values()]
def _get_values(self, one=False, updated=False):
base_values = {
'cipher': 'fake_cipher',
'key_size': 256,
'provider': 'fake_provider',
'volume_type_id': 'fake_type',
'control_location': 'front-end',
}
updated_values = {
'cipher': 'fake_updated_cipher',
'key_size': 512,
'provider': 'fake_updated_provider',
'volume_type_id': 'fake_type',
'control_location': 'front-end',
}
if one:
return base_values
if updated:
values = updated_values
else:
values = base_values
def compose(val, step):
if isinstance(val, str):
step = str(step)
return val + step
return [{k: compose(v, i) for k, v in values.items()}
for i in range(1, 4)]
def test_volume_type_encryption_create(self):
values = self._get_values()
for i, encryption in enumerate(self.created):
self._assertEqualObjects(values[i], encryption, self._ignored_keys)
def test_volume_type_encryption_update(self):
for values in self._get_values(updated=True):
db.volume_type_encryption_update(self.ctxt,
values['volume_type_id'], values)
db_enc = db.volume_type_encryption_get(self.ctxt,
values['volume_type_id'])
self._assertEqualObjects(values, db_enc, self._ignored_keys)
def test_volume_type_encryption_get(self):
for encryption in self.created:
encryption_get = \
db.volume_type_encryption_get(self.ctxt,
encryption['volume_type_id'])
self._assertEqualObjects(encryption, encryption_get,
self._ignored_keys)
def test_volume_type_encryption_update_with_no_create(self):
self.assertRaises(exception.VolumeTypeEncryptionNotFound,
db.volume_type_encryption_update,
self.ctxt,
'fake_no_create_type',
{'cipher': 'fake_updated_cipher'})
def test_volume_type_encryption_delete(self):
values = {
'cipher': 'fake_cipher',
'key_size': 256,
'provider': 'fake_provider',
'volume_type_id': 'fake_type',
'control_location': 'front-end',
}
encryption = db.volume_type_encryption_create(self.ctxt, 'fake_type',
values)
self._assertEqualObjects(values, encryption, self._ignored_keys)
db.volume_type_encryption_delete(self.ctxt,
encryption['volume_type_id'])
encryption_get = \
db.volume_type_encryption_get(self.ctxt,
encryption['volume_type_id'])
self.assertIsNone(encryption_get)
def test_volume_type_encryption_delete_no_create(self):
self.assertRaises(exception.VolumeTypeEncryptionNotFound,
db.volume_type_encryption_delete,
self.ctxt,
'fake_no_create_type')
def test_volume_encryption_get(self):
# normal volume -- metadata should be None
volume = db.volume_create(self.ctxt, {})
values = db.volume_encryption_metadata_get(self.ctxt, volume.id)
self.assertEqual({'encryption_key_id': None}, values)
# encrypted volume -- metadata should match volume type
volume_type = self.created[0]
volume = db.volume_create(self.ctxt, {'volume_type_id':
volume_type['volume_type_id']})
values = db.volume_encryption_metadata_get(self.ctxt, volume.id)
expected = {
'encryption_key_id': volume.encryption_key_id,
'control_location': volume_type['control_location'],
'cipher': volume_type['cipher'],
'key_size': volume_type['key_size'],
'provider': volume_type['provider'],
}
self.assertEqual(expected, values)
class DBAPIReservationTestCase(BaseTest):
"""Tests for db.api.reservation_* methods."""
def setUp(self):
super(DBAPIReservationTestCase, self).setUp()
self.values = {
'uuid': 'sample-uuid',
'project_id': 'project1',
'resource': 'resource',
'delta': 42,
'expire': (datetime.datetime.utcnow() +
datetime.timedelta(days=1)),
'usage': {'id': 1}
}
def test__get_reservation_resources(self):
reservations = _quota_reserve(self.ctxt, 'project1')
expected = ['gigabytes', 'volumes']
resources = sqlalchemy_api._get_reservation_resources(
sqlalchemy_api.get_session(), self.ctxt, reservations)
self.assertEqual(expected, sorted(resources))
def test_reservation_commit(self):
reservations = _quota_reserve(self.ctxt, 'project1')
expected = {'project_id': 'project1',
'volumes': {'reserved': 1, 'in_use': 0},
'gigabytes': {'reserved': 2, 'in_use': 0},
}
self.assertEqual(expected,
db.quota_usage_get_all_by_project(
self.ctxt, 'project1'))
db.reservation_commit(self.ctxt, reservations, 'project1')
expected = {'project_id': 'project1',
'volumes': {'reserved': 0, 'in_use': 1},
'gigabytes': {'reserved': 0, 'in_use': 2},
}
self.assertEqual(expected,
db.quota_usage_get_all_by_project(
self.ctxt,
'project1'))
def test_reservation_rollback(self):
reservations = _quota_reserve(self.ctxt, 'project1')
expected = {'project_id': 'project1',
'volumes': {'reserved': 1, 'in_use': 0},
'gigabytes': {'reserved': 2, 'in_use': 0},
}
self.assertEqual(expected,
db.quota_usage_get_all_by_project(
self.ctxt,
'project1'))
db.reservation_rollback(self.ctxt, reservations, 'project1')
expected = {'project_id': 'project1',
'volumes': {'reserved': 0, 'in_use': 0},
'gigabytes': {'reserved': 0, 'in_use': 0},
}
self.assertEqual(expected,
db.quota_usage_get_all_by_project(
self.ctxt,
'project1'))
def test_reservation_expire(self):
self.values['expire'] = datetime.datetime.utcnow() + \
datetime.timedelta(days=1)
_quota_reserve(self.ctxt, 'project1')
db.reservation_expire(self.ctxt)
expected = {'project_id': 'project1',
'gigabytes': {'reserved': 0, 'in_use': 0},
'volumes': {'reserved': 0, 'in_use': 0}}
self.assertEqual(expected,
db.quota_usage_get_all_by_project(
self.ctxt,
'project1'))
class DBAPIMessageTestCase(BaseTest):
"""Tests for message operations"""
def setUp(self):
super(DBAPIMessageTestCase, self).setUp()
self.context = context.get_admin_context()
def _create_fake_messages(self, m_id, time):
db.message_create(self.context,
{'id': m_id,
'event_id': m_id,
'message_level': 'error',
'project_id': 'fake_id',
'expires_at': time})
def test_cleanup_expired_messages(self):
now = timeutils.utcnow()
# message expired 1 day ago
self._create_fake_messages(
uuidutils.generate_uuid(), now - datetime.timedelta(days=1))
# message expired now
self._create_fake_messages(
uuidutils.generate_uuid(), now)
# message expired 1 day after
self._create_fake_messages(
uuidutils.generate_uuid(), now + datetime.timedelta(days=1))
with mock.patch.object(timeutils, 'utcnow') as mock_time_now:
mock_time_now.return_value = now
db.cleanup_expired_messages(self.context)
messages = db.message_get_all(self.context)
self.assertEqual(2, len(messages))
class DBAPIQuotaClassTestCase(BaseTest):
"""Tests for db.api.quota_class_* methods."""
def setUp(self):
super(DBAPIQuotaClassTestCase, self).setUp()
self.sample_qc = db.quota_class_create(self.ctxt, 'test_qc',
'test_resource', 42)
def test_quota_class_get(self):
qc = db.quota_class_get(self.ctxt, 'test_qc', 'test_resource')
self._assertEqualObjects(self.sample_qc, qc)
@mock.patch('oslo_utils.timeutils.utcnow', return_value=UTC_NOW)
def test_quota_class_destroy(self, utcnow_mock):
self.assertDictEqual(
{'deleted': True, 'deleted_at': UTC_NOW},
db.quota_class_destroy(self.ctxt, 'test_qc', 'test_resource'))
self.assertRaises(exception.QuotaClassNotFound,
db.quota_class_get, self.ctxt,
'test_qc', 'test_resource')
def test_quota_class_get_not_found(self):
self.assertRaises(exception.QuotaClassNotFound,
db.quota_class_get, self.ctxt, 'nonexistent',
'nonexistent')
def test_quota_class_get_all_by_name(self):
db.quota_class_create(self.ctxt, 'test2', 'res1', 43)
db.quota_class_create(self.ctxt, 'test2', 'res2', 44)
self.assertEqual({'class_name': 'test_qc', 'test_resource': 42},
db.quota_class_get_all_by_name(self.ctxt, 'test_qc'))
self.assertEqual({'class_name': 'test2', 'res1': 43, 'res2': 44},
db.quota_class_get_all_by_name(self.ctxt, 'test2'))
def test_quota_class_update(self):
db.quota_class_update(self.ctxt, 'test_qc', 'test_resource', 43)
updated = db.quota_class_get(self.ctxt, 'test_qc', 'test_resource')
self.assertEqual(43, updated['hard_limit'])
def test_quota_class_update_resource(self):
old = db.quota_class_get(self.ctxt, 'test_qc', 'test_resource')
db.quota_class_update_resource(self.ctxt,
'test_resource',
'test_resource1')
new = db.quota_class_get(self.ctxt, 'test_qc', 'test_resource1')
self.assertEqual(old.id, new.id)
self.assertEqual('test_resource1', new.resource)
def test_quota_class_destroy_all_by_name(self):
db.quota_class_create(self.ctxt, 'test2', 'res1', 43)
db.quota_class_create(self.ctxt, 'test2', 'res2', 44)
db.quota_class_destroy_all_by_name(self.ctxt, 'test2')
self.assertEqual({'class_name': 'test2'},
db.quota_class_get_all_by_name(self.ctxt, 'test2'))
class DBAPIQuotaTestCase(BaseTest):
"""Tests for db.api.reservation_* methods."""
def test_quota_create(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
self.assertEqual('resource', quota.resource)
self.assertEqual(99, quota.hard_limit)
self.assertEqual('project1', quota.project_id)
def test_quota_get(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
quota_db = db.quota_get(self.ctxt, 'project1', 'resource')
self._assertEqualObjects(quota, quota_db)
def test_quota_get_all_by_project(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'res%d' % j, j)
for i in range(3):
quotas_db = db.quota_get_all_by_project(self.ctxt, 'proj%d' % i)
self.assertEqual({'project_id': 'proj%d' % i,
'res0': 0,
'res1': 1,
'res2': 2}, quotas_db)
def test_quota_update(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
db.quota_update(self.ctxt, 'project1', 'resource1', 42)
quota = db.quota_get(self.ctxt, 'project1', 'resource1')
self.assertEqual(42, quota.hard_limit)
self.assertEqual('resource1', quota.resource)
self.assertEqual('project1', quota.project_id)
def test_quota_update_resource(self):
old = db.quota_create(self.ctxt, 'project1', 'resource1', 41)
db.quota_update_resource(self.ctxt, 'resource1', 'resource2')
new = db.quota_get(self.ctxt, 'project1', 'resource2')
self.assertEqual(old.id, new.id)
self.assertEqual('resource2', new.resource)
def test_quota_update_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_update,
self.ctxt,
'project1',
'resource1',
42)
def test_quota_get_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_get,
self.ctxt,
'project1',
'resource1')
def test_quota_reserve(self):
reservations = _quota_reserve(self.ctxt, 'project1')
self.assertEqual(2, len(reservations))
quota_usage = db.quota_usage_get_all_by_project(self.ctxt, 'project1')
self.assertEqual({'project_id': 'project1',
'gigabytes': {'reserved': 2, 'in_use': 0},
'volumes': {'reserved': 1, 'in_use': 0}},
quota_usage)
def test__get_quota_usages(self):
_quota_reserve(self.ctxt, 'project1')
session = sqlalchemy_api.get_session()
quota_usage = sqlalchemy_api._get_quota_usages(
self.ctxt, session, 'project1')
self.assertEqual(['gigabytes', 'volumes'],
sorted(quota_usage.keys()))
def test__get_quota_usages_with_resources(self):
_quota_reserve(self.ctxt, 'project1')
session = sqlalchemy_api.get_session()
quota_usage = sqlalchemy_api._get_quota_usages(
self.ctxt, session, 'project1', resources=['volumes'])
self.assertEqual(['volumes'], list(quota_usage.keys()))
@mock.patch('oslo_utils.timeutils.utcnow', return_value=UTC_NOW)
def test_quota_destroy(self, utcnow_mock):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
self.assertDictEqual(
{'deleted': True, 'deleted_at': UTC_NOW},
db.quota_destroy(self.ctxt, 'project1', 'resource1'))
self.assertRaises(exception.ProjectQuotaNotFound, db.quota_get,
self.ctxt, 'project1', 'resource1')
def test_quota_destroy_by_project(self):
# Create limits, reservations and usage for project
project = 'project1'
_quota_reserve(self.ctxt, project)
expected_usage = {'project_id': project,
'volumes': {'reserved': 1, 'in_use': 0},
'gigabytes': {'reserved': 2, 'in_use': 0}}
expected = {'project_id': project, 'gigabytes': 2, 'volumes': 1}
# Check that quotas are there
self.assertEqual(expected,
db.quota_get_all_by_project(self.ctxt, project))
self.assertEqual(expected_usage,
db.quota_usage_get_all_by_project(self.ctxt, project))
# Destroy only the limits
db.quota_destroy_by_project(self.ctxt, project)
# Confirm that limits have been removed
self.assertEqual({'project_id': project},
db.quota_get_all_by_project(self.ctxt, project))
# But that usage and reservations are the same
self.assertEqual(expected_usage,
db.quota_usage_get_all_by_project(self.ctxt, project))
def test_quota_destroy_sqlalchemy_all_by_project_(self):
# Create limits, reservations and usage for project
project = 'project1'
_quota_reserve(self.ctxt, project)
expected_usage = {'project_id': project,
'volumes': {'reserved': 1, 'in_use': 0},
'gigabytes': {'reserved': 2, 'in_use': 0}}
expected = {'project_id': project, 'gigabytes': 2, 'volumes': 1}
expected_result = {'project_id': project}
# Check that quotas are there
self.assertEqual(expected,
db.quota_get_all_by_project(self.ctxt, project))
self.assertEqual(expected_usage,
db.quota_usage_get_all_by_project(self.ctxt, project))
# Destroy all quotas using SQLAlchemy Implementation
sqlalchemy_api.quota_destroy_all_by_project(self.ctxt, project,
only_quotas=False)
# Check that all quotas have been deleted
self.assertEqual(expected_result,
db.quota_get_all_by_project(self.ctxt, project))
self.assertEqual(expected_result,
db.quota_usage_get_all_by_project(self.ctxt, project))
def test_quota_usage_get_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound,
db.quota_usage_get,
self.ctxt,
'p1',
'nonexitent_resource')
def test_quota_usage_get(self):
_quota_reserve(self.ctxt, 'p1')
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'gigabytes')
expected = {'resource': 'gigabytes', 'project_id': 'p1',
'in_use': 0, 'reserved': 2, 'total': 2}
for key, value in expected.items():
self.assertEqual(value, quota_usage[key], key)
def test_quota_usage_get_all_by_project(self):
_quota_reserve(self.ctxt, 'p1')
expected = {'project_id': 'p1',
'volumes': {'in_use': 0, 'reserved': 1},
'gigabytes': {'in_use': 0, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project(
self.ctxt, 'p1'))
class DBAPIBackupTestCase(BaseTest):
"""Tests for db.api.backup_* methods."""
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at', 'data_timestamp', 'backup_metadata']
def setUp(self):
super(DBAPIBackupTestCase, self).setUp()
self.created = [db.backup_create(self.ctxt, values)
for values in self._get_values()]
def _get_values(self, one=False):
base_values = {
'user_id': fake.USER_ID,
'project_id': fake.PROJECT_ID,
'volume_id': 'volume',
'host': 'host',
'availability_zone': 'zone',
'display_name': 'display',
'display_description': 'description',
'container': 'container',
'status': 'status',
'fail_reason': 'test',
'service_metadata': 'metadata',
'service': 'service',
'parent_id': "parent_id",
'size': 1000,
'object_count': 100,
'temp_volume_id': 'temp_volume_id',
'temp_snapshot_id': 'temp_snapshot_id',
'num_dependent_backups': 0,
'snapshot_id': 'snapshot_id',
'restore_volume_id': 'restore_volume_id'}
if one:
return base_values
def compose(val, step):
if isinstance(val, bool):
return val
if isinstance(val, str):
step = str(step)
return val + step
return [{k: compose(v, i) for k, v in base_values.items()}
for i in range(1, 4)]
def test_backup_create(self):
values = self._get_values()
for i, backup in enumerate(self.created):
self.assertEqual(36, len(backup['id'])) # dynamic UUID
self._assertEqualObjects(values[i], backup, self._ignored_keys)
def test_backup_get(self):
for backup in self.created:
backup_get = db.backup_get(self.ctxt, backup['id'])
self._assertEqualObjects(backup, backup_get)
def test_backup_get_deleted(self):
backup_dic = {'user_id': fake.USER_ID,
'project_id': fake.PROJECT_ID,
'volume_id': fake.VOLUME_ID,
'size': 1,
'object_count': 1}
backup = objects.Backup(self.ctxt, **backup_dic)
backup.create()
backup.destroy()
backup_get = db.backup_get(self.ctxt, backup.id, read_deleted='yes')
self.assertEqual(backup.id, backup_get.id)
def tests_backup_get_all(self):
all_backups = db.backup_get_all(self.ctxt)
self._assertEqualListsOfObjects(self.created, all_backups)
def tests_backup_get_all_by_filter(self):
filters = {'status': self.created[1]['status']}
filtered_backups = db.backup_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects([self.created[1]], filtered_backups)
filters = {'display_name': self.created[1]['display_name']}
filtered_backups = db.backup_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects([self.created[1]], filtered_backups)
filters = {'volume_id': self.created[1]['volume_id']}
filtered_backups = db.backup_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects([self.created[1]], filtered_backups)
filters = {'fake_key': 'fake'}
filtered_backups = db.backup_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects([], filtered_backups)
def test_backup_get_all_by_host(self):
byhost = db.backup_get_all_by_host(self.ctxt,
self.created[1]['host'])
self._assertEqualObjects(self.created[1], byhost[0])
def test_backup_get_all_by_project(self):
byproj = db.backup_get_all_by_project(self.ctxt,
self.created[1]['project_id'])
self._assertEqualObjects(self.created[1], byproj[0])
byproj = db.backup_get_all_by_project(self.ctxt,
self.created[1]['project_id'],
{'fake_key': 'fake'})
self._assertEqualListsOfObjects([], byproj)
def test_backup_get_all_by_volume(self):
byvol = db.backup_get_all_by_volume(self.ctxt,
self.created[1]['volume_id'])
self._assertEqualObjects(self.created[1], byvol[0])
byvol = db.backup_get_all_by_volume(self.ctxt,
self.created[1]['volume_id'],
{'fake_key': 'fake'})
self._assertEqualListsOfObjects([], byvol)
def test_backup_update_nonexistent(self):
self.assertRaises(exception.BackupNotFound,
db.backup_update,
self.ctxt, 'nonexistent', {})
def test_backup_update(self):
updated_values = self._get_values(one=True)
update_id = self.created[1]['id']
db.backup_update(self.ctxt, update_id, updated_values)
updated_backup = db.backup_get(self.ctxt, update_id)
self._assertEqualObjects(updated_values, updated_backup,
self._ignored_keys)
def test_backup_update_with_fail_reason_truncation(self):
updated_values = self._get_values(one=True)
fail_reason = '0' * 512
updated_values['fail_reason'] = fail_reason
update_id = self.created[1]['id']
db.backup_update(self.ctxt, update_id, updated_values)
updated_backup = db.backup_get(self.ctxt, update_id)
updated_values['fail_reason'] = fail_reason[:255]
self._assertEqualObjects(updated_values, updated_backup,
self._ignored_keys)
@mock.patch('oslo_utils.timeutils.utcnow', return_value=UTC_NOW)
def test_backup_destroy(self, utcnow_mock):
for backup in self.created:
self.assertDictEqual(
{'status': fields.BackupStatus.DELETED, 'deleted': True,
'deleted_at': UTC_NOW},
db.backup_destroy(self.ctxt, backup['id']))
self.assertFalse(db.backup_get_all(self.ctxt))
def test_backup_not_found(self):
self.assertRaises(exception.BackupNotFound, db.backup_get, self.ctxt,
'notinbase')
class DBAPIProcessSortParamTestCase(test.TestCase):
def test_process_sort_params_defaults(self):
"""Verifies default sort parameters."""
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params([], [])
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['asc', 'asc'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(None, None)
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['asc', 'asc'], sort_dirs)
def test_process_sort_params_override_default_keys(self):
"""Verifies that the default keys can be overridden."""
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=['key1', 'key2', 'key3'])
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['asc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_override_default_dir(self):
"""Verifies that the default direction can be overridden."""
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_dir='dir1')
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['dir1', 'dir1'], sort_dirs)
def test_process_sort_params_override_default_key_and_dir(self):
"""Verifies that the default key and dir can be overridden."""
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=['key1', 'key2', 'key3'],
default_dir='dir1')
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'dir1', 'dir1'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=[], default_dir='dir1')
self.assertEqual([], sort_keys)
self.assertEqual([], sort_dirs)
def test_process_sort_params_non_default(self):
"""Verifies that non-default keys are added correctly."""
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['key1', 'key2'], ['asc', 'desc'])
self.assertEqual(['key1', 'key2', 'created_at', 'id'], sort_keys)
# First sort_dir in list is used when adding the default keys
self.assertEqual(['asc', 'desc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_default(self):
"""Verifies that default keys are added correctly."""
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], ['asc', 'desc'])
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['asc', 'desc', 'asc'], sort_dirs)
# Include default key value, rely on default direction
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], [])
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['asc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_default_dir(self):
"""Verifies that the default dir is applied to all keys."""
# Direction is set, ignore default dir
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], ['desc'], default_dir='dir')
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['desc', 'desc', 'desc'], sort_dirs)
# But should be used if no direction is set
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], [], default_dir='dir')
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['dir', 'dir', 'dir'], sort_dirs)
def test_process_sort_params_unequal_length(self):
"""Verifies that a sort direction list is applied correctly."""
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'desc', 'desc', 'desc'], sort_dirs)
# Default direction is the first key in the list
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc', 'asc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'asc', 'desc', 'desc'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc', 'asc', 'asc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'asc', 'asc', 'desc'], sort_dirs)
def test_process_sort_params_extra_dirs_lengths(self):
"""InvalidInput raised if more directions are given."""
self.assertRaises(exception.InvalidInput,
sqlalchemy_api.process_sort_params,
['key1', 'key2'],
['asc', 'desc', 'desc'])
def test_process_sort_params_invalid_sort_dir(self):
"""InvalidInput raised if invalid directions are given."""
for dirs in [['foo'], ['asc', 'foo'], ['asc', 'desc', 'foo']]:
self.assertRaises(exception.InvalidInput,
sqlalchemy_api.process_sort_params,
['key'],
dirs)
class DBAPIDriverInitiatorDataTestCase(BaseTest):
initiator = 'iqn.1993-08.org.debian:01:222'
namespace = 'test_ns'
def _test_insert(self, key, value, expected_result=True):
result = db.driver_initiator_data_insert_by_key(
self.ctxt, self.initiator, self.namespace, key, value)
self.assertEqual(expected_result, result)
data = db.driver_initiator_data_get(self.ctxt, self.initiator,
self.namespace)
self.assertEqual(data[0].key, key)
self.assertEqual(data[0].value, value)
def test_insert(self):
self._test_insert('key1', 'foo')
def test_insert_already_exists(self):
self._test_insert('key2', 'bar')
self._test_insert('key2', 'bar', expected_result=False)
@ddt.ddt
class DBAPIImageVolumeCacheEntryTestCase(BaseTest):
def _validate_entry(self, entry, host, cluster_name, image_id,
image_updated_at, volume_id, size):
self.assertIsNotNone(entry)
self.assertIsNotNone(entry['id'])
self.assertEqual(host, entry['host'])
self.assertEqual(cluster_name, entry['cluster_name'])
self.assertEqual(image_id, entry['image_id'])
self.assertEqual(image_updated_at, entry['image_updated_at'])
self.assertEqual(volume_id, entry['volume_id'])
self.assertEqual(size, entry['size'])
self.assertIsNotNone(entry['last_used'])
def test_create_delete_query_cache_entry(self):
host = 'abc@123#poolz'
cluster_name = 'def@123#poolz'
image_id = 'c06764d7-54b0-4471-acce-62e79452a38b'
image_updated_at = datetime.datetime.utcnow()
volume_id = 'e0e4f819-24bb-49e6-af1e-67fb77fc07d1'
size = 6
entry = db.image_volume_cache_create(self.ctxt, host, cluster_name,
image_id, image_updated_at,
volume_id, size)
self._validate_entry(entry, host, cluster_name, image_id,
image_updated_at, volume_id, size)
entry = db.image_volume_cache_get_and_update_last_used(self.ctxt,
image_id,
host=host)
self._validate_entry(entry, host, cluster_name, image_id,
image_updated_at, volume_id, size)
entry = db.image_volume_cache_get_by_volume_id(self.ctxt, volume_id)
self._validate_entry(entry, host, cluster_name, image_id,
image_updated_at, volume_id, size)
db.image_volume_cache_delete(self.ctxt, entry['volume_id'])
entry = db.image_volume_cache_get_and_update_last_used(self.ctxt,
image_id,
host=host)
self.assertIsNone(entry)
def test_cache_entry_get_multiple(self):
host = 'abc@123#poolz'
cluster_name = 'def@123#poolz'
image_id = 'c06764d7-54b0-4471-acce-62e79452a38b'
image_updated_at = datetime.datetime.utcnow()
volume_id = 'e0e4f819-24bb-49e6-af1e-67fb77fc07d1'
size = 6
entries = []
for i in range(0, 3):
entries.append(db.image_volume_cache_create(self.ctxt,
host,
cluster_name,
image_id,
image_updated_at,
volume_id,
size))
# It is considered OK for the cache to have multiple of the same
# entries. Expect only a single one from the query.
entry = db.image_volume_cache_get_and_update_last_used(self.ctxt,
image_id,
host=host)
self._validate_entry(entry, host, cluster_name, image_id,
image_updated_at, volume_id, size)
# We expect to get the same one on subsequent queries due to the
# last_used field being updated each time and ordering by it.
entry_id = entry['id']
entry = db.image_volume_cache_get_and_update_last_used(self.ctxt,
image_id,
host=host)
self._validate_entry(entry, host, cluster_name, image_id,
image_updated_at, volume_id, size)
self.assertEqual(entry_id, entry['id'])
# Cleanup
for entry in entries:
db.image_volume_cache_delete(self.ctxt, entry['volume_id'])
def test_cache_entry_get_none(self):
host = 'abc@123#poolz'
image_id = 'c06764d7-54b0-4471-acce-62e79452a38b'
entry = db.image_volume_cache_get_and_update_last_used(self.ctxt,
image_id,
host=host)
self.assertIsNone(entry)
def test_cache_entry_get_by_volume_id_none(self):
volume_id = 'e0e4f819-24bb-49e6-af1e-67fb77fc07d1'
entry = db.image_volume_cache_get_by_volume_id(self.ctxt, volume_id)
self.assertIsNone(entry)
def test_cache_entry_get_all_for_host(self):
host = 'abc@123#poolz'
image_updated_at = datetime.datetime.utcnow()
size = 6
entries = []
for i in range(0, 3):
entries.append(db.image_volume_cache_create(self.ctxt,
host,
'cluster-%s' % i,
'image-' + str(i),
image_updated_at,
'vol-' + str(i),
size))
other_entry = db.image_volume_cache_create(self.ctxt,
'someOtherHost',
'someOtherCluster',
'image-12345',
image_updated_at,
'vol-1234',
size)
found_entries = db.image_volume_cache_get_all(self.ctxt, host=host)
self.assertIsNotNone(found_entries)
self.assertEqual(len(entries), len(found_entries))
for found_entry in found_entries:
for entry in entries:
if found_entry['id'] == entry['id']:
self._validate_entry(found_entry,
entry['host'],
entry['cluster_name'],
entry['image_id'],
entry['image_updated_at'],
entry['volume_id'],
entry['size'])
# Cleanup
db.image_volume_cache_delete(self.ctxt, other_entry['volume_id'])
for entry in entries:
db.image_volume_cache_delete(self.ctxt, entry['volume_id'])
def test_cache_entry_get_all_for_host_none(self):
host = 'abc@123#poolz'
entries = db.image_volume_cache_get_all(self.ctxt, host=host)
self.assertEqual([], entries)
@ddt.data('host1@backend1#pool1', 'host1@backend1')
def test_cache_entry_include_in_cluster_by_host(self, host):
"""Basic cache include test filtering by host and with full rename."""
image_updated_at = datetime.datetime.utcnow()
image_cache = (
db.image_volume_cache_create(
self.ctxt, 'host1@backend1#pool1', 'cluster1@backend1#pool1',
'image-1', image_updated_at, 'vol-1', 6),
db.image_volume_cache_create(
self.ctxt, 'host1@backend2#pool2', 'cluster1@backend2#pool2',
'image-2', image_updated_at, 'vol-2', 6),
db.image_volume_cache_create(
self.ctxt, 'host2@backend#pool', 'cluster2@backend#pool',
'image-3', image_updated_at, 'vol-3', 6),
)
cluster_name = 'my_cluster'
result = db.image_volume_cache_include_in_cluster(self.ctxt,
cluster_name,
partial_rename=False,
host=host)
self.assertEqual(1, result)
db_image_cache = db.image_volume_cache_get_by_volume_id(
self.ctxt, image_cache[0].volume_id)
self.assertEqual(cluster_name, db_image_cache.cluster_name)
class DBAPIGenericTestCase(BaseTest):
def test_resource_exists_volume(self):
# NOTE(geguileo): We create 2 volumes in this test (even if the second
# one is not being used) to confirm that the DB exists subquery is
# properly formulated and doesn't result in multiple rows, as such
# case would raise an exception when converting the result to an
# scalar. This would happen if for example the query wasn't generated
# directly using get_session but using model_query like this:
# query = model_query(context, model,
# sql.exists().where(and_(*conditions)))
# Instead of what we do:
# query = get_session().query(sql.exists().where(and_(*conditions)))
db.volume_create(self.ctxt, {'id': fake.VOLUME_ID})
db.volume_create(self.ctxt, {'id': fake.VOLUME2_ID})
model = db.get_model_for_versioned_object(objects.Volume)
res = sqlalchemy_api.resource_exists(self.ctxt, model, fake.VOLUME_ID)
self.assertTrue(res, msg="Couldn't find existing Volume")
def test_resource_exists_volume_fails(self):
db.volume_create(self.ctxt, {'id': fake.VOLUME_ID})
model = db.get_model_for_versioned_object(objects.Volume)
res = sqlalchemy_api.resource_exists(self.ctxt, model, fake.VOLUME2_ID)
self.assertFalse(res, msg='Found nonexistent Volume')
def test_resource_exists_snapshot(self):
# Read NOTE in test_resource_exists_volume on why we create 2 snapshots
vol = db.volume_create(self.ctxt, {'id': fake.VOLUME_ID})
db.snapshot_create(self.ctxt, {'id': fake.SNAPSHOT_ID,
'volume_id': vol.id})
db.snapshot_create(self.ctxt, {'id': fake.SNAPSHOT2_ID,
'volume_id': vol.id})
model = db.get_model_for_versioned_object(objects.Snapshot)
res = sqlalchemy_api.resource_exists(self.ctxt, model,
fake.SNAPSHOT_ID)
self.assertTrue(res, msg="Couldn't find existing Snapshot")
def test_resource_exists_snapshot_fails(self):
vol = db.volume_create(self.ctxt, {'id': fake.VOLUME_ID})
db.snapshot_create(self.ctxt, {'id': fake.SNAPSHOT_ID,
'volume_id': vol.id})
model = db.get_model_for_versioned_object(objects.Snapshot)
res = sqlalchemy_api.resource_exists(self.ctxt, model,
fake.SNAPSHOT2_ID)
self.assertFalse(res, msg='Found nonexistent Snapshot')
def test_resource_exists_volume_project_separation(self):
user_context = context.RequestContext(fake.USER_ID, fake.PROJECT_ID,
is_admin=False)
user2_context = context.RequestContext(fake.USER2_ID, fake.PROJECT2_ID,
is_admin=False)
volume = db.volume_create(user_context,
{'project_id': fake.PROJECT_ID})
model = db.get_model_for_versioned_object(objects.Volume)
# Owner can find it
res = sqlalchemy_api.resource_exists(user_context, model, volume.id)
self.assertTrue(res, msg='Owner cannot find its own Volume')
# Non admin user that is not the owner cannot find it
res = sqlalchemy_api.resource_exists(user2_context, model, volume.id)
self.assertFalse(res, msg="Non admin user can find somebody else's "
"volume")
# Admin can find it
res = sqlalchemy_api.resource_exists(self.ctxt, model, volume.id)
self.assertTrue(res, msg="Admin cannot find the volume")
def test_resource_exists_snapshot_project_separation(self):
user_context = context.RequestContext(fake.USER_ID, fake.PROJECT_ID,
is_admin=False)
user2_context = context.RequestContext(fake.USER2_ID, fake.PROJECT2_ID,
is_admin=False)
vol = db.volume_create(user_context, {'project_id': fake.PROJECT_ID})
snap = db.snapshot_create(self.ctxt, {'project_id': fake.PROJECT_ID,
'volume_id': vol.id})
model = db.get_model_for_versioned_object(objects.Snapshot)
# Owner can find it
res = sqlalchemy_api.resource_exists(user_context, model, snap.id)
self.assertTrue(res, msg='Owner cannot find its own Snapshot')
# Non admin user that is not the owner cannot find it
res = sqlalchemy_api.resource_exists(user2_context, model, snap.id)
self.assertFalse(res, msg="Non admin user can find somebody else's "
"Snapshot")
# Admin can find it
res = sqlalchemy_api.resource_exists(self.ctxt, model, snap.id)
self.assertTrue(res, msg="Admin cannot find the Snapshot")
@ddt.ddt
class DBAPIBackendTestCase(BaseTest):
@ddt.data((True, True), (True, False), (False, True), (False, False))
@ddt.unpack
def test_is_backend_frozen_service(self, frozen, pool):
service = utils.create_service(self.ctxt, {'frozen': frozen})
utils.create_service(self.ctxt, {'host': service.host + '2',
'frozen': not frozen})
host = service.host
if pool:
host += '#poolname'
self.assertEqual(frozen, db.is_backend_frozen(self.ctxt, host,
service.cluster_name))
@ddt.data((True, True), (True, False), (False, True), (False, False))
@ddt.unpack
def test_is_backend_frozen_cluster(self, frozen, pool):
cluster = utils.create_cluster(self.ctxt, frozen=frozen)
utils.create_service(self.ctxt, {'frozen': frozen, 'host': 'hostA',
'cluster_name': cluster.name})
service = utils.create_service(self.ctxt,
{'frozen': not frozen,
'host': 'hostB',
'cluster_name': cluster.name})
utils.create_populated_cluster(self.ctxt, 3, 0, frozen=not frozen,
name=cluster.name + '2')
host = service.host
cluster = service.cluster_name
if pool:
host += '#poolname'
cluster += '#poolname'
self.assertEqual(frozen,
db.is_backend_frozen(self.ctxt, host, cluster))
@ddt.ddt
class DBAPIGroupTestCase(BaseTest):
def test_group_get_all_by_host(self):
grp_type = db.group_type_create(self.ctxt, {'name': 'my_group_type'})
groups = []
backend = 'host1@lvm'
for i in range(3):
groups.append([db.group_create(
self.ctxt,
{'host': '%(b)s%(n)d' % {'b': backend, 'n': i},
'group_type_id': grp_type['id']})
for j in range(3)])
for i in range(3):
host = '%(b)s%(n)d' % {'b': backend, 'n': i}
filters = {'host': host, 'backend_match_level': 'backend'}
grps = db.group_get_all(
self.ctxt, filters=filters)
self._assertEqualListsOfObjects(groups[i], grps)
for grp in grps:
db.group_destroy(self.ctxt, grp['id'])
db.group_type_destroy(self.ctxt, grp_type['id'])
def test_group_get_all_by_host_with_pools(self):
grp_type = db.group_type_create(self.ctxt, {'name': 'my_group_type'})
groups = []
backend = 'host1@lvm'
pool = '%s#pool1' % backend
grp_on_host_wo_pool = [db.group_create(
self.ctxt,
{'host': backend,
'group_type_id': grp_type['id']})
for j in range(3)]
grp_on_host_w_pool = [db.group_create(
self.ctxt,
{'host': pool,
'group_type_id': grp_type['id']})]
groups.append(grp_on_host_wo_pool + grp_on_host_w_pool)
# insert an additional record that doesn't belongs to the same
# host as 'foo' and test if it is included in the result
grp_foobar = db.group_create(self.ctxt,
{'host': '%sfoo' % backend,
'group_type_id': grp_type['id']})
filters = {'host': backend, 'backend_match_level': 'backend'}
grps = db.group_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects(groups[0], grps)
for grp in grps:
db.group_destroy(self.ctxt, grp['id'])
db.group_destroy(self.ctxt, grp_foobar['id'])
db.group_type_destroy(self.ctxt, grp_type['id'])
def _create_gs_to_test_include_in(self):
"""Helper method for test_group_include_in_* tests."""
return [
db.group_create(
self.ctxt, {'host': 'host1@backend1#pool1',
'cluster_name': 'cluster1@backend1#pool1'}),
db.group_create(
self.ctxt, {'host': 'host1@backend2#pool2',
'cluster_name': 'cluster1@backend2#pool1'}),
db.group_create(
self.ctxt, {'host': 'host2@backend#poo1',
'cluster_name': 'cluster2@backend#pool'}),
]
@ddt.data('host1@backend1#pool1', 'host1@backend1')
def test_group_include_in_cluster_by_host(self, host):
group = self._create_gs_to_test_include_in()[0]
cluster_name = 'my_cluster'
result = db.group_include_in_cluster(self.ctxt, cluster_name,
partial_rename=False, host=host)
self.assertEqual(1, result)
db_group = db.group_get(self.ctxt, group.id)
self.assertEqual(cluster_name, db_group.cluster_name)
def test_group_include_in_cluster_by_host_multiple(self):
groups = self._create_gs_to_test_include_in()[0:2]
host = 'host1'
cluster_name = 'my_cluster'
result = db.group_include_in_cluster(self.ctxt, cluster_name,
partial_rename=True, host=host)
self.assertEqual(2, result)
db_group = [db.group_get(self.ctxt, groups[0].id),
db.group_get(self.ctxt, groups[1].id)]
for i in range(2):
self.assertEqual(cluster_name + groups[i].host[len(host):],
db_group[i].cluster_name)
@ddt.data('cluster1@backend1#pool1', 'cluster1@backend1')
def test_group_include_in_cluster_by_cluster_name(self, cluster_name):
group = self._create_gs_to_test_include_in()[0]
new_cluster_name = 'cluster_new@backend1#pool'
result = db.group_include_in_cluster(self.ctxt, new_cluster_name,
partial_rename=False,
cluster_name=cluster_name)
self.assertEqual(1, result)
db_group = db.group_get(self.ctxt, group.id)
self.assertEqual(new_cluster_name, db_group.cluster_name)
def test_group_include_in_cluster_by_cluster_multiple(self):
groups = self._create_gs_to_test_include_in()[0:2]
cluster_name = 'cluster1'
new_cluster_name = 'my_cluster'
result = db.group_include_in_cluster(self.ctxt, new_cluster_name,
partial_rename=True,
cluster_name=cluster_name)
self.assertEqual(2, result)
db_groups = [db.group_get(self.ctxt, groups[0].id),
db.group_get(self.ctxt, groups[1].id)]
for i in range(2):
self.assertEqual(
new_cluster_name + groups[i].cluster_name[len(cluster_name):],
db_groups[i].cluster_name)
|
apache-2.0
|
QGuLL/samba
|
source3/stf/pythoncheck.py
|
82
|
1720
|
#! /usr/bin/python
# Comfychair test cases for Samba python extensions
# Copyright (C) 2003 by Tim Potter <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""These tests are run by Samba's "make check"."""
import sys, comfychair
class ImportTest(comfychair.TestCase):
"""Check that all modules can be imported without error."""
def runtest(self):
python_modules = ['spoolss', 'lsa', 'samr', 'winbind', 'winreg',
'srvsvc', 'tdb', 'smb', 'tdbpack']
for m in python_modules:
try:
__import__('samba.%s' % m)
except ImportError, msg:
self.log(str(msg))
self.fail('error importing %s module' % m)
tests = [ImportTest]
if __name__ == '__main__':
# Some magic to repend build directory to python path so we see the
# objects we have built and not previously installed stuff.
from distutils.util import get_platform
from os import getcwd
sys.path.insert(0, '%s/build/lib.%s-%s' %
(getcwd(), get_platform(), sys.version[0:3]))
comfychair.main(tests)
|
gpl-3.0
|
mrrrgn/olympia
|
apps/users/tests/test_views.py
|
12
|
50379
|
import collections
import json
from urlparse import urlparse
from django.conf import settings
from django.core import mail
from django.core.cache import cache
from django.contrib.auth.tokens import default_token_generator
from django.forms.models import model_to_dict
from django.utils.http import urlsafe_base64_encode
from mock import Mock, patch
from nose.tools import eq_
# Unused, but needed so that we can patch jingo.
from waffle import helpers # NOQA
import amo
import amo.tests
from abuse.models import AbuseReport
from access.models import Group, GroupUser
from addons.models import Addon, AddonUser, Category
from amo.helpers import urlparams
from amo.pyquery_wrapper import PyQuery as pq
from amo.urlresolvers import reverse
from bandwagon.models import Collection, CollectionWatcher
from devhub.models import ActivityLog
from reviews.models import Review
from users.models import BlacklistedPassword, UserProfile, UserNotification
import users.notifications as email
from users.utils import EmailResetCode, UnsubscribeCode
from users.views import tshirt_eligible
def fake_request():
request = Mock()
request.LANG = 'foo'
request.GET = request.META = {}
# Fake out host/scheme for Persona login.
request.get_host.return_value = urlparse(settings.SITE_URL).netloc
request.is_secure.return_value = False
return request
def check_sidebar_links(self, expected):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#secondary-nav ul a')
amo.tests.check_links(expected, links)
eq_(links.filter('.selected').attr('href'), self.url)
class TestTShirtOrder(amo.tests.TestCase):
fixtures = ['base/users', 'base/addon_3615']
def test_normal_user(self):
user = UserProfile.objects.get(email='[email protected]')
assert not tshirt_eligible(user)
def test_listed_dev(self):
addon = Addon.objects.get(pk=3615)
user = addon.authors.get()
assert tshirt_eligible(user)
def test_unlisted_dev(self):
addon = Addon.objects.get(pk=3615)
user = addon.authors.get()
addon.update(is_listed=False)
assert not tshirt_eligible(user)
addon.versions.get().files.get().update(is_signed=True)
assert tshirt_eligible(user)
def test_persona_dev(self):
addon = Addon.objects.get(pk=3615)
user = addon.authors.get()
addon.update(type=amo.ADDON_PERSONA,
average_daily_users=1)
assert not tshirt_eligible(user)
addon.update(average_daily_users=10000)
assert tshirt_eligible(user)
class UserViewBase(amo.tests.TestCase):
fixtures = ['users/test_backends']
def setUp(self):
super(UserViewBase, self).setUp()
self.client = amo.tests.TestClient()
self.client.get('/')
self.user = UserProfile.objects.get(id='4043307')
def get_profile(self):
return UserProfile.objects.get(id=self.user.id)
class TestAjax(UserViewBase):
def setUp(self):
super(TestAjax, self).setUp()
self.client.login(username='[email protected]', password='password')
def test_ajax_404(self):
r = self.client.get(reverse('users.ajax'), follow=True)
eq_(r.status_code, 404)
def test_ajax_success(self):
r = self.client.get(reverse('users.ajax'), {'q': '[email protected]'},
follow=True)
data = json.loads(r.content)
eq_(data, {'status': 1, 'message': '', 'id': 9945,
'name': u'Justin Scott \u0627\u0644\u062a\u0637\u0628'})
def test_ajax_xss(self):
self.user.display_name = '<script>alert("xss")</script>'
self.user.save()
assert '<script>' in self.user.display_name, (
'Expected <script> to be in display name')
r = self.client.get(reverse('users.ajax'),
{'q': self.user.email, 'dev': 0})
assert '<script>' not in r.content
assert '<script>' in r.content
def test_ajax_failure_incorrect_email(self):
r = self.client.get(reverse('users.ajax'), {'q': 'incorrect'},
follow=True)
data = json.loads(r.content)
eq_(data,
{'status': 0,
'message': 'A user with that email address does not exist.'})
def test_ajax_failure_no_email(self):
r = self.client.get(reverse('users.ajax'), {'q': ''}, follow=True)
data = json.loads(r.content)
eq_(data,
{'status': 0,
'message': 'An email address is required.'})
def test_forbidden(self):
self.client.logout()
r = self.client.get(reverse('users.ajax'))
eq_(r.status_code, 401)
class TestEdit(UserViewBase):
def setUp(self):
super(TestEdit, self).setUp()
self.client.login(username='[email protected]', password='password')
self.user = UserProfile.objects.get(username='jbalogh')
self.url = reverse('users.edit')
self.data = {'username': 'jbalogh', 'email': '[email protected]',
'oldpassword': 'password', 'password': 'longenough',
'password2': 'longenough', 'lang': 'en-US'}
def test_password_logs(self):
res = self.client.post(self.url, self.data)
eq_(res.status_code, 302)
eq_(self.user.userlog_set
.filter(activity_log__action=amo.LOG.CHANGE_PASSWORD.id)
.count(), 1)
def test_password_empty(self):
admingroup = Group(rules='Users:Edit')
admingroup.save()
GroupUser.objects.create(group=admingroup, user=self.user)
homepage = {'username': 'jbalogh', 'email': '[email protected]',
'homepage': 'http://cbc.ca', 'lang': 'en-US'}
res = self.client.post(self.url, homepage)
eq_(res.status_code, 302)
def test_password_blacklisted(self):
BlacklistedPassword.objects.create(password='password')
bad = self.data.copy()
bad['password'] = 'password'
res = self.client.post(self.url, bad)
eq_(res.status_code, 200)
eq_(res.context['form'].is_valid(), False)
eq_(res.context['form'].errors['password'],
[u'That password is not allowed.'])
def test_password_short(self):
bad = self.data.copy()
bad['password'] = 'short'
res = self.client.post(self.url, bad)
eq_(res.status_code, 200)
eq_(res.context['form'].is_valid(), False)
eq_(res.context['form'].errors['password'],
[u'Must be 8 characters or more.'])
def test_email_change_mail_sent(self):
data = {'username': 'jbalogh',
'email': '[email protected]',
'display_name': 'DJ SurfNTurf',
'lang': 'en-US'}
r = self.client.post(self.url, data, follow=True)
self.assertRedirects(r, self.url)
self.assertContains(r, 'An email has been sent to %s' % data['email'])
# The email shouldn't change until they confirm, but the name should
u = UserProfile.objects.get(id='4043307')
self.assertEquals(u.name, 'DJ SurfNTurf')
self.assertEquals(u.email, '[email protected]')
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].subject.find('Please confirm your email'), 0)
assert mail.outbox[0].body.find('%s/emailchange/' % self.user.id) > 0
@patch.object(settings, 'SEND_REAL_EMAIL', False)
def test_email_change_mail_send_even_with_fake_email(self):
data = {'username': 'jbalogh',
'email': '[email protected]',
'display_name': 'DJ SurfNTurf',
'lang': 'en-US'}
self.client.post(self.url, data, follow=True)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].subject.find('Please confirm your email'), 0)
def test_edit_bio(self):
eq_(self.get_profile().bio, None)
data = {'username': 'jbalogh',
'email': '[email protected]',
'bio': 'xxx unst unst',
'lang': 'en-US'}
r = self.client.post(self.url, data, follow=True)
self.assertRedirects(r, self.url)
self.assertContains(r, data['bio'])
eq_(unicode(self.get_profile().bio), data['bio'])
data['bio'] = 'yyy unst unst'
r = self.client.post(self.url, data, follow=True)
self.assertRedirects(r, self.url)
self.assertContains(r, data['bio'])
eq_(unicode(self.get_profile().bio), data['bio'])
def check_default_choices(self, choices, checked=True):
doc = pq(self.client.get(self.url).content)
eq_(doc('input[name=notifications]:checkbox').length, len(choices))
for id, label in choices:
box = doc('input[name=notifications][value=%s]' % id)
if checked:
eq_(box.filter(':checked').length, 1)
else:
eq_(box.length, 1)
parent = box.parent('label')
if checked:
eq_(parent.find('.msg').length, 1) # Check for "NEW" message.
eq_(parent.remove('.msg, .req').text(), label)
def post_notifications(self, choices):
self.check_default_choices(choices)
self.data['notifications'] = []
r = self.client.post(self.url, self.data)
self.assertRedirects(r, self.url, 302)
eq_(UserNotification.objects.count(), len(email.NOTIFICATIONS))
eq_(UserNotification.objects.filter(enabled=True).count(),
len(filter(lambda x: x.mandatory, email.NOTIFICATIONS)))
self.check_default_choices(choices, checked=False)
def test_edit_notifications(self):
# Make jbalogh a developer.
AddonUser.objects.create(
user=self.user,
addon=Addon.objects.create(type=amo.ADDON_EXTENSION))
choices = email.NOTIFICATIONS_CHOICES
self.check_default_choices(choices)
self.data['notifications'] = [2, 4, 6]
r = self.client.post(self.url, self.data)
self.assertRedirects(r, self.url, 302)
mandatory = [n.id for n in email.NOTIFICATIONS if n.mandatory]
total = len(self.data['notifications'] + mandatory)
eq_(UserNotification.objects.count(), len(email.NOTIFICATIONS))
eq_(UserNotification.objects.filter(enabled=True).count(), total)
doc = pq(self.client.get(self.url).content)
eq_(doc('input[name=notifications]:checked').length, total)
eq_(doc('.more-none').length, len(email.NOTIFICATION_GROUPS))
eq_(doc('.more-all').length, len(email.NOTIFICATION_GROUPS))
def test_edit_notifications_non_dev(self):
self.post_notifications(email.NOTIFICATIONS_CHOICES_NOT_DEV)
def test_edit_notifications_non_dev_error(self):
self.data['notifications'] = [2, 4, 6]
r = self.client.post(self.url, self.data)
assert r.context['form'].errors['notifications']
def test_collections_toggles(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('#profile-misc').length, 1,
'Collections options should be visible.')
def test_remove_locale_bad_request(self):
r = self.client.post(self.user.get_user_url('remove-locale'))
eq_(r.status_code, 400)
@patch.object(UserProfile, 'remove_locale')
def test_remove_locale(self, remove_locale_mock):
r = self.client.post(self.user.get_user_url('remove-locale'),
{'locale': 'el'})
eq_(r.status_code, 200)
remove_locale_mock.assert_called_with('el')
def test_remove_locale_default_locale(self):
r = self.client.post(self.user.get_user_url('remove-locale'),
{'locale': settings.LANGUAGE_CODE})
eq_(r.status_code, 400)
class TestEditAdmin(UserViewBase):
fixtures = ['base/users']
def setUp(self):
super(TestEditAdmin, self).setUp()
self.client.login(username='[email protected]', password='password')
self.regular = self.get_user()
self.url = reverse('users.admin_edit', args=[self.regular.pk])
def get_data(self):
data = model_to_dict(self.regular)
data['admin_log'] = 'test'
del data['password']
return data
def get_user(self):
# Using pk so that we can still get the user after anonymize.
return UserProfile.objects.get(pk=10482)
def test_edit(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
def test_edit_without_user_lang(self):
self.regular.lang = None
self.regular.save()
res = self.client.get(self.url)
eq_(res.status_code, 200)
def test_edit_forbidden(self):
self.client.logout()
self.client.login(username='[email protected]', password='password')
res = self.client.get(self.url)
eq_(res.status_code, 403)
def test_edit_forbidden_anon(self):
self.client.logout()
res = self.client.get(self.url)
eq_(res.status_code, 302)
def test_anonymize(self):
data = self.get_data()
data['anonymize'] = True
res = self.client.post(self.url, data)
eq_(res.status_code, 302)
eq_(self.get_user().password, "sha512$Anonymous$Password")
def test_anonymize_fails(self):
data = self.get_data()
data['anonymize'] = True
data['email'] = '[email protected]'
res = self.client.post(self.url, data)
eq_(res.status_code, 200)
eq_(self.get_user().password, self.regular.password) # Hasn't changed.
def test_admin_logs_edit(self):
data = self.get_data()
data['email'] = '[email protected]'
self.client.post(self.url, data)
res = ActivityLog.objects.filter(action=amo.LOG.ADMIN_USER_EDITED.id)
eq_(res.count(), 1)
assert self.get_data()['admin_log'] in res[0]._arguments
def test_admin_logs_anonymize(self):
data = self.get_data()
data['anonymize'] = True
self.client.post(self.url, data)
res = (ActivityLog.objects
.filter(action=amo.LOG.ADMIN_USER_ANONYMIZED.id))
eq_(res.count(), 1)
assert self.get_data()['admin_log'] in res[0]._arguments
def test_admin_no_password(self):
data = self.get_data()
data.update({'password': 'pass1234',
'password2': 'pass1234',
'oldpassword': 'password'})
self.client.post(self.url, data)
logs = ActivityLog.objects.filter
eq_(logs(action=amo.LOG.CHANGE_PASSWORD.id).count(), 0)
res = logs(action=amo.LOG.ADMIN_USER_EDITED.id)
eq_(res.count(), 1)
eq_(res[0].details['password'][0], u'****')
def test_delete_user_display_name_xss(self):
# This is to test for bug 835827.
self.regular.display_name = '"><img src=a onerror=alert(1)><a a="'
self.regular.save()
delete_url = reverse('admin:users_userprofile_delete',
args=(self.regular.pk,))
res = self.client.post(delete_url, {'post': 'yes'}, follow=True)
assert self.regular.display_name not in res.content
FakeResponse = collections.namedtuple("FakeResponse", "status_code content")
class TestPasswordAdmin(UserViewBase):
fixtures = ['base/users']
def setUp(self):
super(TestPasswordAdmin, self).setUp()
self.client.login(username='[email protected]', password='password')
self.url = reverse('users.edit')
self.correct = {'username': 'editor',
'email': '[email protected]',
'oldpassword': 'password', 'password': 'longenough',
'password2': 'longenough', 'lang': 'en-US'}
def test_password_admin(self):
res = self.client.post(self.url, self.correct, follow=False)
eq_(res.status_code, 200)
eq_(res.context['form'].is_valid(), False)
eq_(res.context['form'].errors['password'],
[u'Letters and numbers required.'])
def test_password(self):
UserProfile.objects.get(username='editor').groups.all().delete()
res = self.client.post(self.url, self.correct, follow=False)
eq_(res.status_code, 302)
class TestEmailChange(UserViewBase):
def setUp(self):
super(TestEmailChange, self).setUp()
self.token, self.hash = EmailResetCode.create(self.user.id,
'[email protected]')
def test_fail(self):
# Completely invalid user, valid code
url = reverse('users.emailchange', args=[1234, self.token, self.hash])
r = self.client.get(url, follow=True)
eq_(r.status_code, 404)
# User is in the system, but not attached to this code, valid code
url = reverse('users.emailchange', args=[9945, self.token, self.hash])
r = self.client.get(url, follow=True)
eq_(r.status_code, 400)
# Valid user, invalid code
url = reverse('users.emailchange', args=[self.user.id, self.token,
self.hash[:-3]])
r = self.client.get(url, follow=True)
eq_(r.status_code, 400)
def test_success(self):
self.assertEqual(self.user.email, '[email protected]')
url = reverse('users.emailchange', args=[self.user.id, self.token,
self.hash])
r = self.client.get(url, follow=True)
eq_(r.status_code, 200)
u = UserProfile.objects.get(id=self.user.id)
self.assertEqual(u.email, '[email protected]')
def test_email_change_to_an_existing_user_email(self):
token, hash_ = EmailResetCode.create(self.user.id, '[email protected]')
url = reverse('users.emailchange', args=[self.user.id, token, hash_])
r = self.client.get(url, follow=True)
eq_(r.status_code, 400)
class TestLogin(UserViewBase):
fixtures = ['users/test_backends', 'base/addon_3615']
def setUp(self):
super(TestLogin, self).setUp()
self.url = reverse('users.login')
self.data = {'username': '[email protected]', 'password': 'password'}
def test_client_login(self):
"""
This is just here to make sure Test Client's login() works with
our custom code.
"""
assert not self.client.login(username='[email protected]',
password='wrongpassword')
assert self.client.login(**self.data)
def test_double_login(self):
r = self.client.post(self.url, self.data, follow=True)
self.assertRedirects(r, '/en-US/firefox/')
# If you go to the login page when you're already logged in we bounce
# you.
r = self.client.get(self.url, follow=True)
self.assertRedirects(r, '/en-US/firefox/')
def test_ok_redirects(self):
r = self.client.post(self.url, self.data, follow=True)
self.assertRedirects(r, '/en-US/firefox/')
r = self.client.get(self.url + '?to=/de/firefox/', follow=True)
self.assertRedirects(r, '/de/firefox/')
def test_bad_redirects(self):
r = self.client.post(self.url, self.data, follow=True)
self.assertRedirects(r, '/en-US/firefox/')
for redirect in ['http://xx.com',
'data:text/html,<script>window.alert("xss")</script>',
'mailto:[email protected]',
'file:///etc/passwd',
'javascript:window.alert("xss");']:
self.assertRedirects(r, '/en-US/firefox/')
def test_login_link(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(pq(r.content)('#aux-nav li.login').length, 1)
def test_logout_link(self):
self.test_client_login()
r = self.client.get(reverse('home'))
eq_(r.status_code, 200)
eq_(pq(r.content)('#aux-nav li.logout').length, 1)
@amo.tests.mobile_test
def test_mobile_login(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)('header')
eq_(doc('nav').length, 1)
eq_(doc('#home').length, 1)
eq_(doc('#auth-nav li.login').length, 0)
def test_login_ajax(self):
url = reverse('users.login_modal')
r = self.client.get(url)
eq_(r.status_code, 200)
res = self.client.post(url, data=self.data)
eq_(res.status_code, 302)
def test_login_ajax_error(self):
url = reverse('users.login_modal')
data = self.data
data['username'] = ''
res = self.client.post(url, data=self.data)
eq_(res.context['form'].errors['username'][0],
'This field is required.')
def test_login_ajax_wrong(self):
url = reverse('users.login_modal')
data = self.data
data['username'] = '[email protected]'
res = self.client.post(url, data=self.data)
text = 'Please enter a correct username and password.'
assert res.context['form'].errors['__all__'][0].startswith(text)
def test_login_no_recaptcha(self):
res = self.client.post(self.url, data=self.data)
eq_(res.status_code, 302)
@patch.object(settings, 'RECAPTCHA_PRIVATE_KEY', 'something')
@patch.object(settings, 'LOGIN_RATELIMIT_USER', 2)
def test_login_attempts_recaptcha(self):
res = self.client.post(self.url, data=self.data)
eq_(res.status_code, 200)
assert res.context['form'].fields.get('recaptcha')
@patch.object(settings, 'RECAPTCHA_PRIVATE_KEY', 'something')
def test_login_shown_recaptcha(self):
data = self.data.copy()
data['recaptcha_shown'] = ''
res = self.client.post(self.url, data=data)
eq_(res.status_code, 200)
assert res.context['form'].fields.get('recaptcha')
@patch.object(settings, 'RECAPTCHA_PRIVATE_KEY', 'something')
@patch.object(settings, 'LOGIN_RATELIMIT_USER', 2)
@patch('captcha.fields.ReCaptchaField.clean')
def test_login_with_recaptcha(self, clean):
clean.return_value = ''
data = self.data.copy()
data.update({'recaptcha': '', 'recaptcha_shown': ''})
res = self.client.post(self.url, data=data)
eq_(res.status_code, 302)
def test_login_fails_increment(self):
# It increments even when the form is wrong.
user = UserProfile.objects.filter(email=self.data['username'])
eq_(user.get().failed_login_attempts, 3)
self.client.post(self.url, data={'username': self.data['username']})
eq_(user.get().failed_login_attempts, 4)
def test_doubled_account(self):
"""
Logging in to an account that shares a User object with another
account works properly.
"""
profile = UserProfile.objects.create(username='login_test',
email='[email protected]')
profile.set_password('bazpassword')
profile.email = '[email protected]'
profile.save()
profile2 = UserProfile.objects.create(username='login_test2',
email='[email protected]')
profile2.set_password('foopassword')
profile2.save()
res = self.client.post(self.url,
data={'username': '[email protected]',
'password': 'wrongpassword'})
eq_(res.status_code, 200)
eq_(UserProfile.objects.get(email='[email protected]')
.failed_login_attempts, 1)
res2 = self.client.post(self.url,
data={'username': '[email protected]',
'password': 'bazpassword'})
eq_(res2.status_code, 302)
res3 = self.client.post(self.url, data={'username': '[email protected]',
'password': 'foopassword'})
eq_(res3.status_code, 302)
def test_changed_account(self):
"""
Logging in to an account that had its email changed succeeds.
"""
profile = UserProfile.objects.create(username='login_test',
email='[email protected]')
profile.set_password('bazpassword')
profile.email = '[email protected]'
profile.save()
res = self.client.post(self.url,
data={'username': '[email protected]',
'password': 'wrongpassword'})
eq_(res.status_code, 200)
eq_(UserProfile.objects.get(email='[email protected]')
.failed_login_attempts, 1)
res2 = self.client.post(self.url,
data={'username': '[email protected]',
'password': 'bazpassword'})
eq_(res2.status_code, 302)
@patch.object(settings, 'RECAPTCHA_PRIVATE_KEY', '')
@patch('users.models.UserProfile.log_login_attempt')
class TestFailedCount(UserViewBase):
fixtures = ['users/test_backends', 'base/addon_3615']
def setUp(self):
super(TestFailedCount, self).setUp()
self.url = reverse('users.login')
self.data = {'username': '[email protected]', 'password': 'password'}
def log_calls(self, obj):
return [call[0][0] for call in obj.call_args_list]
def test_login_passes(self, log_login_attempt):
self.client.post(self.url, data=self.data)
eq_(self.log_calls(log_login_attempt), [True])
def test_login_fails(self, log_login_attempt):
self.client.post(self.url, data={'username': self.data['username']})
eq_(self.log_calls(log_login_attempt), [False])
def test_login_deleted(self, log_login_attempt):
(UserProfile.objects.get(email=self.data['username'])
.update(deleted=True))
self.client.post(self.url, data={'username': self.data['username']})
eq_(self.log_calls(log_login_attempt), [False])
def test_login_confirmation(self, log_login_attempt):
(UserProfile.objects.get(email=self.data['username'])
.update(confirmationcode='123'))
self.client.post(self.url, data={'username': self.data['username']})
eq_(self.log_calls(log_login_attempt), [False])
def test_login_get(self, log_login_attempt):
self.client.get(self.url, data={'username': self.data['username']})
eq_(log_login_attempt.called, False)
def test_login_get_no_data(self, log_login_attempt):
self.client.get(self.url)
eq_(log_login_attempt.called, False)
class TestUnsubscribe(UserViewBase):
fixtures = ['base/users']
def setUp(self):
super(TestUnsubscribe, self).setUp()
self.user = UserProfile.objects.get(email='[email protected]')
def test_correct_url_update_notification(self):
# Make sure the user is subscribed
perm_setting = email.NOTIFICATIONS[0]
un = UserNotification.objects.create(notification_id=perm_setting.id,
user=self.user,
enabled=True)
# Create a URL
token, hash = UnsubscribeCode.create(self.user.email)
url = reverse('users.unsubscribe', args=[token, hash,
perm_setting.short])
# Load the URL
r = self.client.get(url)
doc = pq(r.content)
# Check that it was successful
assert doc('#unsubscribe-success').length
assert doc('#standalone').length
eq_(doc('#standalone ul li').length, 1)
# Make sure the user is unsubscribed
un = UserNotification.objects.filter(notification_id=perm_setting.id,
user=self.user)
eq_(un.count(), 1)
eq_(un.all()[0].enabled, False)
def test_correct_url_new_notification(self):
# Make sure the user is subscribed
assert not UserNotification.objects.count()
# Create a URL
perm_setting = email.NOTIFICATIONS[0]
token, hash = UnsubscribeCode.create(self.user.email)
url = reverse('users.unsubscribe', args=[token, hash,
perm_setting.short])
# Load the URL
r = self.client.get(url)
doc = pq(r.content)
# Check that it was successful
assert doc('#unsubscribe-success').length
assert doc('#standalone').length
eq_(doc('#standalone ul li').length, 1)
# Make sure the user is unsubscribed
un = UserNotification.objects.filter(notification_id=perm_setting.id,
user=self.user)
eq_(un.count(), 1)
eq_(un.all()[0].enabled, False)
def test_wrong_url(self):
perm_setting = email.NOTIFICATIONS[0]
token, hash = UnsubscribeCode.create(self.user.email)
hash = hash[::-1] # Reverse the hash, so it's wrong
url = reverse('users.unsubscribe', args=[token, hash,
perm_setting.short])
r = self.client.get(url)
doc = pq(r.content)
eq_(doc('#unsubscribe-fail').length, 1)
class TestReset(UserViewBase):
fixtures = ['base/users']
def setUp(self):
super(TestReset, self).setUp()
user = UserProfile.objects.get(email='[email protected]')
self.token = [urlsafe_base64_encode(str(user.id)),
default_token_generator.make_token(user)]
def test_reset_msg(self):
res = self.client.get(reverse('users.pwreset_confirm',
args=self.token))
assert 'For your account' in res.content
def test_csrf_token_presence(self):
res = self.client.get(reverse('users.pwreset_confirm',
args=self.token))
assert 'csrfmiddlewaretoken' in res.content
def test_reset_fails(self):
res = self.client.post(reverse('users.pwreset_confirm',
args=self.token),
data={'new_password1': 'spassword',
'new_password2': 'spassword'})
eq_(res.context['form'].errors['new_password1'][0],
'Letters and numbers required.')
def test_reset_succeeds(self):
res = self.client.post(reverse('users.pwreset_confirm',
args=self.token),
data={'new_password1': 'password1',
'new_password2': 'password1'})
eq_(res.status_code, 302)
class TestLogout(UserViewBase):
def test_success(self):
user = UserProfile.objects.get(email='[email protected]')
self.client.login(username=user.email, password='password')
r = self.client.get('/', follow=True)
eq_(pq(r.content.decode('utf-8'))('.account .user').text(),
user.display_name)
eq_(pq(r.content)('.account .user').attr('title'), user.email)
r = self.client.get('/users/logout', follow=True)
assert not pq(r.content)('.account .user')
def test_redirect(self):
self.client.login(username='[email protected]', password='password')
self.client.get('/', follow=True)
url = '/en-US/about'
r = self.client.get(urlparams(reverse('users.logout'), to=url),
follow=True)
self.assertRedirects(r, url, status_code=302)
# Test a valid domain. Note that assertRedirects doesn't work on
# external domains
url = urlparams(reverse('users.logout'), to='/addon/new',
domain='builder')
r = self.client.get(url, follow=True)
to, code = r.redirect_chain[0]
self.assertEqual(to, 'https://builder.addons.mozilla.org/addon/new')
self.assertEqual(code, 302)
# Test an invalid domain
url = urlparams(reverse('users.logout'), to='/en-US/about',
domain='http://evil.com')
r = self.client.get(url, follow=True)
self.assertRedirects(r, '/en-US/about', status_code=302)
def test_session_cookie_should_be_http_only(self):
self.client.login(username='[email protected]', password='password')
r = self.client.get(reverse('users.logout'))
self.assertIn('httponly', str(r.cookies[settings.SESSION_COOKIE_NAME]))
class TestRegistration(UserViewBase):
def test_new_confirm(self):
# User doesn't have a confirmation code.
url = reverse('users.confirm', args=[self.user.id, 'code'])
r = self.client.get(url, follow=True)
is_anonymous = pq(r.content)('body').attr('data-anonymous')
eq_(json.loads(is_anonymous), True)
self.user.update(confirmationcode='code')
# URL has the wrong confirmation code.
url = reverse('users.confirm', args=[self.user.id, 'blah'])
r = self.client.get(url, follow=True)
self.assertContains(r, 'Invalid confirmation code!')
# URL has the right confirmation code.
url = reverse('users.confirm', args=[self.user.id, 'code'])
r = self.client.get(url, follow=True)
self.assertContains(r, 'Successfully verified!')
def test_new_confirm_resend(self):
# User doesn't have a confirmation code.
url = reverse('users.confirm.resend', args=[self.user.id])
r = self.client.get(url, follow=True)
self.user.update(confirmationcode='code')
# URL has the right confirmation code now.
r = self.client.get(url, follow=True)
self.assertContains(r, 'An email has been sent to your address')
def test_default_lang(self):
"""When a user registers, set its lang to the current locale."""
with self.activate('fr'):
url = reverse('users.register')
self.client.post(url, data={'email': '[email protected]',
'username': 'new',
'password': 'foobarbaz',
'password2': 'foobarbaz'})
user = UserProfile.objects.get(email='[email protected]')
eq_(user.lang, 'fr')
class TestProfileView(UserViewBase):
def setUp(self):
super(TestProfileView, self).setUp()
self.user = UserProfile.objects.create(homepage='http://example.com')
self.url = reverse('users.profile', args=[self.user.id])
def test_non_developer_homepage_url(self):
"""Don't display homepage url if the user is not a developer."""
r = self.client.get(self.url)
self.assertNotContains(r, self.user.homepage)
@patch.object(UserProfile, 'is_developer', True)
def test_developer_homepage_url(self):
"""Display homepage url for a developer user."""
r = self.client.get(self.url)
self.assertContains(r, self.user.homepage)
class TestProfileLinks(UserViewBase):
fixtures = ['base/appversion', 'base/featured', 'users/test_backends']
def test_edit_buttons(self):
"""Ensure admin/user edit buttons are shown."""
def get_links(id):
"""Grab profile, return edit links."""
url = reverse('users.profile', args=[id])
r = self.client.get(url)
return pq(r.content)('#profile-actions a')
# Anonymous user.
links = get_links(self.user.id)
eq_(links.length, 1)
eq_(links.eq(0).attr('href'), reverse('users.abuse',
args=[self.user.id]))
# Non-admin, someone else's profile.
self.client.login(username='[email protected]', password='password')
links = get_links(9945)
eq_(links.length, 1)
eq_(links.eq(0).attr('href'), reverse('users.abuse', args=[9945]))
# Non-admin, own profile.
links = get_links(self.user.id)
eq_(links.length, 1)
eq_(links.eq(0).attr('href'), reverse('users.edit'))
# Admin, someone else's profile.
admingroup = Group(rules='Users:Edit')
admingroup.save()
GroupUser.objects.create(group=admingroup, user=self.user)
cache.clear()
# Admin, own profile.
links = get_links(self.user.id)
eq_(links.length, 2)
eq_(links.eq(0).attr('href'), reverse('users.edit'))
# TODO XXX Uncomment when we have real user editing pages
#eq_(links.eq(1).attr('href') + "/",
#reverse('admin:users_userprofile_change', args=[self.user.id]))
def test_amouser(self):
# request.amo_user should be a special guy.
self.client.login(username='[email protected]', password='password')
response = self.client.get(reverse('home'))
request = response.context['request']
assert hasattr(request.amo_user, 'mobile_addons')
assert hasattr(request.user, 'mobile_addons')
assert hasattr(request.amo_user, 'favorite_addons')
assert hasattr(request.user, 'favorite_addons')
class TestProfileSections(amo.tests.TestCase):
fixtures = ['base/users', 'base/addon_3615',
'base/addon_5299_gcal', 'base/collections',
'reviews/dev-reply']
def setUp(self):
super(TestProfileSections, self).setUp()
self.user = UserProfile.objects.get(id=10482)
self.url = reverse('users.profile', args=[self.user.id])
def test_mine_anonymous(self):
res = self.client.get('/user/me/', follow=True)
eq_(res.status_code, 404)
def test_mine_authenticated(self):
self.login(self.user)
res = self.client.get('/user/me/', follow=True)
eq_(res.status_code, 200)
eq_(res.context['user'].id, self.user.id)
def test_my_last_login_anonymous(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('.last-login-time').length, 0)
eq_(doc('.last-login-ip').length, 0)
def test_my_last_login_authenticated(self):
self.user.update(last_login_ip='255.255.255.255')
self.login(self.user)
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
assert doc('.last-login-time td').text()
eq_(doc('.last-login-ip td').text(), '255.255.255.255')
def test_not_my_last_login(self):
res = self.client.get('/user/999/', follow=True)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('.last-login-time').length, 0)
eq_(doc('.last-login-ip').length, 0)
def test_my_addons(self):
eq_(pq(self.client.get(self.url).content)('.num-addons a').length, 0)
AddonUser.objects.create(user=self.user, addon_id=3615)
AddonUser.objects.create(user=self.user, addon_id=5299)
r = self.client.get(self.url)
a = r.context['addons'].object_list
eq_(list(a), sorted(a, key=lambda x: x.weekly_downloads, reverse=True))
doc = pq(r.content)
eq_(doc('.num-addons a[href="#my-submissions"]').length, 1)
items = doc('#my-addons .item')
eq_(items.length, 2)
eq_(items('.install[data-addon=3615]').length, 1)
eq_(items('.install[data-addon=5299]').length, 1)
def test_my_unlisted_addons(self):
"""I can't see my own unlisted addons on my profile page."""
eq_(pq(self.client.get(self.url).content)('.num-addons a').length, 0)
AddonUser.objects.create(user=self.user, addon_id=3615)
Addon.objects.get(pk=5299).update(is_listed=False)
AddonUser.objects.create(user=self.user, addon_id=5299)
r = self.client.get(self.url)
assert list(r.context['addons'].object_list) == [
Addon.objects.get(pk=3615)]
doc = pq(r.content)
items = doc('#my-addons .item')
eq_(items.length, 1)
eq_(items('.install[data-addon=3615]').length, 1)
def test_not_my_unlisted_addons(self):
"""I can't see others' unlisted addons on their profile pages."""
res = self.client.get('/user/999/', follow=True)
eq_(pq(res.content)('.num-addons a').length, 0)
user = UserProfile.objects.get(pk=999)
AddonUser.objects.create(user=user, addon_id=3615)
Addon.objects.get(pk=5299).update(is_listed=False)
AddonUser.objects.create(user=user, addon_id=5299)
r = self.client.get('/user/999/', follow=True)
assert list(r.context['addons'].object_list) == [
Addon.objects.get(pk=3615)]
doc = pq(r.content)
items = doc('#my-addons .item')
eq_(items.length, 1)
eq_(items('.install[data-addon=3615]').length, 1)
def test_my_personas(self):
eq_(pq(self.client.get(self.url).content)('.num-addons a').length, 0)
a = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
AddonUser.objects.create(user=self.user, addon=a)
r = self.client.get(self.url)
doc = pq(r.content)
items = doc('#my-themes .persona')
eq_(items.length, 1)
eq_(items('a[href="%s"]' % a.get_url_path()).length, 1)
def test_my_reviews(self):
r = Review.objects.filter(reply_to=None)[0]
r.update(user=self.user)
cache.clear()
self.assertSetEqual(self.user.reviews, [r])
r = self.client.get(self.url)
doc = pq(r.content)('#reviews')
assert not doc.hasClass('full'), (
'reviews should not have "full" class when there are collections')
eq_(doc('.item').length, 1)
eq_(doc('#review-218207').length, 1)
# Edit Review form should be present.
self.assertTemplateUsed(r, 'reviews/edit_review.html')
def test_my_reviews_delete_link(self):
review = Review.objects.filter(reply_to=None)[0]
review.user_id = 999
review.save()
cache.clear()
slug = Addon.objects.get(id=review.addon_id).slug
delete_url = reverse('addons.reviews.delete', args=[slug, review.pk])
def _get_reviews(username, password):
self.client.login(username=username, password=password)
r = self.client.get(reverse('users.profile', args=[999]))
doc = pq(r.content)('#reviews')
return doc('#review-218207 .item-actions a.delete-review')
# Admins get the Delete Review link.
r = _get_reviews(username='[email protected]', password='password')
eq_(r.length, 1)
eq_(r.attr('href'), delete_url)
# Editors get the Delete Review link.
r = _get_reviews(username='[email protected]', password='password')
eq_(r.length, 1)
eq_(r.attr('href'), delete_url)
# Author gets the Delete Review link.
r = _get_reviews(username='[email protected]', password='password')
eq_(r.length, 1)
eq_(r.attr('href'), delete_url)
# Other user does not get the Delete Review link.
r = _get_reviews(username='[email protected]', password='password')
eq_(r.length, 0)
def test_my_reviews_no_pagination(self):
r = self.client.get(self.url)
assert len(self.user.addons_listed) <= 10, (
'This user should have fewer than 10 add-ons.')
eq_(pq(r.content)('#my-addons .paginator').length, 0)
def test_my_reviews_pagination(self):
for i in xrange(20):
AddonUser.objects.create(user=self.user, addon_id=3615)
assert len(self.user.addons_listed) > 10, (
'This user should have way more than 10 add-ons.')
r = self.client.get(self.url)
eq_(pq(r.content)('#my-addons .paginator').length, 1)
def test_my_collections_followed(self):
coll = Collection.objects.all()[0]
CollectionWatcher.objects.create(collection=coll, user=self.user)
mine = Collection.objects.listed().filter(following__user=self.user)
eq_(list(mine), [coll])
r = self.client.get(self.url)
self.assertTemplateUsed(r, 'bandwagon/users/collection_list.html')
eq_(list(r.context['fav_coll']), [coll])
doc = pq(r.content)
eq_(doc('#reviews.full').length, 0)
ul = doc('#my-collections #my-favorite')
eq_(ul.length, 1)
li = ul.find('li')
eq_(li.length, 1)
a = li.find('a')
eq_(a.attr('href'), coll.get_url_path())
eq_(a.text(), unicode(coll.name))
def test_my_collections_created(self):
coll = Collection.objects.listed().filter(author=self.user)
eq_(len(coll), 1)
r = self.client.get(self.url)
self.assertTemplateUsed(r, 'bandwagon/users/collection_list.html')
self.assertSetEqual(r.context['own_coll'], coll)
doc = pq(r.content)
eq_(doc('#reviews.full').length, 0)
ul = doc('#my-collections #my-created')
eq_(ul.length, 1)
li = ul.find('li')
eq_(li.length, 1)
a = li.find('a')
eq_(a.attr('href'), coll[0].get_url_path())
eq_(a.text(), unicode(coll[0].name))
def test_no_my_collections(self):
Collection.objects.filter(author=self.user).delete()
r = self.client.get(self.url)
self.assertTemplateNotUsed(r, 'bandwagon/users/collection_list.html')
doc = pq(r.content)
eq_(doc('#my-collections').length, 0)
eq_(doc('#reviews.full').length, 1)
def test_review_abuse_form(self):
r = self.client.get(self.url)
self.assertTemplateUsed(r, 'reviews/report_review.html')
def test_user_abuse_form(self):
abuse_url = reverse('users.abuse', args=[self.user.id])
r = self.client.get(self.url)
doc = pq(r.content)
button = doc('#profile-actions #report-user-abuse')
eq_(button.length, 1)
eq_(button.attr('href'), abuse_url)
modal = doc('#popup-staging #report-user-modal.modal')
eq_(modal.length, 1)
eq_(modal('form').attr('action'), abuse_url)
eq_(modal('textarea[name=text]').length, 1)
self.assertTemplateUsed(r, 'users/report_abuse.html')
def test_no_self_abuse(self):
self.client.login(username='[email protected]', password='password')
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#profile-actions #report-user-abuse').length, 0)
eq_(doc('#popup-staging #report-user-modal.modal').length, 0)
self.assertTemplateNotUsed(r, 'users/report_abuse.html')
class TestThemesProfile(amo.tests.TestCase):
fixtures = ['base/user_2519']
def setUp(self):
super(TestThemesProfile, self).setUp()
self.user = UserProfile.objects.get(pk=2519)
self.url = self.user.get_user_url('themes')
def _test_good(self, res):
eq_(res.status_code, 200)
ids = res.context['addons'].object_list.values_list('id', flat=True)
self.assertSetEqual(ids, [self.theme.id])
doc = pq(res.content)
eq_(doc('.no-results').length, 0)
results = doc('.personas-grid .persona-preview')
eq_(results.length, 1)
eq_(results.find('h6').text(), unicode(self.theme.name))
def test_bad_user(self):
res = self.client.get(reverse('users.themes', args=['yolo']))
eq_(res.status_code, 404)
def test_no_themes(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('.no-results').length, 1)
def test_themes(self):
self.theme = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
self.theme.addonuser_set.create(user=self.user, listed=True)
res = self.client.get(self.url)
self._test_good(res)
def test_bad_category(self):
res = self.client.get(reverse('users.themes', args=['yolo', 'swag']))
eq_(res.status_code, 404)
def test_empty_category(self):
self.theme = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
self.theme.addonuser_set.create(user=self.user, listed=True)
cat = Category.objects.create(type=amo.ADDON_PERSONA, slug='swag')
res = self.client.get(
self.user.get_user_url('themes', args=[cat.slug]))
eq_(res.status_code, 200)
def test_themes_category(self):
self.theme = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
self.theme.addonuser_set.create(user=self.user, listed=True)
cat = Category.objects.create(type=amo.ADDON_PERSONA, slug='swag')
self.theme.addoncategory_set.create(category=cat)
res = self.client.get(
self.user.get_user_url('themes', args=[cat.slug]))
self._test_good(res)
@patch.object(settings, 'RECAPTCHA_PRIVATE_KEY', 'something')
class TestReportAbuse(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
super(TestReportAbuse, self).setUp()
self.full_page = reverse('users.abuse', args=[10482])
@patch('captcha.fields.ReCaptchaField.clean')
def test_abuse_anonymous(self, clean):
clean.return_value = ""
self.client.post(self.full_page, {'text': 'spammy'})
eq_(len(mail.outbox), 1)
assert 'spammy' in mail.outbox[0].body
report = AbuseReport.objects.get(user=10482)
eq_(report.message, 'spammy')
eq_(report.reporter, None)
def test_abuse_anonymous_fails(self):
r = self.client.post(self.full_page, {'text': 'spammy'})
assert 'recaptcha' in r.context['abuse_form'].errors
def test_abuse_logged_in(self):
self.client.login(username='[email protected]', password='password')
self.client.post(self.full_page, {'text': 'spammy'})
eq_(len(mail.outbox), 1)
assert 'spammy' in mail.outbox[0].body
report = AbuseReport.objects.get(user=10482)
eq_(report.message, 'spammy')
eq_(report.reporter.email, '[email protected]')
r = self.client.get(self.full_page)
eq_(pq(r.content)('.notification-box h2').length, 1)
|
bsd-3-clause
|
mitya57/debian-buildbot
|
buildbot/changes/base.py
|
2
|
3499
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.application import service
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet import task
from twisted.python import log
from zope.interface import implements
from buildbot import util
from buildbot.interfaces import IChangeSource
class ChangeSource(service.Service, util.ComparableMixin):
implements(IChangeSource)
master = None
"if C{self.running} is true, then C{cs.master} points to the buildmaster."
def describe(self):
pass
class PollingChangeSource(ChangeSource):
"""
Utility subclass for ChangeSources that use some kind of periodic polling
operation. Subclasses should define C{poll} and set C{self.pollInterval}.
The rest is taken care of.
Any subclass will be available via the "poller" webhook.
"""
pollInterval = 60
"time (in seconds) between calls to C{poll}"
pollAtLaunch = False
"determines when the first poll occurs. True = immediately on launch, False = wait for one pollInterval."
_loop = None
def __init__(self, name=None, pollInterval=60 * 10, pollAtLaunch=False):
if name:
self.setName(name)
self.pollInterval = pollInterval
self.pollAtLaunch = pollAtLaunch
self.doPoll = util.misc.SerializedInvocation(self.doPoll)
def doPoll(self):
"""
This is the method that is called by LoopingCall to actually poll.
It may also be called by change hooks to request a poll.
It is serialiazed - if you call it while a poll is in progress
then the 2nd invocation won't start until the 1st has finished.
"""
d = defer.maybeDeferred(self.poll)
d.addErrback(log.err, 'while polling for changes')
return d
def poll(self):
"""
Perform the polling operation, and return a deferred that will fire
when the operation is complete. Failures will be logged, but the
method will be called again after C{pollInterval} seconds.
"""
def startLoop(self):
self._loop = task.LoopingCall(self.doPoll)
self._loop.start(self.pollInterval, now=self.pollAtLaunch)
def stopLoop(self):
if self._loop and self._loop.running:
self._loop.stop()
self._loop = None
def startService(self):
ChangeSource.startService(self)
# delay starting doing anything until the reactor is running - if
# services are still starting up, they may miss an initial flood of
# changes
if self.pollInterval:
reactor.callWhenRunning(self.startLoop)
else:
reactor.callWhenRunning(self.doPoll)
def stopService(self):
self.stopLoop()
return ChangeSource.stopService(self)
|
gpl-2.0
|
lavvy/xbmc
|
lib/gtest/scripts/upload_gtest.py
|
1963
|
2851
|
#!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""upload_gtest.py v0.1.0 -- uploads a Google Test patch for review.
This simple wrapper passes all command line flags and
[email protected] to upload.py.
USAGE: upload_gtest.py [options for upload.py]
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import sys
CC_FLAG = '--cc='
GTEST_GROUP = '[email protected]'
def main():
# Finds the path to upload.py, assuming it is in the same directory
# as this file.
my_dir = os.path.dirname(os.path.abspath(__file__))
upload_py_path = os.path.join(my_dir, 'upload.py')
# Adds Google Test discussion group to the cc line if it's not there
# already.
upload_py_argv = [upload_py_path]
found_cc_flag = False
for arg in sys.argv[1:]:
if arg.startswith(CC_FLAG):
found_cc_flag = True
cc_line = arg[len(CC_FLAG):]
cc_list = [addr for addr in cc_line.split(',') if addr]
if GTEST_GROUP not in cc_list:
cc_list.append(GTEST_GROUP)
upload_py_argv.append(CC_FLAG + ','.join(cc_list))
else:
upload_py_argv.append(arg)
if not found_cc_flag:
upload_py_argv.append(CC_FLAG + GTEST_GROUP)
# Invokes upload.py with the modified command line flags.
os.execv(upload_py_path, upload_py_argv)
if __name__ == '__main__':
main()
|
gpl-2.0
|
mano3m/CouchPotatoServer
|
libs/subliminal/subtitles.py
|
136
|
5252
|
# -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <[email protected]>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from .language import Language
from .utils import to_unicode
import os.path
__all__ = ['Subtitle', 'EmbeddedSubtitle', 'ExternalSubtitle', 'ResultSubtitle', 'get_subtitle_path']
#: Subtitles extensions
EXTENSIONS = ['.srt', '.sub', '.txt', '.ass']
class Subtitle(object):
"""Base class for subtitles
:param string path: path to the subtitle
:param language: language of the subtitle
:type language: :class:`~subliminal.language.Language`
"""
def __init__(self, path, language):
if not isinstance(language, Language):
raise TypeError('%r is not an instance of Language')
self.path = path
self.language = language
@property
def exists(self):
"""Whether the subtitle exists or not"""
if self.path:
return os.path.exists(self.path)
return False
def __unicode__(self):
return to_unicode(self.path)
def __str__(self):
return unicode(self).encode('utf-8')
def __repr__(self):
return '%s(%s, %s)' % (self.__class__.__name__, self, self.language)
class EmbeddedSubtitle(Subtitle):
"""Subtitle embedded in a container
:param string path: path to the subtitle
:param language: language of the subtitle
:type language: :class:`~subliminal.language.Language`
:param int track_id: id of the subtitle track in the container
"""
def __init__(self, path, language, track_id):
super(EmbeddedSubtitle, self).__init__(path, language)
self.track_id = track_id
@classmethod
def from_enzyme(cls, path, subtitle):
language = Language(subtitle.language, strict=False)
return cls(path, language, subtitle.trackno)
class ExternalSubtitle(Subtitle):
"""Subtitle in a file next to the video file"""
@classmethod
def from_path(cls, path):
"""Create an :class:`ExternalSubtitle` from path"""
extension = None
for e in EXTENSIONS:
if path.endswith(e):
extension = e
break
if extension is None:
raise ValueError('Not a supported subtitle extension')
language = Language(os.path.splitext(path[:len(path) - len(extension)])[1][1:], strict=False)
return cls(path, language)
class ResultSubtitle(ExternalSubtitle):
"""Subtitle found using :mod:`~subliminal.services`
:param string path: path to the subtitle
:param language: language of the subtitle
:type language: :class:`~subliminal.language.Language`
:param string service: name of the service
:param string link: download link for the subtitle
:param string release: release name of the video
:param float confidence: confidence that the subtitle matches the video according to the service
:param set keywords: keywords that describe the subtitle
"""
def __init__(self, path, language, service, link, release=None, confidence=1, keywords=None):
super(ResultSubtitle, self).__init__(path, language)
self.service = service
self.link = link
self.release = release
self.confidence = confidence
self.keywords = keywords or set()
@property
def single(self):
"""Whether this is a single subtitle or not. A single subtitle does not have
a language indicator in its file name
:rtype: bool
"""
return self.language == Language('Undetermined')
def __repr__(self):
if not self.release:
return 'ResultSubtitle(%s, %s, %s, %.2f)' % (self.path, self.language, self.service, self.confidence)
return 'ResultSubtitle(%s, %s, %s, %.2f, release=%s)' % (self.path, self.language, self.service, self.confidence, self.release.encode('ascii', 'ignore'))
def get_subtitle_path(video_path, language, multi):
"""Create the subtitle path from the given video path using language if multi
:param string video_path: path to the video
:param language: language of the subtitle
:type language: :class:`~subliminal.language.Language`
:param bool multi: whether to use multi language naming or not
:return: path of the subtitle
:rtype: string
"""
if not os.path.exists(video_path):
path = os.path.splitext(os.path.basename(video_path))[0]
else:
path = os.path.splitext(video_path)[0]
if multi and language:
return path + '.%s%s' % (language.alpha2, EXTENSIONS[0])
return path + '%s' % EXTENSIONS[0]
|
gpl-3.0
|
noblisnsp/volatility
|
volatility/plugins/dumpfiles.py
|
44
|
52895
|
# Volatility
# Copyright (C) 2012-13 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation. You may not use, modify or
# distribute this program under any other version of the GNU General
# Public License.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
#pylint: disable-msg=C0111
import os
import re
import math
import volatility.obj as obj
import volatility.utils as utils
import volatility.debug as debug
import volatility.win32.tasks as tasks_mod
import volatility.win32.modules as modules
import volatility.plugins.common as common
import volatility.plugins.taskmods as taskmods
import json
#--------------------------------------------------------------------------------
# Constants
#--------------------------------------------------------------------------------
PAGE_SIZE = 0x1000
PAGE_MASK = PAGE_SIZE - 1
IMAGE_EXT = "img"
DATA_EXT = "dat"
FILEOFFSET_MASK = 0xFFFFFFFFFFFF0000
VACB_BLOCK = 0x40000
VACB_ARRAY = 0x80
VACB_OFFSET_SHIFT = 18
VACB_LEVEL_SHIFT = 7
VACB_SIZE_OF_FIRST_LEVEL = 1 << (VACB_OFFSET_SHIFT + VACB_LEVEL_SHIFT)
class _CONTROL_AREA(obj.CType):
def extract_ca_file(self, unsafe = False):
""" Extracts a file from a specified CONTROL_AREA
Attempts to extract the memory resident pages pertaining to a
particular CONTROL_AREA object.
Args:
control_area: Instance of a CONTROL_AREA object
unsafe: Relax safety constraints for more data
Returns:
mdata: List of pages, (physoffset, fileoffset, size) tuples, that are memory resident
zpad: List of pages, (offset, size) tuples, that not memory resident
Raises:
"""
zpad = []
mdata = []
# Depending on the particular address space being used we need to
# determine if the MMPTE will be either 4 or 8 bytes. The x64
# and IA32_PAE both use 8 byte PTEs. Whereas, IA32 uses 4 byte
# PTE entries.
memory_model = self.obj_vm.profile.metadata.get('memory_model', '32bit')
pae = self.obj_vm.pae
if pae:
mmpte_size = self.obj_vm.profile.get_obj_size("_MMPTEPA")
else:
mmpte_size = self.obj_vm.profile.get_obj_size("_MMPTE")
# Calculate the size of the _CONTROL_AREA object. It is used to find
# the correct offset for the SUBSECTION object and the size of the
# CONTROL_AREA can differ between versions of Windows.
control_area_size = self.size()
# The segment is used to describe the physical view of the
# file. We also use this as a semantic check to see if
# the processing should continue. If the Segment address
# is invalid, then we return.
Segment = self.Segment
if not Segment.is_valid():
return mdata, zpad
# The next semantic check validates that the _SEGMENT object
# points back to the appropriate _CONTROL_AREA object. If the
# check is invalid, then we return.
if (self.obj_offset != Segment.ControlArea):
return mdata, zpad
# This is a semantic check added to make sure the Segment.SizeOfSegment value
# is consistant with the Segment.TotalNumberOfPtes. This occurs fequently
# when traversing through CONTROL_AREA Objects (~5%), often leading to
# impossible values. Thus, to be conservative we do not proceed if the
# Segment does not seem sound.
if Segment.SizeOfSegment != (Segment.TotalNumberOfPtes * PAGE_SIZE):
return mdata, zpad
# The _SUBSECTION object is typically found immediately following
# the CONTROL_AREA object. For Image Section Objects, the SUBSECTIONS
# typically correspond with the sections found in the PE. On the otherhand,
# for Data Section Objects, there is typically only a single valid SUBSECTION.
subsection_offset = self.obj_offset + control_area_size
#subsection = obj.Object("_SUBSECTION", subsection_offset, self.kaddr_space)
subsection = obj.Object("_SUBSECTION", subsection_offset, self.obj_vm)
# This was another check which was inspired by Ruud's code. It
# verifies that the first SubsectionBaase (Mmst) never starts
# at the beginning of a page. The UNSAFE option allows us to
# ignore this constraint. This was necessary for dumping file data
# for file objects found with filescan (ie $Mft)
SubsectionBase = subsection.SubsectionBase
if (SubsectionBase & PAGE_MASK == 0x0) and not unsafe:
return mdata, zpad
# We obtain the Subsections associated with this file
# by traversing the singly linked list. Ideally, this
# list should be null (0) terminated. Upon occasion we
# we have seen instances where the link pointers are
# undefined (XXX). If we hit an invalid pointer, the we
# we exit the traversal.
while subsection.is_valid() and subsection.v() != 0x0:
if not subsection:
break
# This constraint makes sure that the _SUBSECTION object
# points back to the associated CONTROL_AREA object. Otherwise,
# we exit the traversal.
if (self.obj_offset != subsection.ControlArea):
break
# Extract subsection meta-data into local variables
# this helps with performance and not having to do
# repetitive lookups.
PtesInSubsection = subsection.PtesInSubsection
SubsectionBase = subsection.SubsectionBase
NextSubsection = subsection.NextSubsection
# The offset into the file is stored implicitely
# based on the PTE's location within the Subsection.
StartingSector = subsection.StartingSector
SubsectionOffset = StartingSector * 0x200
# This was another check based on something Ruud
# had done. We also so instances where DataSectionObjects
# would hit a SubsectionBase that was paged aligned
# and hit strange data. In those instances, the
# MMPTE SubsectionAddress would not point to the associated
# Subsection. (XXX)
if (SubsectionBase & PAGE_MASK == 0x0) and not unsafe:
break
ptecount = 0
while (ptecount < PtesInSubsection):
pteoffset = SubsectionBase + (mmpte_size * ptecount)
FileOffset = SubsectionOffset + ptecount * 0x1000
# The size of MMPTE changes depending on if it is IA32 (4 bytes)
# or IA32_PAE/AMD64 (8 bytes).
objname = "_MMPTE"
if pae:
objname = "_MMPTEPA"
mmpte = obj.Object(objname, offset = pteoffset, vm = \
subsection.obj_vm)
if not mmpte:
ptecount += 1
continue
# First we check if the entry is valid. If the entry is valid
# then we get the physical offset. The valid entries are actually
# handled by the hardware.
if mmpte.u.Hard.Valid == 0x1:
# There are some valid Page Table entries where bit 63
# is used to specify if the page is executable. This is
# maintained by the processor. If it is not executable,
# then the bit is set. Within the Intel documentation,
# this is known as the Execute-disable (XD) flag. Regardless,
# we will use the get_phys_addr method from the address space
# to obtain the physical address.
### Should we check the size of the PAGE? Haven't seen
# a hit for LargePage.
#if mmpte.u.Hard.LargePage == 0x1:
# print "LargePage"
physoffset = mmpte.u.Hard.PageFrameNumber << 12
mdata.append([physoffset, FileOffset, PAGE_SIZE])
ptecount += 1
continue
elif mmpte.u.Soft.Prototype == 0x1:
# If the entry is not a valid physical address then
# we check if it contains a pointer back to the SUBSECTION
# object. If so, the page is in the backing file and we will
# need to pad to maintain spacial integrity of the file. This
# check needs to be performed for looking for the transition flag.
# The prototype PTEs are initialized as MMPTE_SUBSECTION with the
# SubsectionAddress.
# On x86 systems that use 4 byte MMPTE , the MMPTE_SUBSECTION
# stores an "encoded" version of the SUBSECTION object address.
# The data is relative to global variable (MmSubsectionBase or
# MmNonPagedPoolEnd) depending on the WhichPool member of
# _SUBSECTION. This applies to x86 systems running ntoskrnl.exe.
# If bit 10 is set then it is prototype/subsection
if (memory_model == "32bit") and not pae:
SubsectionOffset = \
((mmpte.u.Subsect.SubsectionAddressHigh << 7) |
(mmpte.u.Subsect.SubsectionAddressLow << 3))
#WhichPool = mmpte.u.Subsect.WhichPool
#print "mmpte 0x%x ptecount 0x%x sub-32 0x%x pteoffset 0x%x which 0x%x subdelta 0x%x"%(mmpte.u.Long,ptecount,subsection_offset,pteoffset,WhichPool,SubsectionOffset)
zpad.append([FileOffset, PAGE_SIZE])
ptecount += 1
continue
if memory_model == "64bit" or pae:
SubsectionAddress = mmpte.u.Subsect.SubsectionAddress
else:
SubsectionAddress = mmpte.u.Long
if SubsectionAddress == subsection.obj_offset:
# sub proto/prot 4c0 420
#print "mmpte 0x%x ptecount 0x%x sub 0x%x offset 0x%x"%(mmpte.u.Long,ptecount,SubsectionAddress,pteoffset)
zpad.append([FileOffset, PAGE_SIZE])
ptecount += 1
continue
elif (SubsectionAddress == (subsection.obj_offset + 4)):
# This was a special case seen on IA32_PAE systems where
# the SubsectionAddress pointed to subsection.obj_offset+4
# (0x420, 0x460, 0x4a0)
#print "mmpte 0x%x ptecount 0x%x sub+4 0x%x offset 0x%x"%(mmpte.u.Long,ptecount,SubsectionAddress,pteoffset)
zpad.append([FileOffset, PAGE_SIZE])
ptecount += 1
continue
else:
#print "mmpte 0x%x ptecount 0x%x sub_unk 0x%x offset 0x%x suboffset 0x%x"%(mmpte.u.Long,ptecount,SubsectionAddress,pteoffset,subsection.obj_offset)
zpad.append([FileOffset, PAGE_SIZE])
ptecount += 1
continue
# Check if the entry is a DemandZero entry.
elif (mmpte.u.Soft.Transition == 0x0):
if ((mmpte.u.Soft.PageFileLow == 0x0) and
(mmpte.u.Soft.PageFileHigh == 0x0)):
# Example entries include: a0,e0
#print "mmpte 0x%x ptecount 0x%x zero offset 0x%x subsec 0x%x"%(mmpte.u.Long,ptecount,pteoffset,subsection.obj_offset)
zpad.append([FileOffset, PAGE_SIZE])
ptecount += 1
else:
#print "mmpte 0x%x ptecount 0x%x paged offset 0x%x subsec 0x%x file 0x%x offset 0x%x"%(mmpte.u.Long,ptecount,pteoffset,subsection.obj_offset,mmpte.u.Soft.PageFileLow,mmpte.u.Soft.PageFileHigh)
zpad.append([FileOffset, PAGE_SIZE])
ptecount += 1
# If the entry is not a valid physical address then
# we also check to see if it is in transition.
elif mmpte.u.Trans.Transition == 0x1:
physoffset = mmpte.u.Trans.PageFrameNumber << 12
#print "mmpte 0x%x ptecount 0x%x transition 0x%x offset 0x%x"%(mmpte.u.Long,ptecount,physoffset,pteoffset)
mdata.append([physoffset, FileOffset, PAGE_SIZE])
ptecount += 1
continue
else:
# This is a catch all for all the other entry types.
# sub proto/pro 420,4e0,460,4a0 (x64 +0x28)(x32 +4)
# other a0,e0,0, (20,60)
# 0x80000000
#print "mmpte 0x%x ptecount 0x%x other offset 0x%x subsec 0x%x"%(mmpte.u.Long,ptecount,pteoffset,subsection.obj_offset)
zpad.append([FileOffset, PAGE_SIZE])
ptecount += 1
# Traverse the singly linked list to its next member.
subsection = NextSubsection
return (mdata, zpad)
class _SHARED_CACHE_MAP(obj.CType):
def is_valid(self):
if not obj.CType.is_valid(self):
return False
# Added a semantic check to make sure the data is in a sound state. It's better
# to catch it early.
FileSize = self.FileSize.QuadPart
ValidDataLength = self.ValidDataLength.QuadPart
SectionSize = self.SectionSize.QuadPart
#print "SectionSize 0x%x < 0 or FileSize < 0x%x ValidDataLength 0x%x"%(SectionSize,FileSize,ValidDataLength)
#if SectionSize < 0 or (FileSize < ValidDataLength):
if SectionSize < 0 or ((FileSize < ValidDataLength) and (ValidDataLength != 0x7fffffffffffffff)):
return False
return True
def process_index_array(self, array_pointer, level, limit, vacbary = None):
""" Recursively process the sparse multilevel VACB index array
Args:
array_pointer: The address of a possible index array
shared_cache_map: The associated SHARED_CACHE_MAP object
level: The current level
limit: The level where we abandon all hope. Ideally this is 7
vacbary: An array of collected VACBs
Returns:
vacbary: Collected VACBs
"""
if vacbary is None:
vacbary = []
if level > limit:
return []
# Create an array of VACB entries
VacbArray = obj.Object("Array", offset = array_pointer, \
vm = self.obj_vm, count = VACB_ARRAY, \
targetType = "address", parent = self)
# Iterate through the entries
for _i in range(0, VACB_ARRAY):
# Check if the VACB entry is in use
if VacbArray[_i] == 0x0:
continue
Vacbs = obj.Object("_VACB", offset = int(VacbArray[_i]), vm = self.obj_vm)
# Check if this is a valid VACB entry by verifying
# the SharedCacheMap member.
if Vacbs.SharedCacheMap == self.obj_offset:
# This is a VACB associated with this cache map
vacbinfo = self.extract_vacb(Vacbs, VACB_BLOCK)
if vacbinfo:
vacbary.append(vacbinfo)
else:
#Process the next level of the multi-level array
vacbary = self.process_index_array(VacbArray[_i], level + 1, limit, vacbary)
#vacbary = vacbary + _vacbary
return vacbary
def extract_vacb(self, vacbs, size):
""" Extracts data from a specified VACB
Attempts to extract the memory resident data from a specified
VACB.
Args:
vacbs: The VACB object
size: How much data should be read from the VACB
shared_cache_map: The associated SHARED_CACHE_MAP object
Returns:
vacbinfo: Extracted VACB meta-information
"""
# This is used to collect summary information. We will eventually leverage this
# when creating the externally exposed APIs.
vacbinfo = {}
# Check if the Overlay member of _VACB is resident
# The Overlay member stores information about the FileOffset
# and the ActiveCount. This is just another proactive check
# to make sure the objects are seemingly sound.
if not vacbs.Overlay:
return vacbinfo
# We should add another check to make sure that
# the SharedCacheMap member of the VACB points back
# to the corresponding SHARED_CACHE_MAP
if vacbs.SharedCacheMap != self.v():
return vacbinfo
# The FileOffset member of VACB is used to denote the
# offset within the file where the view begins. Since all
# views are 256 KB in size, the bottom 16 bits are used to
# store the number of references to the view.
FileOffset = vacbs.Overlay.FileOffset.QuadPart
if not FileOffset:
return vacbinfo
ActiveCount = vacbs.Overlay.ActiveCount
FileOffset = FileOffset & FILEOFFSET_MASK
BaseAddress = vacbs.BaseAddress.v()
vacbinfo['foffset'] = int(FileOffset)
vacbinfo['acount'] = int(ActiveCount)
vacbinfo['voffset'] = int(vacbs.obj_offset)
vacbinfo['baseaddr'] = int(BaseAddress)
vacbinfo['size'] = int(size)
return vacbinfo
def extract_scm_file(self):
""" Extracts a file from a specified _SHARED_CACHE_MAP
Attempts to extract the memory resident pages pertaining to a
particular _SHARED_CACHE_MAP object.
Args:
shared_cache_map: Instance of a _SHARED_CACHE_MAP object
Returns:
vacbary: List of collected VACB meta information.
Raises:
"""
vacbary = []
if self.obj_offset == 0x0:
return
# Added a semantic check to make sure the data is in a sound state.
#FileSize = shared_cache_map.FileSize.QuadPart
#ValidDataLength = shared_cache_map.ValidDataLength.QuadPart
SectionSize = self.SectionSize.QuadPart
# Let's begin by determining the number of Virtual Address Control
# Blocks (VACB) that are stored within the cache (nonpaged). A VACB
# represents one 256-KB view in the system cache. There a are a couple
# options to use for the data size: ValidDataLength, FileSize,
# and SectionSize.
full_blocks = SectionSize / VACB_BLOCK
left_over = SectionSize % VACB_BLOCK
# As an optimization, the shared cache map object contains a VACB index
# array of four entries. The VACB index arrays are arrays of pointers
# to VACBs, that track which views of a given file are mapped in the cache.
# For example, the first entry in the VACB index array refers to the first
# 256 KB of the file. The InitialVacbs can describe a file up to 1 MB (4xVACB).
iterval = 0
while (iterval < full_blocks) and (full_blocks <= 4):
Vacbs = self.InitialVacbs[iterval]
vacbinfo = self.extract_vacb(Vacbs, VACB_BLOCK)
if vacbinfo: vacbary.append(vacbinfo)
iterval += 1
# We also have to account for the spill over data
# that is not found in the full blocks. The first case to
# consider is when the spill over is still in InitialVacbs.
if (left_over > 0) and (full_blocks < 4):
Vacbs = self.InitialVacbs[iterval]
vacbinfo = self.extract_vacb(Vacbs, left_over)
if vacbinfo: vacbary.append(vacbinfo)
# If the file is larger than 1 MB, a seperate VACB index array
# needs to be allocated. This is based on how many 256 KB blocks
# would be required for the size of the file. This newly allocated
# VACB index array is found through the Vacbs member of
# SHARED_CACHE_MAP.
Vacbs = self.Vacbs
if not Vacbs or (Vacbs.v() == 0):
return vacbary
# There are a number of instances where the initial value in
# InitialVacb will also be the fist entry in Vacbs. Thus we
# ignore, since it was already processed. It is possible to just
# process again as the file offset is specified for each VACB.
if self.InitialVacbs[0].obj_offset == Vacbs.v():
return vacbary
# If the file is less than 32 MB than it can be found in
# a single level VACB index array.
size_of_pointer = self.obj_vm.profile.get_obj_size("address")
if not SectionSize > VACB_SIZE_OF_FIRST_LEVEL:
ArrayHead = Vacbs.v()
_i = 0
for _i in range(0, full_blocks):
vacb_addr = ArrayHead + (_i * size_of_pointer)
vacb_entry = obj.Object("address", offset = vacb_addr, vm = Vacbs.obj_vm)
# If we find a zero entry, then we proceed to the next one.
# If the entry is zero, then the view is not mapped and we
# skip. We do not pad because we use the FileOffset to seek
# to the correct offset in the file.
if not vacb_entry or (vacb_entry.v() == 0x0):
continue
Vacb = obj.Object("_VACB", offset = vacb_entry.v(), vm = self.obj_vm)
vacbinfo = self.extract_vacb(Vacb, VACB_BLOCK)
if vacbinfo:
vacbary.append(vacbinfo)
if left_over > 0:
vacb_addr = ArrayHead + ((_i + 1) * size_of_pointer)
vacb_entry = obj.Object("address", offset = vacb_addr, vm = Vacbs.obj_vm)
if not vacb_entry or (vacb_entry.v() == 0x0):
return vacbary
Vacb = obj.Object("_VACB", offset = vacb_entry.v(), vm = self.obj_vm)
vacbinfo = self.extract_vacb(Vacb, left_over)
if vacbinfo:
vacbary.append(vacbinfo)
# The file is less than 32 MB, so we can
# stop processing.
return vacbary
# If we get to this point, then we know that the SectionSize is greator than
# VACB_SIZE_OF_FIRST_LEVEL (32 MB). Then we have a "sparse multilevel index
# array where each VACB index array is made up of 128 entries. We no
# longer assume the data is sequential. (Log2 (32 MB) - 18)/7
#tree_depth = math.ceil((math.ceil(math.log(file_size, 2)) - 18)/7)
level_depth = math.ceil(math.log(SectionSize, 2))
level_depth = (level_depth - VACB_OFFSET_SHIFT) / VACB_LEVEL_SHIFT
level_depth = math.ceil(level_depth)
limit_depth = level_depth
if SectionSize > VACB_SIZE_OF_FIRST_LEVEL:
# Create an array of 128 entries for the VACB index array
VacbArray = obj.Object("Array", offset = Vacbs.v(), \
vm = self.obj_vm, count = VACB_ARRAY, \
targetType = "address", parent = self)
# We use a bit of a brute force method. We walk the
# array and if any entry points to the shared cache map
# object then we extract it. Otherwise, if it is non-zero
# we attempt to traverse to the next level.
for _i in range(0, VACB_ARRAY):
if VacbArray[_i] == 0x0:
continue
Vacb = obj.Object("_VACB", offset = int(VacbArray[_i]), vm = self.obj_vm)
if Vacb.SharedCacheMap == self.obj_offset:
vacbinfo = self.extract_vacb(Vacb, VACB_BLOCK)
if vacbinfo:
vacbary.append(vacbinfo)
else:
# The Index is a pointer
#Process the next level of the multi-level array
# We set the limit_depth to be the depth of the tree
# as determined from the size and we initialize the
# current level to 2.
vacbary = self.process_index_array(VacbArray[_i], 2, limit_depth, vacbary)
#vacbary = vacbary + _vacbary
return vacbary
class ControlAreaModification(obj.ProfileModification):
conditions = {'os': lambda x: x == 'windows'}
def modification(self, profile):
profile.object_classes.update({
'_CONTROL_AREA': _CONTROL_AREA,
'_SHARED_CACHE_MAP': _SHARED_CACHE_MAP,
})
#--------------------------------------------------------------------------------
# VTypes
#--------------------------------------------------------------------------------
# Windows x86 symbols for ntkrnlpa
ntkrnlpa_types_x86 = {
'__ntkrnlpa' : [ 0x8, {
'Long' : [ 0x0, ['unsigned long long']],
'VolatileLong' : [ 0x0, ['unsigned long long']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE_64']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE_64']],
'TimeStamp' : [ 0x0, ['_MMPTE_TIMESTAMP']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION_64']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION_64']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTEPA' : [ 0x8, {
'u' : [ 0x0, ['__ntkrnlpa']],
} ],
'_MMPTE_SUBSECTION_64' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type = 'unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type = 'unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type = 'unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type = 'unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 32, native_type = 'unsigned long long')]],
'SubsectionAddress' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type = 'long long')]],
} ],
'_MMPTE_TRANSITION_64' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type = 'unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type = 'unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type = 'unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type = 'unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type = 'unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type = 'unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type = 'unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type = 'unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type = 'unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type = 'unsigned long long')]],
}],
'_MMPTE_HARDWARE_64' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type = 'unsigned long long')]],
'Dirty1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type = 'unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type = 'unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type = 'unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type = 'unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type = 'unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type = 'unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type = 'unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type = 'unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type = 'unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type = 'unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type = 'unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type = 'unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type = 'unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type = 'unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type = 'unsigned long long')]],
} ],
'_MMPTE_SOFTWARE_64' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type = 'unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type = 'unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type = 'unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type = 'unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type = 'unsigned long long')]],
'UsedPageTableEntries' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 22, native_type = 'unsigned long long')]],
'InStore' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type = 'unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 32, native_type = 'unsigned long long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type = 'unsigned long long')]],
} ],
}
class DumpFilesVTypesx86(obj.ProfileModification):
"""This modification applies the vtypes for all
versions of 32bit Windows."""
before = ['WindowsObjectClasses']
conditions = {'os': lambda x: x == 'windows',
'memory_model': lambda x : x == '32bit'}
def modification(self, profile):
profile.vtypes.update(ntkrnlpa_types_x86)
class DumpFiles(common.AbstractWindowsCommand):
"""Extract memory mapped and cached files"""
def __init__(self, config, *args, **kwargs):
common.AbstractWindowsCommand.__init__(self, config, *args, **kwargs)
self.kaddr_space = None
self.filters = []
config.add_option('REGEX', short_option = 'r',
help = 'Dump files matching REGEX',
action = 'store', type = 'string')
config.add_option('IGNORE-CASE', short_option = 'i',
help = 'Ignore case in pattern match',
action = 'store_true', default = False)
config.add_option('OFFSET', short_option = 'o', default = None,
help = 'Dump files for Process with physical address OFFSET',
action = 'store', type = 'int')
config.add_option('PHYSOFFSET', short_option = 'Q', default = None,
help = 'Dump File Object at physical address PHYSOFFSET',
action = 'store', type = 'int')
config.add_option('DUMP-DIR', short_option = 'D', default = None,
cache_invalidator = False,
help = 'Directory in which to dump extracted files')
config.add_option('SUMMARY-FILE', short_option = 'S', default = None,
cache_invalidator = False,
help = 'File where to store summary information')
config.add_option('PID', short_option = 'p', default = None,
help = 'Operate on these Process IDs (comma-separated)',
action = 'store', type = 'str')
config.add_option('NAME', short_option = 'n',
help = 'Include extracted filename in output file path',
action = 'store_true', default = False)
config.add_option('UNSAFE', short_option = 'u',
help = 'Relax safety constraints for more data',
action = 'store_true', default = False)
# Possible filters include:
# SharedCacheMap,DataSectionObject,ImageSectionObject,HandleTable,VAD
config.add_option("FILTER", short_option = 'F', default = None,
help = 'Filters to apply (comma-separated)')
def filter_tasks(self, tasks):
""" Reduce the tasks based on the user selectable PIDS parameter.
Returns a reduced list or the full list if config.PIDS not specified.
"""
if self._config.PID is None:
return tasks
try:
pidlist = [int(p) for p in self._config.PID.split(',')]
except ValueError:
debug.error("Invalid PID {0}".format(self._config.PID))
return [t for t in tasks if t.UniqueProcessId in pidlist]
def audited_read_bytes(self, vm, vaddr, length, pad):
""" This function provides an audited zread capability
It performs a similar function to zread, in that it will
pad "invalid" pages. The main difference is that it allows
us to collect auditing information about which pages were actually
present and which ones were padded.
Args:
vm: The address space to read the data from.
vaddr: The virtual address to start reading the data from.
length: How many bytes to read
pad: This argument controls if the unavailable bytes are padded.
Returns:
ret: Data that was read
mdata: List of pages that are memory resident
zpad: List of pages that not memory resident
Raises:
"""
zpad = []
mdata = []
vaddr, length = int(vaddr), int(length)
ret = ''
while length > 0:
chunk_len = min(length, PAGE_SIZE - (vaddr % PAGE_SIZE))
buf = vm.read(vaddr, chunk_len)
if vm.vtop(vaddr) is None:
zpad.append([vaddr, chunk_len])
if pad:
buf = '\x00' * chunk_len
else:
buf = ''
else:
mdata.append([vaddr, chunk_len])
ret += buf
vaddr += chunk_len
length -= chunk_len
return ret, mdata, zpad
def calculate(self):
""" Finds all the requested FILE_OBJECTS
Traverses the VAD and HandleTable to find all requested
FILE_OBJECTS
"""
# Initialize containers for collecting artifacts.
control_area_list = []
shared_maps = []
procfiles = []
# These lists are used for object collecting files from
# both the VAD and handle tables
vadfiles = []
handlefiles = []
# Determine which filters the user wants to see
self.filters = []
if self._config.FILTER:
self.filters = self._config.FILTER.split(',')
# Instantiate the kernel address space
self.kaddr_space = utils.load_as(self._config)
# Check to see if the physical address offset was passed for a
# particular process. Otherwise, use the whole task list.
if self._config.OFFSET != None:
tasks_list = [taskmods.DllList.virtual_process_from_physical_offset(
self.kaddr_space, self._config.OFFSET)]
else:
# Filter for the specified processes
tasks_list = self.filter_tasks(tasks_mod.pslist(self.kaddr_space))
# If a regex is specified, build it.
if self._config.REGEX:
try:
if self._config.IGNORE_CASE:
file_re = re.compile(self._config.REGEX, re.I)
else:
file_re = re.compile(self._config.REGEX)
except re.error, e:
debug.error('Error parsing regular expression: {0:s}'.format(e))
# Check to see if a specific physical address was specified for a
# FILE_OBJECT. In particular, this is useful for FILE_OBJECTS that
# are found with filescan that are not associated with a process
# For example, $Mft.
if self._config.PHYSOFFSET:
file_obj = obj.Object("_FILE_OBJECT", self._config.PHYSOFFSET, self.kaddr_space.base, native_vm = self.kaddr_space)
procfiles.append((None, [file_obj]))
#return
# Iterate through the process list and collect all references to
# FILE_OBJECTS from both the VAD and HandleTable. Each open handle to a file
# has a corresponding FILE_OBJECT.
if not self._config.PHYSOFFSET:
for task in tasks_list:
pid = task.UniqueProcessId
# Extract FILE_OBJECTS from the VAD
if not self.filters or "VAD" in self.filters:
for vad in task.VadRoot.traverse():
if vad != None:
try:
control_area = vad.ControlArea
if not control_area:
continue
file_object = vad.FileObject
if file_object:
# Filter for specific FILE_OBJECTS based on user defined
# regular expression. (Performance optimization)
if self._config.REGEX:
name = None
if file_object.FileName:
name = str(file_object.file_name_with_device())
if not name:
continue
if not file_re.search(name):
continue
vadfiles.append(file_object)
except AttributeError:
pass
if not self.filters or "HandleTable" in self.filters:
# Extract the FILE_OBJECTS from the handle table
if task.ObjectTable.HandleTableList:
for handle in task.ObjectTable.handles():
otype = handle.get_object_type()
if otype == "File":
file_obj = handle.dereference_as("_FILE_OBJECT")
if file_obj:
# Filter for specific FILE_OBJECTS based on user defined
# regular expression. (Performance Optimization)
if self._config.REGEX:
name = None
if file_obj.FileName:
name = str(file_obj.file_name_with_device())
if not name:
continue
if not file_re.search(name):
continue
handlefiles.append(file_obj)
# Append the lists of file objects
#allfiles = handlefiles + vadfiles
procfiles.append((pid, handlefiles + vadfiles))
for pid, allfiles in procfiles:
for file_obj in allfiles:
if not self._config.PHYSOFFSET:
offset = file_obj.obj_offset
else:
offset = self._config.PHYSOFFSET
name = None
if file_obj.FileName:
name = str(file_obj.file_name_with_device())
# The SECTION_OBJECT_POINTERS structure is used by the memory
# manager and cache manager to store file-mapping and cache information
# for a particular file stream. We will use it to determine what type
# of FILE_OBJECT we have and how it should be parsed.
if file_obj.SectionObjectPointer:
DataSectionObject = \
file_obj.SectionObjectPointer.DataSectionObject
SharedCacheMap = \
file_obj.SectionObjectPointer.SharedCacheMap
ImageSectionObject = \
file_obj.SectionObjectPointer.ImageSectionObject
# The ImageSectionObject is used to track state information for
# an executable file stream. We will use it to extract memory
# mapped binaries.
if not self.filters or "ImageSectionObject" in self.filters:
if ImageSectionObject and ImageSectionObject != 0:
summaryinfo = {}
# It points to a image section object( CONTROL_AREA )
control_area = \
ImageSectionObject.dereference_as('_CONTROL_AREA')
if not control_area in control_area_list:
control_area_list.append(control_area)
# The format of the filenames: file.<pid>.<control_area>.[img|dat]
ca_offset_string = "0x{0:x}".format(control_area.obj_offset)
if self._config.NAME and name != None:
fname = name.split("\\")
ca_offset_string += "." + fname[-1]
file_string = ".".join(["file", str(pid), ca_offset_string, IMAGE_EXT])
of_path = os.path.join(self._config.DUMP_DIR, file_string)
(mdata, zpad) = control_area.extract_ca_file(self._config.UNSAFE)
summaryinfo['name'] = name
summaryinfo['type'] = "ImageSectionObject"
if pid:
summaryinfo['pid'] = int(pid)
else:
summaryinfo['pid'] = None
summaryinfo['present'] = mdata
summaryinfo['pad'] = zpad
summaryinfo['fobj'] = int(offset)
summaryinfo['ofpath'] = of_path
yield summaryinfo
# The DataSectionObject is used to track state information for
# a data file stream. We will use it to extract artifacts of
# memory mapped data files.
if not self.filters or "DataSectionObject" in self.filters:
if DataSectionObject and DataSectionObject != 0:
summaryinfo = {}
# It points to a data section object (CONTROL_AREA)
control_area = DataSectionObject.dereference_as('_CONTROL_AREA')
if not control_area in control_area_list:
control_area_list.append(control_area)
# The format of the filenames: file.<pid>.<control_area>.[img|dat]
ca_offset_string = "0x{0:x}".format(control_area.obj_offset)
if self._config.NAME and name != None:
fname = name.split("\\")
ca_offset_string += "." + fname[-1]
file_string = ".".join(["file", str(pid), ca_offset_string, DATA_EXT])
of_path = os.path.join(self._config.DUMP_DIR, file_string)
(mdata, zpad) = control_area.extract_ca_file(self._config.UNSAFE)
summaryinfo['name'] = name
summaryinfo['type'] = "DataSectionObject"
if pid:
summaryinfo['pid'] = int(pid)
else:
summaryinfo['pid'] = None
summaryinfo['present'] = mdata
summaryinfo['pad'] = zpad
summaryinfo['fobj'] = int(offset)
summaryinfo['ofpath'] = of_path
yield summaryinfo
# The SharedCacheMap is used to track views that are mapped to the
# data file stream. Each cached file has a single SHARED_CACHE_MAP object,
# which has pointers to slots in the system cache which contain views of the file.
# The shared cache map is used to describe the state of the cached file.
if self.filters and "SharedCacheMap" not in self.filters:
continue
if SharedCacheMap:
vacbary = []
summaryinfo = {}
#The SharedCacheMap member points to a SHARED_CACHE_MAP object.
shared_cache_map = SharedCacheMap.dereference_as('_SHARED_CACHE_MAP')
if shared_cache_map.obj_offset == 0x0:
continue
# Added a semantic check to make sure the data is in a sound state. It's better
# to catch it early.
if not shared_cache_map.is_valid():
continue
if not shared_cache_map.obj_offset in shared_maps:
shared_maps.append(shared_cache_map.obj_offset)
else:
continue
shared_cache_map_string = ".0x{0:x}".format(shared_cache_map.obj_offset)
if self._config.NAME and name != None:
fname = name.split("\\")
shared_cache_map_string = shared_cache_map_string + "." + fname[-1]
of_path = os.path.join(self._config.DUMP_DIR, "file." + str(pid) + shared_cache_map_string + ".vacb")
vacbary = shared_cache_map.extract_scm_file()
summaryinfo['name'] = name
summaryinfo['type'] = "SharedCacheMap"
if pid:
summaryinfo['pid'] = int(pid)
else:
summaryinfo['pid'] = None
summaryinfo['fobj'] = int(offset)
summaryinfo['ofpath'] = of_path
summaryinfo['vacbary'] = vacbary
yield summaryinfo
def render_text(self, outfd, data):
"""Renders output for the dumpfiles plugin.
This includes extracting the file artifacts from memory
to the specified dump directory.
Args:
outfd: The file descriptor to write the text to.
data: (summaryinfo)
"""
# Summary file object
summaryfo = None
summaryinfo = data
if self._config.DUMP_DIR == None:
debug.error("Please specify a dump directory (--dump-dir)")
if not os.path.isdir(self._config.DUMP_DIR):
debug.error(self._config.DUMP_DIR + " is not a directory")
if self._config.SUMMARY_FILE:
summaryfo = open(self._config.SUMMARY_FILE, 'wb')
for summaryinfo in data:
if summaryinfo['type'] == "DataSectionObject":
outfd.write("DataSectionObject {0:#010x} {1:<6} {2}\n".format(summaryinfo['fobj'], summaryinfo['pid'], summaryinfo['name']))
if len(summaryinfo['present']) == 0:
continue
of = open(summaryinfo['ofpath'], 'wb')
for mdata in summaryinfo['present']:
rdata = None
if not mdata[0]:
continue
try:
rdata = self.kaddr_space.base.read(mdata[0], mdata[2])
except (IOError, OverflowError):
debug.debug("IOError: Pid: {0} File: {1} PhysAddr: {2} Size: {3}".format(summaryinfo['pid'], summaryinfo['name'], mdata[0], mdata[2]))
if not rdata:
continue
of.seek(mdata[1])
of.write(rdata)
continue
# XXX Verify FileOffsets
#for zpad in summaryinfo['pad']:
# of.seek(zpad[0])
# of.write("\0" * zpad[1])
if self._config.SUMMARY_FILE:
json.dump(summaryinfo, summaryfo)
of.close()
elif summaryinfo['type'] == "ImageSectionObject":
outfd.write("ImageSectionObject {0:#010x} {1:<6} {2}\n".format(summaryinfo['fobj'], summaryinfo['pid'], summaryinfo['name']))
if len(summaryinfo['present']) == 0:
continue
of = open(summaryinfo['ofpath'], 'wb')
for mdata in summaryinfo['present']:
rdata = None
if not mdata[0]:
continue
try:
rdata = self.kaddr_space.base.read(mdata[0], mdata[2])
except (IOError, OverflowError):
debug.debug("IOError: Pid: {0} File: {1} PhysAddr: {2} Size: {3}".format(summaryinfo['pid'], summaryinfo['name'], mdata[0], mdata[2]))
if not rdata:
continue
of.seek(mdata[1])
of.write(rdata)
continue
# XXX Verify FileOffsets
#for zpad in summaryinfo['pad']:
# print "ZPAD 0x%x"%(zpad[0])
# of.seek(zpad[0])
# of.write("\0" * zpad[1])
if self._config.SUMMARY_FILE:
json.dump(summaryinfo, summaryfo)
of.close()
elif summaryinfo['type'] == "SharedCacheMap":
outfd.write("SharedCacheMap {0:#010x} {1:<6} {2}\n".format(summaryinfo['fobj'], summaryinfo['pid'], summaryinfo['name']))
of = open(summaryinfo['ofpath'], 'wb')
for vacb in summaryinfo['vacbary']:
if not vacb:
continue
(rdata, mdata, zpad) = self.audited_read_bytes(self.kaddr_space, vacb['baseaddr'], vacb['size'], True)
### We need to update the mdata,zpad
if rdata:
try:
of.seek(vacb['foffset'])
of.write(rdata)
except IOError:
# TODO: Handle things like write errors (not enough disk space, etc)
continue
vacb['present'] = mdata
vacb['pad'] = zpad
if self._config.SUMMARY_FILE:
json.dump(summaryinfo, summaryfo)
of.close()
else:
return
if self._config.SUMMARY_FILE:
summaryfo.close()
|
gpl-2.0
|
BaladiDogGames/baladidoggames.github.io
|
mingw/bin/lib/inspect.py
|
40
|
42462
|
# -*- coding: iso-8859-1 -*-
"""Get useful information from live Python objects.
This module encapsulates the interface provided by the internal special
attributes (func_*, co_*, im_*, tb_*, etc.) in a friendlier fashion.
It also provides some help for examining source code and class layout.
Here are some of the useful functions provided by this module:
ismodule(), isclass(), ismethod(), isfunction(), isgeneratorfunction(),
isgenerator(), istraceback(), isframe(), iscode(), isbuiltin(),
isroutine() - check object types
getmembers() - get members of an object that satisfy a given condition
getfile(), getsourcefile(), getsource() - find an object's source code
getdoc(), getcomments() - get documentation on an object
getmodule() - determine the module that an object came from
getclasstree() - arrange classes so as to represent their hierarchy
getargspec(), getargvalues(), getcallargs() - get info about function arguments
formatargspec(), formatargvalues() - format an argument spec
getouterframes(), getinnerframes() - get info about frames
currentframe() - get the current stack frame
stack(), trace() - get info about frames on the stack or in a traceback
"""
# This module is in the public domain. No warranties.
__author__ = 'Ka-Ping Yee <[email protected]>'
__date__ = '1 Jan 2001'
import sys
import os
import types
import string
import re
import dis
import imp
import tokenize
import linecache
from operator import attrgetter
from collections import namedtuple
# These constants are from Include/code.h.
CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS = 0x1, 0x2, 0x4, 0x8
CO_NESTED, CO_GENERATOR, CO_NOFREE = 0x10, 0x20, 0x40
# See Include/object.h
TPFLAGS_IS_ABSTRACT = 1 << 20
# ----------------------------------------------------------- type-checking
def ismodule(object):
"""Return true if the object is a module.
Module objects provide these attributes:
__doc__ documentation string
__file__ filename (missing for built-in modules)"""
return isinstance(object, types.ModuleType)
def isclass(object):
"""Return true if the object is a class.
Class objects provide these attributes:
__doc__ documentation string
__module__ name of module in which this class was defined"""
return isinstance(object, (type, types.ClassType))
def ismethod(object):
"""Return true if the object is an instance method.
Instance method objects provide these attributes:
__doc__ documentation string
__name__ name with which this method was defined
im_class class object in which this method belongs
im_func function object containing implementation of method
im_self instance to which this method is bound, or None"""
return isinstance(object, types.MethodType)
def ismethoddescriptor(object):
"""Return true if the object is a method descriptor.
But not if ismethod() or isclass() or isfunction() are true.
This is new in Python 2.2, and, for example, is true of int.__add__.
An object passing this test has a __get__ attribute but not a __set__
attribute, but beyond that the set of attributes varies. __name__ is
usually sensible, and __doc__ often is.
Methods implemented via descriptors that also pass one of the other
tests return false from the ismethoddescriptor() test, simply because
the other tests promise more -- you can, e.g., count on having the
im_func attribute (etc) when an object passes ismethod()."""
return (hasattr(object, "__get__")
and not hasattr(object, "__set__") # else it's a data descriptor
and not ismethod(object) # mutual exclusion
and not isfunction(object)
and not isclass(object))
def isdatadescriptor(object):
"""Return true if the object is a data descriptor.
Data descriptors have both a __get__ and a __set__ attribute. Examples are
properties (defined in Python) and getsets and members (defined in C).
Typically, data descriptors will also have __name__ and __doc__ attributes
(properties, getsets, and members have both of these attributes), but this
is not guaranteed."""
return (hasattr(object, "__set__") and hasattr(object, "__get__"))
if hasattr(types, 'MemberDescriptorType'):
# CPython and equivalent
def ismemberdescriptor(object):
"""Return true if the object is a member descriptor.
Member descriptors are specialized descriptors defined in extension
modules."""
return isinstance(object, types.MemberDescriptorType)
else:
# Other implementations
def ismemberdescriptor(object):
"""Return true if the object is a member descriptor.
Member descriptors are specialized descriptors defined in extension
modules."""
return False
if hasattr(types, 'GetSetDescriptorType'):
# CPython and equivalent
def isgetsetdescriptor(object):
"""Return true if the object is a getset descriptor.
getset descriptors are specialized descriptors defined in extension
modules."""
return isinstance(object, types.GetSetDescriptorType)
else:
# Other implementations
def isgetsetdescriptor(object):
"""Return true if the object is a getset descriptor.
getset descriptors are specialized descriptors defined in extension
modules."""
return False
def isfunction(object):
"""Return true if the object is a user-defined function.
Function objects provide these attributes:
__doc__ documentation string
__name__ name with which this function was defined
func_code code object containing compiled function bytecode
func_defaults tuple of any default values for arguments
func_doc (same as __doc__)
func_globals global namespace in which this function was defined
func_name (same as __name__)"""
return isinstance(object, types.FunctionType)
def isgeneratorfunction(object):
"""Return true if the object is a user-defined generator function.
Generator function objects provides same attributes as functions.
See help(isfunction) for attributes listing."""
return bool((isfunction(object) or ismethod(object)) and
object.func_code.co_flags & CO_GENERATOR)
def isgenerator(object):
"""Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support interation over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator"""
return isinstance(object, types.GeneratorType)
def istraceback(object):
"""Return true if the object is a traceback.
Traceback objects provide these attributes:
tb_frame frame object at this level
tb_lasti index of last attempted instruction in bytecode
tb_lineno current line number in Python source code
tb_next next inner traceback object (called by this level)"""
return isinstance(object, types.TracebackType)
def isframe(object):
"""Return true if the object is a frame object.
Frame objects provide these attributes:
f_back next outer frame object (this frame's caller)
f_builtins built-in namespace seen by this frame
f_code code object being executed in this frame
f_exc_traceback traceback if raised in this frame, or None
f_exc_type exception type if raised in this frame, or None
f_exc_value exception value if raised in this frame, or None
f_globals global namespace seen by this frame
f_lasti index of last attempted instruction in bytecode
f_lineno current line number in Python source code
f_locals local namespace seen by this frame
f_restricted 0 or 1 if frame is in restricted execution mode
f_trace tracing function for this frame, or None"""
return isinstance(object, types.FrameType)
def iscode(object):
"""Return true if the object is a code object.
Code objects provide these attributes:
co_argcount number of arguments (not including * or ** args)
co_code string of raw compiled bytecode
co_consts tuple of constants used in the bytecode
co_filename name of file in which this code object was created
co_firstlineno number of first line in Python source code
co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg
co_lnotab encoded mapping of line numbers to bytecode indices
co_name name with which this code object was defined
co_names tuple of names of local variables
co_nlocals number of local variables
co_stacksize virtual machine stack space required
co_varnames tuple of names of arguments and local variables"""
return isinstance(object, types.CodeType)
def isbuiltin(object):
"""Return true if the object is a built-in function or method.
Built-in functions and methods provide these attributes:
__doc__ documentation string
__name__ original name of this function or method
__self__ instance to which a method is bound, or None"""
return isinstance(object, types.BuiltinFunctionType)
def isroutine(object):
"""Return true if the object is any kind of function or method."""
return (isbuiltin(object)
or isfunction(object)
or ismethod(object)
or ismethoddescriptor(object))
def isabstract(object):
"""Return true if the object is an abstract base class (ABC)."""
return bool(isinstance(object, type) and object.__flags__ & TPFLAGS_IS_ABSTRACT)
def getmembers(object, predicate=None):
"""Return all members of an object as (name, value) pairs sorted by name.
Optionally, only return members that satisfy a given predicate."""
results = []
for key in dir(object):
try:
value = getattr(object, key)
except AttributeError:
continue
if not predicate or predicate(value):
results.append((key, value))
results.sort()
return results
Attribute = namedtuple('Attribute', 'name kind defining_class object')
def classify_class_attrs(cls):
"""Return list of attribute-descriptor tuples.
For each name in dir(cls), the return list contains a 4-tuple
with these elements:
0. The name (a string).
1. The kind of attribute this is, one of these strings:
'class method' created via classmethod()
'static method' created via staticmethod()
'property' created via property()
'method' any other flavor of method
'data' not a method
2. The class which defined this attribute (a class).
3. The object as obtained directly from the defining class's
__dict__, not via getattr. This is especially important for
data attributes: C.data is just a data object, but
C.__dict__['data'] may be a data descriptor with additional
info, like a __doc__ string.
"""
mro = getmro(cls)
names = dir(cls)
result = []
for name in names:
# Get the object associated with the name, and where it was defined.
# Getting an obj from the __dict__ sometimes reveals more than
# using getattr. Static and class methods are dramatic examples.
# Furthermore, some objects may raise an Exception when fetched with
# getattr(). This is the case with some descriptors (bug #1785).
# Thus, we only use getattr() as a last resort.
homecls = None
for base in (cls,) + mro:
if name in base.__dict__:
obj = base.__dict__[name]
homecls = base
break
else:
obj = getattr(cls, name)
homecls = getattr(obj, "__objclass__", homecls)
# Classify the object.
if isinstance(obj, staticmethod):
kind = "static method"
elif isinstance(obj, classmethod):
kind = "class method"
elif isinstance(obj, property):
kind = "property"
elif ismethoddescriptor(obj):
kind = "method"
elif isdatadescriptor(obj):
kind = "data"
else:
obj_via_getattr = getattr(cls, name)
if (ismethod(obj_via_getattr) or
ismethoddescriptor(obj_via_getattr)):
kind = "method"
else:
kind = "data"
obj = obj_via_getattr
result.append(Attribute(name, kind, homecls, obj))
return result
# ----------------------------------------------------------- class helpers
def _searchbases(cls, accum):
# Simulate the "classic class" search order.
if cls in accum:
return
accum.append(cls)
for base in cls.__bases__:
_searchbases(base, accum)
def getmro(cls):
"Return tuple of base classes (including cls) in method resolution order."
if hasattr(cls, "__mro__"):
return cls.__mro__
else:
result = []
_searchbases(cls, result)
return tuple(result)
# -------------------------------------------------- source code extraction
def indentsize(line):
"""Return the indent size, in spaces, at the start of a line of text."""
expline = string.expandtabs(line)
return len(expline) - len(string.lstrip(expline))
def getdoc(object):
"""Get the documentation string for an object.
All tabs are expanded to spaces. To clean up docstrings that are
indented to line up with blocks of code, any whitespace than can be
uniformly removed from the second line onwards is removed."""
try:
doc = object.__doc__
except AttributeError:
return None
if not isinstance(doc, types.StringTypes):
return None
return cleandoc(doc)
def cleandoc(doc):
"""Clean up indentation from docstrings.
Any whitespace that can be uniformly removed from the second line
onwards is removed."""
try:
lines = string.split(string.expandtabs(doc), '\n')
except UnicodeError:
return None
else:
# Find minimum indentation of any non-blank lines after first line.
margin = sys.maxint
for line in lines[1:]:
content = len(string.lstrip(line))
if content:
indent = len(line) - content
margin = min(margin, indent)
# Remove indentation.
if lines:
lines[0] = lines[0].lstrip()
if margin < sys.maxint:
for i in range(1, len(lines)): lines[i] = lines[i][margin:]
# Remove any trailing or leading blank lines.
while lines and not lines[-1]:
lines.pop()
while lines and not lines[0]:
lines.pop(0)
return string.join(lines, '\n')
def getfile(object):
"""Work out which source or compiled file an object was defined in."""
if ismodule(object):
if hasattr(object, '__file__'):
return object.__file__
raise TypeError('{!r} is a built-in module'.format(object))
if isclass(object):
object = sys.modules.get(object.__module__)
if hasattr(object, '__file__'):
return object.__file__
raise TypeError('{!r} is a built-in class'.format(object))
if ismethod(object):
object = object.im_func
if isfunction(object):
object = object.func_code
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
return object.co_filename
raise TypeError('{!r} is not a module, class, method, '
'function, traceback, frame, or code object'.format(object))
ModuleInfo = namedtuple('ModuleInfo', 'name suffix mode module_type')
def getmoduleinfo(path):
"""Get the module name, suffix, mode, and module type for a given file."""
filename = os.path.basename(path)
suffixes = map(lambda info:
(-len(info[0]), info[0], info[1], info[2]),
imp.get_suffixes())
suffixes.sort() # try longest suffixes first, in case they overlap
for neglen, suffix, mode, mtype in suffixes:
if filename[neglen:] == suffix:
return ModuleInfo(filename[:neglen], suffix, mode, mtype)
def getmodulename(path):
"""Return the module name for a given file, or None."""
info = getmoduleinfo(path)
if info: return info[0]
def getsourcefile(object):
"""Return the filename that can be used to locate an object's source.
Return None if no way can be identified to get the source.
"""
filename = getfile(object)
if string.lower(filename[-4:]) in ('.pyc', '.pyo'):
filename = filename[:-4] + '.py'
for suffix, mode, kind in imp.get_suffixes():
if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:
# Looks like a binary file. We want to only return a text file.
return None
if os.path.exists(filename):
return filename
# only return a non-existent filename if the module has a PEP 302 loader
if hasattr(getmodule(object, filename), '__loader__'):
return filename
# or it is in the linecache
if filename in linecache.cache:
return filename
def getabsfile(object, _filename=None):
"""Return an absolute path to the source or compiled file for an object.
The idea is for each object to have a unique origin, so this routine
normalizes the result as much as possible."""
if _filename is None:
_filename = getsourcefile(object) or getfile(object)
return os.path.normcase(os.path.abspath(_filename))
modulesbyfile = {}
_filesbymodname = {}
def getmodule(object, _filename=None):
"""Return the module an object was defined in, or None if not found."""
if ismodule(object):
return object
if hasattr(object, '__module__'):
return sys.modules.get(object.__module__)
# Try the filename to modulename cache
if _filename is not None and _filename in modulesbyfile:
return sys.modules.get(modulesbyfile[_filename])
# Try the cache again with the absolute file name
try:
file = getabsfile(object, _filename)
except TypeError:
return None
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
# Update the filename to module name cache and check yet again
# Copy sys.modules in order to cope with changes while iterating
for modname, module in sys.modules.items():
if ismodule(module) and hasattr(module, '__file__'):
f = module.__file__
if f == _filesbymodname.get(modname, None):
# Have already mapped this module, so skip it
continue
_filesbymodname[modname] = f
f = getabsfile(module)
# Always map to the name the module knows itself by
modulesbyfile[f] = modulesbyfile[
os.path.realpath(f)] = module.__name__
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
# Check the main module
main = sys.modules['__main__']
if not hasattr(object, '__name__'):
return None
if hasattr(main, object.__name__):
mainobject = getattr(main, object.__name__)
if mainobject is object:
return main
# Check builtins
builtin = sys.modules['__builtin__']
if hasattr(builtin, object.__name__):
builtinobject = getattr(builtin, object.__name__)
if builtinobject is object:
return builtin
def findsource(object):
"""Return the entire source file and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of all the lines
in the file and the line number indexes a line in that list. An IOError
is raised if the source code cannot be retrieved."""
file = getfile(object)
sourcefile = getsourcefile(object)
if not sourcefile and file[0] + file[-1] != '<>':
raise IOError('source code not available')
file = sourcefile if sourcefile else file
module = getmodule(object, file)
if module:
lines = linecache.getlines(file, module.__dict__)
else:
lines = linecache.getlines(file)
if not lines:
raise IOError('could not get source code')
if ismodule(object):
return lines, 0
if isclass(object):
name = object.__name__
pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
# make some effort to find the best matching class definition:
# use the one with the least indentation, which is the one
# that's most probably not inside a function definition.
candidates = []
for i in range(len(lines)):
match = pat.match(lines[i])
if match:
# if it's at toplevel, it's already the best one
if lines[i][0] == 'c':
return lines, i
# else add whitespace to candidate list
candidates.append((match.group(1), i))
if candidates:
# this will sort by whitespace, and by line number,
# less whitespace first
candidates.sort()
return lines, candidates[0][1]
else:
raise IOError('could not find class definition')
if ismethod(object):
object = object.im_func
if isfunction(object):
object = object.func_code
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
if not hasattr(object, 'co_firstlineno'):
raise IOError('could not find function definition')
lnum = object.co_firstlineno - 1
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
while lnum > 0:
if pat.match(lines[lnum]): break
lnum = lnum - 1
return lines, lnum
raise IOError('could not find code object')
def getcomments(object):
"""Get lines of comments immediately preceding an object's source code.
Returns None when source can't be found.
"""
try:
lines, lnum = findsource(object)
except (IOError, TypeError):
return None
if ismodule(object):
# Look for a comment block at the top of the file.
start = 0
if lines and lines[0][:2] == '#!': start = 1
while start < len(lines) and string.strip(lines[start]) in ('', '#'):
start = start + 1
if start < len(lines) and lines[start][:1] == '#':
comments = []
end = start
while end < len(lines) and lines[end][:1] == '#':
comments.append(string.expandtabs(lines[end]))
end = end + 1
return string.join(comments, '')
# Look for a preceding block of comments at the same indentation.
elif lnum > 0:
indent = indentsize(lines[lnum])
end = lnum - 1
if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \
indentsize(lines[end]) == indent:
comments = [string.lstrip(string.expandtabs(lines[end]))]
if end > 0:
end = end - 1
comment = string.lstrip(string.expandtabs(lines[end]))
while comment[:1] == '#' and indentsize(lines[end]) == indent:
comments[:0] = [comment]
end = end - 1
if end < 0: break
comment = string.lstrip(string.expandtabs(lines[end]))
while comments and string.strip(comments[0]) == '#':
comments[:1] = []
while comments and string.strip(comments[-1]) == '#':
comments[-1:] = []
return string.join(comments, '')
class EndOfBlock(Exception): pass
class BlockFinder:
"""Provide a tokeneater() method to detect the end of a code block."""
def __init__(self):
self.indent = 0
self.islambda = False
self.started = False
self.passline = False
self.last = 1
def tokeneater(self, type, token, srow_scol, erow_ecol, line):
srow, scol = srow_scol
erow, ecol = erow_ecol
if not self.started:
# look for the first "def", "class" or "lambda"
if token in ("def", "class", "lambda"):
if token == "lambda":
self.islambda = True
self.started = True
self.passline = True # skip to the end of the line
elif type == tokenize.NEWLINE:
self.passline = False # stop skipping when a NEWLINE is seen
self.last = srow
if self.islambda: # lambdas always end at the first NEWLINE
raise EndOfBlock
elif self.passline:
pass
elif type == tokenize.INDENT:
self.indent = self.indent + 1
self.passline = True
elif type == tokenize.DEDENT:
self.indent = self.indent - 1
# the end of matching indent/dedent pairs end a block
# (note that this only works for "def"/"class" blocks,
# not e.g. for "if: else:" or "try: finally:" blocks)
if self.indent <= 0:
raise EndOfBlock
elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL):
# any other token on the same indentation level end the previous
# block as well, except the pseudo-tokens COMMENT and NL.
raise EndOfBlock
def getblock(lines):
"""Extract the block of code at the top of the given list of lines."""
blockfinder = BlockFinder()
try:
tokenize.tokenize(iter(lines).next, blockfinder.tokeneater)
except (EndOfBlock, IndentationError):
pass
return lines[:blockfinder.last]
def getsourcelines(object):
"""Return a list of source lines and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of the lines
corresponding to the object and the line number indicates where in the
original source file the first line of code was found. An IOError is
raised if the source code cannot be retrieved."""
lines, lnum = findsource(object)
if ismodule(object): return lines, 0
else: return getblock(lines[lnum:]), lnum + 1
def getsource(object):
"""Return the text of the source code for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a single string. An
IOError is raised if the source code cannot be retrieved."""
lines, lnum = getsourcelines(object)
return string.join(lines, '')
# --------------------------------------------------- class tree extraction
def walktree(classes, children, parent):
"""Recursive helper function for getclasstree()."""
results = []
classes.sort(key=attrgetter('__module__', '__name__'))
for c in classes:
results.append((c, c.__bases__))
if c in children:
results.append(walktree(children[c], children, c))
return results
def getclasstree(classes, unique=0):
"""Arrange the given list of classes into a hierarchy of nested lists.
Where a nested list appears, it contains classes derived from the class
whose entry immediately precedes the list. Each entry is a 2-tuple
containing a class and a tuple of its base classes. If the 'unique'
argument is true, exactly one entry appears in the returned structure
for each class in the given list. Otherwise, classes using multiple
inheritance and their descendants will appear multiple times."""
children = {}
roots = []
for c in classes:
if c.__bases__:
for parent in c.__bases__:
if not parent in children:
children[parent] = []
children[parent].append(c)
if unique and parent in classes: break
elif c not in roots:
roots.append(c)
for parent in children:
if parent not in classes:
roots.append(parent)
return walktree(roots, children, None)
# ------------------------------------------------ argument list extraction
Arguments = namedtuple('Arguments', 'args varargs keywords')
def getargs(co):
"""Get information about the arguments accepted by a code object.
Three things are returned: (args, varargs, varkw), where 'args' is
a list of argument names (possibly containing nested lists), and
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
if not iscode(co):
raise TypeError('{!r} is not a code object'.format(co))
nargs = co.co_argcount
names = co.co_varnames
args = list(names[:nargs])
step = 0
# The following acrobatics are for anonymous (tuple) arguments.
for i in range(nargs):
if args[i][:1] in ('', '.'):
stack, remain, count = [], [], []
while step < len(co.co_code):
op = ord(co.co_code[step])
step = step + 1
if op >= dis.HAVE_ARGUMENT:
opname = dis.opname[op]
value = ord(co.co_code[step]) + ord(co.co_code[step+1])*256
step = step + 2
if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
remain.append(value)
count.append(value)
elif opname == 'STORE_FAST':
stack.append(names[value])
# Special case for sublists of length 1: def foo((bar))
# doesn't generate the UNPACK_TUPLE bytecode, so if
# `remain` is empty here, we have such a sublist.
if not remain:
stack[0] = [stack[0]]
break
else:
remain[-1] = remain[-1] - 1
while remain[-1] == 0:
remain.pop()
size = count.pop()
stack[-size:] = [stack[-size:]]
if not remain: break
remain[-1] = remain[-1] - 1
if not remain: break
args[i] = stack[0]
varargs = None
if co.co_flags & CO_VARARGS:
varargs = co.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if co.co_flags & CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
return Arguments(args, varargs, varkw)
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
def getargspec(func):
"""Get the names and default values of a function's arguments.
A tuple of four things is returned: (args, varargs, varkw, defaults).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments.
"""
if ismethod(func):
func = func.im_func
if not isfunction(func):
raise TypeError('{!r} is not a Python function'.format(func))
args, varargs, varkw = getargs(func.func_code)
return ArgSpec(args, varargs, varkw, func.func_defaults)
ArgInfo = namedtuple('ArgInfo', 'args varargs keywords locals')
def getargvalues(frame):
"""Get information about arguments passed into a particular frame.
A tuple of four things is returned: (args, varargs, varkw, locals).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'locals' is the locals dictionary of the given frame."""
args, varargs, varkw = getargs(frame.f_code)
return ArgInfo(args, varargs, varkw, frame.f_locals)
def joinseq(seq):
if len(seq) == 1:
return '(' + seq[0] + ',)'
else:
return '(' + string.join(seq, ', ') + ')'
def strseq(object, convert, join=joinseq):
"""Recursively walk a sequence, stringifying each element."""
if type(object) in (list, tuple):
return join(map(lambda o, c=convert, j=join: strseq(o, c, j), object))
else:
return convert(object)
def formatargspec(args, varargs=None, varkw=None, defaults=None,
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value),
join=joinseq):
"""Format an argument spec from the 4 values returned by getargspec.
The first four arguments are (args, varargs, varkw, defaults). The
other four arguments are the corresponding optional formatting functions
that are called to turn names and values into strings. The ninth
argument is an optional function to format the sequence of arguments."""
specs = []
if defaults:
firstdefault = len(args) - len(defaults)
for i, arg in enumerate(args):
spec = strseq(arg, formatarg, join)
if defaults and i >= firstdefault:
spec = spec + formatvalue(defaults[i - firstdefault])
specs.append(spec)
if varargs is not None:
specs.append(formatvarargs(varargs))
if varkw is not None:
specs.append(formatvarkw(varkw))
return '(' + string.join(specs, ', ') + ')'
def formatargvalues(args, varargs, varkw, locals,
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value),
join=joinseq):
"""Format an argument spec from the 4 values returned by getargvalues.
The first four arguments are (args, varargs, varkw, locals). The
next four arguments are the corresponding optional formatting functions
that are called to turn names and values into strings. The ninth
argument is an optional function to format the sequence of arguments."""
def convert(name, locals=locals,
formatarg=formatarg, formatvalue=formatvalue):
return formatarg(name) + formatvalue(locals[name])
specs = []
for i in range(len(args)):
specs.append(strseq(args[i], convert, join))
if varargs:
specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))
if varkw:
specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))
return '(' + string.join(specs, ', ') + ')'
def getcallargs(func, *positional, **named):
"""Get the mapping of arguments to values.
A dict is returned, with keys the function argument names (including the
names of the * and ** arguments, if any), and values the respective bound
values from 'positional' and 'named'."""
args, varargs, varkw, defaults = getargspec(func)
f_name = func.__name__
arg2value = {}
# The following closures are basically because of tuple parameter unpacking.
assigned_tuple_params = []
def assign(arg, value):
if isinstance(arg, str):
arg2value[arg] = value
else:
assigned_tuple_params.append(arg)
value = iter(value)
for i, subarg in enumerate(arg):
try:
subvalue = next(value)
except StopIteration:
raise ValueError('need more than %d %s to unpack' %
(i, 'values' if i > 1 else 'value'))
assign(subarg,subvalue)
try:
next(value)
except StopIteration:
pass
else:
raise ValueError('too many values to unpack')
def is_assigned(arg):
if isinstance(arg,str):
return arg in arg2value
return arg in assigned_tuple_params
if ismethod(func) and func.im_self is not None:
# implicit 'self' (or 'cls' for classmethods) argument
positional = (func.im_self,) + positional
num_pos = len(positional)
num_total = num_pos + len(named)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
for arg, value in zip(args, positional):
assign(arg, value)
if varargs:
if num_pos > num_args:
assign(varargs, positional[-(num_pos-num_args):])
else:
assign(varargs, ())
elif 0 < num_args < num_pos:
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at most' if defaults else 'exactly', num_args,
'arguments' if num_args > 1 else 'argument', num_total))
elif num_args == 0 and num_total:
if varkw:
if num_pos:
# XXX: We should use num_pos, but Python also uses num_total:
raise TypeError('%s() takes exactly 0 arguments '
'(%d given)' % (f_name, num_total))
else:
raise TypeError('%s() takes no arguments (%d given)' %
(f_name, num_total))
for arg in args:
if isinstance(arg, str) and arg in named:
if is_assigned(arg):
raise TypeError("%s() got multiple values for keyword "
"argument '%s'" % (f_name, arg))
else:
assign(arg, named.pop(arg))
if defaults: # fill in any missing values with the defaults
for arg, value in zip(args[-num_defaults:], defaults):
if not is_assigned(arg):
assign(arg, value)
if varkw:
assign(varkw, named)
elif named:
unexpected = next(iter(named))
if isinstance(unexpected, unicode):
unexpected = unexpected.encode(sys.getdefaultencoding(), 'replace')
raise TypeError("%s() got an unexpected keyword argument '%s'" %
(f_name, unexpected))
unassigned = num_args - len([arg for arg in args if is_assigned(arg)])
if unassigned:
num_required = num_args - num_defaults
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at least' if defaults else 'exactly', num_required,
'arguments' if num_required > 1 else 'argument', num_total))
return arg2value
# -------------------------------------------------- stack frame extraction
Traceback = namedtuple('Traceback', 'filename lineno function code_context index')
def getframeinfo(frame, context=1):
"""Get information about a frame or traceback object.
A tuple of five things is returned: the filename, the line number of
the current line, the function name, a list of lines of context from
the source code, and the index of the current line within that list.
The optional second argument specifies the number of lines of context
to return, which are centered around the current line."""
if istraceback(frame):
lineno = frame.tb_lineno
frame = frame.tb_frame
else:
lineno = frame.f_lineno
if not isframe(frame):
raise TypeError('{!r} is not a frame or traceback object'.format(frame))
filename = getsourcefile(frame) or getfile(frame)
if context > 0:
start = lineno - 1 - context//2
try:
lines, lnum = findsource(frame)
except IOError:
lines = index = None
else:
start = max(start, 1)
start = max(0, min(start, len(lines) - context))
lines = lines[start:start+context]
index = lineno - 1 - start
else:
lines = index = None
return Traceback(filename, lineno, frame.f_code.co_name, lines, index)
def getlineno(frame):
"""Get the line number from a frame object, allowing for optimization."""
# FrameType.f_lineno is now a descriptor that grovels co_lnotab
return frame.f_lineno
def getouterframes(frame, context=1):
"""Get a list of records for a frame and all higher (calling) frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context."""
framelist = []
while frame:
framelist.append((frame,) + getframeinfo(frame, context))
frame = frame.f_back
return framelist
def getinnerframes(tb, context=1):
"""Get a list of records for a traceback's frame and all lower frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context."""
framelist = []
while tb:
framelist.append((tb.tb_frame,) + getframeinfo(tb, context))
tb = tb.tb_next
return framelist
if hasattr(sys, '_getframe'):
currentframe = sys._getframe
else:
currentframe = lambda _=None: None
def stack(context=1):
"""Return a list of records for the stack above the caller's frame."""
return getouterframes(sys._getframe(1), context)
def trace(context=1):
"""Return a list of records for the stack below the current exception."""
return getinnerframes(sys.exc_info()[2], context)
|
mit
|
EmprendedoresLA/emprendevs-equipo-1
|
api/node_modules/yamlish/yamlish-py/test/test_reader.py
|
147
|
9379
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import yaml
import yamlish
import test
import unittest
test_data_list = [
{
"name": 'Hello World',
"in": [ '--- Hello, World', '...', ],
"out": "Hello, World",
},
{
"name": 'Hello World 2',
"in": [ '--- \'Hello, \'\'World\'', '...', ],
"out": "Hello, 'World",
},
{
"name": 'Hello World 3',
"in": [ '--- "Hello, World"', '...', ],
"out": "Hello, World",
},
{
"name": 'Hello World 4',
"in": [ '--- "Hello, World"', '...', ],
"out": "Hello, World",
},
{
"name": 'Hello World 5',
"in": [ '--- >', ' Hello,', ' World', '...', ],
"out": "Hello, World\n",
},
{
"name": 'Hello World 6',
"in": [ '--- >', ' Hello,', ' World', '...', ],
"error": yaml.parser.ParserError,
},
{
"name": 'Simple array',
"in": [ '---', '- 1', '- 2', '- 3', '...', ],
"out": [ 1, 2, 3 ],
},
{
"name": 'Mixed array',
"in": [ '---', '- 1', "- 'two'", r'- "three\n"', '...', ],
"out": [ 1, 'two', "three\n" ],
},
{
"name": 'Hash in array',
"in": [ '---', ' - 1', ' - two: 2', ' - 3', '...', ],
"out": [ 1, { "two": 2 }, 3 ],
},
{
"name": 'Hash in array 2',
"in": [ '---', '- 1', '- two: 2', ' three: 3', '- 4', '...', ],
"out": [ 1, { "two": 2, "three": 3 }, 4 ],
},
{
"name": 'Nested array',
"in": [
'---',
'- one',
'- ',
' - two',
' - ',
' - three',
' - four',
'- five',
'...',
],
"out": [ 'one', [ 'two', ['three'], 'four' ], 'five' ],
},
{
"name": 'Nested hash',
"in": [
'---',
'one:',
' five: 5',
' two:',
' four: 4',
' three: 3',
'six: 6',
'...',
],
"out": {
"one": { "two": { "three": 3, "four": 4 }, "five": 5 },
"six": 6
},
},
{
"name": 'Original YAML::Tiny test',
"in": [
'---',
'invoice: 34843',
'date : 2001-01-23',
'bill-to:',
' given : Chris',
' family : Dumars',
' address:',
' lines: | ',
' 458 Walkman Dr.',
' Suite #292',
' city : Royal Oak',
' state : MI',
' postal : 48046',
'product:',
' - sku : BL394D',
' quantity : 4',
' description : Basketball',
' price : 450.00',
' - sku : BL4438H',
' quantity : 1',
' description : Super Hoop',
' price : 2392.00',
'tax : 251.42',
'total: 4443.52',
'comments: >',
' Late afternoon is best.',
' Backup contact is Nancy',
' Billsmer @ 338-4338',
'...',
],
"out": {
"bill-to": {
"given": 'Chris',
"address": {
"city": 'Royal Oak',
"postal": 48046,
"lines": "458 Walkman Dr.\nSuite #292\n",
"state": 'MI'
},
"family": 'Dumars'
},
"invoice": 34843,
"date": '2001-01-23',
"tax": 251.42,
"product": [
{
"sku": 'BL394D',
"quantity": 4,
"price": 450.00,
"description": 'Basketball'
},
{
"sku": 'BL4438H',
"quantity": 1,
"price": 2392.00,
"description": 'Super Hoop'
}
],
'comments':
"Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338\n",
"total": 4443.52
}
},
# Tests harvested from YAML::Tiny
{
"in": ['...'],
"name": 'Regression: empty',
"error": yaml.parser.ParserError,
},
{
"in": [ '# comment', '...' ],
"name": 'Regression: only_comment',
"error": yaml.parser.ParserError,
},
{
"skip": True, # A corner case, which is apparently not
# clear even from the spec file
"out": None,
"in": [ '---', '...' ],
"name": 'Regression: only_header',
"x-error": yaml.parser.ParserError,
},
{
"in": [ '---', '---', '...' ],
"name": 'Regression: two_header',
"error": yaml.composer.ComposerError,
},
{
"out": None,
"in": [ '--- ~', '...' ],
"name": 'Regression: one_undef'
},
{
"out": None,
"in": [ '--- ~', '...' ],
"name": 'Regression: one_undef2'
},
{
"in": [ '--- ~', '---', '...' ],
"name": 'Regression: two_undef',
"error": yaml.composer.ComposerError,
},
{
"out": 'foo',
"in": [ '--- foo', '...' ],
"name": 'Regression: one_scalar',
},
{
"out": 'foo',
"in": [ '--- foo', '...' ],
"name": 'Regression: one_scalar2',
},
{
"in": [ '--- foo', '--- bar', '...' ],
"name": 'Regression: two_scalar',
"error": yaml.composer.ComposerError,
},
{
"out": ['foo'],
"in": [ '---', '- foo', '...' ],
"name": 'Regression: one_list1'
},
{
"out": [ 'foo', 'bar' ],
"in": [ '---', '- foo', '- bar', '...' ],
"name": 'Regression: one_list2'
},
{
"out": [ None, 'bar' ],
"in": [ '---', '- ~', '- bar', '...' ],
"name": 'Regression: one_listundef'
},
{
"out": { 'foo': 'bar' },
"in": [ '---', 'foo: bar', '...' ],
"name": 'Regression: one_hash1'
},
{
"out": {
"foo": 'bar',
"this": None
},
"in": [ '---', 'foo: bar', 'this: ~', '...' ],
"name": 'Regression: one_hash2'
},
{
"out": { 'foo': [ 'bar', None, 'baz' ] },
"in": [ '---', 'foo:', ' - bar', ' - ~', ' - baz', '...' ],
"name": 'Regression: array_in_hash'
},
{
"out": {
"bar": { 'foo': 'bar' },
"foo": None
},
"in": [ '---', 'foo: ~', 'bar:', ' foo: bar', '...' ],
"name": 'Regression: hash_in_hash'
},
{
"out": [
{
"foo": None,
"this": 'that'
},
'foo', None,
{
"foo": 'bar',
"this": 'that'
}
],
"in": [
'---',
'-',
' foo: ~',
' this: that',
'- foo',
'- ~',
'-',
' foo: bar',
' this: that',
'...'
],
"name": 'Regression: hash_in_array'
},
{
"out": ['foo'],
"in": [ '---', '- \'foo\'', '...' ],
"name": 'Regression: single_quote1'
},
{
"out": [' '],
"in": [ '---', '- \' \'', '...' ],
"name": 'Regression: single_spaces'
},
{
"out": [''],
"in": [ '---', '- \'\'', '...' ],
"name": 'Regression: single_null'
},
{
"out": ' ',
"in": [ '--- " "', '...' ],
"name": 'Regression: only_spaces'
},
{
"out": [
None,
{
"foo": 'bar',
"this": 'that'
},
'baz'
],
"in":
[ '---', '- ~', '- foo: bar', ' this: that', '- baz', '...' ],
"name": 'Regression: inline_nested_hash'
},
{
"name": "Unprintables",
"in": [
"- \"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\a\\x08\\t\\n\\v\\f\\r\\x0e\\x0f\"",
"- \"\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\e\\x1c\\x1d\\x1e\\x1f\"",
"- \" !\\\"#$%&'()*+,-./\"",
"- 0123456789:;<=>?",
"- '\@ABCDEFGHIJKLMNO'",
"- 'PQRSTUVWXYZ[\\]^_'",
"- '`abcdefghijklmno'",
r"- 'pqrstuvwxyz{|}~\177'",
"- \\200\\201\\202\\203\\204\\205\\206\\207\\210\\211\\212\\213\\214\\215\\216\\217",
"- \\220\\221\\222\\223\\224\\225\\226\\227\\230\\231\\232\\233\\234\\235\\236\\237",
"- \\240\\241\\242\\243\\244\\245\\246\\247\\250\\251\\252\\253\\254\\255\\256\\257",
"- \\260\\261\\262\\263\\264\\265\\266\\267\\270\\271\\272\\273\\274\\275\\276\\277",
"- \\300\\301\\302\\303\\304\\305\\306\\307\\310\\311\\312\\313\\314\\315\\316\\317",
"- \\320\\321\\322\\323\\324\\325\\326\\327\\330\\331\\332\\333\\334\\335\\336\\337",
"- \\340\\341\\342\\343\\344\\345\\346\\347\\350\\351\\352\\353\\354\\355\\356\\357",
"- \\360\\361\\362\\363\\364\\365\\366\\367\\370\\371\\372\\373\\374\\375\\376\\377",
"..."
],
"out": [
"\0\1\2\3\4\5\6\a\b\t\n\13\f\r\16\17",
"\20\21\22\23\24\25\26\27\30\31\32\33\34\35\36\37",
" !\"#$%&'()*+,-./",
"0123456789:;<=>?",
"\@ABCDEFGHIJKLMNO",
"PQRSTUVWXYZ[\\]^_",
"`abcdefghijklmno",
r"pqrstuvwxyz{|}~\177",
r"\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217",
r"\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237",
r"\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257",
r"\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277",
r"\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317",
r"\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337",
r"\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357",
r"\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377"
]
},
{
"name": 'Quoted hash keys',
"in": [
'---', ' "quoted": Magic!', ' "\\n\\t": newline, tab', '...',
],
"out": {
"quoted": 'Magic!',
"\n\t": 'newline, tab',
},
},
]
class TestReader(unittest.TestCase): # IGNORE:C0111
pass
test.generate_testsuite(test_data_list, TestReader, yamlish.load)
if __name__ == "__main__":
unittest.main()
|
mit
|
dimroc/tensorflow-mnist-tutorial
|
lib/python3.6/site-packages/tensorflow/core/protobuf/tensorflow_server_pb2.py
|
6
|
8845
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/core/protobuf/tensorflow_server.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorflow.core.protobuf import config_pb2 as tensorflow_dot_core_dot_protobuf_dot_config__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/core/protobuf/tensorflow_server.proto',
package='tensorflow',
syntax='proto3',
serialized_pb=_b('\n0tensorflow/core/protobuf/tensorflow_server.proto\x12\ntensorflow\x1a%tensorflow/core/protobuf/config.proto\"r\n\x06JobDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05tasks\x18\x02 \x03(\x0b\x32\x1d.tensorflow.JobDef.TasksEntry\x1a,\n\nTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"-\n\nClusterDef\x12\x1f\n\x03job\x18\x01 \x03(\x0b\x32\x12.tensorflow.JobDef\"\xa5\x01\n\tServerDef\x12\'\n\x07\x63luster\x18\x01 \x01(\x0b\x32\x16.tensorflow.ClusterDef\x12\x10\n\x08job_name\x18\x02 \x01(\t\x12\x12\n\ntask_index\x18\x03 \x01(\x05\x12\x37\n\x16\x64\x65\x66\x61ult_session_config\x18\x04 \x01(\x0b\x32\x17.tensorflow.ConfigProto\x12\x10\n\x08protocol\x18\x05 \x01(\tB/\n\x1aorg.tensorflow.distruntimeB\x0cServerProtosP\x01\xf8\x01\x01\x62\x06proto3')
,
dependencies=[tensorflow_dot_core_dot_protobuf_dot_config__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_JOBDEF_TASKSENTRY = _descriptor.Descriptor(
name='TasksEntry',
full_name='tensorflow.JobDef.TasksEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='tensorflow.JobDef.TasksEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='tensorflow.JobDef.TasksEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=173,
serialized_end=217,
)
_JOBDEF = _descriptor.Descriptor(
name='JobDef',
full_name='tensorflow.JobDef',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='tensorflow.JobDef.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tasks', full_name='tensorflow.JobDef.tasks', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_JOBDEF_TASKSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=103,
serialized_end=217,
)
_CLUSTERDEF = _descriptor.Descriptor(
name='ClusterDef',
full_name='tensorflow.ClusterDef',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='job', full_name='tensorflow.ClusterDef.job', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=219,
serialized_end=264,
)
_SERVERDEF = _descriptor.Descriptor(
name='ServerDef',
full_name='tensorflow.ServerDef',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cluster', full_name='tensorflow.ServerDef.cluster', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='job_name', full_name='tensorflow.ServerDef.job_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='task_index', full_name='tensorflow.ServerDef.task_index', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='default_session_config', full_name='tensorflow.ServerDef.default_session_config', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='protocol', full_name='tensorflow.ServerDef.protocol', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=267,
serialized_end=432,
)
_JOBDEF_TASKSENTRY.containing_type = _JOBDEF
_JOBDEF.fields_by_name['tasks'].message_type = _JOBDEF_TASKSENTRY
_CLUSTERDEF.fields_by_name['job'].message_type = _JOBDEF
_SERVERDEF.fields_by_name['cluster'].message_type = _CLUSTERDEF
_SERVERDEF.fields_by_name['default_session_config'].message_type = tensorflow_dot_core_dot_protobuf_dot_config__pb2._CONFIGPROTO
DESCRIPTOR.message_types_by_name['JobDef'] = _JOBDEF
DESCRIPTOR.message_types_by_name['ClusterDef'] = _CLUSTERDEF
DESCRIPTOR.message_types_by_name['ServerDef'] = _SERVERDEF
JobDef = _reflection.GeneratedProtocolMessageType('JobDef', (_message.Message,), dict(
TasksEntry = _reflection.GeneratedProtocolMessageType('TasksEntry', (_message.Message,), dict(
DESCRIPTOR = _JOBDEF_TASKSENTRY,
__module__ = 'tensorflow.core.protobuf.tensorflow_server_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.JobDef.TasksEntry)
))
,
DESCRIPTOR = _JOBDEF,
__module__ = 'tensorflow.core.protobuf.tensorflow_server_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.JobDef)
))
_sym_db.RegisterMessage(JobDef)
_sym_db.RegisterMessage(JobDef.TasksEntry)
ClusterDef = _reflection.GeneratedProtocolMessageType('ClusterDef', (_message.Message,), dict(
DESCRIPTOR = _CLUSTERDEF,
__module__ = 'tensorflow.core.protobuf.tensorflow_server_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.ClusterDef)
))
_sym_db.RegisterMessage(ClusterDef)
ServerDef = _reflection.GeneratedProtocolMessageType('ServerDef', (_message.Message,), dict(
DESCRIPTOR = _SERVERDEF,
__module__ = 'tensorflow.core.protobuf.tensorflow_server_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.ServerDef)
))
_sym_db.RegisterMessage(ServerDef)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\032org.tensorflow.distruntimeB\014ServerProtosP\001\370\001\001'))
_JOBDEF_TASKSENTRY.has_options = True
_JOBDEF_TASKSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
|
apache-2.0
|
flavour/porto
|
modules/savage/graph/axes.py
|
24
|
3672
|
from ..utils.struct import Vector as V
from ..graphics.group import Group
from ..graphics.shapes import Text
from copy import deepcopy
class Axis (Group):
def __init__ (self, **attr):
Group.__init__ (self, **attr)
self.ticks = []
self.labels = []
self.incr = None
if attr.has_key ('inf'):
self.inf = attr['inf']
else:
self.inf = 0
if attr.has_key ('sup'):
self.sup = attr['sup']
else:
self.sup = 0
if attr.has_key ('lower'):
self.lower = attr['lower']
else:
self.lower = 0
if attr.has_key ('upper'):
self.upper = attr['upper']
else:
self.upper = 0
if attr.has_key ('textProperties'):
self.textProperties = attr['textProperties']
else:
self.textPrperties = {}
def bounds (self, lower, upper):
self.lower = lower
self.upper = upper
def increment (self, incr=None):
self.incr = incr
def findIncrement (self):
numberRange = self.upper - self.lower
if numberRange == 0:
raise RuntimeError ('upper == lower')
incr = 0
div = 1.0
if numberRange < 1:
while numberRange / pow (10, incr) < 1:
incr -= 1
#incr += 1
elif numberRange > 1:
while numberRange / pow (10, incr) > 1:
incr += 1
incr -= 1
ticks = self.tickPositions (pow (10, incr) / div)
if len (ticks) < 2:
incr -= 1
elif len (ticks) < 5:
div = 2
return float (pow (10, incr)) / div
def setText (self, text=None):
if text:
self.labels = text
def tickPositions (self, incr):
current = 0
ticks = []
while current > self.lower:
current -= incr
while current < self.lower:
current += incr
while current <= self.upper:
ticks.append (current)
current += incr
return ticks
def createTicks (self, tickPos = None):
if not tickPos:
if not self.incr:
self.incr = self.findIncrement ()
ticks = self.tickPositions (self.incr)
else:
ticks = tickPos
for tick in ticks:
per = ((tick - self.lower) / (self.upper - self.lower))
val = ((1 - per) * self.inf) + (per * self.sup)
self.ticks.append (val)
self.labels.append (str (tick))
return deepcopy (self.ticks)
def drawTicks (self):
raise RuntimeError ("Abstract base class does not have method")
def move (self, dx, dy):
for child in self:
child.move (dx, dy)
class XAxis (Axis):
def __init__ (self, **attr):
Axis.__init__ (self, **attr)
if attr.has_key ('y'):
self.y = attr['y']
else:
self.y = 0
def drawTicks (self):
for pos, label in zip (self.ticks, self.labels):
t = Text(text = str(label), x = pos, y = self.y, **self.textProperties)
self.draw (t)
class YAxis (Axis):
def __init__ (self, **attr):
Axis.__init__ (self, **attr)
if attr.has_key ('x'):
self.x = attr['x']
else:
self.x = 0
def drawTicks (self):
width = []
for pos, label in zip (self.ticks, self.labels):
t = Text(text = str(label), y = pos, x = self.x, **self.textProperties)
width.append (t.width)
self.draw (t)
self.width = max (width)
|
mit
|
hergin/DelTa
|
mt/ptcal/pytcore/util/infinity.py
|
1
|
3006
|
# -*- coding: Latin-1 -*-
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ##
# infinity.py
# --------------------------------
# Copyright (c) 2005
# Jean-Sébastien BOLDUC
# Hans Vangheluwe
# McGill University (Montréal)
# --------------------------------
#
# - Singleton class "Inf" and unique instance "INFINITY" ---
# stands for infinity (to use in time advance function)
#
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ##
## INFINITY OBJECT --- ADDED 04/04/2005
## more comparison operators -- HV 12/11/2006
##
## mul and rmul added -- Eugene 14/11/2006
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ##
class Infty(object):
"""Singleton class: the single instance "INFINITY" stands for infinity."""
__instantiated = False
def __init__(self):
if self.__instantiated:
raise NotImplementedError, "singleton class already instantiated"
self.__instantiatiated = True
def __deepcopy__(self, memo):
return self
def __add__(self, other):
""" INFINITY + x = INFINITY """
return self
def __sub__(self, other):
""" INFINITY - x = INFINITY (if x != INF), or NaN (if x == INFINITY) """
if other == self:
raise ValueError, "INFINITY - INFINITY gives NaN (not defined)"
return self
def __mul__(self, other):
""" INFINITY * x = INFINITY """
return self
def __radd__(self, other):
""" x + INFINITY = INFINITY """
return self
def __rsub__(self, other):
""" x - INFINITY = -INFINITY (if x != INFINITY), or NaN (if x == INFINITY) """
if other == self:
raise ValueError, "INFINITY - INFINITY gives NaN (not defined)"
raise ValueError, "x - INFINITY gives MINUS_INFINITY (not defined)"
def __rmul__(self, other):
""" x * INFINITY = INFINITY """
return self
def __abs__(self):
""" abs(INFINITY) = INFINITY -- absolute value """
return self
# def __cmp__(self, other):
# if other is self:
# return 0
# else:
# return 1
def __eq__(self, other):
if other is self:
return True
else:
return False
def __ne__(self, other):
if other is self:
return False
else:
return True
def __lt__(self, other):
return False
def __le__(self, other):
if other is self:
return True
else:
return False
def __gt__(self, other):
if other is self:
return False
else:
return True
def __ge__(self, other):
return True
def __str__(self):
return "+INFINITY"
# Instantiate singleton:
INFINITY = Infty()
|
gpl-3.0
|
eb0t/jac
|
contrib/spendfrom/spendfrom.py
|
792
|
10053
|
#!/usr/bin/env python
#
# Use the raw transactions API to spend bitcoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a bitcoind or Bitcoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the bitcoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Bitcoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Bitcoin")
return os.path.expanduser("~/.bitcoin")
def read_bitcoin_config(dbdir):
"""Read the bitcoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "bitcoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a bitcoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 19332 if testnet else 9332
connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the bitcoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(bitcoind):
info = bitcoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
bitcoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = bitcoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(bitcoind):
address_summary = dict()
address_to_account = dict()
for info in bitcoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = bitcoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = bitcoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-bitcoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(bitcoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(bitcoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to bitcoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = bitcoind.createrawtransaction(inputs, outputs)
signed_rawtx = bitcoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(bitcoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = bitcoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(bitcoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = bitcoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(bitcoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get bitcoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send bitcoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of bitcoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
bitcoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(bitcoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(bitcoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(bitcoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(bitcoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = bitcoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
|
mit
|
fta2012/three.js
|
utils/converters/obj/split_obj.py
|
369
|
12687
|
"""Split single OBJ model into mutliple OBJ files by materials
-------------------------------------
How to use
-------------------------------------
python split_obj.py -i infile.obj -o outfile
Will generate:
outfile_000.obj
outfile_001.obj
...
outfile_XXX.obj
-------------------------------------
Parser based on format description
-------------------------------------
http://en.wikipedia.org/wiki/Obj
------
Author
------
AlteredQualia http://alteredqualia.com
"""
import fileinput
import operator
import random
import os.path
import getopt
import sys
import struct
import math
import glob
# #####################################################
# Configuration
# #####################################################
TRUNCATE = False
SCALE = 1.0
# #####################################################
# Templates
# #####################################################
TEMPLATE_OBJ = u"""\
################################
# OBJ generated by split_obj.py
################################
# Faces: %(nfaces)d
# Vertices: %(nvertices)d
# Normals: %(nnormals)d
# UVs: %(nuvs)d
################################
# vertices
%(vertices)s
# normals
%(normals)s
# uvs
%(uvs)s
# faces
%(faces)s
"""
TEMPLATE_VERTEX = "v %f %f %f"
TEMPLATE_VERTEX_TRUNCATE = "v %d %d %d"
TEMPLATE_NORMAL = "vn %.5g %.5g %.5g"
TEMPLATE_UV = "vt %.5g %.5g"
TEMPLATE_FACE3_V = "f %d %d %d"
TEMPLATE_FACE4_V = "f %d %d %d %d"
TEMPLATE_FACE3_VT = "f %d/%d %d/%d %d/%d"
TEMPLATE_FACE4_VT = "f %d/%d %d/%d %d/%d %d/%d"
TEMPLATE_FACE3_VN = "f %d//%d %d//%d %d//%d"
TEMPLATE_FACE4_VN = "f %d//%d %d//%d %d//%d %d//%d"
TEMPLATE_FACE3_VTN = "f %d/%d/%d %d/%d/%d %d/%d/%d"
TEMPLATE_FACE4_VTN = "f %d/%d/%d %d/%d/%d %d/%d/%d %d/%d/%d"
# #####################################################
# Utils
# #####################################################
def file_exists(filename):
"""Return true if file exists and is accessible for reading.
Should be safer than just testing for existence due to links and
permissions magic on Unix filesystems.
@rtype: boolean
"""
try:
f = open(filename, 'r')
f.close()
return True
except IOError:
return False
# #####################################################
# OBJ parser
# #####################################################
def parse_vertex(text):
"""Parse text chunk specifying single vertex.
Possible formats:
vertex index
vertex index / texture index
vertex index / texture index / normal index
vertex index / / normal index
"""
v = 0
t = 0
n = 0
chunks = text.split("/")
v = int(chunks[0])
if len(chunks) > 1:
if chunks[1]:
t = int(chunks[1])
if len(chunks) > 2:
if chunks[2]:
n = int(chunks[2])
return { 'v': v, 't': t, 'n': n }
def parse_obj(fname):
"""Parse OBJ file.
"""
vertices = []
normals = []
uvs = []
faces = []
materials = {}
mcounter = 0
mcurrent = 0
mtllib = ""
# current face state
group = 0
object = 0
smooth = 0
for line in fileinput.input(fname):
chunks = line.split()
if len(chunks) > 0:
# Vertices as (x,y,z) coordinates
# v 0.123 0.234 0.345
if chunks[0] == "v" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
vertices.append([x,y,z])
# Normals in (x,y,z) form; normals might not be unit
# vn 0.707 0.000 0.707
if chunks[0] == "vn" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
normals.append([x,y,z])
# Texture coordinates in (u,v[,w]) coordinates, w is optional
# vt 0.500 -1.352 [0.234]
if chunks[0] == "vt" and len(chunks) >= 3:
u = float(chunks[1])
v = float(chunks[2])
w = 0
if len(chunks)>3:
w = float(chunks[3])
uvs.append([u,v,w])
# Face
if chunks[0] == "f" and len(chunks) >= 4:
vertex_index = []
uv_index = []
normal_index = []
for v in chunks[1:]:
vertex = parse_vertex(v)
if vertex['v']:
vertex_index.append(vertex['v'])
if vertex['t']:
uv_index.append(vertex['t'])
if vertex['n']:
normal_index.append(vertex['n'])
faces.append({
'vertex':vertex_index,
'uv':uv_index,
'normal':normal_index,
'material':mcurrent,
'group':group,
'object':object,
'smooth':smooth,
})
# Group
if chunks[0] == "g" and len(chunks) == 2:
group = chunks[1]
# Object
if chunks[0] == "o" and len(chunks) == 2:
object = chunks[1]
# Materials definition
if chunks[0] == "mtllib" and len(chunks) == 2:
mtllib = chunks[1]
# Material
if chunks[0] == "usemtl" and len(chunks) == 2:
material = chunks[1]
if not material in materials:
mcurrent = mcounter
materials[material] = mcounter
mcounter += 1
else:
mcurrent = materials[material]
# Smooth shading
if chunks[0] == "s" and len(chunks) == 2:
smooth = chunks[1]
return faces, vertices, uvs, normals, materials, mtllib
# #############################################################################
# API - Breaker
# #############################################################################
def break_obj(infile, outfile):
"""Break infile.obj to outfile.obj
"""
if not file_exists(infile):
print "Couldn't find [%s]" % infile
return
faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
# sort faces by materials
chunks = {}
for face in faces:
material = face["material"]
if not material in chunks:
chunks[material] = {"faces": [], "vertices": set(), "normals": set(), "uvs": set()}
chunks[material]["faces"].append(face)
# extract unique vertex / normal / uv indices used per chunk
for material in chunks:
chunk = chunks[material]
for face in chunk["faces"]:
for i in face["vertex"]:
chunk["vertices"].add(i)
for i in face["normal"]:
chunk["normals"].add(i)
for i in face["uv"]:
chunk["uvs"].add(i)
# generate new OBJs
for mi, material in enumerate(chunks):
chunk = chunks[material]
# generate separate vertex / normal / uv index lists for each chunk
# (including mapping from original to new indices)
# get well defined order
new_vertices = list(chunk["vertices"])
new_normals = list(chunk["normals"])
new_uvs = list(chunk["uvs"])
# map original => new indices
vmap = {}
for i, v in enumerate(new_vertices):
vmap[v] = i + 1
nmap = {}
for i, n in enumerate(new_normals):
nmap[n] = i + 1
tmap = {}
for i, t in enumerate(new_uvs):
tmap[t] = i + 1
# vertices
pieces = []
for i in new_vertices:
vertex = vertices[i-1]
txt = TEMPLATE_VERTEX % (vertex[0], vertex[1], vertex[2])
pieces.append(txt)
str_vertices = "\n".join(pieces)
# normals
pieces = []
for i in new_normals:
normal = normals[i-1]
txt = TEMPLATE_NORMAL % (normal[0], normal[1], normal[2])
pieces.append(txt)
str_normals = "\n".join(pieces)
# uvs
pieces = []
for i in new_uvs:
uv = uvs[i-1]
txt = TEMPLATE_UV % (uv[0], uv[1])
pieces.append(txt)
str_uvs = "\n".join(pieces)
# faces
pieces = []
for face in chunk["faces"]:
txt = ""
fv = face["vertex"]
fn = face["normal"]
ft = face["uv"]
if len(fv) == 3:
va = vmap[fv[0]]
vb = vmap[fv[1]]
vc = vmap[fv[2]]
if len(fn) == 3 and len(ft) == 3:
na = nmap[fn[0]]
nb = nmap[fn[1]]
nc = nmap[fn[2]]
ta = tmap[ft[0]]
tb = tmap[ft[1]]
tc = tmap[ft[2]]
txt = TEMPLATE_FACE3_VTN % (va, ta, na, vb, tb, nb, vc, tc, nc)
elif len(fn) == 3:
na = nmap[fn[0]]
nb = nmap[fn[1]]
nc = nmap[fn[2]]
txt = TEMPLATE_FACE3_VN % (va, na, vb, nb, vc, nc)
elif len(ft) == 3:
ta = tmap[ft[0]]
tb = tmap[ft[1]]
tc = tmap[ft[2]]
txt = TEMPLATE_FACE3_VT % (va, ta, vb, tb, vc, tc)
else:
txt = TEMPLATE_FACE3_V % (va, vb, vc)
elif len(fv) == 4:
va = vmap[fv[0]]
vb = vmap[fv[1]]
vc = vmap[fv[2]]
vd = vmap[fv[3]]
if len(fn) == 4 and len(ft) == 4:
na = nmap[fn[0]]
nb = nmap[fn[1]]
nc = nmap[fn[2]]
nd = nmap[fn[3]]
ta = tmap[ft[0]]
tb = tmap[ft[1]]
tc = tmap[ft[2]]
td = tmap[ft[3]]
txt = TEMPLATE_FACE4_VTN % (va, ta, na, vb, tb, nb, vc, tc, nc, vd, td, nd)
elif len(fn) == 4:
na = nmap[fn[0]]
nb = nmap[fn[1]]
nc = nmap[fn[2]]
nd = nmap[fn[3]]
txt = TEMPLATE_FACE4_VN % (va, na, vb, nb, vc, nc, vd, nd)
elif len(ft) == 4:
ta = tmap[ft[0]]
tb = tmap[ft[1]]
tc = tmap[ft[2]]
td = tmap[ft[3]]
txt = TEMPLATE_FACE4_VT % (va, ta, vb, tb, vc, tc, vd, td)
else:
txt = TEMPLATE_FACE4_V % (va, vb, vc, vd)
pieces.append(txt)
str_faces = "\n".join(pieces)
# generate OBJ string
content = TEMPLATE_OBJ % {
"nfaces" : len(chunk["faces"]),
"nvertices" : len(new_vertices),
"nnormals" : len(new_normals),
"nuvs" : len(new_uvs),
"vertices" : str_vertices,
"normals" : str_normals,
"uvs" : str_uvs,
"faces" : str_faces
}
# write OBJ file
outname = "%s_%03d.obj" % (outfile, mi)
f = open(outname, "w")
f.write(content)
f.close()
# #############################################################################
# Helpers
# #############################################################################
def usage():
print "Usage: %s -i filename.obj -o prefix" % os.path.basename(sys.argv[0])
# #####################################################
# Main
# #####################################################
if __name__ == "__main__":
# get parameters from the command line
try:
opts, args = getopt.getopt(sys.argv[1:], "hi:o:x:", ["help", "input=", "output=", "truncatescale="])
except getopt.GetoptError:
usage()
sys.exit(2)
infile = outfile = ""
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-i", "--input"):
infile = a
elif o in ("-o", "--output"):
outfile = a
elif o in ("-x", "--truncatescale"):
TRUNCATE = True
SCALE = float(a)
if infile == "" or outfile == "":
usage()
sys.exit(2)
print "Splitting [%s] into [%s_XXX.obj] ..." % (infile, outfile)
break_obj(infile, outfile)
|
mit
|
pdellaert/ansible
|
test/units/modules/network/fortios/test_fortios_dlp_sensor.py
|
21
|
10503
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_dlp_sensor
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_dlp_sensor.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_dlp_sensor_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'dlp_sensor': {
'comment': 'Comment.',
'dlp_log': 'enable',
'extended_log': 'enable',
'flow_based': 'enable',
'full_archive_proto': 'smtp',
'nac_quar_log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg_group': 'test_value_11',
'summary_proto': 'smtp'
},
'vdom': 'root'}
is_error, changed, response = fortios_dlp_sensor.fortios_dlp(input_data, fos_instance)
expected_data = {
'comment': 'Comment.',
'dlp-log': 'enable',
'extended-log': 'enable',
'flow-based': 'enable',
'full-archive-proto': 'smtp',
'nac-quar-log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg-group': 'test_value_11',
'summary-proto': 'smtp'
}
set_method_mock.assert_called_with('dlp', 'sensor', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_dlp_sensor_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'dlp_sensor': {
'comment': 'Comment.',
'dlp_log': 'enable',
'extended_log': 'enable',
'flow_based': 'enable',
'full_archive_proto': 'smtp',
'nac_quar_log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg_group': 'test_value_11',
'summary_proto': 'smtp'
},
'vdom': 'root'}
is_error, changed, response = fortios_dlp_sensor.fortios_dlp(input_data, fos_instance)
expected_data = {
'comment': 'Comment.',
'dlp-log': 'enable',
'extended-log': 'enable',
'flow-based': 'enable',
'full-archive-proto': 'smtp',
'nac-quar-log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg-group': 'test_value_11',
'summary-proto': 'smtp'
}
set_method_mock.assert_called_with('dlp', 'sensor', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_dlp_sensor_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'dlp_sensor': {
'comment': 'Comment.',
'dlp_log': 'enable',
'extended_log': 'enable',
'flow_based': 'enable',
'full_archive_proto': 'smtp',
'nac_quar_log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg_group': 'test_value_11',
'summary_proto': 'smtp'
},
'vdom': 'root'}
is_error, changed, response = fortios_dlp_sensor.fortios_dlp(input_data, fos_instance)
delete_method_mock.assert_called_with('dlp', 'sensor', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_dlp_sensor_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'dlp_sensor': {
'comment': 'Comment.',
'dlp_log': 'enable',
'extended_log': 'enable',
'flow_based': 'enable',
'full_archive_proto': 'smtp',
'nac_quar_log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg_group': 'test_value_11',
'summary_proto': 'smtp'
},
'vdom': 'root'}
is_error, changed, response = fortios_dlp_sensor.fortios_dlp(input_data, fos_instance)
delete_method_mock.assert_called_with('dlp', 'sensor', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_dlp_sensor_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'dlp_sensor': {
'comment': 'Comment.',
'dlp_log': 'enable',
'extended_log': 'enable',
'flow_based': 'enable',
'full_archive_proto': 'smtp',
'nac_quar_log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg_group': 'test_value_11',
'summary_proto': 'smtp'
},
'vdom': 'root'}
is_error, changed, response = fortios_dlp_sensor.fortios_dlp(input_data, fos_instance)
expected_data = {
'comment': 'Comment.',
'dlp-log': 'enable',
'extended-log': 'enable',
'flow-based': 'enable',
'full-archive-proto': 'smtp',
'nac-quar-log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg-group': 'test_value_11',
'summary-proto': 'smtp'
}
set_method_mock.assert_called_with('dlp', 'sensor', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_dlp_sensor_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'dlp_sensor': {
'random_attribute_not_valid': 'tag',
'comment': 'Comment.',
'dlp_log': 'enable',
'extended_log': 'enable',
'flow_based': 'enable',
'full_archive_proto': 'smtp',
'nac_quar_log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg_group': 'test_value_11',
'summary_proto': 'smtp'
},
'vdom': 'root'}
is_error, changed, response = fortios_dlp_sensor.fortios_dlp(input_data, fos_instance)
expected_data = {
'comment': 'Comment.',
'dlp-log': 'enable',
'extended-log': 'enable',
'flow-based': 'enable',
'full-archive-proto': 'smtp',
'nac-quar-log': 'enable',
'name': 'default_name_9',
'options': 'test_value_10,',
'replacemsg-group': 'test_value_11',
'summary-proto': 'smtp'
}
set_method_mock.assert_called_with('dlp', 'sensor', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
gpl-3.0
|
pengli09/Paddle
|
python/paddle/trainer_config_helpers/tests/configs/test_cost_layers.py
|
4
|
1402
|
from paddle.trainer_config_helpers import *
settings(learning_rate=1e-4, batch_size=1000)
seq_in = data_layer(name='input', size=200)
labels = data_layer(name='labels', size=5000)
probs = data_layer(name='probs', size=10)
xe_label = data_layer(name='xe-label', size=10)
hidden = fc_layer(input=seq_in, size=4)
outputs(
ctc_layer(
input=seq_in, label=labels),
warp_ctc_layer(
input=seq_in, label=labels, blank=0),
crf_layer(
input=hidden, label=data_layer(
name='crf_label', size=4)),
rank_cost(
left=data_layer(
name='left', size=1),
right=data_layer(
name='right', size=1),
label=data_layer(
name='label', size=1)),
lambda_cost(
input=data_layer(
name='list_feature', size=100),
score=data_layer(
name='list_scores', size=1)),
cross_entropy(
input=probs, label=xe_label),
cross_entropy_with_selfnorm(
input=probs, label=xe_label),
huber_regression_cost(
input=seq_in, label=labels),
huber_classification_cost(
input=data_layer(
name='huber_probs', size=1),
label=data_layer(
name='huber_label', size=1)),
multi_binary_label_cross_entropy(
input=probs, label=xe_label),
sum_cost(input=hidden),
nce_layer(
input=hidden, label=labels))
|
apache-2.0
|
clbarnes/bctpy
|
test/load_samples.py
|
2
|
2646
|
import numpy as np
import bct
import os
TEST_DIR = os.path.dirname(os.path.realpath(__file__))
MAT_DIR = os.path.join(TEST_DIR, 'mats')
def mat_path(fname):
return os.path.join(MAT_DIR, fname)
def load_sample(thres=1.):
return bct.threshold_proportional(np.load(mat_path('sample_data.npy')),
thres, copy=False)
def load_signed_sample(thres=1):
return bct.threshold_proportional(np.around(
np.load(mat_path('sample_signed.npy')), 8), thres, copy=False)
def load_sparse_sample(thres=.02):
return load_sample(thres=thres)
def load_binary_sample(thres=.35):
return bct.binarize(load_sample(thres=thres), copy=False)
def load_directed_sample(thres=1.):
return bct.threshold_proportional(np.load(mat_path('sample_directed.npy')),
thres, copy=False)
def load_binary_directed_sample(thres=.35):
return bct.binarize(load_directed_sample(thres=thres))
def load_directed_low_modularity_sample(thres=1.):
return bct.threshold_proportional(np.load(
mat_path('sample_directed_gc.npy')), thres, copy=False)
def load_binary_directed_low_modularity_sample(thres=.35):
return bct.binarize(load_directed_low_modularity_sample(thres=thres))
# unimplemented samples
def load_binary_sparse_sample(thres=.35):
raise NotImplementedError()
def load_binary_directed_sparse_sample(thres=.02):
raise NotImplementedError()
def load_directed_sparse_sample(thres=.02):
raise NotImplementedError()
def load_directed_signed_sample(thres=.61):
raise NotImplementedError()
def load_directed_signed_sparse_sample(thres=.03):
raise NotImplementedError()
def load_signed_sparse_sample(thres=.06):
raise NotImplementedError()
# NBS samples
def load_sample_group_qball():
q = np.load(mat_path('sample_group_qball.npy'))
return np.transpose(
list(map(bct.normalize, (q[:, :, i] for i in range(q.shape[2])))),
(1, 2, 0))
def load_sample_group_dsi():
d = np.load(mat_path('sample_group_dsi.npy'))
return np.transpose(
list(map(bct.normalize, (d[:, :, i] for i in range(d.shape[2])))),
(1, 2, 0))
def load_sample_group_fmri():
f = np.load(mat_path('sample_group_fmri.npy'))
import functools
def compose(*functions):
return functools.reduce(lambda f, g: lambda x: f(g(x)), functions)
thresh_fun = functools.partial(bct.threshold_proportional, p=.5)
return np.transpose(list(map(compose(bct.normalize, thresh_fun),
(f[:, :, i] for i in range(f.shape[2])))),
(1, 2, 0))
|
gpl-3.0
|
titom1986/CouchPotatoServer
|
couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/southparkstudios.py
|
27
|
1372
|
from __future__ import unicode_literals
from .mtv import MTVServicesInfoExtractor
class SouthParkStudiosIE(MTVServicesInfoExtractor):
IE_NAME = 'southparkstudios.com'
_VALID_URL = r'https?://(www\.)?(?P<url>southparkstudios\.com/(clips|full-episodes)/(?P<id>.+?)(\?|#|$))'
_FEED_URL = 'http://www.southparkstudios.com/feeds/video-player/mrss'
_TESTS = [{
'url': 'http://www.southparkstudios.com/clips/104437/bat-daded#tab=featured',
'info_dict': {
'id': 'a7bff6c2-ed00-11e0-aca6-0026b9414f30',
'ext': 'mp4',
'title': 'Bat Daded',
'description': 'Randy disqualifies South Park by getting into a fight with Bat Dad.',
},
}]
class SouthparkDeIE(SouthParkStudiosIE):
IE_NAME = 'southpark.de'
_VALID_URL = r'https?://(www\.)?(?P<url>southpark\.de/(clips|alle-episoden)/(?P<id>.+?)(\?|#|$))'
_FEED_URL = 'http://www.southpark.de/feeds/video-player/mrss/'
_TESTS = [{
'url': 'http://www.southpark.de/clips/uygssh/the-government-wont-respect-my-privacy#tab=featured',
'info_dict': {
'id': '85487c96-b3b9-4e39-9127-ad88583d9bf2',
'ext': 'mp4',
'title': 'The Government Won\'t Respect My Privacy',
'description': 'Cartman explains the benefits of "Shitter" to Stan, Kyle and Craig.',
},
}]
|
gpl-3.0
|
Tuxemon/Tuxemon
|
scripts/snap_map.py
|
1
|
2014
|
"""
Snap collisions and events to the tile grid
EXAMPLES:
preview changes:
python3 scripts/snap_map.py mods/tuxemon/maps/taba_town.tmx
write changes
python3 -w scripts/snap_map.py mods/tuxemon/maps/taba_town.tmx
many files:
python3 scripts/snap_map.py mods/tuxemon/maps/*tmx
"""
import xml.etree.ElementTree as ET
import click
def snap(attrib, name, interval):
"""Snap value, return True if changed"""
try:
original = attrib[name]
modified = int(round(float(attrib[name]) / interval) * interval)
modified = str(modified)
if modified != original:
attrib[name] = modified
return True
return False
except KeyError:
pass
def snap_objects(tree):
root = tree.getroot()
tw = int(root.attrib["tilewidth"])
th = int(root.attrib["tileheight"])
values = (("x", th), ("y", th), ("width", tw), ("height", th))
changed = False
for obj in tree.findall("./objectgroup/object"):
attrib = obj.attrib
for name, interval in values:
if snap(attrib, name, interval):
changed = True
return changed
@click.command()
@click.option("--write", "-w", is_flag=True, help="write the changes back to the file")
@click.argument("filename", nargs=-1)
def click_shim(filename, write):
"""
Move all events and collisions in a file to align with the tile grid
Can accept multiple filenames
"""
for filepath in filename:
tree = ET.parse(filepath)
changed = snap_objects(tree)
if changed:
print(f"{filepath} will be changed")
if write:
print(f"writing changes to {filepath}...")
tree.write(
filepath,
encoding="UTF-8",
default_namespace=None,
xml_declaration=True,
short_empty_elements=True,
)
if __name__ == "__main__":
click_shim()
|
gpl-3.0
|
ardi69/pyload-0.4.10
|
pyload/plugin/account/FastixRu.py
|
1
|
1215
|
# -*- coding: utf-8 -*-
from pyload.plugin.Account import Account
from pyload.utils import json_loads
class FastixRu(Account):
__name = "FastixRu"
__type = "account"
__version = "0.03"
__description = """Fastix account plugin"""
__license = "GPLv3"
__authors = [("Massimo Rosamilia", "[email protected]")]
def loadAccountInfo(self, user, req):
data = self.getAccountData(user)
html = json_loads(req.load("http://fastix.ru/api_v2/", get={'apikey': data['api'], 'sub': "getaccountdetails"}))
points = html['points']
trafficleft = float(points) * 1024 ** 2 / 1000
if points > 0:
account_info = {"validuntil": -1, "trafficleft": trafficleft}
else:
account_info = {"validuntil": None, "trafficleft": None, "premium": False}
return account_info
def login(self, user, data, req):
html = req.load("http://fastix.ru/api_v2/",
get={'sub': "get_apikey", 'email': user, 'password': data['password']})
api = json_loads(html)
api = api['apikey']
data['api'] = api
if "error_code" in html:
self.wrongPassword()
|
gpl-3.0
|
mblachford/conductor
|
lib/python2.7/site-packages/suds/builder.py
|
197
|
4220
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
"""
The I{builder} module provides an wsdl/xsd defined types factory
"""
from logging import getLogger
from suds import *
from suds.sudsobject import Factory
log = getLogger(__name__)
class Builder:
""" Builder used to construct an object for types defined in the schema """
def __init__(self, resolver):
"""
@param resolver: A schema object name resolver.
@type resolver: L{resolver.Resolver}
"""
self.resolver = resolver
def build(self, name):
""" build a an object for the specified typename as defined in the schema """
if isinstance(name, basestring):
type = self.resolver.find(name)
if type is None:
raise TypeNotFound(name)
else:
type = name
cls = type.name
if type.mixed():
data = Factory.property(cls)
else:
data = Factory.object(cls)
resolved = type.resolve()
md = data.__metadata__
md.sxtype = resolved
md.ordering = self.ordering(resolved)
history = []
self.add_attributes(data, resolved)
for child, ancestry in type.children():
if self.skip_child(child, ancestry):
continue
self.process(data, child, history[:])
return data
def process(self, data, type, history):
""" process the specified type then process its children """
if type in history:
return
if type.enum():
return
history.append(type)
resolved = type.resolve()
value = None
if type.unbounded():
value = []
else:
if len(resolved) > 0:
if resolved.mixed():
value = Factory.property(resolved.name)
md = value.__metadata__
md.sxtype = resolved
else:
value = Factory.object(resolved.name)
md = value.__metadata__
md.sxtype = resolved
md.ordering = self.ordering(resolved)
setattr(data, type.name, value)
if value is not None:
data = value
if not isinstance(data, list):
self.add_attributes(data, resolved)
for child, ancestry in resolved.children():
if self.skip_child(child, ancestry):
continue
self.process(data, child, history[:])
def add_attributes(self, data, type):
""" add required attributes """
for attr, ancestry in type.attributes():
name = '_%s' % attr.name
value = attr.get_default()
setattr(data, name, value)
def skip_child(self, child, ancestry):
""" get whether or not to skip the specified child """
if child.any(): return True
for x in ancestry:
if x.choice():
return True
return False
def ordering(self, type):
""" get the ordering """
result = []
for child, ancestry in type.resolve():
name = child.name
if child.name is None:
continue
if child.isattr():
name = '_%s' % child.name
result.append(name)
return result
|
gpl-2.0
|
phasis/phasis
|
phasis/finc/rmovcon.py
|
1
|
2359
|
# -*- coding: iso-8859-1 -*-
#
# Copyright (C) 2001 - 2020 Massimo Gerardi all rights reserved.
#
# Author: Massimo Gerardi [email protected]
#
# Copyright (c) 2020 Qsistemi.com. All rights reserved.
#
# Viale Giorgio Ribotta, 11 (Roma)
# 00144 Roma (RM) - Italy
# Phone: (+39) 06.87.163
#
#
# Si veda file COPYING per le condizioni di software.
#
# www.qsistemi.com - [email protected]
from reportlab.lib.pagesizes import *
def layout():
return landscape(A4)
def corpo (c,row,Y):
c.setLineWidth(1)
c.setFont('Helvetica',10)
c.drawRightString(94,Y,str(row['NUM_MOV']))
if str(row['DATADOC'])=="":
c.drawString(111,Y,str(row['DATAMOV']))
else:
c.drawString(111,Y,str(row['DATADOC']))
c.drawString(188,Y,str(row['CPART'] + ' - ' + row['RAGSOC']))
c.drawString(368,Y,str(row['DESCRIZ'] + ' - ' + row['D_CONTO']))
c.setFont('Times-Roman',12)
c.setFont('Helvetica',10)
c.drawString(695,Y,str(row['SEGNO']))
c.drawRightString(804,Y,str(row['IMPORVAL']))
def struttura (c):
c.rect(36,510,782,-494,1,0)
c.setFont('Helvetica-Bold',10)
c.drawString(46,485,_("Num. Doc."))
c.drawString(111,485,_("Data Scad."))
c.drawString(188,485,_("Ragione Sociale"))
c.drawString(368,485,_("Causale - Operazione"))
c.drawString(670,485,_("Dare/Avere"))
c.drawString(774,485,_("Totale"))
c.rect(36,470,783,0,1,0)
c.rect(104.57,509.967,0,-493.75,1,0)
c.rect(178,510,0,-493,1,0)
c.rect(363,510,0,-493,1,0)
c.rect(665,510,0,-493,1,0)
c.rect(730,510,0,-493,1,0)
c.setFont('Helvetica-Bold',10)
c.setFont('Helvetica-Bold',12)
c.drawString(676.91,531.567,_("Al "))
c.setFont('Helvetica-Bold',14)
c.drawString(234,531,_("LISTA MOVIMENTI AGGIORNATO "))
def testata (c,row):
c.setFont('Helvetica',10)
c.drawString(42.8833,571,str(row['rsaz']))
c.rect(36,559,782,-40,1,0)
c.setFont('Helvetica',12)
c.drawString(709,531,str(row['datacon']))
def querycorpo ():
return '''SELECT *,anag.rag_soc1 as RAGSOC FROM movcon,anag WHERE movcon.T_CPART = "%s" AND movcon.DATA_INT>="%s"
AND movcon.DATA_INT<= "%s" AND anag.T_CPART=movcon.T_CPART AND anag.COD=movcon.CPART
ORDER BY movcon.DATA_INT ASC'''
def fontcorpo ():
return 10
def Ycorpo ():
return 452
def fineSeq ():
return 25
|
gpl-2.0
|
vv1133/home_web
|
django/core/management/commands/dbshell.py
|
329
|
1243
|
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import connections, DEFAULT_DB_ALIAS
class Command(BaseCommand):
help = ("Runs the command-line client for specified database, or the "
"default database if none is provided.")
option_list = BaseCommand.option_list + (
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database onto which to '
'open a shell. Defaults to the "default" database.'),
)
requires_model_validation = False
def handle(self, **options):
connection = connections[options.get('database')]
try:
connection.client.runshell()
except OSError:
# Note that we're assuming OSError means that the client program
# isn't installed. There's a possibility OSError would be raised
# for some other reason, in which case this error message would be
# inaccurate. Still, this message catches the common case.
raise CommandError('You appear not to have the %r program installed or on your path.' % \
connection.client.executable_name)
|
bsd-3-clause
|
baylee/django
|
tests/mail/tests.py
|
6
|
56176
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import asyncore
import mimetypes
import os
import shutil
import smtpd
import sys
import tempfile
import threading
from email.header import Header
from email.mime.text import MIMEText
from smtplib import SMTP, SMTPException
from ssl import SSLError
from django.core import mail
from django.core.mail import (
EmailMessage, EmailMultiAlternatives, mail_admins, mail_managers,
send_mail, send_mass_mail,
)
from django.core.mail.backends import console, dummy, filebased, locmem, smtp
from django.core.mail.message import BadHeaderError, sanitize_address
from django.test import SimpleTestCase, override_settings
from django.utils._os import upath
from django.utils.encoding import force_bytes, force_text
from django.utils.six import PY3, StringIO, binary_type
from django.utils.translation import ugettext_lazy
if PY3:
from email.utils import parseaddr
from email import message_from_bytes, message_from_binary_file
else:
from email.Utils import parseaddr
from email import (
message_from_string as message_from_bytes,
message_from_file as message_from_binary_file,
)
class HeadersCheckMixin(object):
def assertMessageHasHeaders(self, message, headers):
"""
Check that :param message: has all :param headers: headers.
:param message: can be an instance of an email.Message subclass or a
string with the contents of an email message.
:param headers: should be a set of (header-name, header-value) tuples.
"""
if isinstance(message, binary_type):
message = message_from_bytes(message)
msg_headers = set(message.items())
self.assertTrue(headers.issubset(msg_headers), msg='Message is missing '
'the following headers: %s' % (headers - msg_headers),)
class MailTests(HeadersCheckMixin, SimpleTestCase):
"""
Non-backend specific tests.
"""
def test_ascii(self):
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'])
message = email.message()
self.assertEqual(message['Subject'], 'Subject')
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message['From'], '[email protected]')
self.assertEqual(message['To'], '[email protected]')
def test_multiple_recipients(self):
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'])
message = email.message()
self.assertEqual(message['Subject'], 'Subject')
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message['From'], '[email protected]')
self.assertEqual(message['To'], '[email protected], [email protected]')
def test_cc(self):
"""Regression test for #7722"""
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], cc=['[email protected]'])
message = email.message()
self.assertEqual(message['Cc'], '[email protected]')
self.assertEqual(email.recipients(), ['[email protected]', '[email protected]'])
# Test multiple CC with multiple To
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'],
cc=['[email protected]', '[email protected]']
)
message = email.message()
self.assertEqual(message['Cc'], '[email protected], [email protected]')
self.assertEqual(
email.recipients(),
['[email protected]', '[email protected]', '[email protected]', '[email protected]']
)
# Testing with Bcc
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'],
cc=['[email protected]', '[email protected]'], bcc=['[email protected]']
)
message = email.message()
self.assertEqual(message['Cc'], '[email protected], [email protected]')
self.assertEqual(
email.recipients(),
['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']
)
def test_reply_to(self):
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
reply_to=['[email protected]'],
)
message = email.message()
self.assertEqual(message['Reply-To'], '[email protected]')
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
reply_to=['[email protected]', '[email protected]']
)
message = email.message()
self.assertEqual(message['Reply-To'], '[email protected], [email protected]')
def test_recipients_as_tuple(self):
email = EmailMessage(
'Subject', 'Content', '[email protected]', ('[email protected]', '[email protected]'),
cc=('[email protected]', '[email protected]'), bcc=('[email protected]',)
)
message = email.message()
self.assertEqual(message['Cc'], '[email protected], [email protected]')
self.assertEqual(
email.recipients(),
['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']
)
def test_recipients_as_string(self):
with self.assertRaisesMessage(TypeError, '"to" argument must be a list or tuple'):
EmailMessage(to='[email protected]')
with self.assertRaisesMessage(TypeError, '"cc" argument must be a list or tuple'):
EmailMessage(cc='[email protected]')
with self.assertRaisesMessage(TypeError, '"bcc" argument must be a list or tuple'):
EmailMessage(bcc='[email protected]')
with self.assertRaisesMessage(TypeError, '"reply_to" argument must be a list or tuple'):
EmailMessage(reply_to='[email protected]')
def test_header_injection(self):
email = EmailMessage('Subject\nInjection Test', 'Content', '[email protected]', ['[email protected]'])
with self.assertRaises(BadHeaderError):
email.message()
email = EmailMessage(
ugettext_lazy('Subject\nInjection Test'), 'Content', '[email protected]', ['[email protected]']
)
with self.assertRaises(BadHeaderError):
email.message()
def test_space_continuation(self):
"""
Test for space continuation character in long (ASCII) subject headers (#7747)
"""
email = EmailMessage(
'Long subject lines that get wrapped should contain a space '
'continuation character to get expected behavior in Outlook and Thunderbird',
'Content', '[email protected]', ['[email protected]']
)
message = email.message()
# Note that in Python 3, maximum line length has increased from 76 to 78
self.assertEqual(
message['Subject'].encode(),
b'Long subject lines that get wrapped should contain a space continuation\n'
b' character to get expected behavior in Outlook and Thunderbird'
)
def test_message_header_overrides(self):
"""
Specifying dates or message-ids in the extra headers overrides the
default values (#9233)
"""
headers = {"date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
email = EmailMessage('subject', 'content', '[email protected]', ['[email protected]'], headers=headers)
self.assertMessageHasHeaders(email.message(), {
('Content-Transfer-Encoding', '7bit'),
('Content-Type', 'text/plain; charset="utf-8"'),
('From', '[email protected]'),
('MIME-Version', '1.0'),
('Message-ID', 'foo'),
('Subject', 'subject'),
('To', '[email protected]'),
('date', 'Fri, 09 Nov 2001 01:08:47 -0000'),
})
def test_from_header(self):
"""
Make sure we can manually set the From header (#9214)
"""
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
message = email.message()
self.assertEqual(message['From'], '[email protected]')
def test_to_header(self):
"""
Make sure we can manually set the To header (#17444)
"""
email = EmailMessage('Subject', 'Content', '[email protected]',
['[email protected]', '[email protected]'],
headers={'To': '[email protected]'})
message = email.message()
self.assertEqual(message['To'], '[email protected]')
self.assertEqual(email.to, ['[email protected]', '[email protected]'])
# If we don't set the To header manually, it should default to the `to` argument to the constructor
email = EmailMessage('Subject', 'Content', '[email protected]',
['[email protected]', '[email protected]'])
message = email.message()
self.assertEqual(message['To'], '[email protected], [email protected]')
self.assertEqual(email.to, ['[email protected]', '[email protected]'])
def test_reply_to_header(self):
"""
Specifying 'Reply-To' in headers should override reply_to.
"""
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
reply_to=['[email protected]'], headers={'Reply-To': '[email protected]'},
)
message = email.message()
self.assertEqual(message['Reply-To'], '[email protected]')
def test_multiple_message_call(self):
"""
Regression for #13259 - Make sure that headers are not changed when
calling EmailMessage.message()
"""
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
message = email.message()
self.assertEqual(message['From'], '[email protected]')
message = email.message()
self.assertEqual(message['From'], '[email protected]')
def test_unicode_address_header(self):
"""
Regression for #11144 - When a to/from/cc header contains unicode,
make sure the email addresses are parsed correctly (especially with
regards to commas)
"""
email = EmailMessage(
'Subject', 'Content', '[email protected]',
['"Firstname Sürname" <[email protected]>', '[email protected]'],
)
self.assertEqual(
email.message()['To'],
'=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>, [email protected]'
)
email = EmailMessage(
'Subject', 'Content', '[email protected]',
['"Sürname, Firstname" <[email protected]>', '[email protected]'],
)
self.assertEqual(
email.message()['To'],
'=?utf-8?q?S=C3=BCrname=2C_Firstname?= <[email protected]>, [email protected]'
)
def test_unicode_headers(self):
email = EmailMessage("Gżegżółka", "Content", "[email protected]", ["[email protected]"],
headers={"Sender": '"Firstname Sürname" <[email protected]>',
"Comments": 'My Sürname is non-ASCII'})
message = email.message()
self.assertEqual(message['Subject'], '=?utf-8?b?R8W8ZWfFvMOzxYJrYQ==?=')
self.assertEqual(message['Sender'], '=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>')
self.assertEqual(message['Comments'], '=?utf-8?q?My_S=C3=BCrname_is_non-ASCII?=')
def test_safe_mime_multipart(self):
"""
Make sure headers can be set with a different encoding than utf-8 in
SafeMIMEMultipart as well
"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
from_email, to = '[email protected]', '"Sürname, Firstname" <[email protected]>'
text_content = 'This is an important message.'
html_content = '<p>This is an <strong>important</strong> message.</p>'
msg = EmailMultiAlternatives('Message from Firstname Sürname', text_content, from_email, [to], headers=headers)
msg.attach_alternative(html_content, "text/html")
msg.encoding = 'iso-8859-1'
self.assertEqual(msg.message()['To'], '=?iso-8859-1?q?S=FCrname=2C_Firstname?= <[email protected]>')
self.assertEqual(msg.message()['Subject'], '=?iso-8859-1?q?Message_from_Firstname_S=FCrname?=')
def test_encoding(self):
"""
Regression for #12791 - Encode body correctly with other encodings
than utf-8
"""
email = EmailMessage('Subject', 'Firstname Sürname is a great guy.', '[email protected]', ['[email protected]'])
email.encoding = 'iso-8859-1'
message = email.message()
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable'),
('Subject', 'Subject'),
('From', '[email protected]'),
('To', '[email protected]')})
self.assertEqual(message.get_payload(), 'Firstname S=FCrname is a great guy.')
# Make sure MIME attachments also works correctly with other encodings than utf-8
text_content = 'Firstname Sürname is a great guy.'
html_content = '<p>Firstname Sürname is a <strong>great</strong> guy.</p>'
msg = EmailMultiAlternatives('Subject', text_content, '[email protected]', ['[email protected]'])
msg.encoding = 'iso-8859-1'
msg.attach_alternative(html_content, "text/html")
payload0 = msg.message().get_payload(0)
self.assertMessageHasHeaders(payload0, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable')})
self.assertTrue(payload0.as_bytes().endswith(b'\n\nFirstname S=FCrname is a great guy.'))
payload1 = msg.message().get_payload(1)
self.assertMessageHasHeaders(payload1, {
('MIME-Version', '1.0'),
('Content-Type', 'text/html; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable')})
self.assertTrue(
payload1.as_bytes().endswith(b'\n\n<p>Firstname S=FCrname is a <strong>great</strong> guy.</p>')
)
def test_attachments(self):
"""Regression test for #9367"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
subject, from_email, to = 'hello', '[email protected]', '[email protected]'
text_content = 'This is an important message.'
html_content = '<p>This is an <strong>important</strong> message.</p>'
msg = EmailMultiAlternatives(subject, text_content, from_email, [to], headers=headers)
msg.attach_alternative(html_content, "text/html")
msg.attach("an attachment.pdf", b"%PDF-1.4.%...", mimetype="application/pdf")
msg_bytes = msg.message().as_bytes()
message = message_from_bytes(msg_bytes)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_content_type(), 'multipart/mixed')
self.assertEqual(message.get_default_type(), 'text/plain')
payload = message.get_payload()
self.assertEqual(payload[0].get_content_type(), 'multipart/alternative')
self.assertEqual(payload[1].get_content_type(), 'application/pdf')
def test_non_ascii_attachment_filename(self):
"""Regression test for #14964"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
subject, from_email, to = 'hello', '[email protected]', '[email protected]'
content = 'This is the message.'
msg = EmailMessage(subject, content, from_email, [to], headers=headers)
# Unicode in file name
msg.attach("une pièce jointe.pdf", b"%PDF-1.4.%...", mimetype="application/pdf")
msg_bytes = msg.message().as_bytes()
message = message_from_bytes(msg_bytes)
payload = message.get_payload()
self.assertEqual(payload[1].get_filename(), 'une pièce jointe.pdf')
def test_attach_file(self):
"""
Test attaching a file against different mimetypes and make sure that
a file will be attached and sent properly even if an invalid mimetype
is specified.
"""
files = (
# filename, actual mimetype
('file.txt', 'text/plain'),
('file.png', 'image/png'),
('file_txt', None),
('file_png', None),
('file_txt.png', 'image/png'),
('file_png.txt', 'text/plain'),
)
test_mimetypes = ['text/plain', 'image/png', None]
for basename, real_mimetype in files:
for mimetype in test_mimetypes:
email = EmailMessage('subject', 'body', '[email protected]', ['[email protected]'])
self.assertEqual(mimetypes.guess_type(basename)[0], real_mimetype)
self.assertEqual(email.attachments, [])
file_path = os.path.join(os.path.dirname(upath(__file__)), 'attachments', basename)
email.attach_file(file_path, mimetype=mimetype)
self.assertEqual(len(email.attachments), 1)
self.assertIn(basename, email.attachments[0])
msgs_sent_num = email.send()
self.assertEqual(msgs_sent_num, 1)
def test_dummy_backend(self):
"""
Make sure that dummy backends returns correct number of sent messages
"""
connection = dummy.EmailBackend()
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
self.assertEqual(connection.send_messages([email, email, email]), 3)
def test_arbitrary_keyword(self):
"""
Make sure that get_connection() accepts arbitrary keyword that might be
used with custom backends.
"""
c = mail.get_connection(fail_silently=True, foo='bar')
self.assertTrue(c.fail_silently)
def test_custom_backend(self):
"""Test custom backend defined in this suite."""
conn = mail.get_connection('mail.custombackend.EmailBackend')
self.assertTrue(hasattr(conn, 'test_outbox'))
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
conn.send_messages([email])
self.assertEqual(len(conn.test_outbox), 1)
def test_backend_arg(self):
"""Test backend argument of mail.get_connection()"""
self.assertIsInstance(mail.get_connection('django.core.mail.backends.smtp.EmailBackend'), smtp.EmailBackend)
self.assertIsInstance(
mail.get_connection('django.core.mail.backends.locmem.EmailBackend'),
locmem.EmailBackend
)
self.assertIsInstance(mail.get_connection('django.core.mail.backends.dummy.EmailBackend'), dummy.EmailBackend)
self.assertIsInstance(
mail.get_connection('django.core.mail.backends.console.EmailBackend'),
console.EmailBackend
)
tmp_dir = tempfile.mkdtemp()
try:
self.assertIsInstance(
mail.get_connection('django.core.mail.backends.filebased.EmailBackend', file_path=tmp_dir),
filebased.EmailBackend
)
finally:
shutil.rmtree(tmp_dir)
self.assertIsInstance(mail.get_connection(), locmem.EmailBackend)
@override_settings(
EMAIL_BACKEND='django.core.mail.backends.locmem.EmailBackend',
ADMINS=[('nobody', '[email protected]')],
MANAGERS=[('nobody', '[email protected]')])
def test_connection_arg(self):
"""Test connection argument to send_mail(), et. al."""
mail.outbox = []
# Send using non-default connection
connection = mail.get_connection('mail.custombackend.EmailBackend')
send_mail('Subject', 'Content', '[email protected]', ['[email protected]'], connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, 'Subject')
connection = mail.get_connection('mail.custombackend.EmailBackend')
send_mass_mail([
('Subject1', 'Content1', '[email protected]', ['[email protected]']),
('Subject2', 'Content2', '[email protected]', ['[email protected]']),
], connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 2)
self.assertEqual(connection.test_outbox[0].subject, 'Subject1')
self.assertEqual(connection.test_outbox[1].subject, 'Subject2')
connection = mail.get_connection('mail.custombackend.EmailBackend')
mail_admins('Admin message', 'Content', connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, '[Django] Admin message')
connection = mail.get_connection('mail.custombackend.EmailBackend')
mail_managers('Manager message', 'Content', connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, '[Django] Manager message')
def test_dont_mangle_from_in_body(self):
# Regression for #13433 - Make sure that EmailMessage doesn't mangle
# 'From ' in message body.
email = EmailMessage(
'Subject', 'From the future', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
self.assertNotIn(b'>From the future', email.message().as_bytes())
def test_dont_base64_encode(self):
# Ticket #3472
# Shouldn't use Base64 encoding at all
msg = EmailMessage(
'Subject', 'UTF-8 encoded body', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
self.assertNotIn(b'Content-Transfer-Encoding: base64', msg.message().as_bytes())
# Ticket #11212
# Shouldn't use quoted printable, should detect it can represent content with 7 bit data
msg = EmailMessage(
'Subject', 'Body with only ASCII characters.', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
s = msg.message().as_bytes()
self.assertNotIn(b'Content-Transfer-Encoding: quoted-printable', s)
self.assertIn(b'Content-Transfer-Encoding: 7bit', s)
# Shouldn't use quoted printable, should detect it can represent content with 8 bit data
msg = EmailMessage(
'Subject', 'Body with latin characters: àáä.', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
s = msg.message().as_bytes()
self.assertNotIn(b'Content-Transfer-Encoding: quoted-printable', s)
self.assertIn(b'Content-Transfer-Encoding: 8bit', s)
msg = EmailMessage(
'Subject', 'Body with non latin characters: А Б В Г Д Е Ж Ѕ З И І К Л М Н О П.', '[email protected]',
['[email protected]'], headers={'From': '[email protected]'},
)
s = msg.message().as_bytes()
self.assertNotIn(b'Content-Transfer-Encoding: quoted-printable', s)
self.assertIn(b'Content-Transfer-Encoding: 8bit', s)
def test_dont_base64_encode_message_rfc822(self):
# Ticket #18967
# Shouldn't use base64 encoding for a child EmailMessage attachment.
# Create a child message first
child_msg = EmailMessage(
'Child Subject', 'Some body of child message', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
child_s = child_msg.message().as_string()
# Now create a parent
parent_msg = EmailMessage(
'Parent Subject', 'Some parent body', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
# Attach to parent as a string
parent_msg.attach(content=child_s, mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertIn(str('Child Subject'), parent_s)
# Feature test: try attaching email.Message object directly to the mail.
parent_msg = EmailMessage(
'Parent Subject', 'Some parent body', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
parent_msg.attach(content=child_msg.message(), mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertIn(str('Child Subject'), parent_s)
# Feature test: try attaching Django's EmailMessage object directly to the mail.
parent_msg = EmailMessage(
'Parent Subject', 'Some parent body', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
parent_msg.attach(content=child_msg, mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertIn(str('Child Subject'), parent_s)
def test_sanitize_address(self):
"""
Email addresses are properly sanitized.
"""
# Simple ASCII address - string form
self.assertEqual(sanitize_address('[email protected]', 'ascii'), '[email protected]')
self.assertEqual(sanitize_address('[email protected]', 'utf-8'), '[email protected]')
# Bytestrings are transformed to normal strings.
self.assertEqual(sanitize_address(b'[email protected]', 'utf-8'), '[email protected]')
# Simple ASCII address - tuple form
self.assertEqual(
sanitize_address(('A name', '[email protected]'), 'ascii'),
'A name <[email protected]>'
)
if PY3:
self.assertEqual(
sanitize_address(('A name', '[email protected]'), 'utf-8'),
'=?utf-8?q?A_name?= <[email protected]>'
)
else:
self.assertEqual(
sanitize_address(('A name', '[email protected]'), 'utf-8'),
'A name <[email protected]>'
)
# Unicode characters are are supported in RFC-6532.
self.assertEqual(
sanitize_address('tó@example.com', 'utf-8'),
'[email protected]'
)
self.assertEqual(
sanitize_address(('Tó Example', 'tó@example.com'), 'utf-8'),
'=?utf-8?q?T=C3=B3_Example?= <[email protected]>'
)
class PythonGlobalState(SimpleTestCase):
"""
Tests for #12422 -- Django smarts (#2472/#11212) with charset of utf-8 text
parts shouldn't pollute global email Python package charset registry when
django.mail.message is imported.
"""
def test_utf8(self):
txt = MIMEText('UTF-8 encoded body', 'plain', 'utf-8')
self.assertIn('Content-Transfer-Encoding: base64', txt.as_string())
def test_7bit(self):
txt = MIMEText('Body with only ASCII characters.', 'plain', 'utf-8')
self.assertIn('Content-Transfer-Encoding: base64', txt.as_string())
def test_8bit_latin(self):
txt = MIMEText('Body with latin characters: àáä.', 'plain', 'utf-8')
self.assertIn(str('Content-Transfer-Encoding: base64'), txt.as_string())
def test_8bit_non_latin(self):
txt = MIMEText('Body with non latin characters: А Б В Г Д Е Ж Ѕ З И І К Л М Н О П.', 'plain', 'utf-8')
self.assertIn(str('Content-Transfer-Encoding: base64'), txt.as_string())
class BaseEmailBackendTests(HeadersCheckMixin, object):
email_backend = None
def setUp(self):
self.settings_override = override_settings(EMAIL_BACKEND=self.email_backend)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def assertStartsWith(self, first, second):
if not first.startswith(second):
self.longMessage = True
self.assertEqual(first[:len(second)], second, "First string doesn't start with the second.")
def get_mailbox_content(self):
raise NotImplementedError('subclasses of BaseEmailBackendTests must provide a get_mailbox_content() method')
def flush_mailbox(self):
raise NotImplementedError('subclasses of BaseEmailBackendTests may require a flush_mailbox() method')
def get_the_message(self):
mailbox = self.get_mailbox_content()
self.assertEqual(
len(mailbox), 1,
"Expected exactly one message, got %d.\n%r" % (len(mailbox), [m.as_string() for m in mailbox])
)
return mailbox[0]
def test_send(self):
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'])
num_sent = mail.get_connection().send_messages([email])
self.assertEqual(num_sent, 1)
message = self.get_the_message()
self.assertEqual(message["subject"], "Subject")
self.assertEqual(message.get_payload(), "Content")
self.assertEqual(message["from"], "[email protected]")
self.assertEqual(message.get_all("to"), ["[email protected]"])
def test_send_unicode(self):
email = EmailMessage('Chère maman', 'Je t\'aime très fort', '[email protected]', ['[email protected]'])
num_sent = mail.get_connection().send_messages([email])
self.assertEqual(num_sent, 1)
message = self.get_the_message()
self.assertEqual(message["subject"], '=?utf-8?q?Ch=C3=A8re_maman?=')
self.assertEqual(force_text(message.get_payload(decode=True)), 'Je t\'aime très fort')
def test_send_long_lines(self):
"""
Email line length is limited to 998 chars by the RFC:
https://tools.ietf.org/html/rfc5322#section-2.1.1
Message body containing longer lines are converted to Quoted-Printable
to avoid having to insert newlines, which could be hairy to do properly.
"""
email = EmailMessage('Subject', "Comment ça va? " * 100, '[email protected]', ['[email protected]'])
email.send()
message = self.get_the_message()
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="utf-8"'),
('Content-Transfer-Encoding', 'quoted-printable'),
})
def test_send_many(self):
email1 = EmailMessage('Subject', 'Content1', '[email protected]', ['[email protected]'])
email2 = EmailMessage('Subject', 'Content2', '[email protected]', ['[email protected]'])
num_sent = mail.get_connection().send_messages([email1, email2])
self.assertEqual(num_sent, 2)
messages = self.get_mailbox_content()
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0].get_payload(), "Content1")
self.assertEqual(messages[1].get_payload(), "Content2")
def test_send_verbose_name(self):
email = EmailMessage("Subject", "Content", '"Firstname Sürname" <[email protected]>',
["[email protected]"])
email.send()
message = self.get_the_message()
self.assertEqual(message["subject"], "Subject")
self.assertEqual(message.get_payload(), "Content")
self.assertEqual(message["from"], "=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>")
def test_plaintext_send_mail(self):
"""
Test send_mail without the html_message
regression test for adding html_message parameter to send_mail()
"""
send_mail('Subject', 'Content', '[email protected]', ['[email protected]'])
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get_all('to'), ['[email protected]'])
self.assertFalse(message.is_multipart())
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message.get_content_type(), 'text/plain')
def test_html_send_mail(self):
"""Test html_message argument to send_mail"""
send_mail('Subject', 'Content', '[email protected]', ['[email protected]'], html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get_all('to'), ['[email protected]'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(MANAGERS=[('nobody', '[email protected]')])
def test_html_mail_managers(self):
"""Test html_message argument to mail_managers"""
mail_managers('Subject', 'Content', html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.assertEqual(message.get_all('to'), ['[email protected]'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(ADMINS=[('nobody', '[email protected]')])
def test_html_mail_admins(self):
"""Test html_message argument to mail_admins """
mail_admins('Subject', 'Content', html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.assertEqual(message.get_all('to'), ['[email protected]'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(
ADMINS=[('nobody', '[email protected]')],
MANAGERS=[('nobody', '[email protected]')])
def test_manager_and_admin_mail_prefix(self):
"""
String prefix + lazy translated subject = bad output
Regression for #13494
"""
mail_managers(ugettext_lazy('Subject'), 'Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.flush_mailbox()
mail_admins(ugettext_lazy('Subject'), 'Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
@override_settings(ADMINS=[], MANAGERS=[])
def test_empty_admins(self):
"""
Test that mail_admins/mail_managers doesn't connect to the mail server
if there are no recipients (#9383)
"""
mail_admins('hi', 'there')
self.assertEqual(self.get_mailbox_content(), [])
mail_managers('hi', 'there')
self.assertEqual(self.get_mailbox_content(), [])
def test_message_cc_header(self):
"""
Regression test for #7722
"""
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], cc=['[email protected]'])
mail.get_connection().send_messages([email])
message = self.get_the_message()
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="utf-8"'),
('Content-Transfer-Encoding', '7bit'),
('Subject', 'Subject'),
('From', '[email protected]'),
('To', '[email protected]'),
('Cc', '[email protected]')})
self.assertIn('\nDate: ', message.as_string())
def test_idn_send(self):
"""
Regression test for #14301
"""
self.assertTrue(send_mail('Subject', 'Content', 'from@öäü.com', ['to@öäü.com']))
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), '[email protected]')
self.assertEqual(message.get('to'), '[email protected]')
self.flush_mailbox()
m = EmailMessage('Subject', 'Content', 'from@öäü.com', ['to@öäü.com'], cc=['cc@öäü.com'])
m.send()
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), '[email protected]')
self.assertEqual(message.get('to'), '[email protected]')
self.assertEqual(message.get('cc'), '[email protected]')
def test_recipient_without_domain(self):
"""
Regression test for #15042
"""
self.assertTrue(send_mail("Subject", "Content", "tester", ["django"]))
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), "tester")
self.assertEqual(message.get('to'), "django")
def test_lazy_addresses(self):
"""
Email sending should support lazy email addresses (#24416).
"""
_ = ugettext_lazy
self.assertTrue(send_mail('Subject', 'Content', _('tester'), [_('django')]))
message = self.get_the_message()
self.assertEqual(message.get('from'), 'tester')
self.assertEqual(message.get('to'), 'django')
self.flush_mailbox()
m = EmailMessage(
'Subject', 'Content', _('tester'), [_('to1'), _('to2')],
cc=[_('cc1'), _('cc2')],
bcc=[_('bcc')],
reply_to=[_('reply')],
)
self.assertEqual(m.recipients(), ['to1', 'to2', 'cc1', 'cc2', 'bcc'])
m.send()
message = self.get_the_message()
self.assertEqual(message.get('from'), 'tester')
self.assertEqual(message.get('to'), 'to1, to2')
self.assertEqual(message.get('cc'), 'cc1, cc2')
self.assertEqual(message.get('Reply-To'), 'reply')
def test_close_connection(self):
"""
Test that connection can be closed (even when not explicitly opened)
"""
conn = mail.get_connection(username='', password='')
conn.close()
def test_use_as_contextmanager(self):
"""
Test that the connection can be used as a contextmanager.
"""
opened = [False]
closed = [False]
conn = mail.get_connection(username='', password='')
def open():
opened[0] = True
conn.open = open
def close():
closed[0] = True
conn.close = close
with conn as same_conn:
self.assertTrue(opened[0])
self.assertIs(same_conn, conn)
self.assertFalse(closed[0])
self.assertTrue(closed[0])
class LocmemBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.locmem.EmailBackend'
def get_mailbox_content(self):
return [m.message() for m in mail.outbox]
def flush_mailbox(self):
mail.outbox = []
def tearDown(self):
super(LocmemBackendTests, self).tearDown()
mail.outbox = []
def test_locmem_shared_messages(self):
"""
Make sure that the locmen backend populates the outbox.
"""
connection = locmem.EmailBackend()
connection2 = locmem.EmailBackend()
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
connection.send_messages([email])
connection2.send_messages([email])
self.assertEqual(len(mail.outbox), 2)
def test_validate_multiline_headers(self):
# Ticket #18861 - Validate emails when using the locmem backend
with self.assertRaises(BadHeaderError):
send_mail('Subject\nMultiline', 'Content', '[email protected]', ['[email protected]'])
class FileBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.filebased.EmailBackend'
def setUp(self):
super(FileBackendTests, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.tmp_dir)
self._settings_override = override_settings(EMAIL_FILE_PATH=self.tmp_dir)
self._settings_override.enable()
def tearDown(self):
self._settings_override.disable()
super(FileBackendTests, self).tearDown()
def flush_mailbox(self):
for filename in os.listdir(self.tmp_dir):
os.unlink(os.path.join(self.tmp_dir, filename))
def get_mailbox_content(self):
messages = []
for filename in os.listdir(self.tmp_dir):
with open(os.path.join(self.tmp_dir, filename), 'rb') as fp:
session = fp.read().split(force_bytes('\n' + ('-' * 79) + '\n', encoding='ascii'))
messages.extend(message_from_bytes(m) for m in session if m)
return messages
def test_file_sessions(self):
"""Make sure opening a connection creates a new file"""
msg = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
headers={'From': '[email protected]'},
)
connection = mail.get_connection()
connection.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 1)
with open(os.path.join(self.tmp_dir, os.listdir(self.tmp_dir)[0]), 'rb') as fp:
message = message_from_binary_file(fp)
self.assertEqual(message.get_content_type(), 'text/plain')
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), '[email protected]')
self.assertEqual(message.get('to'), '[email protected]')
connection2 = mail.get_connection()
connection2.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 2)
connection.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 2)
msg.connection = mail.get_connection()
self.assertTrue(connection.open())
msg.send()
self.assertEqual(len(os.listdir(self.tmp_dir)), 3)
msg.send()
self.assertEqual(len(os.listdir(self.tmp_dir)), 3)
connection.close()
class ConsoleBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.console.EmailBackend'
def setUp(self):
super(ConsoleBackendTests, self).setUp()
self.__stdout = sys.stdout
self.stream = sys.stdout = StringIO()
def tearDown(self):
del self.stream
sys.stdout = self.__stdout
del self.__stdout
super(ConsoleBackendTests, self).tearDown()
def flush_mailbox(self):
self.stream = sys.stdout = StringIO()
def get_mailbox_content(self):
messages = self.stream.getvalue().split(str('\n' + ('-' * 79) + '\n'))
return [message_from_bytes(force_bytes(m)) for m in messages if m]
def test_console_stream_kwarg(self):
"""
Test that the console backend can be pointed at an arbitrary stream.
"""
s = StringIO()
connection = mail.get_connection('django.core.mail.backends.console.EmailBackend', stream=s)
send_mail('Subject', 'Content', '[email protected]', ['[email protected]'], connection=connection)
message = force_bytes(s.getvalue().split('\n' + ('-' * 79) + '\n')[0])
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="utf-8"'),
('Content-Transfer-Encoding', '7bit'),
('Subject', 'Subject'),
('From', '[email protected]'),
('To', '[email protected]')})
self.assertIn(b'\nDate: ', message)
class FakeSMTPChannel(smtpd.SMTPChannel):
def collect_incoming_data(self, data):
try:
super(FakeSMTPChannel, self).collect_incoming_data(data)
except UnicodeDecodeError:
# ignore decode error in SSL/TLS connection tests as we only care
# whether the connection attempt was made
pass
class FakeSMTPServer(smtpd.SMTPServer, threading.Thread):
"""
Asyncore SMTP server wrapped into a thread. Based on DummyFTPServer from:
http://svn.python.org/view/python/branches/py3k/Lib/test/test_ftplib.py?revision=86061&view=markup
"""
channel_class = FakeSMTPChannel
def __init__(self, *args, **kwargs):
threading.Thread.__init__(self)
# New kwarg added in Python 3.5; default switching to False in 3.6.
if sys.version_info >= (3, 5):
kwargs['decode_data'] = True
smtpd.SMTPServer.__init__(self, *args, **kwargs)
self._sink = []
self.active = False
self.active_lock = threading.Lock()
self.sink_lock = threading.Lock()
def process_message(self, peer, mailfrom, rcpttos, data):
if PY3:
data = data.encode('utf-8')
m = message_from_bytes(data)
maddr = parseaddr(m.get('from'))[1]
if mailfrom != maddr:
# According to the spec, mailfrom does not necessarily match the
# From header - on Python 3 this is the case where the local part
# isn't encoded, so try to correct that.
lp, domain = mailfrom.split('@', 1)
lp = Header(lp, 'utf-8').encode()
mailfrom = '@'.join([lp, domain])
if mailfrom != maddr:
return "553 '%s' != '%s'" % (mailfrom, maddr)
with self.sink_lock:
self._sink.append(m)
def get_sink(self):
with self.sink_lock:
return self._sink[:]
def flush_sink(self):
with self.sink_lock:
self._sink[:] = []
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
with self.active_lock:
asyncore.loop(timeout=0.1, count=1)
asyncore.close_all()
def stop(self):
if self.active:
self.active = False
self.join()
class SMTPBackendTestsBase(SimpleTestCase):
@classmethod
def setUpClass(cls):
super(SMTPBackendTestsBase, cls).setUpClass()
cls.server = FakeSMTPServer(('127.0.0.1', 0), None)
cls._settings_override = override_settings(
EMAIL_HOST="127.0.0.1",
EMAIL_PORT=cls.server.socket.getsockname()[1])
cls._settings_override.enable()
cls.server.start()
@classmethod
def tearDownClass(cls):
cls._settings_override.disable()
cls.server.stop()
super(SMTPBackendTestsBase, cls).tearDownClass()
class SMTPBackendTests(BaseEmailBackendTests, SMTPBackendTestsBase):
email_backend = 'django.core.mail.backends.smtp.EmailBackend'
def setUp(self):
super(SMTPBackendTests, self).setUp()
self.server.flush_sink()
def tearDown(self):
self.server.flush_sink()
super(SMTPBackendTests, self).tearDown()
def flush_mailbox(self):
self.server.flush_sink()
def get_mailbox_content(self):
return self.server.get_sink()
@override_settings(
EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_authentication_use_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.username, 'not empty username')
self.assertEqual(backend.password, 'not empty password')
@override_settings(
EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_authentication_override_settings(self):
backend = smtp.EmailBackend(username='username', password='password')
self.assertEqual(backend.username, 'username')
self.assertEqual(backend.password, 'password')
@override_settings(
EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_disabled_authentication(self):
backend = smtp.EmailBackend(username='', password='')
self.assertEqual(backend.username, '')
self.assertEqual(backend.password, '')
def test_auth_attempted(self):
"""
Test that opening the backend with non empty username/password tries
to authenticate against the SMTP server.
"""
backend = smtp.EmailBackend(
username='not empty username', password='not empty password')
try:
with self.assertRaisesMessage(SMTPException, 'SMTP AUTH extension not supported by server.'):
backend.open()
finally:
backend.close()
def test_server_open(self):
"""
Test that open() tells us whether it opened a connection.
"""
backend = smtp.EmailBackend(username='', password='')
self.assertFalse(backend.connection)
opened = backend.open()
backend.close()
self.assertTrue(opened)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_use_settings(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_tls)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_override_settings(self):
backend = smtp.EmailBackend(use_tls=False)
self.assertFalse(backend.use_tls)
def test_email_tls_default_disabled(self):
backend = smtp.EmailBackend()
self.assertFalse(backend.use_tls)
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_use_settings(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_ssl)
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_override_settings(self):
backend = smtp.EmailBackend(use_ssl=False)
self.assertFalse(backend.use_ssl)
def test_email_ssl_default_disabled(self):
backend = smtp.EmailBackend()
self.assertFalse(backend.use_ssl)
@override_settings(EMAIL_SSL_CERTFILE='foo')
def test_email_ssl_certfile_use_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.ssl_certfile, 'foo')
@override_settings(EMAIL_SSL_CERTFILE='foo')
def test_email_ssl_certfile_override_settings(self):
backend = smtp.EmailBackend(ssl_certfile='bar')
self.assertEqual(backend.ssl_certfile, 'bar')
def test_email_ssl_certfile_default_disabled(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.ssl_certfile, None)
@override_settings(EMAIL_SSL_KEYFILE='foo')
def test_email_ssl_keyfile_use_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.ssl_keyfile, 'foo')
@override_settings(EMAIL_SSL_KEYFILE='foo')
def test_email_ssl_keyfile_override_settings(self):
backend = smtp.EmailBackend(ssl_keyfile='bar')
self.assertEqual(backend.ssl_keyfile, 'bar')
def test_email_ssl_keyfile_default_disabled(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.ssl_keyfile, None)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_attempts_starttls(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_tls)
try:
with self.assertRaisesMessage(SMTPException, 'STARTTLS extension not supported by server.'):
backend.open()
finally:
backend.close()
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_attempts_ssl_connection(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_ssl)
try:
with self.assertRaises(SSLError):
backend.open()
finally:
backend.close()
def test_connection_timeout_default(self):
"""Test that the connection's timeout value is None by default."""
connection = mail.get_connection('django.core.mail.backends.smtp.EmailBackend')
self.assertEqual(connection.timeout, None)
def test_connection_timeout_custom(self):
"""Test that the timeout parameter can be customized."""
class MyEmailBackend(smtp.EmailBackend):
def __init__(self, *args, **kwargs):
kwargs.setdefault('timeout', 42)
super(MyEmailBackend, self).__init__(*args, **kwargs)
myemailbackend = MyEmailBackend()
myemailbackend.open()
self.assertEqual(myemailbackend.timeout, 42)
self.assertEqual(myemailbackend.connection.timeout, 42)
myemailbackend.close()
@override_settings(EMAIL_TIMEOUT=10)
def test_email_timeout_override_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.timeout, 10)
def test_email_msg_uses_crlf(self):
"""#23063 -- Test that RFC-compliant messages are sent over SMTP."""
send = SMTP.send
try:
smtp_messages = []
def mock_send(self, s):
smtp_messages.append(s)
return send(self, s)
SMTP.send = mock_send
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'])
mail.get_connection().send_messages([email])
# Find the actual message
msg = None
for i, m in enumerate(smtp_messages):
if m[:4] == 'data':
msg = smtp_messages[i + 1]
break
self.assertTrue(msg)
if PY3:
msg = msg.decode('utf-8')
# Ensure that the message only contains CRLF and not combinations of CRLF, LF, and CR.
msg = msg.replace('\r\n', '')
self.assertNotIn('\r', msg)
self.assertNotIn('\n', msg)
finally:
SMTP.send = send
class SMTPBackendStoppedServerTest(SMTPBackendTestsBase):
"""
This test requires a separate class, because it shuts down the
FakeSMTPServer started in setUpClass(). It cannot be restarted
("RuntimeError: threads can only be started once").
"""
def test_server_stopped(self):
"""
Test that closing the backend while the SMTP server is stopped doesn't
raise an exception.
"""
backend = smtp.EmailBackend(username='', password='')
backend.open()
self.server.stop()
backend.close()
|
bsd-3-clause
|
drandykass/fatiando
|
gallery/vis/seismic-wiggle.py
|
6
|
1210
|
"""
Plotting seismic data with wiggles
-----------------------------------
One way to plot seismic data is using black and white wiggles.
Function :func:`fatiando.vis.mpl.seismic_wiggle` does exactly this.
"""
import numpy as np
import matplotlib.pyplot as plt
from fatiando.seismic import conv
from fatiando.vis.mpl import seismic_wiggle
# We need some data to plot, so let's generate some using the convolution model
# in fatiando.seismic.conv
n_samples, n_traces = 400, 20
dt = 2e-3 # the sampling interval
velocity = 1500*np.ones((n_samples, n_traces))
# Our model will have a different velocity layer in the middle. This will cause
# a reflection on the top and one on the bottom (with reversed polarity).
velocity[150:300, :] = 2500
# For simplicity, we'll assume constant density when calculating the
# reflectivity.
rc = conv.reflectivity(velocity, 2000*np.ones_like(velocity))
data = conv.convolutional_model(rc, f=30, wavelet=conv.rickerwave, dt=dt)
# Plot the data using wiggles
plt.figure(figsize=(6, 5))
plt.title("Seismic wiggles")
# The scale parameter makes the wiggles larger or smaller
seismic_wiggle(data, dt, scale=3, color='k')
plt.ylabel('time (s)')
plt.xlabel('trace')
plt.show()
|
bsd-3-clause
|
lizardsystem/lizard5-apps
|
lizard_map/test_templatetags.py
|
2
|
1866
|
from django.contrib.auth.models import User
from django.test import TestCase
from lizard_map.templatetags import workspaces
from lizard_map.models import WorkspaceEdit
from lizard_map.models import CollageEdit
class WorkspacesTest(TestCase):
"""Only smoke tests"""
class MockRequest(object):
session = None
def setUp(self):
pass
def test_workspace_edit(self):
user = User(username='my_little_pony')
workspace_edit = WorkspaceEdit.get_or_create('fake_key', user)
mock_request = self.MockRequest()
context = {'request': mock_request, 'user': user}
workspaces.workspace_edit(context, workspace_edit)
def test_collage_edit(self):
user = User(username='my_little_pony')
collage_edit = CollageEdit.get_or_create('fake_key', user)
mock_request = self.MockRequest()
context = {'request': mock_request, 'user': user}
workspaces.collage_edit(context, collage_edit)
def test_collage_item_statistics(self):
user = User(username='my_little_pony')
collage_edit = CollageEdit.get_or_create('fake_key', user)
collage_edit.collage_items.create(
name='naam1',
adapter_class='fake adapter',
adapter_layer_json='',
identifier='{"id":"id"}')
mock_request = self.MockRequest()
workspaces.collage_edit(mock_request, collage_edit.collage_items.all())
def test_collage_items_html(self):
user = User(username='my_little_pony')
collage_edit = CollageEdit.get_or_create('fake_key', user)
collage_edit.collage_items.create(
name='naam1',
adapter_class='fake adapter',
adapter_layer_json='',
identifier='{"id":"id"}')
workspaces.collage_items_html(
collage_edit.collage_items.all())
|
lgpl-3.0
|
phonnz/azure-storage-python
|
tests/storage_testcase.py
|
1
|
2611
|
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import os.path
from tests.common_recordingtestcase import (
RecordingTestCase,
TestMode,
)
import tests.storage_settings_fake as fake_settings
class StorageTestCase(RecordingTestCase):
def setUp(self):
self.working_folder = os.path.dirname(__file__)
super(StorageTestCase, self).setUp()
self.fake_settings = fake_settings
if TestMode.is_playback(self.test_mode):
self.settings = self.fake_settings
else:
import tests.storage_settings_real as real_settings
self.settings = real_settings
def _scrub(self, val):
val = super(StorageTestCase, self)._scrub(val)
real_to_fake_dict = {
self.settings.STORAGE_ACCOUNT_NAME: self.fake_settings.STORAGE_ACCOUNT_NAME,
self.settings.STORAGE_ACCOUNT_KEY: self.fake_settings.STORAGE_ACCOUNT_KEY,
self.settings.REMOTE_STORAGE_ACCOUNT_KEY: self.fake_settings.REMOTE_STORAGE_ACCOUNT_KEY,
self.settings.REMOTE_STORAGE_ACCOUNT_NAME: self.fake_settings.REMOTE_STORAGE_ACCOUNT_NAME,
}
val = self._scrub_using_dict(val, real_to_fake_dict)
return val
def _create_storage_service(self, service_class, settings, account_name=None, account_key=None):
account_name = account_name or settings.STORAGE_ACCOUNT_NAME
account_key = account_key or settings.STORAGE_ACCOUNT_KEY
service = service_class(
account_name,
account_key,
)
self._set_service_options(service, settings)
return service
def _set_service_options(self, service, settings):
if settings.USE_PROXY:
service.set_proxy(
settings.PROXY_HOST,
settings.PROXY_PORT,
settings.PROXY_USER,
settings.PROXY_PASSWORD,
)
|
apache-2.0
|
Azure/azure-sdk-for-python
|
sdk/cognitiveservices/azure-cognitiveservices-knowledge-qnamaker/azure/cognitiveservices/knowledge/qnamaker/models/knowledgebases_dto_py3.py
|
1
|
1061
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class KnowledgebasesDTO(Model):
"""Collection of knowledgebases owned by a user.
:param knowledgebases: Collection of knowledgebase records.
:type knowledgebases:
list[~azure.cognitiveservices.knowledge.qnamaker.models.KnowledgebaseDTO]
"""
_attribute_map = {
'knowledgebases': {'key': 'knowledgebases', 'type': '[KnowledgebaseDTO]'},
}
def __init__(self, *, knowledgebases=None, **kwargs) -> None:
super(KnowledgebasesDTO, self).__init__(**kwargs)
self.knowledgebases = knowledgebases
|
mit
|
bobobox/ansible
|
lib/ansible/vars/__init__.py
|
17
|
30049
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
from collections import defaultdict, MutableMapping
from ansible.compat.six import iteritems
from jinja2.exceptions import UndefinedError
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
from ansible import constants as C
from ansible.cli import CLI
from ansible.compat.six import string_types, text_type
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound
from ansible.inventory.host import Host
from ansible.plugins import lookup_loader
from ansible.plugins.cache import FactCache
from ansible.template import Templar
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.vars import combine_vars
from ansible.vars.unsafe_proxy import wrap_var
from ansible.module_utils._text import to_native
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
VARIABLE_CACHE = dict()
HOSTVARS_CACHE = dict()
class AnsibleInventoryVarsData(dict):
def __init__(self, *args, **kwargs):
super(AnsibleInventoryVarsData, self).__init__(*args, **kwargs)
self.path = None
def preprocess_vars(a):
'''
Ensures that vars contained in the parameter passed in are
returned as a list of dictionaries, to ensure for instance
that vars loaded from a file conform to an expected state.
'''
if a is None:
return None
elif not isinstance(a, list):
data = [ a ]
else:
data = a
for item in data:
if not isinstance(item, MutableMapping):
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
return data
def strip_internal_keys(dirty):
'''
All keys stating with _ansible_ are internal, so create a copy of the 'dirty' dict
and remove them from the clean one before returning it
'''
clean = dirty.copy()
for k in dirty.keys():
if isinstance(k, string_types) and k.startswith('_ansible_'):
del clean[k]
elif isinstance(dirty[k], dict):
clean[k] = strip_internal_keys(dirty[k])
return clean
class VariableManager:
def __init__(self):
self._nonpersistent_fact_cache = defaultdict(dict)
self._vars_cache = defaultdict(dict)
self._extra_vars = defaultdict(dict)
self._host_vars_files = defaultdict(dict)
self._group_vars_files = defaultdict(dict)
self._inventory = None
self._hostvars = None
self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
self._options_vars = defaultdict(dict)
# bad cache plugin is not fatal error
try:
self._fact_cache = FactCache()
except AnsibleError as e:
display.warning(to_native(e))
# fallback to a dict as in memory cache
self._fact_cache = {}
def __getstate__(self):
data = dict(
fact_cache = self._fact_cache,
np_fact_cache = self._nonpersistent_fact_cache,
vars_cache = self._vars_cache,
extra_vars = self._extra_vars,
host_vars_files = self._host_vars_files,
group_vars_files = self._group_vars_files,
omit_token = self._omit_token,
options_vars = self._options_vars,
#inventory = self._inventory,
)
return data
def __setstate__(self, data):
self._fact_cache = data.get('fact_cache', defaultdict(dict))
self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
self._vars_cache = data.get('vars_cache', defaultdict(dict))
self._extra_vars = data.get('extra_vars', dict())
self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
self._inventory = data.get('inventory', None)
self._options_vars = data.get('options_vars', dict())
def _get_cache_entry(self, play=None, host=None, task=None):
play_id = "NONE"
if play:
play_id = play._uuid
host_id = "NONE"
if host:
host_id = host.get_name()
task_id = "NONE"
if task:
task_id = task._uuid
return "PLAY:%s;HOST:%s;TASK:%s" % (play_id, host_id, task_id)
@property
def extra_vars(self):
''' ensures a clean copy of the extra_vars are made '''
return self._extra_vars.copy()
@extra_vars.setter
def extra_vars(self, value):
''' ensures a clean copy of the extra_vars are used to set the value '''
assert isinstance(value, MutableMapping)
self._extra_vars = value.copy()
def set_inventory(self, inventory):
self._inventory = inventory
@property
def options_vars(self):
''' ensures a clean copy of the options_vars are made '''
return self._options_vars.copy()
@options_vars.setter
def options_vars(self, value):
''' ensures a clean copy of the options_vars are used to set the value '''
assert isinstance(value, dict)
self._options_vars = value.copy()
def _preprocess_vars(self, a):
'''
Ensures that vars contained in the parameter passed in are
returned as a list of dictionaries, to ensure for instance
that vars loaded from a file conform to an expected state.
'''
if a is None:
return None
elif not isinstance(a, list):
data = [ a ]
else:
data = a
for item in data:
if not isinstance(item, MutableMapping):
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
return data
def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True):
'''
Returns the variables, with optional "context" given via the parameters
for the play, host, and task (which could possibly result in different
sets of variables being returned due to the additional context).
The order of precedence is:
- play->roles->get_default_vars (if there is a play context)
- group_vars_files[host] (if there is a host context)
- host_vars_files[host] (if there is a host context)
- host->get_vars (if there is a host context)
- fact_cache[host] (if there is a host context)
- play vars (if there is a play context)
- play vars_files (if there's no host context, ignore
file names that cannot be templated)
- task->get_vars (if there is a task context)
- vars_cache[host] (if there is a host context)
- extra vars
'''
display.debug("in VariableManager get_vars()")
cache_entry = self._get_cache_entry(play=play, host=host, task=task)
if cache_entry in VARIABLE_CACHE and use_cache:
display.debug("vars are cached, returning them now")
return VARIABLE_CACHE[cache_entry]
all_vars = dict()
magic_variables = self._get_magic_variables(
loader=loader,
play=play,
host=host,
task=task,
include_hostvars=include_hostvars,
include_delegate_to=include_delegate_to,
)
if play:
# first we compile any vars specified in defaults/main.yml
# for all roles within the specified play
for role in play.get_roles():
all_vars = combine_vars(all_vars, role.get_default_vars())
# if we have a task in this context, and that task has a role, make
# sure it sees its defaults above any other roles, as we previously
# (v1) made sure each task had a copy of its roles default vars
if task and task._role is not None and (play or task.action == 'include_role'):
all_vars = combine_vars(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()))
if host:
# next, if a host is specified, we load any vars from group_vars
# files and then any vars from host_vars files which may apply to
# this host or the groups it belongs to
# we merge in the special 'all' group_vars first, if they exist
if 'all' in self._group_vars_files:
data = preprocess_vars(self._group_vars_files['all'])
for item in data:
all_vars = combine_vars(all_vars, item)
# we merge in vars from groups specified in the inventory (INI or script)
all_vars = combine_vars(all_vars, host.get_group_vars())
for group in sorted(host.get_groups(), key=lambda g: (g.depth, g.name)):
if group.name in self._group_vars_files and group.name != 'all':
for data in self._group_vars_files[group.name]:
data = preprocess_vars(data)
for item in data:
all_vars = combine_vars(all_vars, item)
# then we merge in vars from the host specified in the inventory (INI or script)
all_vars = combine_vars(all_vars, host.get_vars())
# then we merge in the host_vars/<hostname> file, if it exists
host_name = host.get_name()
if host_name in self._host_vars_files:
for data in self._host_vars_files[host_name]:
data = preprocess_vars(data)
for item in data:
all_vars = combine_vars(all_vars, item)
# finally, the facts caches for this host, if it exists
try:
host_facts = wrap_var(self._fact_cache.get(host.name, dict()))
all_vars = combine_vars(all_vars, host_facts)
except KeyError:
pass
if play:
all_vars = combine_vars(all_vars, play.get_vars())
for vars_file_item in play.get_vars_files():
# create a set of temporary vars here, which incorporate the extra
# and magic vars so we can properly template the vars_files entries
temp_vars = combine_vars(all_vars, self._extra_vars)
temp_vars = combine_vars(temp_vars, magic_variables)
templar = Templar(loader=loader, variables=temp_vars)
# we assume each item in the list is itself a list, as we
# support "conditional includes" for vars_files, which mimics
# the with_first_found mechanism.
vars_file_list = vars_file_item
if not isinstance(vars_file_list, list):
vars_file_list = [ vars_file_list ]
# now we iterate through the (potential) files, and break out
# as soon as we read one from the list. If none are found, we
# raise an error, which is silently ignored at this point.
try:
for vars_file in vars_file_list:
vars_file = templar.template(vars_file)
try:
data = preprocess_vars(loader.load_from_file(vars_file))
if data is not None:
for item in data:
all_vars = combine_vars(all_vars, item)
break
except AnsibleFileNotFound:
# we continue on loader failures
continue
except AnsibleParserError:
raise
else:
# if include_delegate_to is set to False, we ignore the missing
# vars file here because we're working on a delegated host
if include_delegate_to:
raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
except (UndefinedError, AnsibleUndefinedVariable):
if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'" % vars_file_item, obj=vars_file_item)
else:
# we do not have a full context here, and the missing variable could be
# because of that, so just show a warning and continue
display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
continue
# By default, we now merge in all vars from all roles in the play,
# unless the user has disabled this via a config option
if not C.DEFAULT_PRIVATE_ROLE_VARS:
for role in play.get_roles():
all_vars = combine_vars(all_vars, role.get_vars(include_params=False))
# next, we merge in the vars from the role, which will specifically
# follow the role dependency chain, and then we merge in the tasks
# vars (which will look at parent blocks/task includes)
if task:
if task._role:
all_vars = combine_vars(all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False))
all_vars = combine_vars(all_vars, task.get_vars())
# next, we merge in the vars cache (include vars) and nonpersistent
# facts cache (set_fact/register), in that order
if host:
all_vars = combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict()))
all_vars = combine_vars(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()))
# next, we merge in role params and task include params
if task:
if task._role:
all_vars = combine_vars(all_vars, task._role.get_role_params(task.get_dep_chain()))
# special case for include tasks, where the include params
# may be specified in the vars field for the task, which should
# have higher precedence than the vars/np facts above
all_vars = combine_vars(all_vars, task.get_include_params())
# finally, we merge in extra vars and the magic variables
all_vars = combine_vars(all_vars, self._extra_vars)
all_vars = combine_vars(all_vars, magic_variables)
# special case for the 'environment' magic variable, as someone
# may have set it as a variable and we don't want to stomp on it
if task:
if 'environment' not in all_vars:
all_vars['environment'] = task.environment
else:
display.warning("The variable 'environment' appears to be used already, which is also used internally for environment variables set on the task/block/play. You should use a different variable name to avoid conflicts with this internal variable")
# if we have a task and we're delegating to another host, figure out the
# variables for that host now so we don't have to rely on hostvars later
if task and task.delegate_to is not None and include_delegate_to:
all_vars['ansible_delegated_vars'] = self._get_delegated_vars(loader, play, task, all_vars)
#VARIABLE_CACHE[cache_entry] = all_vars
if task or play:
all_vars['vars'] = all_vars.copy()
display.debug("done with get_vars()")
return all_vars
def invalidate_hostvars_cache(self, play):
hostvars_cache_entry = self._get_cache_entry(play=play)
if hostvars_cache_entry in HOSTVARS_CACHE:
del HOSTVARS_CACHE[hostvars_cache_entry]
def _get_magic_variables(self, loader, play, host, task, include_hostvars, include_delegate_to):
'''
Returns a dictionary of so-called "magic" variables in Ansible,
which are special variables we set internally for use.
'''
variables = dict()
variables['playbook_dir'] = loader.get_basedir()
variables['ansible_playbook_python'] = sys.executable
if host:
variables['group_names'] = sorted([group.name for group in host.get_groups() if group.name != 'all'])
if self._inventory:
variables['groups'] = self._inventory.get_group_dict()
if play:
variables['role_names'] = [r._role_name for r in play.roles]
if task:
if task._role:
variables['role_name'] = task._role.get_name()
variables['role_path'] = task._role._role_path
variables['role_uuid'] = text_type(task._role._uuid)
if self._inventory is not None:
variables['inventory_dir'] = self._inventory.basedir()
variables['inventory_file'] = self._inventory.src()
if play:
# add the list of hosts in the play, as adjusted for limit/filters
variables['ansible_play_hosts_all'] = [x.name for x in self._inventory.get_hosts(pattern=play.hosts or 'all', ignore_restrictions=True)]
variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
variables['ansible_play_batch'] = [x.name for x in self._inventory.get_hosts() if x.name not in play._removed_hosts]
#DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
# however this would take work in the templating engine, so for now we'll add both
variables['play_hosts'] = variables['ansible_play_batch']
# the 'omit' value alows params to be left out if the variable they are based on is undefined
variables['omit'] = self._omit_token
variables['ansible_version'] = CLI.version_info(gitinfo=False)
# Set options vars
for option, option_value in iteritems(self._options_vars):
variables[option] = option_value
if self._hostvars is not None and include_hostvars:
variables['hostvars'] = self._hostvars
return variables
def _get_delegated_vars(self, loader, play, task, existing_variables):
# we unfortunately need to template the delegate_to field here,
# as we're fetching vars before post_validate has been called on
# the task that has been passed in
vars_copy = existing_variables.copy()
templar = Templar(loader=loader, variables=vars_copy)
items = []
if task.loop is not None:
if task.loop in lookup_loader:
try:
loop_terms = listify_lookup_plugin_terms(terms=task.loop_args, templar=templar, loader=loader, fail_on_undefined=True, convert_bare=False)
items = lookup_loader.get(task.loop, loader=loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
except AnsibleUndefinedVariable:
# This task will be skipped later due to this, so we just setup
# a dummy array for the later code so it doesn't fail
items = [None]
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % task.loop)
else:
items = [None]
delegated_host_vars = dict()
for item in items:
# update the variables with the item value for templating, in case we need it
if item is not None:
vars_copy['item'] = item
templar.set_available_variables(vars_copy)
delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
if delegated_host_name is None:
raise AnsibleError(message="Undefined delegate_to host for task:", obj=task._ds)
if delegated_host_name in delegated_host_vars:
# no need to repeat ourselves, as the delegate_to value
# does not appear to be tied to the loop item variable
continue
# a dictionary of variables to use if we have to create a new host below
# we set the default port based on the default transport here, to make sure
# we use the proper default for windows
new_port = C.DEFAULT_REMOTE_PORT
if C.DEFAULT_TRANSPORT == 'winrm':
new_port = 5986
new_delegated_host_vars = dict(
ansible_host=delegated_host_name,
ansible_port=new_port,
ansible_user=C.DEFAULT_REMOTE_USER,
ansible_connection=C.DEFAULT_TRANSPORT,
)
# now try to find the delegated-to host in inventory, or failing that,
# create a new host on the fly so we can fetch variables for it
delegated_host = None
if self._inventory is not None:
delegated_host = self._inventory.get_host(delegated_host_name)
# try looking it up based on the address field, and finally
# fall back to creating a host on the fly to use for the var lookup
if delegated_host is None:
if delegated_host_name in C.LOCALHOST:
delegated_host = self._inventory.localhost
else:
for h in self._inventory.get_hosts(ignore_limits=True, ignore_restrictions=True):
# check if the address matches, or if both the delegated_to host
# and the current host are in the list of localhost aliases
if h.address == delegated_host_name:
delegated_host = h
break
else:
delegated_host = Host(name=delegated_host_name)
delegated_host.vars.update(new_delegated_host_vars)
else:
delegated_host = Host(name=delegated_host_name)
delegated_host.vars.update(new_delegated_host_vars)
# now we go fetch the vars for the delegated-to host and save them in our
# master dictionary of variables to be used later in the TaskExecutor/PlayContext
delegated_host_vars[delegated_host_name] = self.get_vars(
loader=loader,
play=play,
host=delegated_host,
task=task,
include_delegate_to=False,
include_hostvars=False,
)
return delegated_host_vars
def _get_inventory_basename(self, path):
'''
Returns the basename minus the extension of the given path, so the
bare filename can be matched against host/group names later
'''
(name, ext) = os.path.splitext(os.path.basename(path))
if ext not in ('.yml', '.yaml'):
return os.path.basename(path)
else:
return name
def _load_inventory_file(self, path, loader):
'''
helper function, which loads the file and gets the
basename of the file without the extension
'''
if loader.is_directory(path):
data = dict()
try:
names = loader.list_directory(path)
except os.error as err:
raise AnsibleError("This folder cannot be listed: %s: %s." % (path, err.strerror))
# evaluate files in a stable order rather than whatever
# order the filesystem lists them.
names.sort()
# do not parse hidden files or dirs, e.g. .svn/
paths = [os.path.join(path, name) for name in names if not name.startswith('.')]
for p in paths:
results = self._load_inventory_file(path=p, loader=loader)
if results is not None:
data = combine_vars(data, results)
else:
file_name, ext = os.path.splitext(path)
data = None
if not ext or ext not in C.YAML_FILENAME_EXTENSIONS:
for test_ext in C.YAML_FILENAME_EXTENSIONS:
new_path = path + test_ext
if loader.path_exists(new_path):
data = loader.load_from_file(new_path)
break
else:
if loader.path_exists(path):
data = loader.load_from_file(path)
rval = AnsibleInventoryVarsData()
rval.path = path
if data is not None:
if not isinstance(data, dict):
raise AnsibleError("Problem parsing file '%s': line %d, column %d" % data.ansible_pos)
else:
rval.update(data)
return rval
def add_host_vars_file(self, path, loader):
'''
Loads and caches a host_vars file in the _host_vars_files dict,
where the key to that dictionary is the basename of the file, minus
the extension, for matching against a given inventory host name
'''
name = self._get_inventory_basename(path)
if name not in self._host_vars_files:
self._host_vars_files[name] = []
for entry in self._host_vars_files[name]:
if entry.path == path:
data = entry
break
else:
data = self._load_inventory_file(path, loader)
if data:
self._host_vars_files[name].append(data)
return data
def add_group_vars_file(self, path, loader):
'''
Loads and caches a host_vars file in the _host_vars_files dict,
where the key to that dictionary is the basename of the file, minus
the extension, for matching against a given inventory host name
'''
name = self._get_inventory_basename(path)
if name not in self._group_vars_files:
self._group_vars_files[name] = []
for entry in self._group_vars_files[name]:
if entry.path == path:
data = entry
break
else:
data = self._load_inventory_file(path, loader)
if data:
self._group_vars_files[name].append(data)
return data
def clear_playbook_hostgroup_vars_files(self, path):
for f in self._host_vars_files.keys():
keepers = []
for entry in self._host_vars_files[f]:
if os.path.dirname(entry.path) != os.path.join(path, 'host_vars'):
keepers.append(entry)
self._host_vars_files[f] = keepers
for f in self._group_vars_files.keys():
keepers = []
for entry in self._group_vars_files[f]:
if os.path.dirname(entry.path) != os.path.join(path, 'group_vars'):
keepers.append(entry)
self._group_vars_files[f] = keepers
def clear_facts(self, hostname):
'''
Clears the facts for a host
'''
if hostname in self._fact_cache:
del self._fact_cache[hostname]
def set_host_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
assert isinstance(facts, dict)
if host.name not in self._fact_cache:
self._fact_cache[host.name] = facts
else:
try:
self._fact_cache.update(host.name, facts)
except KeyError:
self._fact_cache[host.name] = facts
def set_nonpersistent_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
assert isinstance(facts, dict)
if host.name not in self._nonpersistent_fact_cache:
self._nonpersistent_fact_cache[host.name] = facts
else:
try:
self._nonpersistent_fact_cache[host.name].update(facts)
except KeyError:
self._nonpersistent_fact_cache[host.name] = facts
def set_host_variable(self, host, varname, value):
'''
Sets a value in the vars_cache for a host.
'''
host_name = host.get_name()
if host_name not in self._vars_cache:
self._vars_cache[host_name] = dict()
if varname in self._vars_cache[host_name] and isinstance(self._vars_cache[host_name][varname], MutableMapping) and isinstance(value, MutableMapping):
self._vars_cache[host_name] = combine_vars(self._vars_cache[host_name], {varname: value})
else:
self._vars_cache[host_name][varname] = value
|
gpl-3.0
|
markYoungH/chromium.src
|
net/tools/testserver/echo_message.py
|
187
|
13195
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides utility functions for TCP/UDP echo servers and clients.
This program has classes and functions to encode, decode, calculate checksum
and verify the "echo request" and "echo response" messages. "echo request"
message is an echo message sent from the client to the server. "echo response"
message is a response from the server to the "echo request" message from the
client.
The format of "echo request" message is
<version><checksum><payload_size><payload>. <version> is the version number
of the "echo request" protocol. <checksum> is the checksum of the <payload>.
<payload_size> is the size of the <payload>. <payload> is the echo message.
The format of "echo response" message is
<version><checksum><payload_size><key><encoded_payload>.<version>,
<checksum> and <payload_size> are same as what is in the "echo request" message.
<encoded_payload> is encoded version of the <payload>. <key> is a randomly
generated key that is used to encode/decode the <payload>.
"""
__author__ = '[email protected] (Raman Tenneti)'
from itertools import cycle
from itertools import izip
import random
class EchoHeader(object):
"""Class to keep header info of the EchoRequest and EchoResponse messages.
This class knows how to parse the checksum, payload_size from the
"echo request" and "echo response" messages. It holds the checksum,
payload_size of the "echo request" and "echo response" messages.
"""
# This specifies the version.
VERSION_STRING = '01'
# This specifies the starting position of the checksum and length of the
# checksum. Maximum value for the checksum is less than (2 ** 31 - 1).
CHECKSUM_START = 2
CHECKSUM_LENGTH = 10
CHECKSUM_FORMAT = '%010d'
CHECKSUM_END = CHECKSUM_START + CHECKSUM_LENGTH
# This specifies the starting position of the <payload_size> and length of the
# <payload_size>. Maximum number of bytes that can be sent in the <payload> is
# 9,999,999.
PAYLOAD_SIZE_START = CHECKSUM_END
PAYLOAD_SIZE_LENGTH = 7
PAYLOAD_SIZE_FORMAT = '%07d'
PAYLOAD_SIZE_END = PAYLOAD_SIZE_START + PAYLOAD_SIZE_LENGTH
def __init__(self, checksum=0, payload_size=0):
"""Initializes the checksum and payload_size of self (EchoHeader).
Args:
checksum: (int)
The checksum of the payload.
payload_size: (int)
The size of the payload.
"""
self.checksum = checksum
self.payload_size = payload_size
def ParseAndInitialize(self, echo_message):
"""Parses the echo_message and initializes self with the parsed data.
This method extracts checksum, and payload_size from the echo_message
(echo_message could be either echo_request or echo_response messages) and
initializes self (EchoHeader) with checksum and payload_size.
Args:
echo_message: (string)
The string representation of EchoRequest or EchoResponse objects.
Raises:
ValueError: Invalid data
"""
if not echo_message or len(echo_message) < EchoHeader.PAYLOAD_SIZE_END:
raise ValueError('Invalid data:%s' % echo_message)
self.checksum = int(echo_message[
EchoHeader.CHECKSUM_START:EchoHeader.CHECKSUM_END])
self.payload_size = int(echo_message[
EchoHeader.PAYLOAD_SIZE_START:EchoHeader.PAYLOAD_SIZE_END])
def InitializeFromPayload(self, payload):
"""Initializes the EchoHeader object with the payload.
It calculates checksum for the payload and initializes self (EchoHeader)
with the calculated checksum and size of the payload.
This method is used by the client code during testing.
Args:
payload: (string)
The payload is the echo string (like 'hello').
Raises:
ValueError: Invalid data
"""
if not payload:
raise ValueError('Invalid data:%s' % payload)
self.payload_size = len(payload)
self.checksum = Checksum(payload, self.payload_size)
def __str__(self):
"""String representation of the self (EchoHeader).
Returns:
A string representation of self (EchoHeader).
"""
checksum_string = EchoHeader.CHECKSUM_FORMAT % self.checksum
payload_size_string = EchoHeader.PAYLOAD_SIZE_FORMAT % self.payload_size
return EchoHeader.VERSION_STRING + checksum_string + payload_size_string
class EchoRequest(EchoHeader):
"""Class holds data specific to the "echo request" message.
This class holds the payload extracted from the "echo request" message.
"""
# This specifies the starting position of the <payload>.
PAYLOAD_START = EchoHeader.PAYLOAD_SIZE_END
def __init__(self):
"""Initializes EchoRequest object."""
EchoHeader.__init__(self)
self.payload = ''
def ParseAndInitialize(self, echo_request_data):
"""Parses and Initializes the EchoRequest object from the echo_request_data.
This method extracts the header information (checksum and payload_size) and
payload from echo_request_data.
Args:
echo_request_data: (string)
The string representation of EchoRequest object.
Raises:
ValueError: Invalid data
"""
EchoHeader.ParseAndInitialize(self, echo_request_data)
if len(echo_request_data) <= EchoRequest.PAYLOAD_START:
raise ValueError('Invalid data:%s' % echo_request_data)
self.payload = echo_request_data[EchoRequest.PAYLOAD_START:]
def InitializeFromPayload(self, payload):
"""Initializes the EchoRequest object with payload.
It calculates checksum for the payload and initializes self (EchoRequest)
object.
Args:
payload: (string)
The payload string for which "echo request" needs to be constructed.
"""
EchoHeader.InitializeFromPayload(self, payload)
self.payload = payload
def __str__(self):
"""String representation of the self (EchoRequest).
Returns:
A string representation of self (EchoRequest).
"""
return EchoHeader.__str__(self) + self.payload
class EchoResponse(EchoHeader):
"""Class holds data specific to the "echo response" message.
This class knows how to parse the "echo response" message. This class holds
key, encoded_payload and decoded_payload of the "echo response" message.
"""
# This specifies the starting position of the |key_| and length of the |key_|.
# Minimum and maximum values for the |key_| are 100,000 and 999,999.
KEY_START = EchoHeader.PAYLOAD_SIZE_END
KEY_LENGTH = 6
KEY_FORMAT = '%06d'
KEY_END = KEY_START + KEY_LENGTH
KEY_MIN_VALUE = 0
KEY_MAX_VALUE = 999999
# This specifies the starting position of the <encoded_payload> and length
# of the <encoded_payload>.
ENCODED_PAYLOAD_START = KEY_END
def __init__(self, key='', encoded_payload='', decoded_payload=''):
"""Initializes the EchoResponse object."""
EchoHeader.__init__(self)
self.key = key
self.encoded_payload = encoded_payload
self.decoded_payload = decoded_payload
def ParseAndInitialize(self, echo_response_data=None):
"""Parses and Initializes the EchoResponse object from echo_response_data.
This method calls EchoHeader to extract header information from the
echo_response_data and it then extracts key and encoded_payload from the
echo_response_data. It holds the decoded payload of the encoded_payload.
Args:
echo_response_data: (string)
The string representation of EchoResponse object.
Raises:
ValueError: Invalid echo_request_data
"""
EchoHeader.ParseAndInitialize(self, echo_response_data)
if len(echo_response_data) <= EchoResponse.ENCODED_PAYLOAD_START:
raise ValueError('Invalid echo_response_data:%s' % echo_response_data)
self.key = echo_response_data[EchoResponse.KEY_START:EchoResponse.KEY_END]
self.encoded_payload = echo_response_data[
EchoResponse.ENCODED_PAYLOAD_START:]
self.decoded_payload = Crypt(self.encoded_payload, self.key)
def InitializeFromEchoRequest(self, echo_request):
"""Initializes EchoResponse with the data from the echo_request object.
It gets the checksum, payload_size and payload from the echo_request object
and then encodes the payload with a random key. It also saves the payload
as decoded_payload.
Args:
echo_request: (EchoRequest)
The EchoRequest object which has "echo request" message.
"""
self.checksum = echo_request.checksum
self.payload_size = echo_request.payload_size
self.key = (EchoResponse.KEY_FORMAT %
random.randrange(EchoResponse.KEY_MIN_VALUE,
EchoResponse.KEY_MAX_VALUE))
self.encoded_payload = Crypt(echo_request.payload, self.key)
self.decoded_payload = echo_request.payload
def __str__(self):
"""String representation of the self (EchoResponse).
Returns:
A string representation of self (EchoResponse).
"""
return EchoHeader.__str__(self) + self.key + self.encoded_payload
def Crypt(payload, key):
"""Encodes/decodes the payload with the key and returns encoded payload.
This method loops through the payload and XORs each byte with the key.
Args:
payload: (string)
The string to be encoded/decoded.
key: (string)
The key used to encode/decode the payload.
Returns:
An encoded/decoded string.
"""
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(payload, cycle(key)))
def Checksum(payload, payload_size):
"""Calculates the checksum of the payload.
Args:
payload: (string)
The payload string for which checksum needs to be calculated.
payload_size: (int)
The number of bytes in the payload.
Returns:
The checksum of the payload.
"""
checksum = 0
length = min(payload_size, len(payload))
for i in range (0, length):
checksum += ord(payload[i])
return checksum
def GetEchoRequestData(payload):
"""Constructs an "echo request" message from the payload.
It builds an EchoRequest object from the payload and then returns a string
representation of the EchoRequest object.
This is used by the TCP/UDP echo clients to build the "echo request" message.
Args:
payload: (string)
The payload string for which "echo request" needs to be constructed.
Returns:
A string representation of the EchoRequest object.
Raises:
ValueError: Invalid payload
"""
try:
echo_request = EchoRequest()
echo_request.InitializeFromPayload(payload)
return str(echo_request)
except (IndexError, ValueError):
raise ValueError('Invalid payload:%s' % payload)
def GetEchoResponseData(echo_request_data):
"""Verifies the echo_request_data and returns "echo response" message.
It builds the EchoRequest object from the echo_request_data and then verifies
the checksum of the EchoRequest is same as the calculated checksum of the
payload. If the checksums don't match then it returns None. It checksums
match, it builds the echo_response object from echo_request object and returns
string representation of the EchoResponse object.
This is used by the TCP/UDP echo servers.
Args:
echo_request_data: (string)
The string that echo servers send to the clients.
Returns:
A string representation of the EchoResponse object. It returns None if the
echo_request_data is not valid.
Raises:
ValueError: Invalid echo_request_data
"""
try:
if not echo_request_data:
raise ValueError('Invalid payload:%s' % echo_request_data)
echo_request = EchoRequest()
echo_request.ParseAndInitialize(echo_request_data)
if Checksum(echo_request.payload,
echo_request.payload_size) != echo_request.checksum:
return None
echo_response = EchoResponse()
echo_response.InitializeFromEchoRequest(echo_request)
return str(echo_response)
except (IndexError, ValueError):
raise ValueError('Invalid payload:%s' % echo_request_data)
def DecodeAndVerify(echo_request_data, echo_response_data):
"""Decodes and verifies the echo_response_data.
It builds EchoRequest and EchoResponse objects from the echo_request_data and
echo_response_data. It returns True if the EchoResponse's payload and
checksum match EchoRequest's.
This is used by the TCP/UDP echo clients for testing purposes.
Args:
echo_request_data: (string)
The request clients sent to echo servers.
echo_response_data: (string)
The response clients received from the echo servers.
Returns:
True if echo_request_data and echo_response_data match.
Raises:
ValueError: Invalid echo_request_data or Invalid echo_response
"""
try:
echo_request = EchoRequest()
echo_request.ParseAndInitialize(echo_request_data)
except (IndexError, ValueError):
raise ValueError('Invalid echo_request:%s' % echo_request_data)
try:
echo_response = EchoResponse()
echo_response.ParseAndInitialize(echo_response_data)
except (IndexError, ValueError):
raise ValueError('Invalid echo_response:%s' % echo_response_data)
return (echo_request.checksum == echo_response.checksum and
echo_request.payload == echo_response.decoded_payload)
|
bsd-3-clause
|
cslzchen/osf.io
|
addons/dataverse/apps.py
|
11
|
1922
|
import os
from addons.base.apps import BaseAddonAppConfig
from addons.dataverse.settings import MAX_UPLOAD_SIZE
HERE = os.path.dirname(os.path.abspath(__file__))
TEMPLATE_PATH = os.path.join(
HERE,
'templates'
)
class DataverseAddonAppConfig(BaseAddonAppConfig):
name = 'addons.dataverse'
label = 'addons_dataverse'
full_name = 'Dataverse'
short_name = 'dataverse'
owners = ['user', 'node']
configs = ['accounts', 'node']
views = ['widget']
categories = ['storage']
include_css = {
'widget': ['dataverse.css'],
'page': [],
}
has_hgrid_files = True
node_settings_template = os.path.join(TEMPLATE_PATH, 'dataverse_node_settings.mako')
user_settings_template = os.path.join(TEMPLATE_PATH, 'dataverse_user_settings.mako')
max_file_size = MAX_UPLOAD_SIZE
@property
def get_hgrid_data(self):
# Avoid circular import
from addons.dataverse.views import _dataverse_root_folder
return _dataverse_root_folder
FILE_ADDED = 'dataverse_file_added'
FILE_REMOVED = 'dataverse_file_removed'
DATASET_LINKED = 'dataverse_dataset_linked'
DATASET_PUBLISHED = 'dataverse_dataset_published'
STUDY_LINKED = 'dataverse_study_linked'
STUDY_RELEASED = 'dataverse_study_released'
NODE_AUTHORIZED = 'dataverse_node_authorized'
NODE_DEAUTHORIZED = 'dataverse_node_deauthorized'
NODE_DEAUTHORIZED_NO_USER = 'dataverse_node_deauthorized_no_user'
actions = (FILE_ADDED, FILE_REMOVED, DATASET_LINKED, DATASET_PUBLISHED, STUDY_LINKED, STUDY_RELEASED, NODE_AUTHORIZED, NODE_DEAUTHORIZED, NODE_DEAUTHORIZED_NO_USER)
@property
def routes(self):
from .routes import api_routes
return [api_routes]
@property
def user_settings(self):
return self.get_model('UserSettings')
@property
def node_settings(self):
return self.get_model('NodeSettings')
|
apache-2.0
|
IronLanguages/ironpython3
|
Src/StdLib/Lib/test/test_listcomps.py
|
186
|
3851
|
doctests = """
########### Tests borrowed from or inspired by test_genexps.py ############
Test simple loop with conditional
>>> sum([i*i for i in range(100) if i&1 == 1])
166650
Test simple nesting
>>> [(i,j) for i in range(3) for j in range(4)]
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
Test nesting with the inner expression dependent on the outer
>>> [(i,j) for i in range(4) for j in range(i)]
[(1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2)]
Make sure the induction variable is not exposed
>>> i = 20
>>> sum([i*i for i in range(100)])
328350
>>> i
20
Verify that syntax error's are raised for listcomps used as lvalues
>>> [y for y in (1,2)] = 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
>>> [y for y in (1,2)] += 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
########### Tests borrowed from or inspired by test_generators.py ############
Make a nested list comprehension that acts like range()
>>> def frange(n):
... return [i for i in range(n)]
>>> frange(10)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Same again, only as a lambda expression instead of a function definition
>>> lrange = lambda n: [i for i in range(n)]
>>> lrange(10)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Generators can call other generators:
>>> def grange(n):
... for x in [i for i in range(n)]:
... yield x
>>> list(grange(5))
[0, 1, 2, 3, 4]
Make sure that None is a valid return value
>>> [None for i in range(10)]
[None, None, None, None, None, None, None, None, None, None]
########### Tests for various scoping corner cases ############
Return lambdas that use the iteration variable as a default argument
>>> items = [(lambda i=i: i) for i in range(5)]
>>> [x() for x in items]
[0, 1, 2, 3, 4]
Same again, only this time as a closure variable
>>> items = [(lambda: i) for i in range(5)]
>>> [x() for x in items]
[4, 4, 4, 4, 4]
Another way to test that the iteration variable is local to the list comp
>>> items = [(lambda: i) for i in range(5)]
>>> i = 20
>>> [x() for x in items]
[4, 4, 4, 4, 4]
And confirm that a closure can jump over the list comp scope
>>> items = [(lambda: y) for i in range(5)]
>>> y = 2
>>> [x() for x in items]
[2, 2, 2, 2, 2]
We also repeat each of the above scoping tests inside a function
>>> def test_func():
... items = [(lambda i=i: i) for i in range(5)]
... return [x() for x in items]
>>> test_func()
[0, 1, 2, 3, 4]
>>> def test_func():
... items = [(lambda: i) for i in range(5)]
... return [x() for x in items]
>>> test_func()
[4, 4, 4, 4, 4]
>>> def test_func():
... items = [(lambda: i) for i in range(5)]
... i = 20
... return [x() for x in items]
>>> test_func()
[4, 4, 4, 4, 4]
>>> def test_func():
... items = [(lambda: y) for i in range(5)]
... y = 2
... return [x() for x in items]
>>> test_func()
[2, 2, 2, 2, 2]
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=None):
import sys
from test import support
from test import test_listcomps
support.run_doctest(test_listcomps, verbose)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_doctest(test_genexps, verbose)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
|
apache-2.0
|
Pan0ram1x/pycoin
|
tests/sighash_single_test.py
|
19
|
6726
|
#!/usr/bin/env python
import unittest
from pycoin.ecdsa import (
generator_secp256k1,
sign as ecdsa_sign,
verify as ecdsa_verify,
)
from pycoin.encoding import (
bytes_from_int,
to_bytes_32,
)
from pycoin.key import Key
from pycoin.serialize import (
b2h,
b2h_rev,
)
from pycoin.tx import (
Tx,
TxIn,
TxOut,
)
from pycoin.tx.Tx import (
SIGHASH_ALL,
SIGHASH_ANYONECANPAY,
SIGHASH_SINGLE,
)
from pycoin.tx.TxOut import standard_tx_out_script
from pycoin.tx.script.der import (
sigdecode_der,
sigencode_der,
)
from pycoin.tx.script.tools import compile as pycoin_compile
PRIV_KEYS = (
2330949616242593315303241053456316633827293588958882755297900732239663851861,
4437411780076344925846479906614060621668407514498402815534040340772719979673,
14311886404724799688521454580288220586308410691395501373612453626821267193196,
16404731722033649474165521611800542240555275746052963990137782680023514762282,
92715304942310420502826004911529506622922082818576946681102234225452853924813,
103235678552410630318322729483874198805317322052500844759252733409163632402845,
)
#=========================================================================
def sigcheck(a_key, a_hash_for_sig, a_sig):
"""
Returns True if a_key was used to generate a_sig from a_hash_for_sig;
False otherwise.
"""
r, s = sigdecode_der(a_sig)
return ecdsa_verify(generator_secp256k1, a_key.public_pair(), a_hash_for_sig, ( r, s ))
#=========================================================================
def sigmake(a_key, a_hash_for_sig, a_sig_type=SIGHASH_ALL):
"""
Signs a_hash_for_sig with a_key and returns a DER-encoded signature
with a_sig_type appended.
"""
order = generator_secp256k1.order()
r, s = ecdsa_sign(generator_secp256k1, a_key.secret_exponent(), a_hash_for_sig)
if s + s > order:
s = order - s
return sigencode_der(r, s) + bytes_from_int(a_sig_type)
#=========================================================================
class SighashSingleTest(unittest.TestCase):
#=====================================================================
def test_sighash_single_mainnet(self):
self._test_sighash_single('BTC')
#=====================================================================
def test_sighash_single_testnet3(self):
self._test_sighash_single('XTN')
#=====================================================================
def _test_sighash_single(self, netcode):
k0 = Key(secret_exponent=PRIV_KEYS[0], is_compressed=True, netcode=netcode)
k1 = Key(secret_exponent=PRIV_KEYS[1], is_compressed=True, netcode=netcode)
k2 = Key(secret_exponent=PRIV_KEYS[2], is_compressed=True, netcode=netcode)
k3 = Key(secret_exponent=PRIV_KEYS[3], is_compressed=True, netcode=netcode)
k4 = Key(secret_exponent=PRIV_KEYS[4], is_compressed=True, netcode=netcode)
k5 = Key(secret_exponent=PRIV_KEYS[5], is_compressed=True, netcode=netcode)
# Fake a coinbase transaction
coinbase_tx = Tx.coinbase_tx(k0.sec(), 500000000)
coinbase_tx.txs_out.append(TxOut(1000000000, pycoin_compile('%s OP_CHECKSIG' % b2h(k1.sec()))))
coinbase_tx.txs_out.append(TxOut(1000000000, pycoin_compile('%s OP_CHECKSIG' % b2h(k2.sec()))))
self.assertEqual('2acbe1006f7168bad538b477f7844e53de3a31ffddfcfc4c6625276dd714155a',
b2h_rev(coinbase_tx.hash()))
# Make the test transaction
txs_in = [
TxIn(coinbase_tx.hash(), 0),
TxIn(coinbase_tx.hash(), 1),
TxIn(coinbase_tx.hash(), 2),
]
txs_out = [
TxOut(900000000, standard_tx_out_script(k3.address())),
TxOut(800000000, standard_tx_out_script(k4.address())),
TxOut(800000000, standard_tx_out_script(k5.address())),
]
tx = Tx(1, txs_in, txs_out)
tx.set_unspents(coinbase_tx.txs_out)
self.assertEqual('791b98ef0a3ac87584fe273bc65abd89821569fd7c83538ac0625a8ca85ba587', b2h_rev(tx.hash()))
sig_type = SIGHASH_SINGLE
sig_hash = tx.signature_hash(coinbase_tx.txs_out[0].script, 0, sig_type)
self.assertEqual('cc52d785a3b4133504d1af9e60cd71ca422609cb41df3a08bbb466b2a98a885e', b2h(to_bytes_32(sig_hash)))
sig = sigmake(k0, sig_hash, sig_type)
self.assertTrue(sigcheck(k0, sig_hash, sig[:-1]))
tx.txs_in[0].script = pycoin_compile(b2h(sig))
self.assertTrue(tx.is_signature_ok(0))
sig_hash = tx.signature_hash(coinbase_tx.txs_out[1].script, 1, sig_type)
self.assertEqual('93bb883d70fccfba9b8aa2028567aca8357937c65af7f6f5ccc6993fd7735fb7', b2h(to_bytes_32(sig_hash)))
sig = sigmake(k1, sig_hash, sig_type)
self.assertTrue(sigcheck(k1, sig_hash, sig[:-1]))
tx.txs_in[1].script = pycoin_compile(b2h(sig))
self.assertTrue(tx.is_signature_ok(1))
sig_hash = tx.signature_hash(coinbase_tx.txs_out[2].script, 2, sig_type)
self.assertEqual('53ef7f67c3541bffcf4e0d06c003c6014e2aa1fb38ff33240b3e1c1f3f8e2a35', b2h(to_bytes_32(sig_hash)))
sig = sigmake(k2, sig_hash, sig_type)
self.assertTrue(sigcheck(k2, sig_hash, sig[:-1]))
tx.txs_in[2].script = pycoin_compile(b2h(sig))
self.assertTrue(tx.is_signature_ok(2))
sig_type = SIGHASH_SINGLE | SIGHASH_ANYONECANPAY
sig_hash = tx.signature_hash(coinbase_tx.txs_out[0].script, 0, sig_type)
self.assertEqual('2003393d246a7f136692ce7ab819c6eadc54ffea38eb4377ac75d7d461144e75', b2h(to_bytes_32(sig_hash)))
sig = sigmake(k0, sig_hash, sig_type)
self.assertTrue(sigcheck(k0, sig_hash, sig[:-1]))
tx.txs_in[0].script = pycoin_compile(b2h(sig))
self.assertTrue(tx.is_signature_ok(0))
sig_hash = tx.signature_hash(coinbase_tx.txs_out[1].script, 1, sig_type)
self.assertEqual('e3f469ac88e9f35e8eff0bd8ad4ad3bf899c80eb7645947d60860de4a08a35df', b2h(to_bytes_32(sig_hash)))
sig = sigmake(k1, sig_hash, sig_type)
self.assertTrue(sigcheck(k1, sig_hash, sig[:-1]))
tx.txs_in[1].script = pycoin_compile(b2h(sig))
self.assertTrue(tx.is_signature_ok(1))
sig_hash = tx.signature_hash(coinbase_tx.txs_out[2].script, 2, sig_type)
self.assertEqual('bacd7c3ab79cad71807312677c1788ad9565bf3c00ab9a153d206494fb8b7e6a', b2h(to_bytes_32(sig_hash)))
sig = sigmake(k2, sig_hash, sig_type)
self.assertTrue(sigcheck(k2, sig_hash, sig[:-1]))
tx.txs_in[2].script = pycoin_compile(b2h(sig))
self.assertTrue(tx.is_signature_ok(2))
if __name__ == "__main__":
unittest.main()
|
mit
|
wangyum/tensorflow
|
tensorflow/contrib/framework/python/ops/ops_test.py
|
118
|
2882
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""tensor_util tests."""
# pylint: disable=unused-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework.python.ops import ops as ops_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
class OpsTest(test.TestCase):
def testGetGraphFromEmptyInputs(self):
with ops.Graph().as_default() as g0:
self.assertIs(g0, ops_lib.get_graph_from_inputs([]))
def testGetGraphFromValidInputs(self):
g0 = ops.Graph()
with g0.as_default():
values = [constant_op.constant(0.0), constant_op.constant(1.0)]
self.assertIs(g0, ops_lib.get_graph_from_inputs(values))
self.assertIs(g0, ops_lib.get_graph_from_inputs(values, g0))
with ops.Graph().as_default():
self.assertIs(g0, ops_lib.get_graph_from_inputs(values))
self.assertIs(g0, ops_lib.get_graph_from_inputs(values, g0))
def testGetGraphFromInvalidInputs(self):
g0 = ops.Graph()
with g0.as_default():
values = [constant_op.constant(0.0), constant_op.constant(1.0)]
g1 = ops.Graph()
with self.assertRaisesRegexp(ValueError, "not from the passed-in graph"):
ops_lib.get_graph_from_inputs(values, g1)
with g1.as_default():
values.append(constant_op.constant(2.0))
with self.assertRaisesRegexp(ValueError, "must be from the same graph"):
ops_lib.get_graph_from_inputs(values)
with self.assertRaisesRegexp(ValueError, "not from the passed-in graph"):
ops_lib.get_graph_from_inputs(values, g0)
with self.assertRaisesRegexp(ValueError, "not from the passed-in graph"):
ops_lib.get_graph_from_inputs(values, g1)
def testGetNameScope(self):
with ops.name_scope("scope1"):
with ops.name_scope("scope2"):
with ops.name_scope("scope3"):
self.assertEqual("scope1/scope2/scope3", ops_lib.get_name_scope())
self.assertEqual("scope1/scope2", ops_lib.get_name_scope())
self.assertEqual("scope1", ops_lib.get_name_scope())
self.assertEqual("", ops_lib.get_name_scope())
if __name__ == "__main__":
test.main()
|
apache-2.0
|
lauri-codes/GameShop
|
gameshop/social/apps/django_app/default/models.py
|
9
|
3601
|
"""Django ORM models for Social Auth"""
import six
from django.db import models
from django.conf import settings
from django.db.utils import IntegrityError
from social.utils import setting_name
from social.storage.django_orm import DjangoUserMixin, \
DjangoAssociationMixin, \
DjangoNonceMixin, \
DjangoCodeMixin, \
BaseDjangoStorage
from social.apps.django_app.default.fields import JSONField
USER_MODEL = getattr(settings, setting_name('USER_MODEL'), None) or \
getattr(settings, 'AUTH_USER_MODEL', None) or \
'auth.User'
UID_LENGTH = getattr(settings, setting_name('UID_LENGTH'), 255)
NONCE_SERVER_URL_LENGTH = getattr(
settings, setting_name('NONCE_SERVER_URL_LENGTH'), 255)
ASSOCIATION_SERVER_URL_LENGTH = getattr(
settings, setting_name('ASSOCIATION_SERVER_URL_LENGTH'), 255)
ASSOCIATION_HANDLE_LENGTH = getattr(
settings, setting_name('ASSOCIATION_HANDLE_LENGTH'), 255)
class UserSocialAuth(models.Model, DjangoUserMixin):
"""Social Auth association model"""
user = models.ForeignKey(USER_MODEL, related_name='social_auth')
provider = models.CharField(max_length=32)
uid = models.CharField(max_length=UID_LENGTH)
extra_data = JSONField()
class Meta:
"""Meta data"""
unique_together = ('provider', 'uid')
db_table = 'social_auth_usersocialauth'
@classmethod
def get_social_auth(cls, provider, uid):
try:
return cls.objects.select_related('user').get(provider=provider,
uid=uid)
except UserSocialAuth.DoesNotExist:
return None
@classmethod
def username_max_length(cls):
username_field = cls.username_field()
field = UserSocialAuth.user_model()._meta.get_field(username_field)
return field.max_length
@classmethod
def user_model(cls):
user_model = UserSocialAuth._meta.get_field('user').rel.to
if isinstance(user_model, six.string_types):
app_label, model_name = user_model.split('.')
return models.get_model(app_label, model_name)
return user_model
class Nonce(models.Model, DjangoNonceMixin):
"""One use numbers"""
server_url = models.CharField(max_length=NONCE_SERVER_URL_LENGTH)
timestamp = models.IntegerField()
salt = models.CharField(max_length=65)
class Meta:
db_table = 'social_auth_nonce'
class Association(models.Model, DjangoAssociationMixin):
"""OpenId account association"""
server_url = models.CharField(max_length=ASSOCIATION_SERVER_URL_LENGTH)
handle = models.CharField(max_length=ASSOCIATION_HANDLE_LENGTH)
secret = models.CharField(max_length=255) # Stored base64 encoded
issued = models.IntegerField()
lifetime = models.IntegerField()
assoc_type = models.CharField(max_length=64)
class Meta:
db_table = 'social_auth_association'
class Code(models.Model, DjangoCodeMixin):
email = models.EmailField()
code = models.CharField(max_length=32, db_index=True)
verified = models.BooleanField(default=False)
class Meta:
db_table = 'social_auth_code'
unique_together = ('email', 'code')
class DjangoStorage(BaseDjangoStorage):
user = UserSocialAuth
nonce = Nonce
association = Association
code = Code
@classmethod
def is_integrity_error(cls, exception):
return exception.__class__ is IntegrityError
|
gpl-2.0
|
mdn/webalyzer
|
webalyzer/analyzer/urls.py
|
1
|
1333
|
from django.conf.urls import patterns, url
from webalyzer.analyzer import views
urlpatterns = patterns(
'',
url(r'^$', views.index, name='index'),
url(r'^submit/$', views.submit, name='submit'),
url(
r'^recent-submissions$',
views.recent_submissions,
name='recent_submissions'
),
url(r'^(?P<domain>[\w\.]+)$', views.index, name='analyzed'),
url(r'^(?P<domain>[\w\.]+)/data$', views.analyzed, name='analyzed_data'),
url(
r'^(?P<domain>[\w\.]+)/source/(?P<id>\d+)/$',
views.index,
name='source_view'
),
url(
r'^(?P<domain>[\w\.]+)/source/(?P<id>\d+)/data$',
views.source_view,
name='source_view_data'
),
url(
r'^(?P<domain>[\w\.]+)/download/(?P<id>\d+)/'
'(?P<which>before|after)/(?P<filename>.*?\.css)$',
views.download,
name='download'
),
# url(r'^$', views.analyze, name='analyze'),
# url(r'^start/$', views.start, name='start'),
# url(
# r'^(?P<domain>[\w\.]+)/source/(?P<id>\d+)/$',
# views.source_view,
# name='source_view'
# ),
# url(
# r'^(?P<domain>[\w\.]+)/diff/(?P<id>\d+)/$',
# views.diff_view,
# name='diff_view'
# ),
# url(r'^(?P<domain>[\w\.]+)$', views.analyzed, name='analyzed'),
)
|
bsd-3-clause
|
saulgray/nemio-flask-old
|
lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py
|
915
|
12621
|
from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
import re
from . import _base
from .. import ihatexml
from .. import constants
from ..constants import namespaces
from ..utils import moduleFactoryFactory
tag_regexp = re.compile("{([^}]*)}(.*)")
def getETreeBuilder(ElementTreeImplementation, fullTree=False):
ElementTree = ElementTreeImplementation
ElementTreeCommentType = ElementTree.Comment("asd").tag
class Element(_base.Node):
def __init__(self, name, namespace=None):
self._name = name
self._namespace = namespace
self._element = ElementTree.Element(self._getETreeTag(name,
namespace))
if namespace is None:
self.nameTuple = namespaces["html"], self._name
else:
self.nameTuple = self._namespace, self._name
self.parent = None
self._childNodes = []
self._flags = []
def _getETreeTag(self, name, namespace):
if namespace is None:
etree_tag = name
else:
etree_tag = "{%s}%s" % (namespace, name)
return etree_tag
def _setName(self, name):
self._name = name
self._element.tag = self._getETreeTag(self._name, self._namespace)
def _getName(self):
return self._name
name = property(_getName, _setName)
def _setNamespace(self, namespace):
self._namespace = namespace
self._element.tag = self._getETreeTag(self._name, self._namespace)
def _getNamespace(self):
return self._namespace
namespace = property(_getNamespace, _setNamespace)
def _getAttributes(self):
return self._element.attrib
def _setAttributes(self, attributes):
# Delete existing attributes first
# XXX - there may be a better way to do this...
for key in list(self._element.attrib.keys()):
del self._element.attrib[key]
for key, value in attributes.items():
if isinstance(key, tuple):
name = "{%s}%s" % (key[2], key[1])
else:
name = key
self._element.set(name, value)
attributes = property(_getAttributes, _setAttributes)
def _getChildNodes(self):
return self._childNodes
def _setChildNodes(self, value):
del self._element[:]
self._childNodes = []
for element in value:
self.insertChild(element)
childNodes = property(_getChildNodes, _setChildNodes)
def hasContent(self):
"""Return true if the node has children or text"""
return bool(self._element.text or len(self._element))
def appendChild(self, node):
self._childNodes.append(node)
self._element.append(node._element)
node.parent = self
def insertBefore(self, node, refNode):
index = list(self._element).index(refNode._element)
self._element.insert(index, node._element)
node.parent = self
def removeChild(self, node):
self._element.remove(node._element)
node.parent = None
def insertText(self, data, insertBefore=None):
if not(len(self._element)):
if not self._element.text:
self._element.text = ""
self._element.text += data
elif insertBefore is None:
# Insert the text as the tail of the last child element
if not self._element[-1].tail:
self._element[-1].tail = ""
self._element[-1].tail += data
else:
# Insert the text before the specified node
children = list(self._element)
index = children.index(insertBefore._element)
if index > 0:
if not self._element[index - 1].tail:
self._element[index - 1].tail = ""
self._element[index - 1].tail += data
else:
if not self._element.text:
self._element.text = ""
self._element.text += data
def cloneNode(self):
element = type(self)(self.name, self.namespace)
for name, value in self.attributes.items():
element.attributes[name] = value
return element
def reparentChildren(self, newParent):
if newParent.childNodes:
newParent.childNodes[-1]._element.tail += self._element.text
else:
if not newParent._element.text:
newParent._element.text = ""
if self._element.text is not None:
newParent._element.text += self._element.text
self._element.text = ""
_base.Node.reparentChildren(self, newParent)
class Comment(Element):
def __init__(self, data):
# Use the superclass constructor to set all properties on the
# wrapper element
self._element = ElementTree.Comment(data)
self.parent = None
self._childNodes = []
self._flags = []
def _getData(self):
return self._element.text
def _setData(self, value):
self._element.text = value
data = property(_getData, _setData)
class DocumentType(Element):
def __init__(self, name, publicId, systemId):
Element.__init__(self, "<!DOCTYPE>")
self._element.text = name
self.publicId = publicId
self.systemId = systemId
def _getPublicId(self):
return self._element.get("publicId", "")
def _setPublicId(self, value):
if value is not None:
self._element.set("publicId", value)
publicId = property(_getPublicId, _setPublicId)
def _getSystemId(self):
return self._element.get("systemId", "")
def _setSystemId(self, value):
if value is not None:
self._element.set("systemId", value)
systemId = property(_getSystemId, _setSystemId)
class Document(Element):
def __init__(self):
Element.__init__(self, "DOCUMENT_ROOT")
class DocumentFragment(Element):
def __init__(self):
Element.__init__(self, "DOCUMENT_FRAGMENT")
def testSerializer(element):
rv = []
def serializeElement(element, indent=0):
if not(hasattr(element, "tag")):
element = element.getroot()
if element.tag == "<!DOCTYPE>":
if element.get("publicId") or element.get("systemId"):
publicId = element.get("publicId") or ""
systemId = element.get("systemId") or ""
rv.append("""<!DOCTYPE %s "%s" "%s">""" %
(element.text, publicId, systemId))
else:
rv.append("<!DOCTYPE %s>" % (element.text,))
elif element.tag == "DOCUMENT_ROOT":
rv.append("#document")
if element.text is not None:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
if element.tail is not None:
raise TypeError("Document node cannot have tail")
if hasattr(element, "attrib") and len(element.attrib):
raise TypeError("Document node cannot have attributes")
elif element.tag == ElementTreeCommentType:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
else:
assert isinstance(element.tag, text_type), \
"Expected unicode, got %s, %s" % (type(element.tag), element.tag)
nsmatch = tag_regexp.match(element.tag)
if nsmatch is None:
name = element.tag
else:
ns, name = nsmatch.groups()
prefix = constants.prefixes[ns]
name = "%s %s" % (prefix, name)
rv.append("|%s<%s>" % (' ' * indent, name))
if hasattr(element, "attrib"):
attributes = []
for name, value in element.attrib.items():
nsmatch = tag_regexp.match(name)
if nsmatch is not None:
ns, name = nsmatch.groups()
prefix = constants.prefixes[ns]
attr_string = "%s %s" % (prefix, name)
else:
attr_string = name
attributes.append((attr_string, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
if element.text:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
indent += 2
for child in element:
serializeElement(child, indent)
if element.tail:
rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
serializeElement(element, 0)
return "\n".join(rv)
def tostring(element):
"""Serialize an element and its child nodes to a string"""
rv = []
filter = ihatexml.InfosetFilter()
def serializeElement(element):
if isinstance(element, ElementTree.ElementTree):
element = element.getroot()
if element.tag == "<!DOCTYPE>":
if element.get("publicId") or element.get("systemId"):
publicId = element.get("publicId") or ""
systemId = element.get("systemId") or ""
rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" %
(element.text, publicId, systemId))
else:
rv.append("<!DOCTYPE %s>" % (element.text,))
elif element.tag == "DOCUMENT_ROOT":
if element.text is not None:
rv.append(element.text)
if element.tail is not None:
raise TypeError("Document node cannot have tail")
if hasattr(element, "attrib") and len(element.attrib):
raise TypeError("Document node cannot have attributes")
for child in element:
serializeElement(child)
elif element.tag == ElementTreeCommentType:
rv.append("<!--%s-->" % (element.text,))
else:
# This is assumed to be an ordinary element
if not element.attrib:
rv.append("<%s>" % (filter.fromXmlName(element.tag),))
else:
attr = " ".join(["%s=\"%s\"" % (
filter.fromXmlName(name), value)
for name, value in element.attrib.items()])
rv.append("<%s %s>" % (element.tag, attr))
if element.text:
rv.append(element.text)
for child in element:
serializeElement(child)
rv.append("</%s>" % (element.tag,))
if element.tail:
rv.append(element.tail)
serializeElement(element)
return "".join(rv)
class TreeBuilder(_base.TreeBuilder):
documentClass = Document
doctypeClass = DocumentType
elementClass = Element
commentClass = Comment
fragmentClass = DocumentFragment
implementation = ElementTreeImplementation
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
if fullTree:
return self.document._element
else:
if self.defaultNamespace is not None:
return self.document._element.find(
"{%s}html" % self.defaultNamespace)
else:
return self.document._element.find("html")
def getFragment(self):
return _base.TreeBuilder.getFragment(self)._element
return locals()
getETreeModule = moduleFactoryFactory(getETreeBuilder)
|
mit
|
litchfield/django
|
tests/gis_tests/gis_migrations/migrations/0001_initial.py
|
269
|
2465
|
from django.db import connection, migrations, models
from ...models import models as gis_models
ops = [
migrations.CreateModel(
name='Neighborhood',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('geom', gis_models.MultiPolygonField(srid=4326)),
],
options={
'required_db_features': ['gis_enabled'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Household',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('neighborhood', models.ForeignKey(
'gis_migrations.Neighborhood',
models.SET_NULL,
to_field='id',
null=True,
)),
('address', models.CharField(max_length=100)),
('zip_code', models.IntegerField(null=True, blank=True)),
('geom', gis_models.PointField(srid=4326, geography=True)),
],
options={
'required_db_features': ['gis_enabled'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Family',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='household',
name='family',
field=models.ForeignKey('gis_migrations.Family', models.SET_NULL, blank=True, null=True),
preserve_default=True,
)
]
if connection.features.gis_enabled and connection.features.supports_raster:
ops += [
migrations.CreateModel(
name='Heatmap',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, unique=True)),
('rast', gis_models.fields.RasterField(srid=4326)),
],
options={
},
bases=(models.Model,),
),
]
class Migration(migrations.Migration):
"""
Used for gis-specific migration tests.
"""
operations = ops
|
bsd-3-clause
|
rebost/django
|
django/core/management/base.py
|
8
|
14083
|
"""
Base classes for writing management commands (named commands which can
be executed through ``django-admin.py`` or ``manage.py``).
"""
import os
import sys
from io import BytesIO
from optparse import make_option, OptionParser
import traceback
import django
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import color_style
from django.utils.encoding import smart_str
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
pass
def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
class OutputWrapper(object):
"""
Wrapper around stdout/stderr
"""
def __init__(self, out, style_func=None, ending='\n'):
self._out = out
self.style_func = None
if hasattr(out, 'isatty') and out.isatty():
self.style_func = style_func
self.ending = ending
def __getattr__(self, name):
return getattr(self._out, name)
def write(self, msg, style_func=None, ending=None):
ending = ending is None and self.ending or ending
if ending and not msg.endswith(ending):
msg += ending
style_func = [f for f in (style_func, self.style_func, lambda x:x)
if f is not None][0]
self._out.write(smart_str(style_func(msg)))
class BaseCommand(object):
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``django-admin.py`` or ``manage.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``OptionParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output and, if the command is intended to produce a block of
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
4. If ``handle()`` or ``execute()`` raised any exception (e.g.
``CommandError``), ``run_from_argv()`` will instead print an error
message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``args``
A string listing the arguments accepted by the command,
suitable for use in help messages; e.g., a command which takes
a list of application names might set this to '<appname
appname ...>'.
``can_import_settings``
A boolean indicating whether the command needs to be able to
import Django settings; if ``True``, ``execute()`` will verify
that this is possible before proceeding. Default value is
``True``.
``help``
A short description of the command, which will be printed in
help messages.
``option_list``
This is the list of ``optparse`` options which will be fed
into the command's ``OptionParser`` for parsing arguments.
``output_transaction``
A boolean indicating whether the command outputs SQL
statements; if ``True``, the output will automatically be
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
``False``.
``requires_model_validation``
A boolean; if ``True``, validation of installed models will be
performed prior to executing the command. Default value is
``True``. To validate an individual application's models
rather than all applications' models, call
``self.validate(app)`` from ``handle()``, where ``app`` is the
application's Python module.
"""
# Metadata about this command.
option_list = (
make_option('-v', '--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2', '3'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output'),
make_option('--settings',
help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'),
make_option('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
make_option('--traceback', action='store_true',
help='Print traceback on exception'),
)
help = ''
args = ''
# Configuration shortcuts that alter various logic.
can_import_settings = True
requires_model_validation = True
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
def __init__(self):
self.style = color_style()
def get_version(self):
"""
Return the Django version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
return django.get_version()
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
usage = '%%prog %s [options] %s' % (subcommand, self.args)
if self.help:
return '%s\n\n%s' % (usage, self.help)
else:
return usage
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``OptionParser`` which will be used to
parse the arguments to this command.
"""
return OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=self.get_version(),
option_list=self.option_list)
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr.
"""
parser = self.create_parser(argv[0], argv[1])
options, args = parser.parse_args(argv[2:])
handle_default_options(options)
try:
self.execute(*args, **options.__dict__)
except Exception as e:
if options.traceback:
self.stderr.write(traceback.format_exc())
self.stderr.write('%s: %s' % (e.__class__.__name__, e))
sys.exit(1)
def execute(self, *args, **options):
"""
Try to execute this command, performing model validation if
needed (as controlled by the attribute
``self.requires_model_validation``, except if force-skipped).
"""
# Switch to English, because django-admin.py creates database content
# like permissions, and those shouldn't contain any translations.
# But only do this if we can assume we have a working settings file,
# because django.utils.translation requires settings.
saved_lang = None
self.stdout = OutputWrapper(options.get('stdout', sys.stdout))
self.stderr = OutputWrapper(options.get('stderr', sys.stderr), self.style.ERROR)
if self.can_import_settings:
from django.utils import translation
saved_lang = translation.get_language()
translation.activate('en-us')
try:
if self.requires_model_validation and not options.get('skip_validation'):
self.validate()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
# This needs to be imported here, because it relies on
# settings.
from django.db import connections, DEFAULT_DB_ALIAS
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
if connection.ops.start_transaction_sql():
self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()))
self.stdout.write(output)
if self.output_transaction:
self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;"))
finally:
if saved_lang is not None:
translation.activate(saved_lang)
def validate(self, app=None, display_num_errors=False):
"""
Validates the given app, raising CommandError for any errors.
If app is None, then this will validate all installed apps.
"""
from django.core.management.validation import get_validation_errors
s = BytesIO()
num_errors = get_validation_errors(s, app)
if num_errors:
s.seek(0)
error_text = s.read()
raise CommandError("One or more models did not validate:\n%s" % error_text)
if display_num_errors:
self.stdout.write("%s error%s found" % (num_errors, num_errors != 1 and 's' or ''))
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError()
class AppCommand(BaseCommand):
"""
A management command which takes one or more installed application
names as arguments, and does something with each of them.
Rather than implementing ``handle()``, subclasses must implement
``handle_app()``, which will be called once for each application.
"""
args = '<appname appname ...>'
def handle(self, *app_labels, **options):
from django.db import models
if not app_labels:
raise CommandError('Enter at least one appname.')
try:
app_list = [models.get_app(app_label) for app_label in app_labels]
except (ImproperlyConfigured, ImportError) as e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
output = []
for app in app_list:
app_output = self.handle_app(app, **options)
if app_output:
output.append(app_output)
return '\n'.join(output)
def handle_app(self, app, **options):
"""
Perform the command's actions for ``app``, which will be the
Python module corresponding to an application name given on
the command line.
"""
raise NotImplementedError()
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
args = '<label label ...>'
label = 'label'
def handle(self, *labels, **options):
if not labels:
raise CommandError('Enter at least one %s.' % self.label)
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError()
class NoArgsCommand(BaseCommand):
"""
A command which takes no arguments on the command line.
Rather than implementing ``handle()``, subclasses must implement
``handle_noargs()``; ``handle()`` itself is overridden to ensure
no arguments are passed to the command.
Attempting to pass arguments will raise ``CommandError``.
"""
args = ''
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
return self.handle_noargs(**options)
def handle_noargs(self, **options):
"""
Perform this command's actions.
"""
raise NotImplementedError()
|
bsd-3-clause
|
silenci/neutron
|
neutron/tests/unit/plugins/ml2/drivers/mech_sriov/mech_driver/test_mech_sriov_nic_switch.py
|
15
|
12687
|
# Copyright 2014 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_config import cfg
import testtools
from neutron.common import constants
from neutron.extensions import portbindings
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import config # noqa
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers.mech_sriov.mech_driver \
import exceptions as exc
from neutron.plugins.ml2.drivers.mech_sriov.mech_driver import mech_driver
from neutron.tests.unit.plugins.ml2 import _test_mech_agent as base
MELLANOX_CONNECTX3_PCI_INFO = '15b3:1004'
DEFAULT_PCI_INFO = ['15b3:1004', '8086:10ca']
class TestFakePortContext(base.FakePortContext):
def __init__(self, agent_type, agents, segments,
vnic_type=portbindings.VNIC_NORMAL,
profile={'pci_vendor_info':
MELLANOX_CONNECTX3_PCI_INFO}):
super(TestFakePortContext, self).__init__(agent_type,
agents,
segments,
vnic_type)
self._bound_profile = profile
@property
def current(self):
return {'id': base.PORT_ID,
'binding:vnic_type': self._bound_vnic_type,
'binding:profile': self._bound_profile}
def set_binding(self, segment_id, vif_type, vif_details, state):
self._bound_segment_id = segment_id
self._bound_vif_type = vif_type
self._bound_vif_details = vif_details
self._bound_state = state
class SriovNicSwitchMechanismBaseTestCase(base.AgentMechanismBaseTestCase):
VIF_TYPE = mech_driver.VIF_TYPE_HW_VEB
CAP_PORT_FILTER = False
AGENT_TYPE = constants.AGENT_TYPE_NIC_SWITCH
VLAN_SEGMENTS = base.AgentMechanismVlanTestCase.VLAN_SEGMENTS
GOOD_MAPPINGS = {'fake_physical_network': 'fake_device'}
GOOD_CONFIGS = {'device_mappings': GOOD_MAPPINGS}
BAD_MAPPINGS = {'wrong_physical_network': 'wrong_device'}
BAD_CONFIGS = {'device_mappings': BAD_MAPPINGS}
AGENTS = [{'alive': True,
'configurations': GOOD_CONFIGS}]
AGENTS_DEAD = [{'alive': False,
'configurations': GOOD_CONFIGS}]
AGENTS_BAD = [{'alive': False,
'configurations': GOOD_CONFIGS},
{'alive': True,
'configurations': BAD_CONFIGS}]
def setUp(self):
cfg.CONF.set_override('supported_pci_vendor_devs',
DEFAULT_PCI_INFO,
'ml2_sriov')
cfg.CONF.set_override('agent_required', True, 'ml2_sriov')
super(SriovNicSwitchMechanismBaseTestCase, self).setUp()
self.driver = mech_driver.SriovNicSwitchMechanismDriver()
self.driver.initialize()
class SriovSwitchMechGenericTestCase(SriovNicSwitchMechanismBaseTestCase,
base.AgentMechanismGenericTestCase):
def test_check_segment(self):
"""Validate the check_segment call."""
segment = {'api.NETWORK_TYPE': ""}
segment[api.NETWORK_TYPE] = p_const.TYPE_VLAN
self.assertTrue(self.driver.check_segment(segment))
# Validate a network type not currently supported
segment[api.NETWORK_TYPE] = p_const.TYPE_GRE
self.assertFalse(self.driver.check_segment(segment))
def test_check_segment_allows_supported_network_types(self):
for network_type in self.driver.supported_network_types:
segment = {api.NETWORK_TYPE: network_type}
self.assertTrue(self.driver.check_segment(segment))
class SriovMechVlanTestCase(SriovNicSwitchMechanismBaseTestCase,
base.AgentMechanismBaseTestCase):
VLAN_SEGMENTS = [{api.ID: 'unknown_segment_id',
api.NETWORK_TYPE: 'no_such_type'},
{api.ID: 'vlan_segment_id',
api.NETWORK_TYPE: 'vlan',
api.PHYSICAL_NETWORK: 'fake_physical_network',
api.SEGMENTATION_ID: 1234}]
def test_type_vlan(self):
context = TestFakePortContext(self.AGENT_TYPE,
self.AGENTS,
self.VLAN_SEGMENTS,
portbindings.VNIC_DIRECT)
self.driver.bind_port(context)
self._check_bound(context, self.VLAN_SEGMENTS[1])
def test_type_vlan_bad(self):
context = TestFakePortContext(self.AGENT_TYPE,
self.AGENTS_BAD,
self.VLAN_SEGMENTS,
portbindings.VNIC_DIRECT)
self.driver.bind_port(context)
self._check_unbound(context)
class SriovSwitchMechVnicTypeTestCase(SriovNicSwitchMechanismBaseTestCase):
def _check_vif_type_for_vnic_type(self, vnic_type,
expected_vif_type):
context = TestFakePortContext(self.AGENT_TYPE,
self.AGENTS,
self.VLAN_SEGMENTS,
vnic_type)
self.driver.bind_port(context)
self.assertEqual(expected_vif_type, context._bound_vif_type)
vlan = int(context._bound_vif_details[portbindings.VIF_DETAILS_VLAN])
self.assertEqual(1234, vlan)
def test_vnic_type_direct(self):
self._check_vif_type_for_vnic_type(portbindings.VNIC_DIRECT,
mech_driver.VIF_TYPE_HW_VEB)
def test_vnic_type_macvtap(self):
self._check_vif_type_for_vnic_type(portbindings.VNIC_MACVTAP,
mech_driver.VIF_TYPE_HW_VEB)
class SriovSwitchMechProfileTestCase(SriovNicSwitchMechanismBaseTestCase):
def _check_vif_for_pci_info(self, pci_vendor_info, expected_vif_type):
context = TestFakePortContext(self.AGENT_TYPE,
self.AGENTS,
self.VLAN_SEGMENTS,
portbindings.VNIC_DIRECT,
{'pci_vendor_info': pci_vendor_info})
self.driver.bind_port(context)
self.assertEqual(expected_vif_type, context._bound_vif_type)
def test_profile_supported_pci_info(self):
self._check_vif_for_pci_info(MELLANOX_CONNECTX3_PCI_INFO,
mech_driver.VIF_TYPE_HW_VEB)
def test_profile_unsupported_pci_info(self):
with mock.patch('neutron.plugins.ml2.drivers.mech_sriov.'
'mech_driver.mech_driver.LOG') as log_mock:
self._check_vif_for_pci_info('xxxx:yyyy', None)
log_mock.debug.assert_called_with('Refusing to bind due to '
'unsupported pci_vendor device')
class SriovSwitchMechProfileFailTestCase(SriovNicSwitchMechanismBaseTestCase):
def _check_for_pci_vendor_info(self, pci_vendor_info):
context = TestFakePortContext(self.AGENT_TYPE,
self.AGENTS,
self.VLAN_SEGMENTS,
portbindings.VNIC_DIRECT,
pci_vendor_info)
self.driver._check_supported_pci_vendor_device(context)
def test_profile_missing_profile(self):
with mock.patch('neutron.plugins.ml2.drivers.mech_sriov.'
'mech_driver.mech_driver.LOG') as log_mock:
self._check_for_pci_vendor_info({})
log_mock.debug.assert_called_with("Missing profile in port"
" binding")
def test_profile_missing_pci_vendor_info(self):
with mock.patch('neutron.plugins.ml2.drivers.mech_sriov.'
'mech_driver.mech_driver.LOG') as log_mock:
self._check_for_pci_vendor_info({'aa': 'bb'})
log_mock.debug.assert_called_with("Missing pci vendor"
" info in profile")
class SriovSwitchMechVifDetailsTestCase(SriovNicSwitchMechanismBaseTestCase):
VLAN_SEGMENTS = [{api.ID: 'vlan_segment_id',
api.NETWORK_TYPE: 'vlan',
api.PHYSICAL_NETWORK: 'fake_physical_network',
api.SEGMENTATION_ID: 1234}]
def test_vif_details_contains_vlan_id(self):
context = TestFakePortContext(self.AGENT_TYPE,
self.AGENTS,
self.VLAN_SEGMENTS,
portbindings.VNIC_DIRECT)
self.driver.bind_port(context)
vif_details = context._bound_vif_details
self.assertIsNotNone(vif_details)
vlan_id = int(vif_details.get(portbindings.VIF_DETAILS_VLAN))
self.assertEqual(1234, vlan_id)
def test_get_vif_details_for_flat_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT}
vif_details = self.driver._get_vif_details(segment)
vlan_id = vif_details[portbindings.VIF_DETAILS_VLAN]
self.assertEqual('0', vlan_id)
def test_get_vif_details_unsupported_net(self):
segment = {api.NETWORK_TYPE: 'foo'}
with testtools.ExpectedException(exc.SriovUnsupportedNetworkType):
self.driver._get_vif_details(segment)
def test_get_vif_details_without_agent(self):
cfg.CONF.set_override('agent_required', False, 'ml2_sriov')
self.driver = mech_driver.SriovNicSwitchMechanismDriver()
self.driver.initialize()
context = TestFakePortContext(self.AGENT_TYPE,
self.AGENTS,
self.VLAN_SEGMENTS,
portbindings.VNIC_DIRECT)
self.driver.bind_port(context)
self.assertEqual(constants.PORT_STATUS_ACTIVE, context._bound_state)
def test_get_vif_details_with_agent(self):
context = TestFakePortContext(self.AGENT_TYPE,
self.AGENTS,
self.VLAN_SEGMENTS,
portbindings.VNIC_DIRECT)
self.driver.bind_port(context)
self.assertEqual(constants.PORT_STATUS_DOWN, context._bound_state)
class SriovSwitchMechConfigTestCase(SriovNicSwitchMechanismBaseTestCase):
def _set_config(self, pci_devs=['aa:bb']):
cfg.CONF.set_override('mechanism_drivers',
['logger', 'sriovnicswitch'], 'ml2')
cfg.CONF.set_override('supported_pci_vendor_devs', pci_devs,
'ml2_sriov')
def test_pci_vendor_config_single_entry(self):
self._set_config()
self.driver.initialize()
self.assertEqual(['aa:bb'], self.driver.pci_vendor_info)
def test_pci_vendor_config_multiple_entry(self):
self._set_config(['x:y', 'a:b'])
self.driver.initialize()
self.assertEqual(['x:y', 'a:b'], self.driver.pci_vendor_info)
def test_pci_vendor_config_default_entry(self):
self.driver.initialize()
self.assertEqual(DEFAULT_PCI_INFO,
self.driver.pci_vendor_info)
def test_pci_vendor_config_wrong_entry(self):
self._set_config(['wrong_entry'])
self.assertRaises(cfg.Error, self.driver.initialize)
def test_initialize_missing_product_id(self):
self._set_config(['vendor_id:'])
self.assertRaises(cfg.Error, self.driver.initialize)
def test_initialize_missing_vendor_id(self):
self._set_config([':product_id'])
self.assertRaises(cfg.Error, self.driver.initialize)
def test_initialize_multiple_colons(self):
self._set_config(['foo:bar:baz'])
self.assertRaises(cfg.Error, self.driver.initialize)
def test_initialize_empty_string(self):
self._set_config([''])
self.assertRaises(cfg.Error, self.driver.initialize)
|
apache-2.0
|
WalnutATiie/google_search
|
gtaskpool/gtaskpool_helper.py
|
1
|
1815
|
#!/usr/env python
# encoding: utf-8
import gtaskpool
import random
from datetime import datetime
import json
class Interface(object):
def __init__(self, get_proxy, proxy_feedback, get_useragent):
self.get_proxy = get_proxy
self.proxy_feedback = proxy_feedback
self.get_useragent = get_useragent
def get_useragent_wrapper(useragents):
def get_useragent():
if len(useragents) == 0:
return None
idx = random.randint(0, len(useragents)-1)
return useragents[idx]
return get_useragent
def get_proxy_wrapper(next_proxy):
def get_proxy(url):
return next_proxy(url).proxy
return get_proxy
def get_interfaces(proxymgr, useragents):
return Interface(
get_proxy = get_proxy_wrapper(proxymgr.next_proxy),
proxy_feedback = proxymgr.feedback,
get_useragent = get_useragent_wrapper(useragents))
def retry_task(task, task_log, max_try):
trycnt = 0
while trycnt != max_try:
res = task()
res['try_idx'] = trycnt + 1
if trycnt+1 == max_try or res['finish']:
res['last_try'] = True
else:
res['last_try'] = False
log_task_result(res, task_log)
if res['finish']:
return
trycnt += 1
def log_task_result(result, filehandle):
result['ts'] = str(datetime.now())
jstr = json.dumps(result, ensure_ascii=False).encode('utf-8')
filehandle.write(jstr + "\n")
def runtasks(task_generator, task_log, max_try=10):
def gen_task():
while True:
try:
task = task_generator.next()
except StopIteration, e:
return
yield gtaskpool.Task(retry_task, [task, task_log, max_try])
gtaskpool.runtasks(gen_task())
|
mit
|
TRESCLOUD/odoo
|
addons/email_template/tests/__init__.py
|
56
|
1104
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_mail
checks = [
test_mail,
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Neuromancer2701/OpenRoverRosSerial
|
src/open_rover_serial/src/vesc.py
|
1
|
3713
|
"""
This file is a sending/receiving script to the VESC 6
"""
import serial, sys, glob
from time import sleep
from packets import Packet
from commands import Commands
from datatypes import Scale, PacketID
class Vesc:
def __init__(self):
self.left_back = None
self.right_back = None
self.commands = Commands()
self.generalpacket = Packet()
self.get_data = Packet(8, PacketID.GET_VALUES, Scale.NONE)
self.alive = Packet(8, PacketID.ALIVE, Scale.NONE)
def print_mc_data(self):
print "PWM:", self.commands.mcData.duty_now
print "RPM:", self.commands.mcData.rpm
def setandmonitorPWM(self, leftduty, rightduty):
Packet(8, PacketID.SET_DUTY, Scale.NONE, 32, leftduty, Scale.E5).send(self.left_back)
Packet(8, PacketID.SET_DUTY, Scale.NONE, 32, -rightduty, Scale.E5).send(self.right_back)
'''
sleep(0.05)
self.get_data.send(self.left_back)
sleep(0.05)
if self.left_back.in_waiting > 5:
converted = [int(elem.encode("hex"), 16) for elem in self.left_back.read_all()]
if self.generalpacket.process_buffer(converted):
self.commands.process_packet(self.generalpacket.goodpacket)
self.print_mc_data()
self.get_data.send(self.right_back)
sleep(0.05)
if self.right_back.in_waiting > 5:
converted = [int(elem.encode("hex"), 16) for elem in self.right_back.read_all()]
if self.generalpacket.process_buffer(converted):
self.commands.process_packet(self.generalpacket.goodpacket)
self.print_mc_data()
'''
def findandmapcontrollers(self):
left_back_port = ""
right_back_port = ""
get_data = Packet(8, PacketID.GET_VALUES, Scale.NONE)
if sys.platform.startswith ('linux'):
temp_list = glob.glob('/dev/tty[A]*')
for a_port in temp_list:
try:
vesc_usb = serial.Serial(a_port, 115200, timeout=0.1)
vesc_usb.flush()
sleep(2)
get_data.send(vesc_usb)
sleep(0.5)
if vesc_usb.in_waiting > 5:
buffer = Packet()
converted = [int(elem.encode("hex"), 16) for elem in vesc_usb.read_all()]
if buffer.process_buffer(converted):
self.commands.process_packet(buffer.goodpacket)
if self.commands.mcData.vesc_id == 100:
left_back_port = a_port
print "Found left wheel.\n"
elif self.commands.mcData.vesc_id == 200:
print "Found right wheel.\n"
right_back_port = a_port
vesc_usb.close()
except serial.SerialException:
pass
if len(left_back_port) > 0 and len(right_back_port) > 0:
self.left_back = serial.Serial(left_back_port, 115200, timeout=0.1)
self.right_back = serial.Serial(right_back_port, 115200, timeout=0.1)
self.left_back.flush()
self.right_back.flush()
#data = [2, 59, 4, 1, 100, 252, 188, 255, 255, 255, 238, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 1, 244, 0, 0, 7, 128, 1, 109, 0, 0, 0, 156, 0, 0, 0, 0, 0, 0, 22, 44, 0, 0, 0, 24, 0, 1, 14, 20, 0, 1, 48, 94, 0, 0, 0, 0, 0, 100, 38, 197, 3]
'''
if __name__ == '__main__':
print "Test!!"
vesc = Vesc()
vesc.findandmapcontrollers()
while True:
vesc.setandmonitorPWM(0.5, 0.5)
'''
|
mit
|
cisco-sas/katnip
|
unit_tests/test_model_low_level_radamsa_field.py
|
1
|
1913
|
# Copyright (C) 2016 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
#
# This file is part of Katnip.
#
# Katnip is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Katnip is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Katnip. If not, see <http://www.gnu.org/licenses/>.
'''
Tests for RadamsaField:
'''
from common import metaTest
from test_model_low_level_field import ValueTestCase
from bitstring import Bits
from katnip.model.low_level.radamsa import RadamsaField
class RadamsaFieldTests(ValueTestCase):
__meta__ = False
def setUp(self, cls=RadamsaField):
super(RadamsaFieldTests, self).setUp(cls)
self._fuzz_count = 500
self.seed = 123111
self.default_value = 'RadamsaField test'
self.default_value_rendered = Bits(bytes=self.default_value)
self.uut_name = 'RadamsaFieldTest'
def get_default_field(self, fuzzable=True):
return self.cls(value=self.default_value, fuzzable=fuzzable, name=self.uut_name, fuzz_count=self._fuzz_count, seed=self.seed)
def _base_check(self, field):
num_mutations = field.num_mutations()
mutations = self._get_all_mutations(field)
self.assertEqual(num_mutations, len(mutations))
mutations = self._get_all_mutations(field)
self.assertEqual(num_mutations, len(mutations))
@metaTest
def testMutateAllDifferent(self):
# some time will got same data, so we skip this test.
pass
|
gpl-2.0
|
odootr/odoo
|
addons/edi/models/__init__.py
|
442
|
1116
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import edi
import res_partner
import res_company
import res_currency
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
shybovycha/buck
|
third-party/py/argparse/doc/source/conf.py
|
84
|
7470
|
# -*- coding: utf-8 -*-
#
# argparse documentation build configuration file, created by
# sphinx-quickstart on Sun Mar 27 01:27:16 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'argparse'
copyright = u'2011, Steven J. Bethard'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.2'
# The full version, including alpha/beta/rc tags.
release = '1.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'argparsedoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'argparse.tex', u'argparse Documentation',
u'Steven J. Bethard', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'argparse', u'argparse Documentation',
[u'Steven J. Bethard'], 1)
]
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'argparse', u'argparse Documentation', u'Steven J. Bethard',
'argparse', 'One line description of project.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
texinfo_appendices = []
|
apache-2.0
|
trishnaguha/ansible
|
lib/ansible/modules/cloud/cloudstack/cs_pod.py
|
14
|
7215
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016, René Moser <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_pod
short_description: Manages pods on Apache CloudStack based clouds.
description:
- Create, update, delete pods.
version_added: "2.1"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the pod.
required: true
id:
description:
- uuid of the existing pod.
start_ip:
description:
- Starting IP address for the Pod.
- Required on C(state=present)
end_ip:
description:
- Ending IP address for the Pod.
netmask:
description:
- Netmask for the Pod.
- Required on C(state=present)
gateway:
description:
- Gateway for the Pod.
- Required on C(state=present)
zone:
description:
- Name of the zone in which the pod belongs to.
- If not set, default zone is used.
state:
description:
- State of the pod.
default: 'present'
choices: [ 'present', 'enabled', 'disabled', 'absent' ]
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Ensure a pod is present
local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
start_ip: 10.100.10.101
gateway: 10.100.10.1
netmask: 255.255.255.0
- name: Ensure a pod is disabled
local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: disabled
- name: Ensure a pod is enabled
local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: enabled
- name: Ensure a pod is absent
local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: absent
'''
RETURN = '''
---
id:
description: UUID of the pod.
returned: success
type: str
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
name:
description: Name of the pod.
returned: success
type: str
sample: pod01
start_ip:
description: Starting IP of the pod.
returned: success
type: str
sample: 10.100.1.101
end_ip:
description: Ending IP of the pod.
returned: success
type: str
sample: 10.100.1.254
netmask:
description: Netmask of the pod.
returned: success
type: str
sample: 255.255.255.0
gateway:
description: Gateway of the pod.
returned: success
type: str
sample: 10.100.1.1
allocation_state:
description: State of the pod.
returned: success
type: str
sample: Enabled
zone:
description: Name of zone the pod is in.
returned: success
type: str
sample: ch-gva-2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together
)
class AnsibleCloudStackPod(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackPod, self).__init__(module)
self.returns = {
'endip': 'end_ip',
'startip': 'start_ip',
'gateway': 'gateway',
'netmask': 'netmask',
'allocationstate': 'allocation_state',
}
self.pod = None
def _get_common_pod_args(self):
args = {
'name': self.module.params.get('name'),
'zoneid': self.get_zone(key='id'),
'startip': self.module.params.get('start_ip'),
'endip': self.module.params.get('end_ip'),
'netmask': self.module.params.get('netmask'),
'gateway': self.module.params.get('gateway')
}
state = self.module.params.get('state')
if state in ['enabled', 'disabled']:
args['allocationstate'] = state.capitalize()
return args
def get_pod(self):
if not self.pod:
args = {
'zoneid': self.get_zone(key='id')
}
uuid = self.module.params.get('id')
if uuid:
args['id'] = uuid
else:
args['name'] = self.module.params.get('name')
pods = self.query_api('listPods', **args)
if pods:
for pod in pods['pod']:
if not args['name']:
self.pod = self._transform_ip_list(pod)
break
elif args['name'] == pod['name']:
self.pod = self._transform_ip_list(pod)
break
return self.pod
def present_pod(self):
pod = self.get_pod()
if pod:
pod = self._update_pod()
else:
pod = self._create_pod()
return pod
def _create_pod(self):
required_params = [
'start_ip',
'netmask',
'gateway',
]
self.module.fail_on_missing_params(required_params=required_params)
pod = None
self.result['changed'] = True
args = self._get_common_pod_args()
if not self.module.check_mode:
res = self.query_api('createPod', **args)
pod = res['pod']
return pod
def _update_pod(self):
pod = self.get_pod()
args = self._get_common_pod_args()
args['id'] = pod['id']
if self.has_changed(args, pod):
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('updatePod', **args)
pod = res['pod']
return pod
def absent_pod(self):
pod = self.get_pod()
if pod:
self.result['changed'] = True
args = {
'id': pod['id']
}
if not self.module.check_mode:
self.query_api('deletePod', **args)
return pod
def _transform_ip_list(self, resource):
""" Workaround for 4.11 return API break """
keys = ['endip', 'startip']
if resource:
for key in keys:
if key in resource and isinstance(resource[key], list):
resource[key] = resource[key][0]
return resource
def get_result(self, pod):
pod = self._transform_ip_list(pod)
super(AnsibleCloudStackPod, self).get_result(pod)
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
id=dict(),
name=dict(required=True),
gateway=dict(),
netmask=dict(),
start_ip=dict(),
end_ip=dict(),
zone=dict(),
state=dict(choices=['present', 'enabled', 'disabled', 'absent'], default='present'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_pod = AnsibleCloudStackPod(module)
state = module.params.get('state')
if state in ['absent']:
pod = acs_pod.absent_pod()
else:
pod = acs_pod.present_pod()
result = acs_pod.get_result(pod)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
simartin/servo
|
tests/wpt/web-platform-tests/webdriver/tests/element_clear/user_prompts.py
|
16
|
4374
|
# META: timeout=long
import pytest
from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
def element_clear(session, element):
return session.transport.send(
"POST", "/session/{session_id}/element/{element_id}/clear".format(
session_id=session.session_id,
element_id=element.id))
@pytest.fixture
def check_user_prompt_closed_without_exception(session, create_dialog, inline):
def check_user_prompt_closed_without_exception(dialog_type, retval):
session.url = inline("<input type=text>")
element = session.find.css("input", all=False)
element.send_keys("foo")
assert element.property("value") == "foo"
create_dialog(dialog_type, text=dialog_type)
response = element_clear(session, element)
assert_success(response)
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
assert element.property("value") == ""
return check_user_prompt_closed_without_exception
@pytest.fixture
def check_user_prompt_closed_with_exception(session, create_dialog, inline):
def check_user_prompt_closed_with_exception(dialog_type, retval):
session.url = inline("<input type=text>")
element = session.find.css("input", all=False)
element.send_keys("foo")
assert element.property("value") == "foo"
create_dialog(dialog_type, text=dialog_type)
response = element_clear(session, element)
assert_error(response, "unexpected alert open")
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
assert element.property("value") == "foo"
return check_user_prompt_closed_with_exception
@pytest.fixture
def check_user_prompt_not_closed_but_exception(session, create_dialog, inline):
def check_user_prompt_not_closed_but_exception(dialog_type):
session.url = inline("<input type=text>")
element = session.find.css("input", all=False)
element.send_keys("foo")
assert element.property("value") == "foo"
create_dialog(dialog_type, text=dialog_type)
response = element_clear(session, element)
assert_error(response, "unexpected alert open")
assert session.alert.text == dialog_type
session.alert.dismiss()
assert element.property("value") == "foo"
return check_user_prompt_not_closed_but_exception
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", True),
("prompt", ""),
])
def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
check_user_prompt_closed_without_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", True),
("prompt", ""),
])
def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", False),
("prompt", None),
])
def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
check_user_prompt_closed_without_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", False),
("prompt", None),
])
def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
check_user_prompt_not_closed_but_exception(dialog_type)
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", False),
("prompt", None),
])
def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval)
|
mpl-2.0
|
arnif/CouchPotatoServer
|
libs/requests/packages/chardet2/mbcssm.py
|
63
|
18215
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
# BIG5
BIG5_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
4,4,4,4,4,4,4,4, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
4,3,3,3,3,3,3,3, # a0 - a7
3,3,3,3,3,3,3,3, # a8 - af
3,3,3,3,3,3,3,3, # b0 - b7
3,3,3,3,3,3,3,3, # b8 - bf
3,3,3,3,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0) # f8 - ff
BIG5_st = ( \
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart)#10-17
Big5CharLenTable = (0, 1, 1, 2, 0)
Big5SMModel = {'classTable': BIG5_cls,
'classFactor': 5,
'stateTable': BIG5_st,
'charLenTable': Big5CharLenTable,
'name': 'Big5'}
# EUC-JP
EUCJP_cls = ( \
4,4,4,4,4,4,4,4, # 00 - 07
4,4,4,4,4,4,5,5, # 08 - 0f
4,4,4,4,4,4,4,4, # 10 - 17
4,4,4,5,4,4,4,4, # 18 - 1f
4,4,4,4,4,4,4,4, # 20 - 27
4,4,4,4,4,4,4,4, # 28 - 2f
4,4,4,4,4,4,4,4, # 30 - 37
4,4,4,4,4,4,4,4, # 38 - 3f
4,4,4,4,4,4,4,4, # 40 - 47
4,4,4,4,4,4,4,4, # 48 - 4f
4,4,4,4,4,4,4,4, # 50 - 57
4,4,4,4,4,4,4,4, # 58 - 5f
4,4,4,4,4,4,4,4, # 60 - 67
4,4,4,4,4,4,4,4, # 68 - 6f
4,4,4,4,4,4,4,4, # 70 - 77
4,4,4,4,4,4,4,4, # 78 - 7f
5,5,5,5,5,5,5,5, # 80 - 87
5,5,5,5,5,5,1,3, # 88 - 8f
5,5,5,5,5,5,5,5, # 90 - 97
5,5,5,5,5,5,5,5, # 98 - 9f
5,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,0,5) # f8 - ff
EUCJP_st = ( \
3, 4, 3, 5,eStart,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
3,eError,eError,eError,eStart,eStart,eStart,eStart)#20-27
EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
EUCJPSMModel = {'classTable': EUCJP_cls,
'classFactor': 6,
'stateTable': EUCJP_st,
'charLenTable': EUCJPCharLenTable,
'name': 'EUC-JP'}
# EUC-KR
EUCKR_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,3,3,3, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,3,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,0) # f8 - ff
EUCKR_st = (
eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart)#08-0f
EUCKRCharLenTable = (0, 1, 2, 0)
EUCKRSMModel = {'classTable': EUCKR_cls,
'classFactor': 4,
'stateTable': EUCKR_st,
'charLenTable': EUCKRCharLenTable,
'name': 'EUC-KR'}
# EUC-TW
EUCTW_cls = ( \
2,2,2,2,2,2,2,2, # 00 - 07
2,2,2,2,2,2,0,0, # 08 - 0f
2,2,2,2,2,2,2,2, # 10 - 17
2,2,2,0,2,2,2,2, # 18 - 1f
2,2,2,2,2,2,2,2, # 20 - 27
2,2,2,2,2,2,2,2, # 28 - 2f
2,2,2,2,2,2,2,2, # 30 - 37
2,2,2,2,2,2,2,2, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,2, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,6,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,3,4,4,4,4,4,4, # a0 - a7
5,5,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,3,1,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0) # f8 - ff
EUCTW_st = ( \
eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart)#28-2f
EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
EUCTWSMModel = {'classTable': EUCTW_cls,
'classFactor': 7,
'stateTable': EUCTW_st,
'charLenTable': EUCTWCharLenTable,
'name': 'x-euc-tw'}
# GB2312
GB2312_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
3,3,3,3,3,3,3,3, # 30 - 37
3,3,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,4, # 78 - 7f
5,6,6,6,6,6,6,6, # 80 - 87
6,6,6,6,6,6,6,6, # 88 - 8f
6,6,6,6,6,6,6,6, # 90 - 97
6,6,6,6,6,6,6,6, # 98 - 9f
6,6,6,6,6,6,6,6, # a0 - a7
6,6,6,6,6,6,6,6, # a8 - af
6,6,6,6,6,6,6,6, # b0 - b7
6,6,6,6,6,6,6,6, # b8 - bf
6,6,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
6,6,6,6,6,6,6,6, # e0 - e7
6,6,6,6,6,6,6,6, # e8 - ef
6,6,6,6,6,6,6,6, # f0 - f7
6,6,6,6,6,6,6,0) # f8 - ff
GB2312_st = ( \
eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart)#28-2f
# To be accurate, the length of class 6 can be either 2 or 4.
# But it is not necessary to discriminate between the two since
# it is used for frequency analysis only, and we are validing
# each code range there as well. So it is safe to set it to be
# 2 here.
GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
GB2312SMModel = {'classTable': GB2312_cls,
'classFactor': 7,
'stateTable': GB2312_st,
'charLenTable': GB2312CharLenTable,
'name': 'GB2312'}
# Shift_JIS
SJIS_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
3,3,3,3,3,3,3,3, # 80 - 87
3,3,3,3,3,3,3,3, # 88 - 8f
3,3,3,3,3,3,3,3, # 90 - 97
3,3,3,3,3,3,3,3, # 98 - 9f
#0xa0 is illegal in sjis encoding, but some pages does
#contain such byte. We need to be more error forgiven.
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,4,4,4, # e8 - ef
4,4,4,4,4,4,4,4, # f0 - f7
4,4,4,4,4,0,0,0) # f8 - ff
SJIS_st = ( \
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart)#10-17
SJISCharLenTable = (0, 1, 1, 2, 0, 0)
SJISSMModel = {'classTable': SJIS_cls,
'classFactor': 6,
'stateTable': SJIS_st,
'charLenTable': SJISCharLenTable,
'name': 'Shift_JIS'}
# UCS2-BE
UCS2BE_cls = ( \
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5) # f8 - ff
UCS2BE_st = ( \
5, 7, 7,eError, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
6, 6, 6, 6, 5, 7, 7,eError,#20-27
5, 8, 6, 6,eError, 6, 6, 6,#28-2f
6, 6, 6, 6,eError,eError,eStart,eStart)#30-37
UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
UCS2BESMModel = {'classTable': UCS2BE_cls,
'classFactor': 6,
'stateTable': UCS2BE_st,
'charLenTable': UCS2BECharLenTable,
'name': 'UTF-16BE'}
# UCS2-LE
UCS2LE_cls = ( \
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5) # f8 - ff
UCS2LE_st = ( \
6, 6, 7, 6, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
5, 5, 5,eError, 5,eError, 6, 6,#18-1f
7, 6, 8, 8, 5, 5, 5,eError,#20-27
5, 5, 5,eError,eError,eError, 5, 5,#28-2f
5, 5, 5,eError, 5,eError,eStart,eStart)#30-37
UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
UCS2LESMModel = {'classTable': UCS2LE_cls,
'classFactor': 6,
'stateTable': UCS2LE_st,
'charLenTable': UCS2LECharLenTable,
'name': 'UTF-16LE'}
# UTF-8
UTF8_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
2,2,2,2,3,3,3,3, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
5,5,5,5,5,5,5,5, # a0 - a7
5,5,5,5,5,5,5,5, # a8 - af
5,5,5,5,5,5,5,5, # b0 - b7
5,5,5,5,5,5,5,5, # b8 - bf
0,0,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
7,8,8,8,8,8,8,8, # e0 - e7
8,8,8,8,8,9,8,8, # e8 - ef
10,11,11,11,11,11,11,11, # f0 - f7
12,13,13,13,14,15,0,0) # f8 - ff
UTF8_st = ( \
eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
9, 11, 8, 7, 6, 5, 4, 3,#08-0f
eError,eError,eError,eError,eError,eError,eError,eError,#10-17
eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
eError,eError, 5, 5, 5, 5,eError,eError,#30-37
eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
eError,eError,eError, 5, 5, 5,eError,eError,#40-47
eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
eError,eError, 7, 7, 7, 7,eError,eError,#50-57
eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
eError,eError,eError,eError, 7, 7,eError,eError,#60-67
eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
eError,eError, 9, 9, 9, 9,eError,eError,#70-77
eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
eError,eError,eError,eError,eError, 9,eError,eError,#80-87
eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
eError,eError, 12, 12, 12, 12,eError,eError,#90-97
eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
eError,eError,eError,eError,eError,eError,eError,eError)#c8-cf
UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
UTF8SMModel = {'classTable': UTF8_cls,
'classFactor': 16,
'stateTable': UTF8_st,
'charLenTable': UTF8CharLenTable,
'name': 'UTF-8'}
|
gpl-3.0
|
creativcoder/servo
|
tests/wpt/harness/wptrunner/update/metadata.py
|
118
|
2732
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from .. import metadata, products
from base import Step, StepRunner
class GetUpdatePropertyList(Step):
provides = ["property_order", "boolean_properties"]
def create(self, state):
property_order, boolean_properties = products.load_product_update(
state.config, state.product)
state.property_order = property_order
state.boolean_properties = boolean_properties
class UpdateExpected(Step):
"""Do the metadata update on the local checkout"""
provides = ["needs_human"]
def create(self, state):
if state.sync_tree is not None:
sync_root = state.sync_tree.root
else:
sync_root = None
state.needs_human = metadata.update_expected(state.paths,
state.serve_root,
state.run_log,
rev_old=None,
ignore_existing=state.ignore_existing,
sync_root=sync_root,
property_order=state.property_order,
boolean_properties=state.boolean_properties)
class CreateMetadataPatch(Step):
"""Create a patch/commit for the metadata checkout"""
def create(self, state):
if state.no_patch:
return
local_tree = state.local_tree
sync_tree = state.sync_tree
if sync_tree is not None:
name = "web-platform-tests_update_%s_metadata" % sync_tree.rev
message = "Update %s expected data to revision %s" % (state.suite_name, sync_tree.rev)
else:
name = "web-platform-tests_update_metadata"
message = "Update %s expected data" % state.suite_name
local_tree.create_patch(name, message)
if not local_tree.is_clean:
metadata_paths = [manifest_path["metadata_path"]
for manifest_path in state.paths.itervalues()]
for path in metadata_paths:
local_tree.add_new(os.path.relpath(path, local_tree.root))
local_tree.update_patch(include=metadata_paths)
local_tree.commit_patch()
class MetadataUpdateRunner(StepRunner):
"""(Sub)Runner for updating metadata"""
steps = [GetUpdatePropertyList,
UpdateExpected,
CreateMetadataPatch]
|
mpl-2.0
|
webOS-ports/qtwebkit
|
Tools/Scripts/webkitpy/common/lru_cache.py
|
134
|
4428
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class Node():
def __init__(self, key, value):
self.key = key
self.value = value
self.prev = None
self.next = None
class LRUCache():
"""An implementation of Least Recently Used (LRU) Cache."""
def __init__(self, capacity):
"""Initializes a lru cache with the given capacity.
Args:
capacity: The capacity of the cache.
"""
assert capacity > 0, "capacity (%s) must be greater than zero." % capacity
self._first = None
self._last = None
self._dict = {}
self._capacity = capacity
def __setitem__(self, key, value):
if key in self._dict:
self.__delitem__(key)
if not self._first:
self._one_node(key, value)
return
if len(self._dict) >= self._capacity:
del self._dict[self._last.key]
if self._capacity == 1:
self._one_node(key, value)
return
self._last = self._last.next
self._last.prev = None
node = Node(key, value)
node.prev = self._first
self._first.next = node
self._first = node
self._dict[key] = node
def _one_node(self, key, value):
node = Node(key, value)
self._dict[key] = node
self._first = node
self._last = node
def __getitem__(self, key):
if not self._first:
raise KeyError(str(key))
if self._first.key == key:
return self._first.value
if self._last.key == key:
next_last = self._last.next
next_last.prev = None
next_first = self._last
next_first.prev = self._first
next_first.next = None
self._first.next = next_first
self._first = next_first
self._last = next_last
return self._first.value
node = self._dict[key]
node.next.prev = node.prev
node.prev.next = node.next
node.prev = self._first
node.next = None
self._first.next = node
self._first = node
return self._first.value
def __delitem__(self, key):
node = self._dict[key]
del self._dict[key]
if self._first is self._last:
self._last = None
self._first = None
return
if self._first is node:
self._first = node.prev
self._first.next = None
return
if self._last is node:
self._last = node.next
self._last.prev = None
return
node.next.prev = node.prev
node.prev.next = node.next
def __len__(self):
return len(self._dict)
def __contains__(self, key):
return key in self._dict
def __iter__(self):
return iter(self._dict)
def items(self):
return [(key, node.value) for key, node in self._dict.items()]
def values(self):
return [node.value for node in self._dict.values()]
def keys(self):
return self._dict.keys()
|
gpl-2.0
|
tomdean1/linux
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py
|
12527
|
1935
|
# Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
|
gpl-2.0
|
ForestNymph/Arduino_sources
|
arduino-core/src/processing/app/i18n/python/requests/packages/charade/charsetprober.py
|
3127
|
1902
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
|
lgpl-2.1
|
toirl/pynunzen
|
pynunzen/ledger/transaction.py
|
1
|
10225
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import datetime
from decimal import Decimal, getcontext
from pynunzen.helpers import double_sha256
__transaction_version__ = "1.0"
log = logging.getLogger(__name__)
getcontext().prec = 8
def generate_transaction_hash(transaction):
"""Will generate a hash based on the content of input and outputs of
the given transaction. The hash is used as a address of the
transaction within the blockchain.
:transaction: :class:Transaction instance
:returns: hash
"""
value = str(transaction.time)
value += str(transaction.version)
for tx_in in transaction.inputs:
value += str(tx_in.data.value)
value += str(tx_in.script._script)
value += str(tx_in.tx_hash)
value += str(tx_in.utxo_idx)
for tx_out in transaction.outputs:
value += str(tx_out.data.value)
value += str(tx_out.script._script)
return double_sha256(value)
def validate_transaction(transaction, blockchain):
"""Will validate the given transaction. The transaction is validated against the following points:
* The transaction’s syntax and data structure must be correct.
* Neither lists of inputs or outputs are empty.
* Transaction hash validate.
* For each input, the referenced output must exist.
TODO:
* For each input, the referenced output cannot already be spent
(is in UTXO).
* The transaction size in bytes is less than MAX_BLOCK_SIZE.
* Each output value, as well as the total, must be within the
allowed range of values (less than 21m coins, more than 0).
* None of the inputs have hash=0, N=–1 (coinbase transactions
should not be relayed).
* nLockTime is less than or equal to INT_MAX.
* The transaction size in bytes is greater than or equal to 100.
* The number of signature operations contained in the transaction
is less than the signature operation limit.
* The unlocking script (scriptSig) can only push numbers on the
stack, and the locking script (scriptPubkey) must match
isStandard forms (this rejects "nonstandard" transactions).
* A matching transaction in the pool, or in a block in the main
branch, must exist.
* For each input, if the referenced output exists in any other
transaction in the pool, the transaction must be rejected.
* For each input, look in the main branch and the transaction
pool to find the referenced output transaction. If the output
transaction is missing for any input, this will be an orphan
transaction. Add to the orphan transactions pool, if a matching
transaction is not already in the pool.
* For each input, if the referenced output transaction is a
coinbase output, it must have at least COINBASE_MATURITY (100)
confirmations.
* Using the referenced output transactions to get input values,
check that each input value, as well as the sum, are in the
allowed range of values (less than 21m coins, more than 0).
* Reject if the sum of input values is less than sum of output values.
* Reject if transaction fee would be too low to get into an empty block.
* The unlocking scripts for each input must validate against the
corresponding output locking scripts.
:transaction: :class:Transaction object
:returns: True or False
"""
trans_checks = [_check_syntax, _check_io, _check_hash]
for check in trans_checks:
if check(transaction):
continue
else:
log.error("Validation {} of transactoin failed".format(check.__name__))
return False
trans_block_checks = [_check_referenced_hash]
for check in trans_block_checks:
if check(transaction, blockchain):
continue
else:
log.error("Validation {} of transactoin in context of the blockchain failed".format(check.__name__))
return False
log.debug("Validation of transaction successfull")
return True
def _check_referenced_hash(transaction, blockchain):
"""Will check if the referenced outout in the transaction inputs are
present in the blockchain and not already spent.
:transaction: TODO
:blockchain: TODO
:returns: TODO
"""
for tx_in in transaction.inputs:
tx = blockchain.get_transaction(tx_in.tx_hash)
if tx:
# TODO: Implement check if outout is already spent. (ti) <2017-05-22 13:32>
pass
else:
# Transaction not found.
return False
return True
def _check_hash(transaction):
"""The stored hash value of the transaction must be the same when rebuild from scratch
:transaction: :class:Transaction
:returns: True or False
"""
tx_hash = transaction.hash
return tx_hash == generate_transaction_hash(transaction)
def _check_syntax(transaction):
"""The transaction’s syntax and data structure must be correct.
:transaction: :class:Transaction
:returns: True or False
"""
try:
assert hasattr(transaction, "time")
assert hasattr(transaction, "version")
assert hasattr(transaction, "inputs")
assert hasattr(transaction, "outputs")
assert hasattr(transaction, "hash")
assert len(transaction.__dict__) == 5
return True
except AssertionError:
return False
def _check_io(transaction):
"""Neither lists of inputs or outputs are empty.
:transaction: :class:Transaction
:returns: True or False
"""
return len(transaction.inputs) > 0 and len(transaction.outputs) > 0
class Data(object):
"""Container for the transfered data/value within a transaction."""
def __init__(self, value):
self.value = value
def check(self, value):
"""Will return True if the current container includes the given
value.
:value: Value to be checked
:returns: True or False
"""
raise NotImplementedError()
class Coin(Data):
def __init__(self, value):
value = Decimal(value)
super(Coin, self).__init__(value)
def check(self, value):
try:
value = Decimal(value)
except:
raise ValueError("'{}' can not be casted to Decimal".format(value))
return self.value > 0
class LockScript(object):
"""A locking script is an encumbrance placed on an output, and it
specifies the conditions that must be met to spend the output in the
future. Most of the time this is the public address of the receiver
of the transaction."""
def __init__(self, script):
self._script = script
def unlock(self, script):
return self._script == script
class UnlockScript(object):
"""An unlocking script is a script that "solves," or satisfies, the
conditions placed on an output by a locking script and allows the
output to be spent. Unlocking scripts are part of every transaction
input, and most of the time they contain a digital signature
produced by the user’s wallet from his or her private key"""
def __init__(self, script):
self._script = script
class Output(object):
"""Output for a transaction. Transaction outputs consist of two
parts: An amount of data which is about to be transferred, and a
locking script, also known as an "encumbrance" that "locks" this
data by specifying the conditions that must be met to spend the
output"""
def __init__(self, data, script):
"""
:data: :class:Data instance.
:script: :class:LockScript instance.
"""
self.data = data
self.script = script
class Input(object):
"""In simple terms, transaction inputs are pointers to UTXO. They
point to a specific UTXO by reference to the transaction hash and
sequence number where the UTXO is recorded in the blockchain. To
spend UTXO, a transaction input also includes unlocking scripts that
satisfy the spending conditions set by the UTXO. The unlocking
script is usually a signature proving ownership of the bitcoin
address that is in the locking script."""
def __init__(self, data, script, tx_hash, utxo_idx):
"""
:data: :class:Data instance.
:script: :class:UnlockScript instance.
:txhash: Reference to the hash of the transaction with unspent
outputs.
:utxo_idx: Index to the unspent output in the referenced
transaction (txhash). 0 is the first.
"""
self.data = data
self.script = script
self.tx_hash = tx_hash
self.utxo_idx = utxo_idx
class CoinbaseInput(Input):
"""The coinbase input is a special input. It is the input of the
first transaction within a new block. It is the origin of the reward
for the miner who generated the new block. As this input has no
reference to a origin output it has some special logic to unlock
the input."""
def __init__(self, data, script, coinbase_script):
tx_hash = "0" * 32
utxo_idx = 0
Input.__init__(self, data, script, tx_hash, utxo_idx)
self.coinbase_script = coinbase_script
class Transaction(object):
"""A transaction is a data structure that encodes a transfer of
value from a source of data/value, called an input, to a destination,
called an output."""
def __init__(self, inputs, outputs):
"""TODO: to be defined1. """
self.version = __transaction_version__
"""Version of this transaction"""
self.time = str(datetime.datetime.utcnow())
"""Time when the the transaction was created"""
self.inputs = inputs
"""One or more transaction inputs"""
self.outputs = outputs
"""One or more transaction outputs"""
self.hash = generate_transaction_hash(self)
"""Hash of this transaction, A hash is some kind of a address of
the transaction within the blockchain. It is used to link
outouts from inputs in other transactions."""
|
mit
|
CFDEMproject/LAMMPS
|
tools/python/dump2pdb.py
|
51
|
1217
|
#!/usr/bin/env python
# Script: dump2pdb.py
# Purpose: convert a LAMMPS dump file to PDB format
# Syntax: dump2pdb.py dumpfile Nid Ntype Nx Ny Nz pdbfile template
# dumpfile = LAMMPS dump file in native LAMMPS format
# Nid,Ntype,Nx,Ny,Nz = columns #s for ID,type,x,y,z
# (usually 1,2,3,4,5)
# pdbfile = new PDB file
# template = PDB file to use as template for creating new PDB file
# this arg is optional, if not used a generic PDB file is created
# Author: Steve Plimpton (Sandia), sjplimp at sandia.gov
import sys,os
path = os.environ["LAMMPS_PYTHON_TOOLS"]
sys.path.append(path)
from dump import dump
from pdbfile import pdbfile
if len(sys.argv) != 8 and len(sys.argv) != 9:
raise StandardError, "Syntax: dump2pdb.py dumpfile Nid Ntype Nx Ny Nz pdbfile template"
dumpfile = sys.argv[1]
nid = int(sys.argv[2])
ntype = int(sys.argv[3])
nx = int(sys.argv[4])
ny = int(sys.argv[5])
nz = int(sys.argv[6])
pfile = sys.argv[7]
if len(sys.argv) == 9: template = sys.argv[8]
else: template = ""
d = dump(dumpfile)
d.map(nid,"id",ntype,"type",nx,"x",ny,"y",nz,"z")
if template: p = pdbfile(template,d)
else: p = pdbfile(d)
p.one(pfile)
|
gpl-2.0
|
andrewyoung1991/dj-stripe
|
tests/test_subscriptions.py
|
9
|
13785
|
import calendar
import copy
import datetime
import decimal
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.utils import timezone
from mock import patch, PropertyMock
from stripe import InvalidRequestError
from djstripe.exceptions import SubscriptionCancellationFailure, SubscriptionUpdateFailure
from djstripe.models import convert_tstamp, Customer, CurrentSubscription
from djstripe.settings import PAYMENTS_PLANS
from tests import convert_to_fake_stripe_object
def timestamp(year, month, day, hour, minute=0, second=0):
dt = datetime.datetime(year, month, day, hour, minute, second, tzinfo=timezone.utc)
return calendar.timegm(dt.timetuple())
CREATE_TIME = timestamp(2014, 4, 1, 11)
START_TIME = timestamp(2014, 4, 1, 12)
END_TIME = timestamp(2014, 4, 11, 12)
CANCELED_TIME = timestamp(2014, 4, 6, 12)
BASIC_PLAN = {
"stripe_plan_id": "basic_id",
"name": "Basic Plan",
"description": "Basic Plan (monthly)",
"price": 10000,
"currency": "usd",
"interval": "month"
}
GOLD_PLAN = {
"stripe_plan_id": "gold_id",
"name": "Gold Plan",
"description": "Gold Plan (annual)",
"price": 100000,
"currency": "usd",
"interval": "year"
}
DUMMY_CUSTOMER_WITHOUT_SUB = {
"object": "customer",
"created": CREATE_TIME,
"id": "cus_xxxxxxxxxxxxxx",
"subscription": None,
"cards": {
"object": "list",
"count": 0,
"data": []
},
"default_card": None
}
DUMMY_SUB_BASIC = {
"plan": "basic_id",
"object": "subscription",
"start": START_TIME,
"status": "trialing",
"customer": "cus_xxxxxxxxxxxxxx",
"cancel_at_period_end": False,
"current_period_start": START_TIME,
"current_period_end": END_TIME,
"ended_at": None,
"trial_start": START_TIME,
"trial_end": END_TIME,
"canceled_at": None,
"quantity": 1
}
DUMMY_SUB_BASIC_CANCELED = {
"plan": "basic_id",
"object": "subscription",
"start": START_TIME,
"status": "canceled",
"customer": "cus_xxxxxxxxxxxxxx",
"cancel_at_period_end": False,
"current_period_start": START_TIME,
"current_period_end": END_TIME,
"ended_at": CANCELED_TIME,
"trial_start": START_TIME,
"trial_end": END_TIME,
"canceled_at": CANCELED_TIME,
"quantity": 1
}
DUMMY_SUB_GOLD = {
"plan": "gold_id",
"object": "subscription",
"start": START_TIME,
"status": "trialing",
"customer": "cus_xxxxxxxxxxxxxx",
"cancel_at_period_end": False,
"current_period_start": START_TIME,
"current_period_end": END_TIME,
"ended_at": None,
"trial_start": START_TIME,
"trial_end": END_TIME,
"canceled_at": None,
"quantity": 1
}
DUMMY_SUB_BASIC_WITH_PLAN = copy.deepcopy(DUMMY_SUB_BASIC)
DUMMY_SUB_BASIC_WITH_PLAN["plan"] = {"id": "basic_id", "object": "plan", "amount": 10000}
DUMMY_CUSTOMER_WITH_SUB_BASIC = copy.deepcopy(DUMMY_CUSTOMER_WITHOUT_SUB)
DUMMY_CUSTOMER_WITH_SUB_BASIC["subscription"] = DUMMY_SUB_BASIC_WITH_PLAN
DUMMY_SUB_GOLD_WITH_PLAN = copy.deepcopy(DUMMY_SUB_GOLD)
DUMMY_SUB_GOLD_WITH_PLAN["plan"] = {"id": "gold_id", "object": "plan", "amount": 100000}
DUMMY_CUSTOMER_WITH_SUB_GOLD = copy.deepcopy(DUMMY_CUSTOMER_WITHOUT_SUB)
DUMMY_CUSTOMER_WITH_SUB_GOLD["subscription"] = DUMMY_SUB_GOLD_WITH_PLAN
def create_subscription(customer, plan="basic"):
return CurrentSubscription.objects.create(
customer=customer,
plan=plan,
quantity=1,
start=convert_tstamp(1395527780),
amount=decimal.Decimal("100.00" if plan == "basic" else "1000.00"),
status="trialing"
)
def version_tuple(v):
return tuple(map(int, (v.split("."))))
class TestSingleSubscription(TestCase):
@classmethod
def setupClass(cls):
PAYMENTS_PLANS["basic"] = BASIC_PLAN
PAYMENTS_PLANS["gold"] = GOLD_PLAN
@classmethod
def tearDownClass(cls):
del PAYMENTS_PLANS["basic"]
del PAYMENTS_PLANS["gold"]
def setUp(self):
self.user = get_user_model().objects.create_user(username="chris")
self.customer = Customer.objects.create(
subscriber=self.user,
stripe_id="cus_xxxxxxxxxxxxxxx",
card_fingerprint="YYYYYYYY",
card_last_4="2342",
card_kind="Visa"
)
def test_current_subscription_does_not_exist(self):
with self.assertRaises(CurrentSubscription.DoesNotExist):
self.customer.current_subscription
@patch("stripe.resource.Customer.update_subscription")
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_subscribe(self, StripeCustomerMock, UpdateSubscriptionMock):
StripeCustomerMock.side_effect = [convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITHOUT_SUB),
convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_BASIC)]
self.assertEqual(self.customer.has_active_subscription(), False)
self.customer.subscribe("basic", charge_immediately=False)
self.assertEqual(self.customer.has_active_subscription(), True)
sub = self.customer.current_subscription
self.assertEqual(sub.quantity, 1)
self.assertEqual(sub.amount, decimal.Decimal("100.00"))
@patch("stripe.resource.Customer.update_subscription")
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_upgrade(self, StripeCustomerMock, UpdateSubscriptionMock):
StripeCustomerMock.side_effect = [convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_BASIC),
convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_GOLD)]
create_subscription(self.customer)
self.assertEqual(self.customer.has_active_subscription(), True)
self.assertEqual(self.customer.current_subscription.plan, "basic")
self.customer.subscribe("gold", charge_immediately=False)
self.assertEqual(self.customer.has_active_subscription(), True)
sub = self.customer.current_subscription
self.assertEqual(sub.amount, decimal.Decimal("1000.00"))
self.assertEqual(sub.plan, "gold")
def test_cancel_without_sub(self):
with self.assertRaises(SubscriptionCancellationFailure):
self.customer.cancel_subscription()
@patch("stripe.resource.Customer.cancel_subscription", new_callable=PropertyMock)
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_cancel_without_stripe_sub(self, StripeCustomerMock, CancelSubscriptionMock):
StripeCustomerMock.return_value = convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITHOUT_SUB)
CancelSubscriptionMock.side_effect = InvalidRequestError("No active subscriptions for customer: cus_xxxxxxxxxxxxxx", None)
create_subscription(self.customer)
self.assertEqual(self.customer.has_active_subscription(), True)
self.assertEqual(self.customer.current_subscription.status, "trialing")
with self.assertRaises(SubscriptionCancellationFailure):
self.customer.cancel_subscription()
@patch("stripe.resource.Customer.cancel_subscription")
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_cancel_with_stripe_sub(self, StripeCustomerMock, CancelSubscriptionMock):
StripeCustomerMock.return_value = convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_BASIC)
CancelSubscriptionMock.return_value = convert_to_fake_stripe_object(DUMMY_SUB_BASIC_CANCELED)
create_subscription(self.customer)
self.assertEqual(self.customer.current_subscription.status, "trialing")
self.customer.cancel_subscription(at_period_end=False)
self.assertEqual(self.customer.has_active_subscription(), False)
self.assertEqual(self.customer.current_subscription.status, "canceled")
self.assertEqual(self.customer.current_subscription.ended_at, None)
self.assertEqual(self.customer.current_subscription.canceled_at, convert_tstamp(CANCELED_TIME))
@patch("stripe.resource.Customer.cancel_subscription")
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_cancel_with_stripe_sub_future(self, stripe_customer_mock, cancel_subscription_mock):
stripe_customer_mock.return_value = convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_BASIC)
cancel_subscription_mock.return_value = convert_to_fake_stripe_object(DUMMY_SUB_BASIC_CANCELED)
subscription_instance = create_subscription(self.customer)
subscription_instance.trial_end = timezone.now() + datetime.timedelta(days=5)
subscription_instance.save()
self.customer.cancel_subscription(at_period_end=True)
self.assertEqual(self.customer.has_active_subscription(), False)
self.assertEqual(self.customer.current_subscription.status, "canceled")
@patch("stripe.resource.Customer.update_subscription")
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_update_quantity(self, StripeCustomerMock, UpdateSubscriptionMock):
dummy_customer = copy.deepcopy(DUMMY_CUSTOMER_WITH_SUB_BASIC)
dummy_customer["subscription"]["quantity"] = 2
StripeCustomerMock.side_effect = [convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_BASIC),
convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_BASIC),
convert_to_fake_stripe_object(dummy_customer)]
create_subscription(self.customer)
self.customer.update_plan_quantity(2, charge_immediately=False)
self.assertEqual(self.customer.current_subscription.quantity, 2)
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_update_no_stripe_sub(self, StripeCustomerMock):
StripeCustomerMock.return_value = convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITHOUT_SUB)
create_subscription(self.customer)
with self.assertRaises(SubscriptionUpdateFailure):
self.customer.update_plan_quantity(2)
@patch("stripe.resource.Customer.update_subscription")
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_extend(self, StripeCustomerMock, UpdateSubscriptionMock):
StripeCustomerMock.return_value = convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_BASIC)
subscription_instance = create_subscription(self.customer)
subscription_instance.current_period_end = timezone.datetime.fromtimestamp(END_TIME, tz=timezone.utc)
delta = timezone.timedelta(days=30)
self.customer.current_subscription.extend(delta)
UpdateSubscriptionMock.assert_called_once_with(prorate=False, trial_end=subscription_instance.current_period_end + delta)
def test_extend_negative_delta(self):
delta = timezone.timedelta(days=-30)
create_subscription(self.customer)
with self.assertRaises(ValueError):
self.customer.current_subscription.extend(delta)
@patch("stripe.resource.Customer.update_subscription")
@patch("djstripe.models.Customer.stripe_customer", new_callable=PropertyMock)
def test_extend_with_trial(self, StripeCustomerMock, UpdateSubscriptionMock):
StripeCustomerMock.return_value = convert_to_fake_stripe_object(DUMMY_CUSTOMER_WITH_SUB_BASIC)
subscription_instance = create_subscription(self.customer)
subscription_instance.trial_end = timezone.now() + timezone.timedelta(days=5)
delta = timezone.timedelta(days=30)
new_trial_end = subscription_instance.trial_end + delta
self.customer.current_subscription.extend(delta)
UpdateSubscriptionMock.assert_called_once_with(prorate=False, trial_end=new_trial_end)
class CurrentSubscriptionTest(TestCase):
def setUp(self):
self.plan_id = "test"
self.current_subscription = CurrentSubscription.objects.create(plan=self.plan_id,
quantity=1,
start=timezone.now(),
amount=decimal.Decimal(25.00),
status=CurrentSubscription.STATUS_PAST_DUE)
def test_plan_display(self):
self.assertEquals(PAYMENTS_PLANS[self.plan_id]["name"], self.current_subscription.plan_display())
def test_status_display(self):
self.assertEqual("Past Due", self.current_subscription.status_display())
def test_is_period_current_no_current_period_end(self):
self.assertFalse(self.current_subscription.is_period_current())
def test_is_status_temporarily_current_true(self):
current_subscription = CurrentSubscription.objects.create(plan=self.plan_id,
quantity=1,
start=timezone.now(),
amount=decimal.Decimal(25.00),
status=CurrentSubscription.STATUS_PAST_DUE,
canceled_at=timezone.now() + datetime.timedelta(days=5),
cancel_at_period_end=True)
self.assertTrue(current_subscription.is_status_temporarily_current())
def test_is_status_temporarily_current_false(self):
self.assertFalse(self.current_subscription.is_status_temporarily_current())
|
bsd-3-clause
|
nightjean/Deep-Learning
|
tensorflow/python/training/optimizer.py
|
21
|
28134
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base class for optimizers."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import slot_creator
from tensorflow.python.util import nest
def _get_variable_for(v):
"""Returns the ResourceVariable responsible for v, or v if not necessary."""
if v.op.type == "VarHandleOp":
for var in variables.trainable_variables():
if (isinstance(var, resource_variable_ops.ResourceVariable)
and var.handle.op is v.op):
return var
raise ValueError("Got %s but could not locate source variable." % (str(v)))
return v
def _deduplicate_indexed_slices(values, indices):
"""Sums `values` associated with any non-unique `indices`.
Args:
values: A `Tensor` with rank >= 1.
indices: A one-dimensional integer `Tensor`, indexing into the first
dimension of `values` (as in an IndexedSlices object).
Returns:
A tuple of (`summed_values`, `unique_indices`) where `unique_indices` is a
de-duplicated version of `indices` and `summed_values` contains the sum of
`values` slices associated with each unique index.
"""
unique_indices, new_index_positions = array_ops.unique(indices)
summed_values = math_ops.unsorted_segment_sum(
values, new_index_positions,
array_ops.shape(unique_indices)[0])
return (summed_values, unique_indices)
def _var_key(var):
return (var.op.graph, var.op.name)
class _OptimizableVariable(object):
"""Interface for abstracting over variables in the optimizers."""
@abc.abstractmethod
def target(self):
"""Returns the optimization target for this variable."""
raise NotImplementedError("Calling an abstract method.")
@abc.abstractmethod
def update_op(self, optimizer, g):
"""Returns the update ops for updating the variable."""
raise NotImplementedError("Calling an abstract method.")
class _RefVariableProcessor(_OptimizableVariable):
"""Processor for Variable."""
def __init__(self, v):
self._v = v
def target(self):
return self._v._ref() # pylint: disable=protected-access
def update_op(self, optimizer, g):
if isinstance(g, ops.Tensor):
return optimizer._apply_dense(g, self._v) # pylint: disable=protected-access
else:
assert isinstance(g, ops.IndexedSlices), ("Gradient ", g, " is neither a "
"tensor nor IndexedSlices.")
# pylint: disable=protected-access
return optimizer._apply_sparse_duplicate_indices(g, self._v)
class _DenseReadResourceVariableProcessor(_OptimizableVariable):
"""Processor for dense ResourceVariables."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
# pylint: disable=protected-access
return optimizer._resource_apply_dense(g, self._v.op.inputs[0])
class _DenseResourceVariableProcessor(_OptimizableVariable):
"""Processor for dense ResourceVariables."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
# pylint: disable=protected-access
if isinstance(g, ops.IndexedSlices):
return optimizer._resource_apply_sparse_duplicate_indices(
g.values, self._v, g.indices)
return optimizer._resource_apply_dense(g, self._v)
class _StreamingModelPortProcessor(_OptimizableVariable):
"""Processor for streaming ModelPorts."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
return g
def _get_processor(v):
"""The processor of v."""
if v.op.type == "VarHandleOp":
return _DenseResourceVariableProcessor(v)
if isinstance(v, variables.Variable):
return _RefVariableProcessor(v)
if v.op.type == "SubmodelPort":
return _StreamingModelPortProcessor(v)
raise NotImplementedError("Trying to optimize unsupported type ", v)
class Optimizer(object):
"""Base class for optimizers.
This class defines the API to add Ops to train a model. You never use this
class directly, but instead instantiate one of its subclasses such as
`GradientDescentOptimizer`, `AdagradOptimizer`, or `MomentumOptimizer`.
### Usage
```python
# Create an optimizer with the desired parameters.
opt = GradientDescentOptimizer(learning_rate=0.1)
# Add Ops to the graph to minimize a cost by updating a list of variables.
# "cost" is a Tensor, and the list of variables contains tf.Variable
# objects.
opt_op = opt.minimize(cost, var_list=<list of variables>)
```
In the training program you will just have to run the returned Op.
```python
# Execute opt_op to do one step of training:
opt_op.run()
```
### Processing gradients before applying them.
Calling `minimize()` takes care of both computing the gradients and
applying them to the variables. If you want to process the gradients
before applying them you can instead use the optimizer in three steps:
1. Compute the gradients with `compute_gradients()`.
2. Process the gradients as you wish.
3. Apply the processed gradients with `apply_gradients()`.
Example:
```python
# Create an optimizer.
opt = GradientDescentOptimizer(learning_rate=0.1)
# Compute the gradients for a list of variables.
grads_and_vars = opt.compute_gradients(loss, <list of variables>)
# grads_and_vars is a list of tuples (gradient, variable). Do whatever you
# need to the 'gradient' part, for example cap them, etc.
capped_grads_and_vars = [(MyCapper(gv[0]), gv[1]) for gv in grads_and_vars]
# Ask the optimizer to apply the capped gradients.
opt.apply_gradients(capped_grads_and_vars)
```
### Gating Gradients
Both `minimize()` and `compute_gradients()` accept a `gate_gradients`
argument that controls the degree of parallelism during the application of
the gradients.
The possible values are: `GATE_NONE`, `GATE_OP`, and `GATE_GRAPH`.
<b>`GATE_NONE`</b>: Compute and apply gradients in parallel. This provides
the maximum parallelism in execution, at the cost of some non-reproducibility
in the results. For example the two gradients of `matmul` depend on the input
values: With `GATE_NONE` one of the gradients could be applied to one of the
inputs _before_ the other gradient is computed resulting in non-reproducible
results.
<b>`GATE_OP`</b>: For each Op, make sure all gradients are computed before
they are used. This prevents race conditions for Ops that generate gradients
for multiple inputs where the gradients depend on the inputs.
<b>`GATE_GRAPH`</b>: Make sure all gradients for all variables are computed
before any one of them is used. This provides the least parallelism but can
be useful if you want to process all gradients before applying any of them.
### Slots
Some optimizer subclasses, such as `MomentumOptimizer` and `AdagradOptimizer`
allocate and manage additional variables associated with the variables to
train. These are called <i>Slots</i>. Slots have names and you can ask the
optimizer for the names of the slots that it uses. Once you have a slot name
you can ask the optimizer for the variable it created to hold the slot value.
This can be useful if you want to log debug a training algorithm, report stats
about the slots, etc.
"""
# Values for gate_gradients.
GATE_NONE = 0
GATE_OP = 1
GATE_GRAPH = 2
def __init__(self, use_locking, name):
"""Create a new Optimizer.
This must be called by the constructors of subclasses.
Args:
use_locking: Bool. If True apply use locks to prevent concurrent updates
to variables.
name: A non-empty string. The name to use for accumulators created
for the optimizer.
Raises:
ValueError: If name is malformed.
"""
if not name:
raise ValueError("Must specify the optimizer name")
self._use_locking = use_locking
self._name = name
# Dictionary of slots.
# {slot_name : { variable_to_train: slot_for_the_variable, ...}, ... }
self._slots = {}
def get_name(self):
return self._name
def minimize(self, loss, global_step=None, var_list=None,
gate_gradients=GATE_OP, aggregation_method=None,
colocate_gradients_with_ops=False, name=None,
grad_loss=None):
"""Add operations to minimize `loss` by updating `var_list`.
This method simply combines calls `compute_gradients()` and
`apply_gradients()`. If you want to process the gradient before applying
them call `compute_gradients()` and `apply_gradients()` explicitly instead
of using this function.
Args:
loss: A `Tensor` containing the value to minimize.
global_step: Optional `Variable` to increment by one after the
variables have been updated.
var_list: Optional list or tuple of `Variable` objects to update to
minimize `loss`. Defaults to the list of variables collected in
the graph under the key `GraphKeys.TRAINABLE_VARIABLES`.
gate_gradients: How to gate the computation of gradients. Can be
`GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
name: Optional name for the returned operation.
grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`.
Returns:
An Operation that updates the variables in `var_list`. If `global_step`
was not `None`, that operation also increments `global_step`.
Raises:
ValueError: If some of the variables are not `Variable` objects.
"""
grads_and_vars = self.compute_gradients(
loss, var_list=var_list, gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops,
grad_loss=grad_loss)
vars_with_grad = [v for g, v in grads_and_vars if g is not None]
if not vars_with_grad:
raise ValueError(
"No gradients provided for any variable, check your graph for ops"
" that do not support gradients, between variables %s and loss %s." %
([str(v) for _, v in grads_and_vars], loss))
return self.apply_gradients(grads_and_vars, global_step=global_step,
name=name)
def compute_gradients(self, loss, var_list=None,
gate_gradients=GATE_OP,
aggregation_method=None,
colocate_gradients_with_ops=False,
grad_loss=None):
"""Compute gradients of `loss` for the variables in `var_list`.
This is the first part of `minimize()`. It returns a list
of (gradient, variable) pairs where "gradient" is the gradient
for "variable". Note that "gradient" can be a `Tensor`, an
`IndexedSlices`, or `None` if there is no gradient for the
given variable.
Args:
loss: A Tensor containing the value to minimize.
var_list: Optional list or tuple of `tf.Variable` to update to minimize
`loss`. Defaults to the list of variables collected in the graph
under the key `GraphKey.TRAINABLE_VARIABLES`.
gate_gradients: How to gate the computation of gradients. Can be
`GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`.
Returns:
A list of (gradient, variable) pairs. Variable is always present, but
gradient can be `None`.
Raises:
TypeError: If `var_list` contains anything else than `Variable` objects.
ValueError: If some arguments are invalid.
"""
if gate_gradients not in [Optimizer.GATE_NONE, Optimizer.GATE_OP,
Optimizer.GATE_GRAPH]:
raise ValueError("gate_gradients must be one of: Optimizer.GATE_NONE, "
"Optimizer.GATE_OP, Optimizer.GATE_GRAPH. Not %s" %
gate_gradients)
self._assert_valid_dtypes([loss])
if grad_loss is not None:
self._assert_valid_dtypes([grad_loss])
if var_list is None:
var_list = (
variables.trainable_variables() +
ops.get_collection(ops.GraphKeys.TRAINABLE_RESOURCE_VARIABLES))
else:
var_list = nest.flatten(var_list)
# pylint: disable=protected-access
var_list += ops.get_collection(ops.GraphKeys._STREAMING_MODEL_PORTS)
# pylint: enable=protected-access
processors = [_get_processor(v) for v in var_list]
if not var_list:
raise ValueError("No variables to optimize.")
var_refs = [p.target() for p in processors]
grads = gradients.gradients(
loss, var_refs, grad_ys=grad_loss,
gate_gradients=(gate_gradients == Optimizer.GATE_OP),
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops)
if gate_gradients == Optimizer.GATE_GRAPH:
grads = control_flow_ops.tuple(grads)
grads_and_vars = list(zip(grads, var_list))
self._assert_valid_dtypes(
[v for g, v in grads_and_vars
if g is not None and v.dtype != dtypes.resource])
return grads_and_vars
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""Apply gradients to variables.
This is the second part of `minimize()`. It returns an `Operation` that
applies gradients.
Args:
grads_and_vars: List of (gradient, variable) pairs as returned by
`compute_gradients()`.
global_step: Optional `Variable` to increment by one after the
variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the `Optimizer` constructor.
Returns:
An `Operation` that applies the specified gradients. If `global_step`
was not None, that operation also increments `global_step`.
Raises:
TypeError: If `grads_and_vars` is malformed.
ValueError: If none of the variables have gradients.
"""
# This is a default implementation of apply_gradients() that can be shared
# by most optimizers. It relies on the subclass implementing the following
# methods: _create_slots(), _prepare(), _apply_dense(), and _apply_sparse().
grads_and_vars = tuple(grads_and_vars) # Make sure repeat iteration works.
if not grads_and_vars:
raise ValueError("No variables provided.")
converted_grads_and_vars = []
for g, v in grads_and_vars:
if g is not None:
try:
# Convert the grad to Tensor or IndexedSlices if necessary.
g = ops.convert_to_tensor_or_indexed_slices(g)
except TypeError:
raise TypeError(
"Gradient must be convertible to a Tensor"
" or IndexedSlices, or None: %s" % g)
if not isinstance(g, (ops.Tensor, ops.IndexedSlices)):
raise TypeError(
"Gradient must be a Tensor, IndexedSlices, or None: %s" % g)
p = _get_processor(v)
converted_grads_and_vars.append((g, v, p))
converted_grads_and_vars = tuple(converted_grads_and_vars)
var_list = [v for g, v, _ in converted_grads_and_vars if g is not None]
if not var_list:
raise ValueError("No gradients provided for any variable: %s." %
([str(v) for _, _, v in converted_grads_and_vars],))
with ops.control_dependencies(None):
self._create_slots([_get_variable_for(v) for v in var_list])
update_ops = []
with ops.name_scope(name, self._name) as name:
self._prepare()
for grad, var, processor in converted_grads_and_vars:
if grad is None:
continue
# We colocate all ops created in _apply_dense or _apply_sparse
# on the same device as the variable.
with ops.name_scope("update_" + var.op.name), ops.colocate_with(var):
update_ops.append(processor.update_op(self, grad))
if global_step is None:
apply_updates = self._finish(update_ops, name)
else:
with ops.control_dependencies([self._finish(update_ops, "update")]):
with ops.colocate_with(global_step):
apply_updates = state_ops.assign_add(global_step, 1, name=name).op
train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
if apply_updates not in train_op:
train_op.append(apply_updates)
return apply_updates
def get_slot(self, var, name):
"""Return a slot named `name` created for `var` by the Optimizer.
Some `Optimizer` subclasses use additional variables. For example
`Momentum` and `Adagrad` use variables to accumulate updates. This method
gives access to these `Variable` objects if for some reason you need them.
Use `get_slot_names()` to get the list of slot names created by the
`Optimizer`.
Args:
var: A variable passed to `minimize()` or `apply_gradients()`.
name: A string.
Returns:
The `Variable` for the slot if it was created, `None` otherwise.
"""
named_slots = self._slots.get(name, None)
if not named_slots:
return None
return named_slots.get(_var_key(var), None)
def get_slot_names(self):
"""Return a list of the names of slots created by the `Optimizer`.
See `get_slot()`.
Returns:
A list of strings.
"""
return sorted(self._slots.keys())
def _assert_valid_dtypes(self, tensors):
"""Asserts tensors are all valid types (see `_valid_dtypes`).
Args:
tensors: Tensors to check.
Raises:
ValueError: If any tensor is not a valid type.
"""
valid_dtypes = self._valid_dtypes()
for t in tensors:
dtype = t.dtype.base_dtype
if dtype not in valid_dtypes:
raise ValueError(
"Invalid type %r for %s, expected: %s." % (
dtype, t.name, [v for v in valid_dtypes]))
# --------------
# Methods to be implemented by subclasses if they want to use the
# inherited implementation of apply_gradients() or compute_gradients().
# --------------
def _valid_dtypes(self):
"""Valid types for loss, variables and gradients.
Subclasses should override to allow other float types.
Returns:
Valid types for loss, variables and gradients.
"""
return set([dtypes.float16, dtypes.float32, dtypes.float64])
def _create_slots(self, var_list):
"""Create all slots needed by the variables.
Args:
var_list: A list of `Variable` objects.
"""
# No slots needed by default
pass
def _prepare(self):
"""Create all needed tensors before applying gradients.
This is called with the name_scope using the "name" that
users have chosen for the application of gradients.
"""
pass
def _apply_dense(self, grad, var):
"""Add ops to apply dense gradients to `var`.
Args:
grad: A `Tensor`.
var: A `Variable` object.
Return:
An `Operation`.
"""
raise NotImplementedError()
def _resource_apply_dense(self, grad, handle):
"""Add ops to apply dense gradients to the variable `handle`.
Args:
grad: a `Tensor` representing the gradient.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
Returns:
An `Operation` which updates the value of the variable.
"""
raise NotImplementedError()
def _resource_apply_sparse_duplicate_indices(self, grad, handle, indices):
"""Add ops to apply sparse gradients to `handle`, with repeated indices.
Optimizers which override this method must deal with repeated indices. See
the docstring of `_apply_sparse_duplicate_indices` for details. By default
the correct behavior, to sum non-unique indices and their associated
gradients, is enforced by first pre-processing `grad` and `indices` and
passing them on to `_resource_apply_sparse`. Optimizers which deal correctly
with duplicate indices may instead override this method to avoid the
overhead of summing.
Args:
grad: a `Tensor` representing the gradient for the affected indices.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
indices: a `Tensor` of integral type representing the indices for
which the gradient is nonzero. Indices may be repeated.
Returns:
An `Operation` which updates the value of the variable.
"""
summed_grad, unique_indices = _deduplicate_indexed_slices(
values=grad, indices=indices)
return self._resource_apply_sparse(summed_grad, handle, unique_indices)
def _resource_apply_sparse(self, grad, handle, indices):
"""Add ops to apply sparse gradients to the variable `handle`.
Similar to `_apply_sparse`, the `indices` argument to this method has been
de-duplicated. Optimizers which deal correctly with non-unique indices may
instead override `_resource_apply_sparse_duplicate_indices` to avoid this
overhead.
Args:
grad: a `Tensor` representing the gradient for the affected indices.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
indices: a `Tensor` of integral type representing the indices for
which the gradient is nonzero. Indices are unique.
Returns:
An `Operation` which updates the value of the variable.
"""
raise NotImplementedError()
def _apply_sparse_duplicate_indices(self, grad, var):
"""Add ops to apply sparse gradients to `var`, with repeated sparse indices.
Optimizers which override this method must deal with IndexedSlices objects
such as the following:
IndexedSlicesValue(values=[1, 1], indices=[0, 0], dense_shape=[1])
The correct interpretation is:
IndexedSlicesValue(values=[2], indices=[0], dense_shape=[1])
Many optimizers deal incorrectly with repeated indices when updating based
on sparse gradients (e.g. summing squares rather than squaring the sum, or
applying momentum terms multiple times). Adding first is always the correct
behavior, so this is enforced here by reconstructing the IndexedSlices to
have only unique indices, then calling _apply_sparse.
Optimizers which deal correctly with repeated indices may instead override
this method to avoid the overhead of summing indices.
Args:
grad: `IndexedSlices`.
var: A `Variable` object.
Returns:
An `Operation`.
"""
summed_values, unique_indices = _deduplicate_indexed_slices(
values=grad.values, indices=grad.indices)
gradient_no_duplicate_indices = ops.IndexedSlices(
indices=unique_indices,
values=summed_values,
dense_shape=grad.dense_shape)
return self._apply_sparse(gradient_no_duplicate_indices, var)
def _apply_sparse(self, grad, var):
"""Add ops to apply sparse gradients to `var`.
The IndexedSlices object passed to `grad` in this function is by default
pre-processed in `_apply_sparse_duplicate_indices` to remove duplicate
indices (see its docstring for details). Optimizers which can tolerate or
have correct special cases for duplicate sparse indices may override
`_apply_sparse_duplicate_indices` instead of this function, avoiding that
overhead.
Args:
grad: `IndexedSlices`, with no repeated indices.
var: A `Variable` object.
Return:
An `Operation`.
"""
raise NotImplementedError()
def _finish(self, update_ops, name_scope):
"""Do what is needed to finish the update.
This is called with the `name_scope` using the "name" that
users have chosen for the application of gradients.
Args:
update_ops: List of `Operation` objects to update variables. This list
contains the values returned by the `_apply_dense()` and
`_apply_sparse()` calls.
name_scope: String. Name to use for the returned operation.
Returns:
The operation to apply updates.
"""
return control_flow_ops.group(*update_ops, name=name_scope)
# --------------
# Utility methods for subclasses.
# --------------
def _slot_dict(self, slot_name):
"""Returns a dict for caching slots created under the given name.
Args:
slot_name: Name for the slot.
Returns:
A dict that maps primary `Variable` objects to the slot created
for that variable, under the given slot name.
"""
named_slots = self._slots.get(slot_name, None)
if named_slots is None:
named_slots = {}
self._slots[slot_name] = named_slots
return named_slots
def _get_or_make_slot(self, var, val, slot_name, op_name):
"""Find or create a slot for a variable.
Args:
var: A `Variable` object.
val: A `Tensor`. The initial value of the slot.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
named_slots[_var_key(var)] = slot_creator.create_slot(var, val, op_name)
return named_slots[_var_key(var)]
def _get_or_make_slot_with_initializer(self, var, initializer, shape, dtype,
slot_name, op_name):
"""Find or create a slot for a variable, using an Initializer.
Args:
var: A `Variable` object.
initializer: An `Initializer`. The initial value of the slot.
shape: Shape of the initial value of the slot.
dtype: Type of the value of the slot.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
named_slots[_var_key(var)] = slot_creator.create_slot_with_initializer(
var, initializer, shape, dtype, op_name)
return named_slots[_var_key(var)]
def _zeros_slot(self, var, slot_name, op_name):
"""Find or create a slot initialized with 0.0.
Args:
var: A `Variable` object.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
named_slots[_var_key(var)] = slot_creator.create_zeros_slot(var, op_name)
return named_slots[_var_key(var)]
|
apache-2.0
|
tuanthng/apitrace
|
specs/dwrite.py
|
4
|
74738
|
##########################################################################
#
# Copyright 2011 Jose Fonseca
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
from winapi import *
from dcommon import *
ID2D1SimplifiedGeometrySink = Interface("ID2D1SimplifiedGeometrySink", IUnknown)
IDWriteGeometrySink = Alias("IDWriteGeometrySink", ID2D1SimplifiedGeometrySink)
IDWriteFontFileLoader = Interface("IDWriteFontFileLoader", IUnknown)
IDWriteLocalFontFileLoader = Interface("IDWriteLocalFontFileLoader", IDWriteFontFileLoader)
IDWriteFontFileStream = Interface("IDWriteFontFileStream", IUnknown)
IDWriteFontFile = Interface("IDWriteFontFile", IUnknown)
IDWriteRenderingParams = Interface("IDWriteRenderingParams", IUnknown)
IDWriteFontFace = Interface("IDWriteFontFace", IUnknown)
IDWriteFontCollectionLoader = Interface("IDWriteFontCollectionLoader", IUnknown)
IDWriteFontFileEnumerator = Interface("IDWriteFontFileEnumerator", IUnknown)
IDWriteLocalizedStrings = Interface("IDWriteLocalizedStrings", IUnknown)
IDWriteFontCollection = Interface("IDWriteFontCollection", IUnknown)
IDWriteFontList = Interface("IDWriteFontList", IUnknown)
IDWriteFontFamily = Interface("IDWriteFontFamily", IDWriteFontList)
IDWriteFont = Interface("IDWriteFont", IUnknown)
IDWriteTextFormat = Interface("IDWriteTextFormat", IUnknown)
IDWriteTypography = Interface("IDWriteTypography", IUnknown)
IDWriteNumberSubstitution = Interface("IDWriteNumberSubstitution", IUnknown)
IDWriteTextAnalysisSource = Interface("IDWriteTextAnalysisSource", IUnknown)
IDWriteTextAnalysisSink = Interface("IDWriteTextAnalysisSink", IUnknown)
IDWriteTextAnalyzer = Interface("IDWriteTextAnalyzer", IUnknown)
IDWriteInlineObject = Interface("IDWriteInlineObject", IUnknown)
IDWritePixelSnapping = Interface("IDWritePixelSnapping", IUnknown)
IDWriteTextRenderer = Interface("IDWriteTextRenderer", IDWritePixelSnapping)
IDWriteTextLayout = Interface("IDWriteTextLayout", IDWriteTextFormat)
IDWriteBitmapRenderTarget = Interface("IDWriteBitmapRenderTarget", IUnknown)
IDWriteGdiInterop = Interface("IDWriteGdiInterop", IUnknown)
IDWriteGlyphRunAnalysis = Interface("IDWriteGlyphRunAnalysis", IUnknown)
IDWriteFactory = Interface("IDWriteFactory", IUnknown)
def EnumFlag(expr, values):
return Flags(Alias(expr, DWORD), values)
DWRITE_FONT_FILE_TYPE = Enum("DWRITE_FONT_FILE_TYPE", [
"DWRITE_FONT_FILE_TYPE_UNKNOWN",
"DWRITE_FONT_FILE_TYPE_CFF",
"DWRITE_FONT_FILE_TYPE_TRUETYPE",
"DWRITE_FONT_FILE_TYPE_TRUETYPE_COLLECTION",
"DWRITE_FONT_FILE_TYPE_TYPE1_PFM",
"DWRITE_FONT_FILE_TYPE_TYPE1_PFB",
"DWRITE_FONT_FILE_TYPE_VECTOR",
"DWRITE_FONT_FILE_TYPE_BITMAP",
])
DWRITE_FONT_FACE_TYPE = Enum("DWRITE_FONT_FACE_TYPE", [
"DWRITE_FONT_FACE_TYPE_CFF",
"DWRITE_FONT_FACE_TYPE_TRUETYPE",
"DWRITE_FONT_FACE_TYPE_TRUETYPE_COLLECTION",
"DWRITE_FONT_FACE_TYPE_TYPE1",
"DWRITE_FONT_FACE_TYPE_VECTOR",
"DWRITE_FONT_FACE_TYPE_BITMAP",
"DWRITE_FONT_FACE_TYPE_UNKNOWN",
])
DWRITE_FONT_SIMULATIONS = EnumFlag("DWRITE_FONT_SIMULATIONS", [
"DWRITE_FONT_SIMULATIONS_NONE",
"DWRITE_FONT_SIMULATIONS_BOLD",
"DWRITE_FONT_SIMULATIONS_OBLIQUE",
])
DWRITE_FONT_WEIGHT = Enum("DWRITE_FONT_WEIGHT", [
"DWRITE_FONT_WEIGHT_THIN",
"DWRITE_FONT_WEIGHT_EXTRA_LIGHT",
#"DWRITE_FONT_WEIGHT_ULTRA_LIGHT",
"DWRITE_FONT_WEIGHT_LIGHT",
"DWRITE_FONT_WEIGHT_NORMAL",
#"DWRITE_FONT_WEIGHT_REGULAR",
"DWRITE_FONT_WEIGHT_MEDIUM",
"DWRITE_FONT_WEIGHT_DEMI_BOLD",
#"DWRITE_FONT_WEIGHT_SEMI_BOLD",
"DWRITE_FONT_WEIGHT_BOLD",
"DWRITE_FONT_WEIGHT_EXTRA_BOLD",
#"DWRITE_FONT_WEIGHT_ULTRA_BOLD",
"DWRITE_FONT_WEIGHT_BLACK",
#"DWRITE_FONT_WEIGHT_HEAVY",
"DWRITE_FONT_WEIGHT_EXTRA_BLACK",
#"DWRITE_FONT_WEIGHT_ULTRA_BLACK",
])
DWRITE_FONT_STRETCH = Enum("DWRITE_FONT_STRETCH", [
"DWRITE_FONT_STRETCH_UNDEFINED",
"DWRITE_FONT_STRETCH_ULTRA_CONDENSED",
"DWRITE_FONT_STRETCH_EXTRA_CONDENSED",
"DWRITE_FONT_STRETCH_CONDENSED",
"DWRITE_FONT_STRETCH_SEMI_CONDENSED",
"DWRITE_FONT_STRETCH_NORMAL",
#"DWRITE_FONT_STRETCH_MEDIUM",
"DWRITE_FONT_STRETCH_SEMI_EXPANDED",
"DWRITE_FONT_STRETCH_EXPANDED",
"DWRITE_FONT_STRETCH_EXTRA_EXPANDED",
"DWRITE_FONT_STRETCH_ULTRA_EXPANDED",
])
DWRITE_FONT_STYLE = Enum("DWRITE_FONT_STYLE", [
"DWRITE_FONT_STYLE_NORMAL",
"DWRITE_FONT_STYLE_OBLIQUE",
"DWRITE_FONT_STYLE_ITALIC",
])
DWRITE_INFORMATIONAL_STRING_ID = Enum("DWRITE_INFORMATIONAL_STRING_ID", [
"DWRITE_INFORMATIONAL_STRING_NONE",
"DWRITE_INFORMATIONAL_STRING_COPYRIGHT_NOTICE",
"DWRITE_INFORMATIONAL_STRING_VERSION_STRINGS",
"DWRITE_INFORMATIONAL_STRING_TRADEMARK",
"DWRITE_INFORMATIONAL_STRING_MANUFACTURER",
"DWRITE_INFORMATIONAL_STRING_DESIGNER",
"DWRITE_INFORMATIONAL_STRING_DESIGNER_URL",
"DWRITE_INFORMATIONAL_STRING_DESCRIPTION",
"DWRITE_INFORMATIONAL_STRING_FONT_VENDOR_URL",
"DWRITE_INFORMATIONAL_STRING_LICENSE_DESCRIPTION",
"DWRITE_INFORMATIONAL_STRING_LICENSE_INFO_URL",
"DWRITE_INFORMATIONAL_STRING_WIN32_FAMILY_NAMES",
"DWRITE_INFORMATIONAL_STRING_WIN32_SUBFAMILY_NAMES",
"DWRITE_INFORMATIONAL_STRING_PREFERRED_FAMILY_NAMES",
"DWRITE_INFORMATIONAL_STRING_PREFERRED_SUBFAMILY_NAMES",
"DWRITE_INFORMATIONAL_STRING_SAMPLE_TEXT",
])
DWRITE_FONT_METRICS = Struct("DWRITE_FONT_METRICS", [
(UINT16, "designUnitsPerEm"),
(UINT16, "ascent"),
(UINT16, "descent"),
(INT16, "lineGap"),
(UINT16, "capHeight"),
(UINT16, "xHeight"),
(INT16, "underlinePosition"),
(UINT16, "underlineThickness"),
(INT16, "strikethroughPosition"),
(UINT16, "strikethroughThickness"),
])
DWRITE_GLYPH_METRICS = Struct("DWRITE_GLYPH_METRICS", [
(INT32, "leftSideBearing"),
(UINT32, "advanceWidth"),
(INT32, "rightSideBearing"),
(INT32, "topSideBearing"),
(UINT32, "advanceHeight"),
(INT32, "bottomSideBearing"),
(INT32, "verticalOriginY"),
])
DWRITE_GLYPH_OFFSET = Struct("DWRITE_GLYPH_OFFSET", [
(FLOAT, "advanceOffset"),
(FLOAT, "ascenderOffset"),
])
DWRITE_FACTORY_TYPE = Enum("DWRITE_FACTORY_TYPE", [
"DWRITE_FACTORY_TYPE_SHARED",
"DWRITE_FACTORY_TYPE_ISOLATED",
])
IDWriteFontFileLoader.methods += [
StdMethod(HRESULT, "CreateStreamFromKey", [(OpaquePointer(Const(Void)), "fontFileReferenceKey"), (UINT32, "fontFileReferenceKeySize"), Out(Pointer(ObjPointer(IDWriteFontFileStream)), "fontFileStream")]),
]
IDWriteLocalFontFileLoader.methods += [
StdMethod(HRESULT, "GetFilePathLengthFromKey", [(OpaquePointer(Const(Void)), "fontFileReferenceKey"), (UINT32, "fontFileReferenceKeySize"), Out(Pointer(UINT32), "filePathLength")]),
StdMethod(HRESULT, "GetFilePathFromKey", [(OpaquePointer(Const(Void)), "fontFileReferenceKey"), (UINT32, "fontFileReferenceKeySize"), Out(Pointer(WCHAR), "filePath"), (UINT32, "filePathSize")]),
StdMethod(HRESULT, "GetLastWriteTimeFromKey", [(OpaquePointer(Const(Void)), "fontFileReferenceKey"), (UINT32, "fontFileReferenceKeySize"), Out(Pointer(FILETIME), "lastWriteTime")]),
]
IDWriteFontFileStream.methods += [
StdMethod(HRESULT, "ReadFileFragment", [Out(Pointer(OpaquePointer(Const(Void))), "fragmentStart"), (UINT64, "fileOffset"), (UINT64, "fragmentSize"), Out(Pointer(OpaquePointer(Void)), "fragmentContext")]),
StdMethod(Void, "ReleaseFileFragment", [(OpaquePointer(Void), "fragmentContext")]),
StdMethod(HRESULT, "GetFileSize", [Out(Pointer(UINT64), "fileSize")]),
StdMethod(HRESULT, "GetLastWriteTime", [Out(Pointer(UINT64), "lastWriteTime")]),
]
IDWriteFontFile.methods += [
StdMethod(HRESULT, "GetReferenceKey", [Out(Pointer(OpaquePointer(Const(Void))), "fontFileReferenceKey"), Out(Pointer(UINT32), "fontFileReferenceKeySize")]),
StdMethod(HRESULT, "GetLoader", [Out(Pointer(ObjPointer(IDWriteFontFileLoader)), "fontFileLoader")]),
StdMethod(HRESULT, "Analyze", [Out(Pointer(BOOL), "isSupportedFontType"), Out(Pointer(DWRITE_FONT_FILE_TYPE), "fontFileType"), Out(Pointer(DWRITE_FONT_FACE_TYPE), "fontFaceType"), Out(Pointer(UINT32), "numberOfFaces")]),
]
DWRITE_PIXEL_GEOMETRY = Enum("DWRITE_PIXEL_GEOMETRY", [
"DWRITE_PIXEL_GEOMETRY_FLAT",
"DWRITE_PIXEL_GEOMETRY_RGB",
"DWRITE_PIXEL_GEOMETRY_BGR",
])
DWRITE_RENDERING_MODE = Enum("DWRITE_RENDERING_MODE", [
"DWRITE_RENDERING_MODE_DEFAULT",
"DWRITE_RENDERING_MODE_ALIASED",
"DWRITE_RENDERING_MODE_CLEARTYPE_GDI_CLASSIC",
"DWRITE_RENDERING_MODE_CLEARTYPE_GDI_NATURAL",
"DWRITE_RENDERING_MODE_CLEARTYPE_NATURAL",
"DWRITE_RENDERING_MODE_CLEARTYPE_NATURAL_SYMMETRIC",
"DWRITE_RENDERING_MODE_OUTLINE",
])
DWRITE_MATRIX = Struct("DWRITE_MATRIX", [
(FLOAT, "m11"),
(FLOAT, "m12"),
(FLOAT, "m21"),
(FLOAT, "m22"),
(FLOAT, "dx"),
(FLOAT, "dy"),
])
IDWriteRenderingParams.methods += [
StdMethod(FLOAT, "GetGamma", []),
StdMethod(FLOAT, "GetEnhancedContrast", []),
StdMethod(FLOAT, "GetClearTypeLevel", []),
StdMethod(DWRITE_PIXEL_GEOMETRY, "GetPixelGeometry", []),
StdMethod(DWRITE_RENDERING_MODE, "GetRenderingMode", []),
]
IDWriteFontFace.methods += [
StdMethod(DWRITE_FONT_FACE_TYPE, "GetType", []),
StdMethod(HRESULT, "GetFiles", [(OpaquePointer(UINT32), "numberOfFiles"), Out(Pointer(ObjPointer(IDWriteFontFile)), "fontFiles")]),
StdMethod(UINT32, "GetIndex", []),
StdMethod(DWRITE_FONT_SIMULATIONS, "GetSimulations", []),
StdMethod(BOOL, "IsSymbolFont", []),
StdMethod(Void, "GetMetrics", [Out(Pointer(DWRITE_FONT_METRICS), "fontFaceMetrics")]),
StdMethod(UINT16, "GetGlyphCount", []),
StdMethod(HRESULT, "GetDesignGlyphMetrics", [(Pointer(Const(UINT16)), "glyphIndices"), (UINT32, "glyphCount"), Out(Pointer(DWRITE_GLYPH_METRICS), "glyphMetrics"), (BOOL, "isSideways")]),
StdMethod(HRESULT, "GetGlyphIndices", [(Pointer(Const(UINT32)), "codePoints"), (UINT32, "codePointCount"), Out(Pointer(UINT16), "glyphIndices")]),
StdMethod(HRESULT, "TryGetFontTable", [(UINT32, "openTypeTableTag"), Out(Pointer(OpaquePointer(Const(Void))), "tableData"), Out(Pointer(UINT32), "tableSize"), Out(Pointer(OpaquePointer(Void)), "tableContext"), Out(Pointer(BOOL), "exists")]),
StdMethod(Void, "ReleaseFontTable", [(OpaquePointer(Void), "tableContext")]),
StdMethod(HRESULT, "GetGlyphRunOutline", [(FLOAT, "emSize"), (Pointer(Const(UINT16)), "glyphIndices"), (Pointer(Const(FLOAT)), "glyphAdvances"), (Pointer(Const(DWRITE_GLYPH_OFFSET)), "glyphOffsets"), (UINT32, "glyphCount"), (BOOL, "isSideways"), (BOOL, "isRightToLeft"), (ObjPointer(IDWriteGeometrySink), "geometrySink")]),
StdMethod(HRESULT, "GetRecommendedRenderingMode", [(FLOAT, "emSize"), (FLOAT, "pixelsPerDip"), (DWRITE_MEASURING_MODE, "measuringMode"), (ObjPointer(IDWriteRenderingParams), "renderingParams"), Out(Pointer(DWRITE_RENDERING_MODE), "renderingMode")]),
StdMethod(HRESULT, "GetGdiCompatibleMetrics", [(FLOAT, "emSize"), (FLOAT, "pixelsPerDip"), (Pointer(Const(DWRITE_MATRIX)), "transform"), Out(Pointer(DWRITE_FONT_METRICS), "fontFaceMetrics")]),
StdMethod(HRESULT, "GetGdiCompatibleGlyphMetrics", [(FLOAT, "emSize"), (FLOAT, "pixelsPerDip"), (Pointer(Const(DWRITE_MATRIX)), "transform"), (BOOL, "useGdiNatural"), (Array(Const(UINT16), "glyphCount"), "glyphIndices"), (UINT32, "glyphCount"), Out(Array(DWRITE_GLYPH_METRICS, "glyphCount"), "glyphMetrics"), (BOOL, "isSideways")]),
]
IDWriteFontCollectionLoader.methods += [
StdMethod(HRESULT, "CreateEnumeratorFromKey", [(ObjPointer(IDWriteFactory), "factory"), (OpaquePointer(Const(Void)), "collectionKey"), (UINT32, "collectionKeySize"), Out(Pointer(ObjPointer(IDWriteFontFileEnumerator)), "fontFileEnumerator")]),
]
IDWriteFontFileEnumerator.methods += [
StdMethod(HRESULT, "MoveNext", [Out(Pointer(BOOL), "hasCurrentFile")]),
StdMethod(HRESULT, "GetCurrentFontFile", [Out(Pointer(ObjPointer(IDWriteFontFile)), "fontFile")]),
]
IDWriteLocalizedStrings.methods += [
StdMethod(UINT32, "GetCount", []),
StdMethod(HRESULT, "FindLocaleName", [(PCWSTR, "localeName"), Out(Pointer(UINT32), "index"), Out(Pointer(BOOL), "exists")]),
StdMethod(HRESULT, "GetLocaleNameLength", [(UINT32, "index"), Out(Pointer(UINT32), "length")]),
StdMethod(HRESULT, "GetLocaleName", [(UINT32, "index"), Out(Pointer(WCHAR), "localeName"), (UINT32, "size")]),
StdMethod(HRESULT, "GetStringLength", [(UINT32, "index"), Out(Pointer(UINT32), "length")]),
StdMethod(HRESULT, "GetString", [(UINT32, "index"), Out(Pointer(WCHAR), "stringBuffer"), (UINT32, "size")]),
]
IDWriteFontCollection.methods += [
StdMethod(UINT32, "GetFontFamilyCount", []),
StdMethod(HRESULT, "GetFontFamily", [(UINT32, "index"), Out(Pointer(ObjPointer(IDWriteFontFamily)), "fontFamily")]),
StdMethod(HRESULT, "FindFamilyName", [(PCWSTR, "familyName"), Out(Pointer(UINT32), "index"), Out(Pointer(BOOL), "exists")]),
StdMethod(HRESULT, "GetFontFromFontFace", [(ObjPointer(IDWriteFontFace), "fontFace"), Out(Pointer(ObjPointer(IDWriteFont)), "font")]),
]
IDWriteFontList.methods += [
StdMethod(HRESULT, "GetFontCollection", [Out(Pointer(ObjPointer(IDWriteFontCollection)), "fontCollection")]),
StdMethod(UINT32, "GetFontCount", []),
StdMethod(HRESULT, "GetFont", [(UINT32, "index"), Out(Pointer(ObjPointer(IDWriteFont)), "font")]),
]
IDWriteFontFamily.methods += [
StdMethod(HRESULT, "GetFamilyNames", [Out(Pointer(ObjPointer(IDWriteLocalizedStrings)), "names")]),
StdMethod(HRESULT, "GetFirstMatchingFont", [(DWRITE_FONT_WEIGHT, "weight"), (DWRITE_FONT_STRETCH, "stretch"), (DWRITE_FONT_STYLE, "style"), Out(Pointer(ObjPointer(IDWriteFont)), "matchingFont")]),
StdMethod(HRESULT, "GetMatchingFonts", [(DWRITE_FONT_WEIGHT, "weight"), (DWRITE_FONT_STRETCH, "stretch"), (DWRITE_FONT_STYLE, "style"), Out(Pointer(ObjPointer(IDWriteFontList)), "matchingFonts")]),
]
IDWriteFont.methods += [
StdMethod(HRESULT, "GetFontFamily", [Out(Pointer(ObjPointer(IDWriteFontFamily)), "fontFamily")]),
StdMethod(DWRITE_FONT_WEIGHT, "GetWeight", []),
StdMethod(DWRITE_FONT_STRETCH, "GetStretch", []),
StdMethod(DWRITE_FONT_STYLE, "GetStyle", []),
StdMethod(BOOL, "IsSymbolFont", []),
StdMethod(HRESULT, "GetFaceNames", [Out(Pointer(ObjPointer(IDWriteLocalizedStrings)), "names")]),
StdMethod(HRESULT, "GetInformationalStrings", [(DWRITE_INFORMATIONAL_STRING_ID, "informationalStringID"), Out(Pointer(ObjPointer(IDWriteLocalizedStrings)), "informationalStrings"), Out(Pointer(BOOL), "exists")]),
StdMethod(DWRITE_FONT_SIMULATIONS, "GetSimulations", []),
StdMethod(Void, "GetMetrics", [Out(Pointer(DWRITE_FONT_METRICS), "fontMetrics")]),
StdMethod(HRESULT, "HasCharacter", [(UINT32, "unicodeValue"), Out(Pointer(BOOL), "exists")]),
StdMethod(HRESULT, "CreateFontFace", [Out(Pointer(ObjPointer(IDWriteFontFace)), "fontFace")]),
]
DWRITE_READING_DIRECTION = Enum("DWRITE_READING_DIRECTION", [
"DWRITE_READING_DIRECTION_LEFT_TO_RIGHT",
"DWRITE_READING_DIRECTION_RIGHT_TO_LEFT",
])
DWRITE_FLOW_DIRECTION = Enum("DWRITE_FLOW_DIRECTION", [
"DWRITE_FLOW_DIRECTION_TOP_TO_BOTTOM",
])
DWRITE_TEXT_ALIGNMENT = Enum("DWRITE_TEXT_ALIGNMENT", [
"DWRITE_TEXT_ALIGNMENT_LEADING",
"DWRITE_TEXT_ALIGNMENT_TRAILING",
"DWRITE_TEXT_ALIGNMENT_CENTER",
])
DWRITE_PARAGRAPH_ALIGNMENT = Enum("DWRITE_PARAGRAPH_ALIGNMENT", [
"DWRITE_PARAGRAPH_ALIGNMENT_NEAR",
"DWRITE_PARAGRAPH_ALIGNMENT_FAR",
"DWRITE_PARAGRAPH_ALIGNMENT_CENTER",
])
DWRITE_WORD_WRAPPING = Enum("DWRITE_WORD_WRAPPING", [
"DWRITE_WORD_WRAPPING_WRAP",
"DWRITE_WORD_WRAPPING_NO_WRAP",
])
DWRITE_LINE_SPACING_METHOD = Enum("DWRITE_LINE_SPACING_METHOD", [
"DWRITE_LINE_SPACING_METHOD_DEFAULT",
"DWRITE_LINE_SPACING_METHOD_UNIFORM",
])
DWRITE_TRIMMING_GRANULARITY = Enum("DWRITE_TRIMMING_GRANULARITY", [
"DWRITE_TRIMMING_GRANULARITY_NONE",
"DWRITE_TRIMMING_GRANULARITY_CHARACTER",
"DWRITE_TRIMMING_GRANULARITY_WORD",
])
DWRITE_FONT_FEATURE_TAG = Enum("DWRITE_FONT_FEATURE_TAG", [
"DWRITE_FONT_FEATURE_TAG_ALTERNATIVE_FRACTIONS",
"DWRITE_FONT_FEATURE_TAG_PETITE_CAPITALS_FROM_CAPITALS",
"DWRITE_FONT_FEATURE_TAG_SMALL_CAPITALS_FROM_CAPITALS",
"DWRITE_FONT_FEATURE_TAG_CONTEXTUAL_ALTERNATES",
"DWRITE_FONT_FEATURE_TAG_CASE_SENSITIVE_FORMS",
"DWRITE_FONT_FEATURE_TAG_GLYPH_COMPOSITION_DECOMPOSITION",
"DWRITE_FONT_FEATURE_TAG_CONTEXTUAL_LIGATURES",
"DWRITE_FONT_FEATURE_TAG_CAPITAL_SPACING",
"DWRITE_FONT_FEATURE_TAG_CONTEXTUAL_SWASH",
"DWRITE_FONT_FEATURE_TAG_CURSIVE_POSITIONING",
"DWRITE_FONT_FEATURE_TAG_DEFAULT",
"DWRITE_FONT_FEATURE_TAG_DISCRETIONARY_LIGATURES",
"DWRITE_FONT_FEATURE_TAG_EXPERT_FORMS",
"DWRITE_FONT_FEATURE_TAG_FRACTIONS",
"DWRITE_FONT_FEATURE_TAG_FULL_WIDTH",
"DWRITE_FONT_FEATURE_TAG_HALF_FORMS",
"DWRITE_FONT_FEATURE_TAG_HALANT_FORMS",
"DWRITE_FONT_FEATURE_TAG_ALTERNATE_HALF_WIDTH",
"DWRITE_FONT_FEATURE_TAG_HISTORICAL_FORMS",
"DWRITE_FONT_FEATURE_TAG_HORIZONTAL_KANA_ALTERNATES",
"DWRITE_FONT_FEATURE_TAG_HISTORICAL_LIGATURES",
"DWRITE_FONT_FEATURE_TAG_HALF_WIDTH",
"DWRITE_FONT_FEATURE_TAG_HOJO_KANJI_FORMS",
"DWRITE_FONT_FEATURE_TAG_JIS04_FORMS",
"DWRITE_FONT_FEATURE_TAG_JIS78_FORMS",
"DWRITE_FONT_FEATURE_TAG_JIS83_FORMS",
"DWRITE_FONT_FEATURE_TAG_JIS90_FORMS",
"DWRITE_FONT_FEATURE_TAG_KERNING",
"DWRITE_FONT_FEATURE_TAG_STANDARD_LIGATURES",
"DWRITE_FONT_FEATURE_TAG_LINING_FIGURES",
"DWRITE_FONT_FEATURE_TAG_LOCALIZED_FORMS",
"DWRITE_FONT_FEATURE_TAG_MARK_POSITIONING",
"DWRITE_FONT_FEATURE_TAG_MATHEMATICAL_GREEK",
"DWRITE_FONT_FEATURE_TAG_MARK_TO_MARK_POSITIONING",
"DWRITE_FONT_FEATURE_TAG_ALTERNATE_ANNOTATION_FORMS",
"DWRITE_FONT_FEATURE_TAG_NLC_KANJI_FORMS",
"DWRITE_FONT_FEATURE_TAG_OLD_STYLE_FIGURES",
"DWRITE_FONT_FEATURE_TAG_ORDINALS",
"DWRITE_FONT_FEATURE_TAG_PROPORTIONAL_ALTERNATE_WIDTH",
"DWRITE_FONT_FEATURE_TAG_PETITE_CAPITALS",
"DWRITE_FONT_FEATURE_TAG_PROPORTIONAL_FIGURES",
"DWRITE_FONT_FEATURE_TAG_PROPORTIONAL_WIDTHS",
"DWRITE_FONT_FEATURE_TAG_QUARTER_WIDTHS",
"DWRITE_FONT_FEATURE_TAG_REQUIRED_LIGATURES",
"DWRITE_FONT_FEATURE_TAG_RUBY_NOTATION_FORMS",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_ALTERNATES",
"DWRITE_FONT_FEATURE_TAG_SCIENTIFIC_INFERIORS",
"DWRITE_FONT_FEATURE_TAG_SMALL_CAPITALS",
"DWRITE_FONT_FEATURE_TAG_SIMPLIFIED_FORMS",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_1",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_2",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_3",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_4",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_5",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_6",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_7",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_8",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_9",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_10",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_11",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_12",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_13",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_14",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_15",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_16",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_17",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_18",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_19",
"DWRITE_FONT_FEATURE_TAG_STYLISTIC_SET_20",
"DWRITE_FONT_FEATURE_TAG_SUBSCRIPT",
"DWRITE_FONT_FEATURE_TAG_SUPERSCRIPT",
"DWRITE_FONT_FEATURE_TAG_SWASH",
"DWRITE_FONT_FEATURE_TAG_TITLING",
"DWRITE_FONT_FEATURE_TAG_TRADITIONAL_NAME_FORMS",
"DWRITE_FONT_FEATURE_TAG_TABULAR_FIGURES",
"DWRITE_FONT_FEATURE_TAG_TRADITIONAL_FORMS",
"DWRITE_FONT_FEATURE_TAG_THIRD_WIDTHS",
"DWRITE_FONT_FEATURE_TAG_UNICASE",
"DWRITE_FONT_FEATURE_TAG_SLASHED_ZERO",
])
DWRITE_TEXT_RANGE = Struct("DWRITE_TEXT_RANGE", [
(UINT32, "startPosition"),
(UINT32, "length"),
])
DWRITE_FONT_FEATURE = Struct("DWRITE_FONT_FEATURE", [
(DWRITE_FONT_FEATURE_TAG, "nameTag"),
(UINT32, "parameter"),
])
DWRITE_TYPOGRAPHIC_FEATURES = Struct("DWRITE_TYPOGRAPHIC_FEATURES", [
(OpaquePointer(DWRITE_FONT_FEATURE), "features"),
(UINT32, "featureCount"),
])
DWRITE_TRIMMING = Struct("DWRITE_TRIMMING", [
(DWRITE_TRIMMING_GRANULARITY, "granularity"),
(UINT32, "delimiter"),
(UINT32, "delimiterCount"),
])
IDWriteTextFormat.methods += [
StdMethod(HRESULT, "SetTextAlignment", [(DWRITE_TEXT_ALIGNMENT, "textAlignment")]),
StdMethod(HRESULT, "SetParagraphAlignment", [(DWRITE_PARAGRAPH_ALIGNMENT, "paragraphAlignment")]),
StdMethod(HRESULT, "SetWordWrapping", [(DWRITE_WORD_WRAPPING, "wordWrapping")]),
StdMethod(HRESULT, "SetReadingDirection", [(DWRITE_READING_DIRECTION, "readingDirection")]),
StdMethod(HRESULT, "SetFlowDirection", [(DWRITE_FLOW_DIRECTION, "flowDirection")]),
StdMethod(HRESULT, "SetIncrementalTabStop", [(FLOAT, "incrementalTabStop")]),
StdMethod(HRESULT, "SetTrimming", [(Pointer(Const(DWRITE_TRIMMING)), "trimmingOptions"), (OpaquePointer(IDWriteInlineObject), "trimmingSign")]),
StdMethod(HRESULT, "SetLineSpacing", [(DWRITE_LINE_SPACING_METHOD, "lineSpacingMethod"), (FLOAT, "lineSpacing"), (FLOAT, "baseline")]),
StdMethod(DWRITE_TEXT_ALIGNMENT, "GetTextAlignment", []),
StdMethod(DWRITE_PARAGRAPH_ALIGNMENT, "GetParagraphAlignment", []),
StdMethod(DWRITE_WORD_WRAPPING, "GetWordWrapping", []),
StdMethod(DWRITE_READING_DIRECTION, "GetReadingDirection", []),
StdMethod(DWRITE_FLOW_DIRECTION, "GetFlowDirection", []),
StdMethod(FLOAT, "GetIncrementalTabStop", []),
StdMethod(HRESULT, "GetTrimming", [Out(Pointer(DWRITE_TRIMMING), "trimmingOptions"), Out(Pointer(OpaquePointer(IDWriteInlineObject)), "trimmingSign")]),
StdMethod(HRESULT, "GetLineSpacing", [Out(Pointer(DWRITE_LINE_SPACING_METHOD), "lineSpacingMethod"), Out(Pointer(FLOAT), "lineSpacing"), Out(Pointer(FLOAT), "baseline")]),
StdMethod(HRESULT, "GetFontCollection", [Out(Pointer(ObjPointer(IDWriteFontCollection)), "fontCollection")]),
StdMethod(UINT32, "GetFontFamilyNameLength", []),
StdMethod(HRESULT, "GetFontFamilyName", [Out(Pointer(WCHAR), "fontFamilyName"), (UINT32, "nameSize")]),
StdMethod(DWRITE_FONT_WEIGHT, "GetFontWeight", []),
StdMethod(DWRITE_FONT_STYLE, "GetFontStyle", []),
StdMethod(DWRITE_FONT_STRETCH, "GetFontStretch", []),
StdMethod(FLOAT, "GetFontSize", []),
StdMethod(UINT32, "GetLocaleNameLength", []),
StdMethod(HRESULT, "GetLocaleName", [Out(Pointer(WCHAR), "localeName"), (UINT32, "nameSize")]),
]
IDWriteTypography.methods += [
StdMethod(HRESULT, "AddFontFeature", [(DWRITE_FONT_FEATURE, "fontFeature")]),
StdMethod(UINT32, "GetFontFeatureCount", []),
StdMethod(HRESULT, "GetFontFeature", [(UINT32, "fontFeatureIndex"), Out(Pointer(DWRITE_FONT_FEATURE), "fontFeature")]),
]
DWRITE_SCRIPT_SHAPES = EnumFlag("DWRITE_SCRIPT_SHAPES", [
"DWRITE_SCRIPT_SHAPES_DEFAULT",
"DWRITE_SCRIPT_SHAPES_NO_VISUAL",
])
DWRITE_SCRIPT_ANALYSIS = Struct("DWRITE_SCRIPT_ANALYSIS", [
(UINT16, "script"),
(DWRITE_SCRIPT_SHAPES, "shapes"),
])
DWRITE_BREAK_CONDITION = Enum("DWRITE_BREAK_CONDITION", [
"DWRITE_BREAK_CONDITION_NEUTRAL",
"DWRITE_BREAK_CONDITION_CAN_BREAK",
"DWRITE_BREAK_CONDITION_MAY_NOT_BREAK",
"DWRITE_BREAK_CONDITION_MUST_BREAK",
])
DWRITE_LINE_BREAKPOINT = Struct("DWRITE_LINE_BREAKPOINT", [
(UINT8, "breakConditionBefore"),
(UINT8, "breakConditionAfter"),
(UINT8, "isWhitespace"),
(UINT8, "isSoftHyphen"),
(UINT8, "padding"),
])
DWRITE_NUMBER_SUBSTITUTION_METHOD = Enum("DWRITE_NUMBER_SUBSTITUTION_METHOD", [
"DWRITE_NUMBER_SUBSTITUTION_METHOD_FROM_CULTURE",
"DWRITE_NUMBER_SUBSTITUTION_METHOD_CONTEXTUAL",
"DWRITE_NUMBER_SUBSTITUTION_METHOD_NONE",
"DWRITE_NUMBER_SUBSTITUTION_METHOD_NATIONAL",
"DWRITE_NUMBER_SUBSTITUTION_METHOD_TRADITIONAL",
])
IDWriteNumberSubstitution.methods += [
]
DWRITE_SHAPING_TEXT_PROPERTIES = Struct("DWRITE_SHAPING_TEXT_PROPERTIES", [
(UINT16, "isShapedAlone"),
(UINT16, "reserved"),
])
DWRITE_SHAPING_GLYPH_PROPERTIES = Struct("DWRITE_SHAPING_GLYPH_PROPERTIES", [
(UINT16, "justification"),
(UINT16, "isClusterStart"),
(UINT16, "isDiacritic"),
(UINT16, "isZeroWidthSpace"),
(UINT16, "reserved"),
])
IDWriteTextAnalysisSource.methods += [
StdMethod(HRESULT, "GetTextAtPosition", [(UINT32, "textPosition"), Out(Pointer(Pointer(Const(WCHAR))), "textString"), Out(Pointer(UINT32), "textLength")]),
StdMethod(HRESULT, "GetTextBeforePosition", [(UINT32, "textPosition"), Out(Pointer(Pointer(Const(WCHAR))), "textString"), Out(Pointer(UINT32), "textLength")]),
StdMethod(DWRITE_READING_DIRECTION, "GetParagraphReadingDirection", []),
StdMethod(HRESULT, "GetLocaleName", [(UINT32, "textPosition"), Out(Pointer(UINT32), "textLength"), Out(Pointer(Pointer(Const(WCHAR))), "localeName")]),
StdMethod(HRESULT, "GetNumberSubstitution", [(UINT32, "textPosition"), Out(Pointer(UINT32), "textLength"), Out(Pointer(ObjPointer(IDWriteNumberSubstitution)), "numberSubstitution")]),
]
IDWriteTextAnalysisSink.methods += [
StdMethod(HRESULT, "SetScriptAnalysis", [(UINT32, "textPosition"), (UINT32, "textLength"), (Pointer(Const(DWRITE_SCRIPT_ANALYSIS)), "scriptAnalysis")]),
StdMethod(HRESULT, "SetLineBreakpoints", [(UINT32, "textPosition"), (UINT32, "textLength"), (Pointer(Const(DWRITE_LINE_BREAKPOINT)), "lineBreakpoints")]),
StdMethod(HRESULT, "SetBidiLevel", [(UINT32, "textPosition"), (UINT32, "textLength"), (UINT8, "explicitLevel"), (UINT8, "resolvedLevel")]),
StdMethod(HRESULT, "SetNumberSubstitution", [(UINT32, "textPosition"), (UINT32, "textLength"), (ObjPointer(IDWriteNumberSubstitution), "numberSubstitution")]),
]
IDWriteTextAnalyzer.methods += [
StdMethod(HRESULT, "AnalyzeScript", [(ObjPointer(IDWriteTextAnalysisSource), "analysisSource"), (UINT32, "textPosition"), (UINT32, "textLength"), (ObjPointer(IDWriteTextAnalysisSink), "analysisSink")]),
StdMethod(HRESULT, "AnalyzeBidi", [(ObjPointer(IDWriteTextAnalysisSource), "analysisSource"), (UINT32, "textPosition"), (UINT32, "textLength"), (ObjPointer(IDWriteTextAnalysisSink), "analysisSink")]),
StdMethod(HRESULT, "AnalyzeNumberSubstitution", [(ObjPointer(IDWriteTextAnalysisSource), "analysisSource"), (UINT32, "textPosition"), (UINT32, "textLength"), (ObjPointer(IDWriteTextAnalysisSink), "analysisSink")]),
StdMethod(HRESULT, "AnalyzeLineBreakpoints", [(ObjPointer(IDWriteTextAnalysisSource), "analysisSource"), (UINT32, "textPosition"), (UINT32, "textLength"), (ObjPointer(IDWriteTextAnalysisSink), "analysisSink")]),
StdMethod(HRESULT, "GetGlyphs", [(String(Const(WCHAR), "textLength", wide=True), "textString"), (UINT32, "textLength"), (ObjPointer(IDWriteFontFace), "fontFace"), (BOOL, "isSideways"), (BOOL, "isRightToLeft"), (Pointer(Const(DWRITE_SCRIPT_ANALYSIS)), "scriptAnalysis"), (PCWSTR, "localeName"), (ObjPointer(IDWriteNumberSubstitution), "numberSubstitution"), (OpaquePointer(Pointer(Const(DWRITE_TYPOGRAPHIC_FEATURES))), "features"), (Pointer(Const(UINT32)), "featureRangeLengths"), (UINT32, "featureRanges"), (UINT32, "maxGlyphCount"), Out(Pointer(UINT16), "clusterMap"), Out(Pointer(DWRITE_SHAPING_TEXT_PROPERTIES), "textProps"), Out(Pointer(UINT16), "glyphIndices"), Out(Pointer(DWRITE_SHAPING_GLYPH_PROPERTIES), "glyphProps"), Out(Pointer(UINT32), "actualGlyphCount")]),
StdMethod(HRESULT, "GetGlyphPlacements", [(String(Const(WCHAR), "textLength", wide=True), "textString"), (Array(Const(UINT16), "textLength"), "clusterMap"), (Array(DWRITE_SHAPING_TEXT_PROPERTIES, "textLength"), "textProps"), (UINT32, "textLength"), (Array(Const(UINT16), "glyphCount"), "glyphIndices"), (Array(Const(DWRITE_SHAPING_GLYPH_PROPERTIES), "glyphCount"), "glyphProps"), (UINT32, "glyphCount"), (ObjPointer(IDWriteFontFace), "fontFace"), (FLOAT, "fontEmSize"), (BOOL, "isSideways"), (BOOL, "isRightToLeft"), (Pointer(Const(DWRITE_SCRIPT_ANALYSIS)), "scriptAnalysis"), (LPCWSTR, "localeName"), (Array(Pointer(Const(DWRITE_TYPOGRAPHIC_FEATURES)), "featureRanges"), "features"), (Array(Const(UINT32), "featureRanges"), "featureRangeLengths"), (UINT32, "featureRanges"), Out(Array(FLOAT, "glyphCount"), "glyphAdvances"), Out(Array(DWRITE_GLYPH_OFFSET, "glyphCount"), "glyphOffsets")]),
StdMethod(HRESULT, "GetGdiCompatibleGlyphPlacements", [(String(Const(WCHAR), "textLength", wide=True), "textString"), (Array(Const(UINT16), "textLength"), "clusterMap"), (Array(DWRITE_SHAPING_TEXT_PROPERTIES, "textLength"), "textProps"), (UINT32, "textLength"), (Array(Const(UINT16), "glyphCount"), "glyphIndices"), (Array(Const(DWRITE_SHAPING_GLYPH_PROPERTIES), "glyphCount"), "glyphProps"), (UINT32, "glyphCount"), (ObjPointer(IDWriteFontFace), "fontFace"), (FLOAT, "fontEmSize"), (FLOAT, "pixelsPerDip"), (Pointer(Const(DWRITE_MATRIX)), "transform"), (BOOL, "useGdiNatural"), (BOOL, "isSideways"), (BOOL, "isRightToLeft"), (Pointer(Const(DWRITE_SCRIPT_ANALYSIS)), "scriptAnalysis"), (LPCWSTR, "localeName"), (Array(Pointer(Const(DWRITE_TYPOGRAPHIC_FEATURES)), "featureRanges"), "features"), (Array(Const(UINT32), "featureRanges"), "featureRangeLengths"), (UINT32, "featureRanges"), Out(Array(FLOAT, "glyphCount"), "glyphAdvances"), Out(Array(DWRITE_GLYPH_OFFSET, "glyphCount"), "glyphOffsets")]),
]
DWRITE_GLYPH_RUN = Struct("DWRITE_GLYPH_RUN", [
(ObjPointer(IDWriteFontFace), "fontFace"),
(FLOAT, "fontEmSize"),
(UINT32, "glyphCount"),
(Pointer(Const(UINT16)), "glyphIndices"),
(Pointer(Const(FLOAT)), "glyphAdvances"),
(Pointer(Const(DWRITE_GLYPH_OFFSET)), "glyphOffsets"),
(BOOL, "isSideways"),
(UINT32, "bidiLevel"),
])
DWRITE_GLYPH_RUN_DESCRIPTION = Struct("DWRITE_GLYPH_RUN_DESCRIPTION", [
(PCWSTR, "localeName"),
(String(Const(WCHAR), "{self}.stringLength", wide=True), "string"),
(UINT32, "stringLength"),
(Pointer(Const(UINT16)), "clusterMap"),
(UINT32, "textPosition"),
])
DWRITE_UNDERLINE = Struct("DWRITE_UNDERLINE", [
(FLOAT, "width"),
(FLOAT, "thickness"),
(FLOAT, "offset"),
(FLOAT, "runHeight"),
(DWRITE_READING_DIRECTION, "readingDirection"),
(DWRITE_FLOW_DIRECTION, "flowDirection"),
(PCWSTR, "localeName"),
(DWRITE_MEASURING_MODE, "measuringMode"),
])
DWRITE_STRIKETHROUGH = Struct("DWRITE_STRIKETHROUGH", [
(FLOAT, "width"),
(FLOAT, "thickness"),
(FLOAT, "offset"),
(DWRITE_READING_DIRECTION, "readingDirection"),
(DWRITE_FLOW_DIRECTION, "flowDirection"),
(PCWSTR, "localeName"),
(DWRITE_MEASURING_MODE, "measuringMode"),
])
DWRITE_LINE_METRICS = Struct("DWRITE_LINE_METRICS", [
(UINT32, "length"),
(UINT32, "trailingWhitespaceLength"),
(UINT32, "newlineLength"),
(FLOAT, "height"),
(FLOAT, "baseline"),
(BOOL, "isTrimmed"),
])
DWRITE_CLUSTER_METRICS = Struct("DWRITE_CLUSTER_METRICS", [
(FLOAT, "width"),
(UINT16, "length"),
(UINT16, "canWrapLineAfter"),
(UINT16, "isWhitespace"),
(UINT16, "isNewline"),
(UINT16, "isSoftHyphen"),
(UINT16, "isRightToLeft"),
(UINT16, "padding"),
])
DWRITE_TEXT_METRICS = Struct("DWRITE_TEXT_METRICS", [
(FLOAT, "left"),
(FLOAT, "top"),
(FLOAT, "width"),
(FLOAT, "widthIncludingTrailingWhitespace"),
(FLOAT, "height"),
(FLOAT, "layoutWidth"),
(FLOAT, "layoutHeight"),
(UINT32, "maxBidiReorderingDepth"),
(UINT32, "lineCount"),
])
DWRITE_INLINE_OBJECT_METRICS = Struct("DWRITE_INLINE_OBJECT_METRICS", [
(FLOAT, "width"),
(FLOAT, "height"),
(FLOAT, "baseline"),
(BOOL, "supportsSideways"),
])
DWRITE_OVERHANG_METRICS = Struct("DWRITE_OVERHANG_METRICS", [
(FLOAT, "left"),
(FLOAT, "top"),
(FLOAT, "right"),
(FLOAT, "bottom"),
])
DWRITE_HIT_TEST_METRICS = Struct("DWRITE_HIT_TEST_METRICS", [
(UINT32, "textPosition"),
(UINT32, "length"),
(FLOAT, "left"),
(FLOAT, "top"),
(FLOAT, "width"),
(FLOAT, "height"),
(UINT32, "bidiLevel"),
(BOOL, "isText"),
(BOOL, "isTrimmed"),
])
IDWriteInlineObject.methods += [
StdMethod(HRESULT, "Draw", [(OpaquePointer(Void), "clientDrawingContext"), (OpaquePointer(IDWriteTextRenderer), "renderer"), (FLOAT, "originX"), (FLOAT, "originY"), (BOOL, "isSideways"), (BOOL, "isRightToLeft"), (ObjPointer(IUnknown), "clientDrawingEffect")]),
StdMethod(HRESULT, "GetMetrics", [Out(Pointer(DWRITE_INLINE_OBJECT_METRICS), "metrics")]),
StdMethod(HRESULT, "GetOverhangMetrics", [Out(Pointer(DWRITE_OVERHANG_METRICS), "overhangs")]),
StdMethod(HRESULT, "GetBreakConditions", [Out(Pointer(DWRITE_BREAK_CONDITION), "breakConditionBefore"), Out(Pointer(DWRITE_BREAK_CONDITION), "breakConditionAfter")]),
]
IDWritePixelSnapping.methods += [
StdMethod(HRESULT, "IsPixelSnappingDisabled", [(OpaquePointer(Void), "clientDrawingContext"), Out(Pointer(BOOL), "isDisabled")]),
StdMethod(HRESULT, "GetCurrentTransform", [(OpaquePointer(Void), "clientDrawingContext"), Out(Pointer(DWRITE_MATRIX), "transform")]),
StdMethod(HRESULT, "GetPixelsPerDip", [(OpaquePointer(Void), "clientDrawingContext"), Out(Pointer(FLOAT), "pixelsPerDip")]),
]
IDWriteTextRenderer.methods += [
StdMethod(HRESULT, "DrawGlyphRun", [(OpaquePointer(Void), "clientDrawingContext"), (FLOAT, "baselineOriginX"), (FLOAT, "baselineOriginY"), (DWRITE_MEASURING_MODE, "measuringMode"), (Pointer(Const(DWRITE_GLYPH_RUN)), "glyphRun"), (Pointer(Const(DWRITE_GLYPH_RUN_DESCRIPTION)), "glyphRunDescription"), (ObjPointer(IUnknown), "clientDrawingEffect")]),
StdMethod(HRESULT, "DrawUnderline", [(OpaquePointer(Void), "clientDrawingContext"), (FLOAT, "baselineOriginX"), (FLOAT, "baselineOriginY"), (Pointer(Const(DWRITE_UNDERLINE)), "underline"), (ObjPointer(IUnknown), "clientDrawingEffect")]),
StdMethod(HRESULT, "DrawStrikethrough", [(OpaquePointer(Void), "clientDrawingContext"), (FLOAT, "baselineOriginX"), (FLOAT, "baselineOriginY"), (Pointer(Const(DWRITE_STRIKETHROUGH)), "strikethrough"), (ObjPointer(IUnknown), "clientDrawingEffect")]),
StdMethod(HRESULT, "DrawInlineObject", [(OpaquePointer(Void), "clientDrawingContext"), (FLOAT, "originX"), (FLOAT, "originY"), (OpaquePointer(IDWriteInlineObject), "inlineObject"), (BOOL, "isSideways"), (BOOL, "isRightToLeft"), (ObjPointer(IUnknown), "clientDrawingEffect")]),
]
IDWriteTextLayout.methods += [
StdMethod(HRESULT, "SetMaxWidth", [(FLOAT, "maxWidth")]),
StdMethod(HRESULT, "SetMaxHeight", [(FLOAT, "maxHeight")]),
StdMethod(HRESULT, "SetFontCollection", [(ObjPointer(IDWriteFontCollection), "fontCollection"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetFontFamilyName", [(PCWSTR, "fontFamilyName"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetFontWeight", [(DWRITE_FONT_WEIGHT, "fontWeight"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetFontStyle", [(DWRITE_FONT_STYLE, "fontStyle"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetFontStretch", [(DWRITE_FONT_STRETCH, "fontStretch"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetFontSize", [(FLOAT, "fontSize"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetUnderline", [(BOOL, "hasUnderline"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetStrikethrough", [(BOOL, "hasStrikethrough"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetDrawingEffect", [(ObjPointer(IUnknown), "drawingEffect"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetInlineObject", [(OpaquePointer(IDWriteInlineObject), "inlineObject"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetTypography", [(ObjPointer(IDWriteTypography), "typography"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "SetLocaleName", [(PCWSTR, "localeName"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(FLOAT, "GetMaxWidth", []),
StdMethod(FLOAT, "GetMaxHeight", []),
StdMethod(HRESULT, "GetFontCollection", [(UINT32, "currentPosition"), Out(Pointer(ObjPointer(IDWriteFontCollection)), "fontCollection"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetFontFamilyNameLength", [(UINT32, "currentPosition"), Out(Pointer(UINT32), "nameLength"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetFontFamilyName", [(UINT32, "currentPosition"), Out(Pointer(WCHAR), "fontFamilyName"), (UINT32, "nameSize"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetFontWeight", [(UINT32, "currentPosition"), Out(Pointer(DWRITE_FONT_WEIGHT), "fontWeight"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetFontStyle", [(UINT32, "currentPosition"), Out(Pointer(DWRITE_FONT_STYLE), "fontStyle"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetFontStretch", [(UINT32, "currentPosition"), Out(Pointer(DWRITE_FONT_STRETCH), "fontStretch"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetFontSize", [(UINT32, "currentPosition"), Out(Pointer(FLOAT), "fontSize"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetUnderline", [(UINT32, "currentPosition"), Out(Pointer(BOOL), "hasUnderline"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetStrikethrough", [(UINT32, "currentPosition"), Out(Pointer(BOOL), "hasStrikethrough"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetDrawingEffect", [(UINT32, "currentPosition"), Out(Pointer(ObjPointer(IUnknown)), "drawingEffect"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetInlineObject", [(UINT32, "currentPosition"), Out(Pointer(OpaquePointer(IDWriteInlineObject)), "inlineObject"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetTypography", [(UINT32, "currentPosition"), Out(Pointer(ObjPointer(IDWriteTypography)), "typography"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetLocaleNameLength", [(UINT32, "currentPosition"), Out(Pointer(UINT32), "nameLength"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "GetLocaleName", [(UINT32, "currentPosition"), Out(Pointer(WCHAR), "localeName"), (UINT32, "nameSize"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "Draw", [(OpaquePointer(Void), "clientDrawingContext"), (OpaquePointer(IDWriteTextRenderer), "renderer"), (FLOAT, "originX"), (FLOAT, "originY")]),
StdMethod(HRESULT, "GetLineMetrics", [Out(Pointer(DWRITE_LINE_METRICS), "lineMetrics"), (UINT32, "maxLineCount"), Out(Pointer(UINT32), "actualLineCount")]),
StdMethod(HRESULT, "GetMetrics", [Out(Pointer(DWRITE_TEXT_METRICS), "textMetrics")]),
StdMethod(HRESULT, "GetOverhangMetrics", [Out(Pointer(DWRITE_OVERHANG_METRICS), "overhangs")]),
StdMethod(HRESULT, "GetClusterMetrics", [Out(Pointer(DWRITE_CLUSTER_METRICS), "clusterMetrics"), (UINT32, "maxClusterCount"), Out(Pointer(UINT32), "actualClusterCount")]),
StdMethod(HRESULT, "DetermineMinWidth", [Out(Pointer(FLOAT), "minWidth")]),
StdMethod(HRESULT, "HitTestPoint", [(FLOAT, "pointX"), (FLOAT, "pointY"), Out(Pointer(BOOL), "isTrailingHit"), Out(Pointer(BOOL), "isInside"), Out(Pointer(DWRITE_HIT_TEST_METRICS), "hitTestMetrics")]),
StdMethod(HRESULT, "HitTestTextPosition", [(UINT32, "textPosition"), (BOOL, "isTrailingHit"), Out(Pointer(FLOAT), "pointX"), Out(Pointer(FLOAT), "pointY"), Out(Pointer(DWRITE_HIT_TEST_METRICS), "hitTestMetrics")]),
StdMethod(HRESULT, "HitTestTextRange", [(UINT32, "textPosition"), (UINT32, "textLength"), (FLOAT, "originX"), (FLOAT, "originY"), Out(Pointer(DWRITE_HIT_TEST_METRICS), "hitTestMetrics"), (UINT32, "maxHitTestMetricsCount"), Out(Pointer(UINT32), "actualHitTestMetricsCount")]),
]
IDWriteBitmapRenderTarget.methods += [
StdMethod(HRESULT, "DrawGlyphRun", [(FLOAT, "baselineOriginX"), (FLOAT, "baselineOriginY"), (DWRITE_MEASURING_MODE, "measuringMode"), (Pointer(Const(DWRITE_GLYPH_RUN)), "glyphRun"), (ObjPointer(IDWriteRenderingParams), "renderingParams"), (COLORREF, "textColor"), Out(Pointer(RECT), "blackBoxRect")]),
StdMethod(HDC, "GetMemoryDC", []),
StdMethod(FLOAT, "GetPixelsPerDip", []),
StdMethod(HRESULT, "SetPixelsPerDip", [(FLOAT, "pixelsPerDip")]),
StdMethod(HRESULT, "GetCurrentTransform", [Out(Pointer(DWRITE_MATRIX), "transform")]),
StdMethod(HRESULT, "SetCurrentTransform", [(Pointer(Const(DWRITE_MATRIX)), "transform")]),
StdMethod(HRESULT, "GetSize", [Out(Pointer(SIZE), "size")]),
StdMethod(HRESULT, "Resize", [(UINT32, "width"), (UINT32, "height")]),
]
IDWriteGdiInterop.methods += [
StdMethod(HRESULT, "CreateFontFromLOGFONT", [(Pointer(Const(LOGFONTW)), "logFont"), Out(Pointer(ObjPointer(IDWriteFont)), "font")]),
StdMethod(HRESULT, "ConvertFontToLOGFONT", [(ObjPointer(IDWriteFont), "font"), Out(Pointer(LOGFONTW), "logFont"), Out(Pointer(BOOL), "isSystemFont")]),
StdMethod(HRESULT, "ConvertFontFaceToLOGFONT", [(ObjPointer(IDWriteFontFace), "font"), Out(Pointer(LOGFONTW), "logFont")]),
StdMethod(HRESULT, "CreateFontFaceFromHdc", [(HDC, "hdc"), Out(Pointer(ObjPointer(IDWriteFontFace)), "fontFace")]),
StdMethod(HRESULT, "CreateBitmapRenderTarget", [(HDC, "hdc"), (UINT32, "width"), (UINT32, "height"), Out(Pointer(ObjPointer(IDWriteBitmapRenderTarget)), "renderTarget")]),
]
DWRITE_TEXTURE_TYPE = Enum("DWRITE_TEXTURE_TYPE", [
"DWRITE_TEXTURE_ALIASED_1x1",
"DWRITE_TEXTURE_CLEARTYPE_3x1",
])
IDWriteGlyphRunAnalysis.methods += [
StdMethod(HRESULT, "GetAlphaTextureBounds", [(DWRITE_TEXTURE_TYPE, "textureType"), Out(Pointer(RECT), "textureBounds")]),
StdMethod(HRESULT, "CreateAlphaTexture", [(DWRITE_TEXTURE_TYPE, "textureType"), (Pointer(Const(RECT)), "textureBounds"), Out(Pointer(BYTE), "alphaValues"), (UINT32, "bufferSize")]),
StdMethod(HRESULT, "GetAlphaBlendParams", [(ObjPointer(IDWriteRenderingParams), "renderingParams"), Out(Pointer(FLOAT), "blendGamma"), Out(Pointer(FLOAT), "blendEnhancedContrast"), Out(Pointer(FLOAT), "blendClearTypeLevel")]),
]
IDWriteFactory.methods += [
StdMethod(HRESULT, "GetSystemFontCollection", [Out(Pointer(ObjPointer(IDWriteFontCollection)), "fontCollection"), (BOOL, "checkForUpdates")]),
StdMethod(HRESULT, "CreateCustomFontCollection", [(ObjPointer(IDWriteFontCollectionLoader), "collectionLoader"), (OpaquePointer(Const(Void)), "collectionKey"), (UINT32, "collectionKeySize"), Out(Pointer(ObjPointer(IDWriteFontCollection)), "fontCollection")]),
StdMethod(HRESULT, "RegisterFontCollectionLoader", [(ObjPointer(IDWriteFontCollectionLoader), "fontCollectionLoader")]),
StdMethod(HRESULT, "UnregisterFontCollectionLoader", [(ObjPointer(IDWriteFontCollectionLoader), "fontCollectionLoader")]),
StdMethod(HRESULT, "CreateFontFileReference", [(PCWSTR, "filePath"), (Pointer(Const(FILETIME)), "lastWriteTime"), Out(Pointer(ObjPointer(IDWriteFontFile)), "fontFile")]),
StdMethod(HRESULT, "CreateCustomFontFileReference", [(OpaquePointer(Const(Void)), "fontFileReferenceKey"), (UINT32, "fontFileReferenceKeySize"), (ObjPointer(IDWriteFontFileLoader), "fontFileLoader"), Out(Pointer(ObjPointer(IDWriteFontFile)), "fontFile")]),
StdMethod(HRESULT, "CreateFontFace", [(DWRITE_FONT_FACE_TYPE, "fontFaceType"), (UINT32, "numberOfFiles"), (Array(Const(ObjPointer(IDWriteFontFile)), "numberOfFiles"), "fontFiles"), (UINT32, "faceIndex"), (DWRITE_FONT_SIMULATIONS, "fontFaceSimulationFlags"), Out(Pointer(ObjPointer(IDWriteFontFace)), "fontFace")]),
StdMethod(HRESULT, "CreateRenderingParams", [Out(Pointer(ObjPointer(IDWriteRenderingParams)), "renderingParams")]),
StdMethod(HRESULT, "CreateMonitorRenderingParams", [(HMONITOR, "monitor"), Out(Pointer(ObjPointer(IDWriteRenderingParams)), "renderingParams")]),
StdMethod(HRESULT, "CreateCustomRenderingParams", [(FLOAT, "gamma"), (FLOAT, "enhancedContrast"), (FLOAT, "clearTypeLevel"), (DWRITE_PIXEL_GEOMETRY, "pixelGeometry"), (DWRITE_RENDERING_MODE, "renderingMode"), Out(Pointer(ObjPointer(IDWriteRenderingParams)), "renderingParams")]),
StdMethod(HRESULT, "RegisterFontFileLoader", [(ObjPointer(IDWriteFontFileLoader), "fontFileLoader")]),
StdMethod(HRESULT, "UnregisterFontFileLoader", [(ObjPointer(IDWriteFontFileLoader), "fontFileLoader")]),
StdMethod(HRESULT, "CreateTextFormat", [(PCWSTR, "fontFamilyName"), (ObjPointer(IDWriteFontCollection), "fontCollection"), (DWRITE_FONT_WEIGHT, "fontWeight"), (DWRITE_FONT_STYLE, "fontStyle"), (DWRITE_FONT_STRETCH, "fontStretch"), (FLOAT, "fontSize"), (PCWSTR, "localeName"), Out(Pointer(ObjPointer(IDWriteTextFormat)), "textFormat")]),
StdMethod(HRESULT, "CreateTypography", [Out(Pointer(ObjPointer(IDWriteTypography)), "typography")]),
StdMethod(HRESULT, "GetGdiInterop", [Out(Pointer(ObjPointer(IDWriteGdiInterop)), "gdiInterop")]),
StdMethod(HRESULT, "CreateTextLayout", [(String(Const(WCHAR), "stringLength", wide=True), "string"), (UINT32, "stringLength"), (ObjPointer(IDWriteTextFormat), "textFormat"), (FLOAT, "maxWidth"), (FLOAT, "maxHeight"), Out(Pointer(ObjPointer(IDWriteTextLayout)), "textLayout")]),
StdMethod(HRESULT, "CreateGdiCompatibleTextLayout", [(String(Const(WCHAR), "stringLength", wide=True), "string"), (UINT32, "stringLength"), (ObjPointer(IDWriteTextFormat), "textFormat"), (FLOAT, "layoutWidth"), (FLOAT, "layoutHeight"), (FLOAT, "pixelsPerDip"), (Pointer(Const(DWRITE_MATRIX)), "transform"), (BOOL, "useGdiNatural"), Out(Pointer(ObjPointer(IDWriteTextLayout)), "textLayout")]),
StdMethod(HRESULT, "CreateEllipsisTrimmingSign", [(ObjPointer(IDWriteTextFormat), "textFormat"), Out(Pointer(OpaquePointer(IDWriteInlineObject)), "trimmingSign")]),
StdMethod(HRESULT, "CreateTextAnalyzer", [Out(Pointer(ObjPointer(IDWriteTextAnalyzer)), "textAnalyzer")]),
StdMethod(HRESULT, "CreateNumberSubstitution", [(DWRITE_NUMBER_SUBSTITUTION_METHOD, "substitutionMethod"), (PCWSTR, "localeName"), (BOOL, "ignoreUserOverride"), Out(Pointer(ObjPointer(IDWriteNumberSubstitution)), "numberSubstitution")]),
StdMethod(HRESULT, "CreateGlyphRunAnalysis", [(Pointer(Const(DWRITE_GLYPH_RUN)), "glyphRun"), (FLOAT, "pixelsPerDip"), (Pointer(Const(DWRITE_MATRIX)), "transform"), (DWRITE_RENDERING_MODE, "renderingMode"), (DWRITE_MEASURING_MODE, "measuringMode"), (FLOAT, "baselineOriginX"), (FLOAT, "baselineOriginY"), Out(Pointer(ObjPointer(IDWriteGlyphRunAnalysis)), "glyphRunAnalysis")]),
]
dwrite = Module("dwrite")
dwrite.addInterfaces([
IDWriteFactory
])
dwrite.addFunctions([
StdFunction(HRESULT, "DWriteCreateFactory", [(DWRITE_FACTORY_TYPE, "factoryType"), (REFIID, "iid"), Out(Pointer(ObjPointer(IUnknown)), "factory")]),
])
#
# dwrite_1
#
DWRITE_PANOSE_FAMILY = Enum("DWRITE_PANOSE_FAMILY", [
"DWRITE_PANOSE_FAMILY_ANY",
"DWRITE_PANOSE_FAMILY_NO_FIT",
"DWRITE_PANOSE_FAMILY_TEXT_DISPLAY",
"DWRITE_PANOSE_FAMILY_SCRIPT",
"DWRITE_PANOSE_FAMILY_DECORATIVE",
"DWRITE_PANOSE_FAMILY_SYMBOL",
"DWRITE_PANOSE_FAMILY_PICTORIAL",
])
DWRITE_PANOSE_SERIF_STYLE = Enum("DWRITE_PANOSE_SERIF_STYLE", [
"DWRITE_PANOSE_SERIF_STYLE_ANY",
"DWRITE_PANOSE_SERIF_STYLE_NO_FIT",
"DWRITE_PANOSE_SERIF_STYLE_COVE",
"DWRITE_PANOSE_SERIF_STYLE_OBTUSE_COVE",
"DWRITE_PANOSE_SERIF_STYLE_SQUARE_COVE",
"DWRITE_PANOSE_SERIF_STYLE_OBTUSE_SQUARE_COVE",
"DWRITE_PANOSE_SERIF_STYLE_SQUARE",
"DWRITE_PANOSE_SERIF_STYLE_THIN",
"DWRITE_PANOSE_SERIF_STYLE_OVAL",
"DWRITE_PANOSE_SERIF_STYLE_EXAGGERATED",
"DWRITE_PANOSE_SERIF_STYLE_TRIANGLE",
"DWRITE_PANOSE_SERIF_STYLE_NORMAL_SANS",
"DWRITE_PANOSE_SERIF_STYLE_OBTUSE_SANS",
"DWRITE_PANOSE_SERIF_STYLE_PERPENDICULAR_SANS",
"DWRITE_PANOSE_SERIF_STYLE_FLARED",
"DWRITE_PANOSE_SERIF_STYLE_ROUNDED",
"DWRITE_PANOSE_SERIF_STYLE_SCRIPT",
"DWRITE_PANOSE_SERIF_STYLE_PERP_SANS",
"DWRITE_PANOSE_SERIF_STYLE_BONE",
])
DWRITE_PANOSE_WEIGHT = Enum("DWRITE_PANOSE_WEIGHT", [
"DWRITE_PANOSE_WEIGHT_ANY",
"DWRITE_PANOSE_WEIGHT_NO_FIT",
"DWRITE_PANOSE_WEIGHT_VERY_LIGHT",
"DWRITE_PANOSE_WEIGHT_LIGHT",
"DWRITE_PANOSE_WEIGHT_THIN",
"DWRITE_PANOSE_WEIGHT_BOOK",
"DWRITE_PANOSE_WEIGHT_MEDIUM",
"DWRITE_PANOSE_WEIGHT_DEMI",
"DWRITE_PANOSE_WEIGHT_BOLD",
"DWRITE_PANOSE_WEIGHT_HEAVY",
"DWRITE_PANOSE_WEIGHT_BLACK",
"DWRITE_PANOSE_WEIGHT_EXTRA_BLACK",
"DWRITE_PANOSE_WEIGHT_NORD",
])
DWRITE_PANOSE_PROPORTION = Enum("DWRITE_PANOSE_PROPORTION", [
"DWRITE_PANOSE_PROPORTION_ANY",
"DWRITE_PANOSE_PROPORTION_NO_FIT",
"DWRITE_PANOSE_PROPORTION_OLD_STYLE",
"DWRITE_PANOSE_PROPORTION_MODERN",
"DWRITE_PANOSE_PROPORTION_EVEN_WIDTH",
"DWRITE_PANOSE_PROPORTION_EXPANDED",
"DWRITE_PANOSE_PROPORTION_CONDENSED",
"DWRITE_PANOSE_PROPORTION_VERY_EXPANDED",
"DWRITE_PANOSE_PROPORTION_VERY_CONDENSED",
"DWRITE_PANOSE_PROPORTION_MONOSPACED",
])
DWRITE_PANOSE_CONTRAST = Enum("DWRITE_PANOSE_CONTRAST", [
"DWRITE_PANOSE_CONTRAST_ANY",
"DWRITE_PANOSE_CONTRAST_NO_FIT",
"DWRITE_PANOSE_CONTRAST_NONE",
"DWRITE_PANOSE_CONTRAST_VERY_LOW",
"DWRITE_PANOSE_CONTRAST_LOW",
"DWRITE_PANOSE_CONTRAST_MEDIUM_LOW",
"DWRITE_PANOSE_CONTRAST_MEDIUM",
"DWRITE_PANOSE_CONTRAST_MEDIUM_HIGH",
"DWRITE_PANOSE_CONTRAST_HIGH",
"DWRITE_PANOSE_CONTRAST_VERY_HIGH",
"DWRITE_PANOSE_CONTRAST_HORIZONTAL_LOW",
"DWRITE_PANOSE_CONTRAST_HORIZONTAL_MEDIUM",
"DWRITE_PANOSE_CONTRAST_HORIZONTAL_HIGH",
"DWRITE_PANOSE_CONTRAST_BROKEN",
])
DWRITE_PANOSE_STROKE_VARIATION = Enum("DWRITE_PANOSE_STROKE_VARIATION", [
"DWRITE_PANOSE_STROKE_VARIATION_ANY",
"DWRITE_PANOSE_STROKE_VARIATION_NO_FIT",
"DWRITE_PANOSE_STROKE_VARIATION_NO_VARIATION",
"DWRITE_PANOSE_STROKE_VARIATION_GRADUAL_DIAGONAL",
"DWRITE_PANOSE_STROKE_VARIATION_GRADUAL_TRANSITIONAL",
"DWRITE_PANOSE_STROKE_VARIATION_GRADUAL_VERTICAL",
"DWRITE_PANOSE_STROKE_VARIATION_GRADUAL_HORIZONTAL",
"DWRITE_PANOSE_STROKE_VARIATION_RAPID_VERTICAL",
"DWRITE_PANOSE_STROKE_VARIATION_RAPID_HORIZONTAL",
"DWRITE_PANOSE_STROKE_VARIATION_INSTANT_VERTICAL",
"DWRITE_PANOSE_STROKE_VARIATION_INSTANT_HORIZONTAL",
])
DWRITE_PANOSE_ARM_STYLE = Enum("DWRITE_PANOSE_ARM_STYLE", [
"DWRITE_PANOSE_ARM_STYLE_ANY",
"DWRITE_PANOSE_ARM_STYLE_NO_FIT",
"DWRITE_PANOSE_ARM_STYLE_STRAIGHT_ARMS_HORIZONTAL",
"DWRITE_PANOSE_ARM_STYLE_STRAIGHT_ARMS_WEDGE",
"DWRITE_PANOSE_ARM_STYLE_STRAIGHT_ARMS_VERTICAL",
"DWRITE_PANOSE_ARM_STYLE_STRAIGHT_ARMS_SINGLE_SERIF",
"DWRITE_PANOSE_ARM_STYLE_STRAIGHT_ARMS_DOUBLE_SERIF",
"DWRITE_PANOSE_ARM_STYLE_NONSTRAIGHT_ARMS_HORIZONTAL",
"DWRITE_PANOSE_ARM_STYLE_NONSTRAIGHT_ARMS_WEDGE",
"DWRITE_PANOSE_ARM_STYLE_NONSTRAIGHT_ARMS_VERTICAL",
"DWRITE_PANOSE_ARM_STYLE_NONSTRAIGHT_ARMS_SINGLE_SERIF",
"DWRITE_PANOSE_ARM_STYLE_NONSTRAIGHT_ARMS_DOUBLE_SERIF",
"DWRITE_PANOSE_ARM_STYLE_STRAIGHT_ARMS_HORZ",
"DWRITE_PANOSE_ARM_STYLE_STRAIGHT_ARMS_VERT",
"DWRITE_PANOSE_ARM_STYLE_BENT_ARMS_HORZ",
"DWRITE_PANOSE_ARM_STYLE_BENT_ARMS_WEDGE",
"DWRITE_PANOSE_ARM_STYLE_BENT_ARMS_VERT",
"DWRITE_PANOSE_ARM_STYLE_BENT_ARMS_SINGLE_SERIF",
"DWRITE_PANOSE_ARM_STYLE_BENT_ARMS_DOUBLE_SERIF",
])
DWRITE_PANOSE_LETTERFORM = Enum("DWRITE_PANOSE_LETTERFORM", [
"DWRITE_PANOSE_LETTERFORM_ANY",
"DWRITE_PANOSE_LETTERFORM_NO_FIT",
"DWRITE_PANOSE_LETTERFORM_NORMAL_CONTACT",
"DWRITE_PANOSE_LETTERFORM_NORMAL_WEIGHTED",
"DWRITE_PANOSE_LETTERFORM_NORMAL_BOXED",
"DWRITE_PANOSE_LETTERFORM_NORMAL_FLATTENED",
"DWRITE_PANOSE_LETTERFORM_NORMAL_ROUNDED",
"DWRITE_PANOSE_LETTERFORM_NORMAL_OFF_CENTER",
"DWRITE_PANOSE_LETTERFORM_NORMAL_SQUARE",
"DWRITE_PANOSE_LETTERFORM_OBLIQUE_CONTACT",
"DWRITE_PANOSE_LETTERFORM_OBLIQUE_WEIGHTED",
"DWRITE_PANOSE_LETTERFORM_OBLIQUE_BOXED",
"DWRITE_PANOSE_LETTERFORM_OBLIQUE_FLATTENED",
"DWRITE_PANOSE_LETTERFORM_OBLIQUE_ROUNDED",
"DWRITE_PANOSE_LETTERFORM_OBLIQUE_OFF_CENTER",
"DWRITE_PANOSE_LETTERFORM_OBLIQUE_SQUARE",
])
DWRITE_PANOSE_MIDLINE = Enum("DWRITE_PANOSE_MIDLINE", [
"DWRITE_PANOSE_MIDLINE_ANY",
"DWRITE_PANOSE_MIDLINE_NO_FIT",
"DWRITE_PANOSE_MIDLINE_STANDARD_TRIMMED",
"DWRITE_PANOSE_MIDLINE_STANDARD_POINTED",
"DWRITE_PANOSE_MIDLINE_STANDARD_SERIFED",
"DWRITE_PANOSE_MIDLINE_HIGH_TRIMMED",
"DWRITE_PANOSE_MIDLINE_HIGH_POINTED",
"DWRITE_PANOSE_MIDLINE_HIGH_SERIFED",
"DWRITE_PANOSE_MIDLINE_CONSTANT_TRIMMED",
"DWRITE_PANOSE_MIDLINE_CONSTANT_POINTED",
"DWRITE_PANOSE_MIDLINE_CONSTANT_SERIFED",
"DWRITE_PANOSE_MIDLINE_LOW_TRIMMED",
"DWRITE_PANOSE_MIDLINE_LOW_POINTED",
"DWRITE_PANOSE_MIDLINE_LOW_SERIFED",
])
DWRITE_PANOSE_XHEIGHT = Enum("DWRITE_PANOSE_XHEIGHT", [
"DWRITE_PANOSE_XHEIGHT_ANY",
"DWRITE_PANOSE_XHEIGHT_NO_FIT",
"DWRITE_PANOSE_XHEIGHT_CONSTANT_SMALL",
"DWRITE_PANOSE_XHEIGHT_CONSTANT_STANDARD",
"DWRITE_PANOSE_XHEIGHT_CONSTANT_LARGE",
"DWRITE_PANOSE_XHEIGHT_DUCKING_SMALL",
"DWRITE_PANOSE_XHEIGHT_DUCKING_STANDARD",
"DWRITE_PANOSE_XHEIGHT_DUCKING_LARGE",
"DWRITE_PANOSE_XHEIGHT_CONSTANT_STD",
"DWRITE_PANOSE_XHEIGHT_DUCKING_STD",
])
DWRITE_PANOSE_TOOL_KIND = Enum("DWRITE_PANOSE_TOOL_KIND", [
"DWRITE_PANOSE_TOOL_KIND_ANY",
"DWRITE_PANOSE_TOOL_KIND_NO_FIT",
"DWRITE_PANOSE_TOOL_KIND_FLAT_NIB",
"DWRITE_PANOSE_TOOL_KIND_PRESSURE_POINT",
"DWRITE_PANOSE_TOOL_KIND_ENGRAVED",
"DWRITE_PANOSE_TOOL_KIND_BALL",
"DWRITE_PANOSE_TOOL_KIND_BRUSH",
"DWRITE_PANOSE_TOOL_KIND_ROUGH",
"DWRITE_PANOSE_TOOL_KIND_FELT_PEN_BRUSH_TIP",
"DWRITE_PANOSE_TOOL_KIND_WILD_BRUSH",
])
DWRITE_PANOSE_SPACING = Enum("DWRITE_PANOSE_SPACING", [
"DWRITE_PANOSE_SPACING_ANY",
"DWRITE_PANOSE_SPACING_NO_FIT",
"DWRITE_PANOSE_SPACING_PROPORTIONAL_SPACED",
"DWRITE_PANOSE_SPACING_MONOSPACED",
])
DWRITE_PANOSE_ASPECT_RATIO = Enum("DWRITE_PANOSE_ASPECT_RATIO", [
"DWRITE_PANOSE_ASPECT_RATIO_ANY",
"DWRITE_PANOSE_ASPECT_RATIO_NO_FIT",
"DWRITE_PANOSE_ASPECT_RATIO_VERY_CONDENSED",
"DWRITE_PANOSE_ASPECT_RATIO_CONDENSED",
"DWRITE_PANOSE_ASPECT_RATIO_NORMAL",
"DWRITE_PANOSE_ASPECT_RATIO_EXPANDED",
"DWRITE_PANOSE_ASPECT_RATIO_VERY_EXPANDED",
])
DWRITE_PANOSE_SCRIPT_TOPOLOGY = Enum("DWRITE_PANOSE_SCRIPT_TOPOLOGY", [
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_ANY",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_NO_FIT",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_ROMAN_DISCONNECTED",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_ROMAN_TRAILING",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_ROMAN_CONNECTED",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_CURSIVE_DISCONNECTED",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_CURSIVE_TRAILING",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_CURSIVE_CONNECTED",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_BLACKLETTER_DISCONNECTED",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_BLACKLETTER_TRAILING",
"DWRITE_PANOSE_SCRIPT_TOPOLOGY_BLACKLETTER_CONNECTED",
])
DWRITE_PANOSE_SCRIPT_FORM = Enum("DWRITE_PANOSE_SCRIPT_FORM", [
"DWRITE_PANOSE_SCRIPT_FORM_ANY",
"DWRITE_PANOSE_SCRIPT_FORM_NO_FIT",
"DWRITE_PANOSE_SCRIPT_FORM_UPRIGHT_NO_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_UPRIGHT_SOME_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_UPRIGHT_MORE_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_UPRIGHT_EXTREME_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_OBLIQUE_NO_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_OBLIQUE_SOME_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_OBLIQUE_MORE_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_OBLIQUE_EXTREME_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_EXAGGERATED_NO_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_EXAGGERATED_SOME_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_EXAGGERATED_MORE_WRAPPING",
"DWRITE_PANOSE_SCRIPT_FORM_EXAGGERATED_EXTREME_WRAPPING",
])
DWRITE_PANOSE_FINIALS = Enum("DWRITE_PANOSE_FINIALS", [
"DWRITE_PANOSE_FINIALS_ANY",
"DWRITE_PANOSE_FINIALS_NO_FIT",
"DWRITE_PANOSE_FINIALS_NONE_NO_LOOPS",
"DWRITE_PANOSE_FINIALS_NONE_CLOSED_LOOPS",
"DWRITE_PANOSE_FINIALS_NONE_OPEN_LOOPS",
"DWRITE_PANOSE_FINIALS_SHARP_NO_LOOPS",
"DWRITE_PANOSE_FINIALS_SHARP_CLOSED_LOOPS",
"DWRITE_PANOSE_FINIALS_SHARP_OPEN_LOOPS",
"DWRITE_PANOSE_FINIALS_TAPERED_NO_LOOPS",
"DWRITE_PANOSE_FINIALS_TAPERED_CLOSED_LOOPS",
"DWRITE_PANOSE_FINIALS_TAPERED_OPEN_LOOPS",
"DWRITE_PANOSE_FINIALS_ROUND_NO_LOOPS",
"DWRITE_PANOSE_FINIALS_ROUND_CLOSED_LOOPS",
"DWRITE_PANOSE_FINIALS_ROUND_OPEN_LOOPS",
])
DWRITE_PANOSE_XASCENT = Enum("DWRITE_PANOSE_XASCENT", [
"DWRITE_PANOSE_XASCENT_ANY",
"DWRITE_PANOSE_XASCENT_NO_FIT",
"DWRITE_PANOSE_XASCENT_VERY_LOW",
"DWRITE_PANOSE_XASCENT_LOW",
"DWRITE_PANOSE_XASCENT_MEDIUM",
"DWRITE_PANOSE_XASCENT_HIGH",
"DWRITE_PANOSE_XASCENT_VERY_HIGH",
])
DWRITE_PANOSE_DECORATIVE_CLASS = Enum("DWRITE_PANOSE_DECORATIVE_CLASS", [
"DWRITE_PANOSE_DECORATIVE_CLASS_ANY",
"DWRITE_PANOSE_DECORATIVE_CLASS_NO_FIT",
"DWRITE_PANOSE_DECORATIVE_CLASS_DERIVATIVE",
"DWRITE_PANOSE_DECORATIVE_CLASS_NONSTANDARD_TOPOLOGY",
"DWRITE_PANOSE_DECORATIVE_CLASS_NONSTANDARD_ELEMENTS",
"DWRITE_PANOSE_DECORATIVE_CLASS_NONSTANDARD_ASPECT",
"DWRITE_PANOSE_DECORATIVE_CLASS_INITIALS",
"DWRITE_PANOSE_DECORATIVE_CLASS_CARTOON",
"DWRITE_PANOSE_DECORATIVE_CLASS_PICTURE_STEMS",
"DWRITE_PANOSE_DECORATIVE_CLASS_ORNAMENTED",
"DWRITE_PANOSE_DECORATIVE_CLASS_TEXT_AND_BACKGROUND",
"DWRITE_PANOSE_DECORATIVE_CLASS_COLLAGE",
"DWRITE_PANOSE_DECORATIVE_CLASS_MONTAGE",
])
DWRITE_PANOSE_ASPECT = Enum("DWRITE_PANOSE_ASPECT", [
"DWRITE_PANOSE_ASPECT_ANY",
"DWRITE_PANOSE_ASPECT_NO_FIT",
"DWRITE_PANOSE_ASPECT_SUPER_CONDENSED",
"DWRITE_PANOSE_ASPECT_VERY_CONDENSED",
"DWRITE_PANOSE_ASPECT_CONDENSED",
"DWRITE_PANOSE_ASPECT_NORMAL",
"DWRITE_PANOSE_ASPECT_EXTENDED",
"DWRITE_PANOSE_ASPECT_VERY_EXTENDED",
"DWRITE_PANOSE_ASPECT_SUPER_EXTENDED",
"DWRITE_PANOSE_ASPECT_MONOSPACED",
])
DWRITE_PANOSE_FILL = Enum("DWRITE_PANOSE_FILL", [
"DWRITE_PANOSE_FILL_ANY",
"DWRITE_PANOSE_FILL_NO_FIT",
"DWRITE_PANOSE_FILL_STANDARD_SOLID_FILL",
"DWRITE_PANOSE_FILL_NO_FILL",
"DWRITE_PANOSE_FILL_PATTERNED_FILL",
"DWRITE_PANOSE_FILL_COMPLEX_FILL",
"DWRITE_PANOSE_FILL_SHAPED_FILL",
"DWRITE_PANOSE_FILL_DRAWN_DISTRESSED",
])
DWRITE_PANOSE_LINING = Enum("DWRITE_PANOSE_LINING", [
"DWRITE_PANOSE_LINING_ANY",
"DWRITE_PANOSE_LINING_NO_FIT",
"DWRITE_PANOSE_LINING_NONE",
"DWRITE_PANOSE_LINING_INLINE",
"DWRITE_PANOSE_LINING_OUTLINE",
"DWRITE_PANOSE_LINING_ENGRAVED",
"DWRITE_PANOSE_LINING_SHADOW",
"DWRITE_PANOSE_LINING_RELIEF",
"DWRITE_PANOSE_LINING_BACKDROP",
])
DWRITE_PANOSE_DECORATIVE_TOPOLOGY = Enum("DWRITE_PANOSE_DECORATIVE_TOPOLOGY", [
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_ANY",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_NO_FIT",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_STANDARD",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_SQUARE",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_MULTIPLE_SEGMENT",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_ART_DECO",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_UNEVEN_WEIGHTING",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_DIVERSE_ARMS",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_DIVERSE_FORMS",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_LOMBARDIC_FORMS",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_UPPER_CASE_IN_LOWER_CASE",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_IMPLIED_TOPOLOGY",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_HORSESHOE_E_AND_A",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_CURSIVE",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_BLACKLETTER",
"DWRITE_PANOSE_DECORATIVE_TOPOLOGY_SWASH_VARIANCE",
])
DWRITE_PANOSE_CHARACTER_RANGES = Enum("DWRITE_PANOSE_CHARACTER_RANGES", [
"DWRITE_PANOSE_CHARACTER_RANGES_ANY",
"DWRITE_PANOSE_CHARACTER_RANGES_NO_FIT",
"DWRITE_PANOSE_CHARACTER_RANGES_EXTENDED_COLLECTION",
"DWRITE_PANOSE_CHARACTER_RANGES_LITERALS",
"DWRITE_PANOSE_CHARACTER_RANGES_NO_LOWER_CASE",
"DWRITE_PANOSE_CHARACTER_RANGES_SMALL_CAPS",
])
DWRITE_PANOSE_SYMBOL_KIND = Enum("DWRITE_PANOSE_SYMBOL_KIND", [
"DWRITE_PANOSE_SYMBOL_KIND_ANY",
"DWRITE_PANOSE_SYMBOL_KIND_NO_FIT",
"DWRITE_PANOSE_SYMBOL_KIND_MONTAGES",
"DWRITE_PANOSE_SYMBOL_KIND_PICTURES",
"DWRITE_PANOSE_SYMBOL_KIND_SHAPES",
"DWRITE_PANOSE_SYMBOL_KIND_SCIENTIFIC",
"DWRITE_PANOSE_SYMBOL_KIND_MUSIC",
"DWRITE_PANOSE_SYMBOL_KIND_EXPERT",
"DWRITE_PANOSE_SYMBOL_KIND_PATTERNS",
"DWRITE_PANOSE_SYMBOL_KIND_BOARDERS",
"DWRITE_PANOSE_SYMBOL_KIND_ICONS",
"DWRITE_PANOSE_SYMBOL_KIND_LOGOS",
"DWRITE_PANOSE_SYMBOL_KIND_INDUSTRY_SPECIFIC",
])
DWRITE_PANOSE_SYMBOL_ASPECT_RATIO = Enum("DWRITE_PANOSE_SYMBOL_ASPECT_RATIO", [
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_ANY",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_NO_FIT",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_NO_WIDTH",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_EXCEPTIONALLY_WIDE",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_SUPER_WIDE",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_VERY_WIDE",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_WIDE",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_NORMAL",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_NARROW",
"DWRITE_PANOSE_SYMBOL_ASPECT_RATIO_VERY_NARROW",
])
DWRITE_OUTLINE_THRESHOLD = Enum("DWRITE_OUTLINE_THRESHOLD", [
"DWRITE_OUTLINE_THRESHOLD_ANTIALIASED",
"DWRITE_OUTLINE_THRESHOLD_ALIASED",
])
DWRITE_BASELINE = Enum("DWRITE_BASELINE", [
"DWRITE_BASELINE_DEFAULT",
"DWRITE_BASELINE_ROMAN",
"DWRITE_BASELINE_CENTRAL",
"DWRITE_BASELINE_MATH",
"DWRITE_BASELINE_HANGING",
"DWRITE_BASELINE_IDEOGRAPHIC_BOTTOM",
"DWRITE_BASELINE_IDEOGRAPHIC_TOP",
"DWRITE_BASELINE_MINIMUM",
"DWRITE_BASELINE_MAXIMUM",
])
DWRITE_VERTICAL_GLYPH_ORIENTATION = Enum("DWRITE_VERTICAL_GLYPH_ORIENTATION", [
"DWRITE_VERTICAL_GLYPH_ORIENTATION_DEFAULT",
"DWRITE_VERTICAL_GLYPH_ORIENTATION_STACKED",
])
DWRITE_GLYPH_ORIENTATION_ANGLE = Enum("DWRITE_GLYPH_ORIENTATION_ANGLE", [
"DWRITE_GLYPH_ORIENTATION_ANGLE_0_DEGREES",
"DWRITE_GLYPH_ORIENTATION_ANGLE_90_DEGREES",
"DWRITE_GLYPH_ORIENTATION_ANGLE_180_DEGREES",
"DWRITE_GLYPH_ORIENTATION_ANGLE_270_DEGREES",
])
DWRITE_FONT_METRICS1 = Struct("DWRITE_FONT_METRICS1", [
(INT16, "glyphBoxLeft"),
(INT16, "glyphBoxTop"),
(INT16, "glyphBoxRight"),
(INT16, "glyphBoxBottom"),
(INT16, "subscriptPositionX"),
(INT16, "subscriptPositionY"),
(INT16, "subscriptSizeX"),
(INT16, "subscriptSizeY"),
(INT16, "superscriptPositionX"),
(INT16, "superscriptPositionY"),
(INT16, "superscriptSizeX"),
(INT16, "superscriptSizeY"),
(BOOL, "hasTypographicMetrics"),
])
DWRITE_CARET_METRICS = Struct("DWRITE_CARET_METRICS", [
(INT16, "slopeRise"),
(INT16, "slopeRun"),
(INT16, "offset"),
])
DWRITE_UNICODE_RANGE = Struct("DWRITE_UNICODE_RANGE", [
(UINT32, "first"),
(UINT32, "last"),
])
DWRITE_SCRIPT_PROPERTIES = Struct("DWRITE_SCRIPT_PROPERTIES", [
(UINT32, "isoScriptCode"),
(UINT32, "isoScriptNumber"),
(UINT32, "clusterLookahead"),
(UINT32, "justificationCharacter"),
(UINT32, "restrictCaretToClusters"),
(UINT32, "usesWordDividers"),
(UINT32, "isDiscreteWriting"),
(UINT32, "isBlockWriting"),
(UINT32, "isDistributedWithinCluster"),
(UINT32, "isConnectedWriting"),
(UINT32, "isCursiveWriting"),
(UINT32, "reserved"),
])
DWRITE_JUSTIFICATION_OPPORTUNITY = Struct("DWRITE_JUSTIFICATION_OPPORTUNITY", [
(FLOAT, "expansionMinimum"),
(FLOAT, "expansionMaximum"),
(FLOAT, "compressionMaximum"),
(UINT32, "expansionPriority"),
(UINT32, "compressionPriority"),
(UINT32, "allowResidualExpansion"),
(UINT32, "allowResidualCompression"),
(UINT32, "applyToLeadingEdge"),
(UINT32, "applyToTrailingEdge"),
(UINT32, "reserved"),
])
IDWriteFactory1 = Interface("IDWriteFactory1", IDWriteFactory)
IDWriteFontFace1 = Interface("IDWriteFontFace1", IDWriteFontFace)
IDWriteFont1 = Interface("IDWriteFont1", IDWriteFont)
IDWriteRenderingParams1 = Interface("IDWriteRenderingParams1", IDWriteRenderingParams)
IDWriteTextAnalyzer1 = Interface("IDWriteTextAnalyzer1", IDWriteTextAnalyzer)
IDWriteTextAnalysisSource1 = Interface("IDWriteTextAnalysisSource1", IDWriteTextAnalysisSource)
IDWriteTextAnalysisSink1 = Interface("IDWriteTextAnalysisSink1", IDWriteTextAnalysisSink)
IDWriteTextLayout1 = Interface("IDWriteTextLayout1", IDWriteTextLayout)
IDWriteBitmapRenderTarget1 = Interface("IDWriteBitmapRenderTarget1", IDWriteBitmapRenderTarget)
IDWriteFactory1.methods += [
StdMethod(HRESULT, "GetEudcFontCollection", [(Pointer(ObjPointer(IDWriteFontCollection)), "fontCollection"), (BOOL, "checkForUpdates")]),
StdMethod(HRESULT, "CreateCustomRenderingParams", [(FLOAT, "gamma"), (FLOAT, "enhancedContrast"), (FLOAT, "enhancedContrastGrayscale"), (FLOAT, "clearTypeLevel"), (DWRITE_PIXEL_GEOMETRY, "pixelGeometry"), (DWRITE_RENDERING_MODE, "renderingMode"), Out(Pointer(ObjPointer(IDWriteRenderingParams1)), "renderingParams")]),
]
IDWriteFontFace1.methods += [
StdMethod(Void, "GetMetrics", [Out(Pointer(DWRITE_FONT_METRICS1), "fontMetrics")]),
StdMethod(HRESULT, "GetGdiCompatibleMetrics", [(FLOAT, "emSize"), (FLOAT, "pixelsPerDip"), (Pointer(Const(DWRITE_MATRIX)), "transform"), Out(Pointer(DWRITE_FONT_METRICS1), "fontMetrics")]),
StdMethod(Void, "GetCaretMetrics", [Out(Pointer(DWRITE_CARET_METRICS), "caretMetrics")]),
StdMethod(HRESULT, "GetUnicodeRanges", [(UINT32, "maxRangeCount"), Out(Pointer(DWRITE_UNICODE_RANGE), "unicodeRanges"), Out(Pointer(UINT32), "actualRangeCount")]),
StdMethod(BOOL, "IsMonospacedFont", []),
StdMethod(HRESULT, "GetDesignGlyphAdvances", [(UINT32, "glyphCount"), (Pointer(Const(UINT16)), "glyphIndices"), Out(ObjPointer(INT32), "glyphAdvances"), (BOOL, "isSideways")]),
StdMethod(HRESULT, "GetGdiCompatibleGlyphAdvances", [(FLOAT, "emSize"), (FLOAT, "pixelsPerDip"), (Pointer(Const(DWRITE_MATRIX)), "transform"), (BOOL, "useGdiNatural"), (BOOL, "isSideways"), (UINT32, "glyphCount"), (Pointer(Const(UINT16)), "glyphIndices"), Out(ObjPointer(INT32), "glyphAdvances")]),
StdMethod(HRESULT, "GetKerningPairAdjustments", [(UINT32, "glyphCount"), (Pointer(Const(UINT16)), "glyphIndices"), Out(ObjPointer(INT32), "glyphAdvanceAdjustments")]),
StdMethod(BOOL, "HasKerningPairs", []),
StdMethod(HRESULT, "GetRecommendedRenderingMode", [(FLOAT, "fontEmSize"), (FLOAT, "dpiX"), (FLOAT, "dpiY"), (Pointer(Const(DWRITE_MATRIX)), "transform"), (BOOL, "isSideways"), (DWRITE_OUTLINE_THRESHOLD, "outlineThreshold"), (DWRITE_MEASURING_MODE, "measuringMode"), Out(Pointer(DWRITE_RENDERING_MODE), "renderingMode")]),
StdMethod(HRESULT, "GetVerticalGlyphVariants", [(UINT32, "glyphCount"), (Pointer(Const(UINT16)), "nominalGlyphIndices"), Out(Pointer(UINT16), "verticalGlyphIndices")]),
StdMethod(BOOL, "HasVerticalGlyphVariants", []),
]
DWRITE_PANOSE_TEXT = Struct("DWRITE_PANOSE_TEXT", [
(UINT8, "familyKind"),
(UINT8, "serifStyle"),
(UINT8, "weight"),
(UINT8, "proportion"),
(UINT8, "contrast"),
(UINT8, "strokeVariation"),
(UINT8, "armStyle"),
(UINT8, "letterform"),
(UINT8, "midline"),
(UINT8, "xHeight"),
])
DWRITE_PANOSE_SCRIPT = Struct("DWRITE_PANOSE_SCRIPT", [
(UINT8, "familyKind"),
(UINT8, "toolKind"),
(UINT8, "weight"),
(UINT8, "spacing"),
(UINT8, "aspectRatio"),
(UINT8, "contrast"),
(UINT8, "scriptTopology"),
(UINT8, "scriptForm"),
(UINT8, "finials"),
(UINT8, "xAscent"),
])
DWRITE_PANOSE_DECORATIVE = Struct("DWRITE_PANOSE_DECORATIVE", [
(UINT8, "familyKind"),
(UINT8, "decorativeClass"),
(UINT8, "weight"),
(UINT8, "aspect"),
(UINT8, "contrast"),
(UINT8, "serifVariant"),
(UINT8, "fill"),
(UINT8, "lining"),
(UINT8, "decorativeTopology"),
(UINT8, "characterRange"),
])
DWRITE_PANOSE_SYMBOL = Struct("DWRITE_PANOSE_SYMBOL", [
(UINT8, "familyKind"),
(UINT8, "symbolKind"),
(UINT8, "weight"),
(UINT8, "spacing"),
(UINT8, "aspectRatioAndContrast"),
(UINT8, "aspectRatio94"),
(UINT8, "aspectRatio119"),
(UINT8, "aspectRatio157"),
(UINT8, "aspectRatio163"),
(UINT8, "aspectRatio211"),
])
DWRITE_PANOSE = Struct("DWRITE_PANOSE", [
(UINT8, "familyKind"),
# FIXME
])
IDWriteFont1.methods += [
StdMethod(Void, "GetMetrics", [Out(Pointer(DWRITE_FONT_METRICS1), "fontMetrics")]),
StdMethod(Void, "GetPanose", [Out(OpaquePointer(DWRITE_PANOSE), "panose")]),
StdMethod(HRESULT, "GetUnicodeRanges", [(UINT32, "maxRangeCount"), Out(Pointer(DWRITE_UNICODE_RANGE), "unicodeRanges"), Out(Pointer(UINT32), "actualRangeCount")]),
StdMethod(BOOL, "IsMonospacedFont", []),
]
IDWriteRenderingParams1.methods += [
StdMethod(FLOAT, "GetGrayscaleEnhancedContrast", []),
]
IDWriteTextAnalyzer1.methods += [
StdMethod(HRESULT, "ApplyCharacterSpacing", [(FLOAT, "leadingSpacing"), (FLOAT, "trailingSpacing"), (FLOAT, "minimumAdvanceWidth"), (UINT32, "textLength"), (UINT32, "glyphCount"), (Pointer(Const(UINT16)), "clusterMap"), (Pointer(Const(FLOAT)), "glyphAdvances"), (Pointer(Const(DWRITE_GLYPH_OFFSET)), "glyphOffsets"), (Pointer(Const(DWRITE_SHAPING_GLYPH_PROPERTIES)), "glyphProperties"), Out(Pointer(FLOAT), "modifiedGlyphAdvances"), Out(Pointer(DWRITE_GLYPH_OFFSET), "modifiedGlyphOffsets")]),
StdMethod(HRESULT, "GetBaseline", [(ObjPointer(IDWriteFontFace), "fontFace"), (DWRITE_BASELINE, "baseline"), (BOOL, "isVertical"), (BOOL, "isSimulationAllowed"), (DWRITE_SCRIPT_ANALYSIS, "scriptAnalysis"), (PCWSTR, "localeName"), Out(ObjPointer(INT32), "baselineCoordinate"), Out(Pointer(BOOL), "exists")]),
StdMethod(HRESULT, "AnalyzeVerticalGlyphOrientation", [(ObjPointer(IDWriteTextAnalysisSource1), "analysisSource"), (UINT32, "textPosition"), (UINT32, "textLength"), (ObjPointer(IDWriteTextAnalysisSink1), "analysisSink")]),
StdMethod(HRESULT, "GetGlyphOrientationTransform", [(DWRITE_GLYPH_ORIENTATION_ANGLE, "glyphOrientationAngle"), (BOOL, "isSideways"), Out(Pointer(DWRITE_MATRIX), "transform")]),
StdMethod(HRESULT, "GetScriptProperties", [(DWRITE_SCRIPT_ANALYSIS, "scriptAnalysis"), Out(Pointer(DWRITE_SCRIPT_PROPERTIES), "scriptProperties")]),
StdMethod(HRESULT, "GetTextComplexity", [(String(Const(WCHAR), "textLength", wide=True), "textString"), (UINT32, "textLength"), (ObjPointer(IDWriteFontFace), "fontFace"), Out(Pointer(BOOL), "isTextSimple"), Out(Pointer(UINT32), "textLengthRead"), Out(Pointer(UINT16), "glyphIndices")]),
StdMethod(HRESULT, "GetJustificationOpportunities", [(ObjPointer(IDWriteFontFace), "fontFace"), (FLOAT, "fontEmSize"), (DWRITE_SCRIPT_ANALYSIS, "scriptAnalysis"), (UINT32, "textLength"), (UINT32, "glyphCount"), (String(Const(WCHAR), "textLength", wide=True), "textString"), (Pointer(Const(UINT16)), "clusterMap"), (Pointer(Const(DWRITE_SHAPING_GLYPH_PROPERTIES)), "glyphProperties"), Out(Pointer(DWRITE_JUSTIFICATION_OPPORTUNITY), "justificationOpportunities")]),
StdMethod(HRESULT, "JustifyGlyphAdvances", [(FLOAT, "lineWidth"), (UINT32, "glyphCount"), (Pointer(Const(DWRITE_JUSTIFICATION_OPPORTUNITY)), "justificationOpportunities"), (Pointer(Const(FLOAT)), "glyphAdvances"), (Pointer(Const(DWRITE_GLYPH_OFFSET)), "glyphOffsets"), Out(Pointer(FLOAT), "justifiedGlyphAdvances"), Out(Pointer(DWRITE_GLYPH_OFFSET), "justifiedGlyphOffsets")]),
StdMethod(HRESULT, "GetJustifiedGlyphs", [(ObjPointer(IDWriteFontFace), "fontFace"), (FLOAT, "fontEmSize"), (DWRITE_SCRIPT_ANALYSIS, "scriptAnalysis"), (UINT32, "textLength"), (UINT32, "glyphCount"), (UINT32, "maxGlyphCount"), (Pointer(Const(UINT16)), "clusterMap"), (Pointer(Const(UINT16)), "glyphIndices"), (Pointer(Const(FLOAT)), "glyphAdvances"), (Pointer(Const(FLOAT)), "justifiedGlyphAdvances"), (Pointer(Const(DWRITE_GLYPH_OFFSET)), "justifiedGlyphOffsets"), (Pointer(Const(DWRITE_SHAPING_GLYPH_PROPERTIES)), "glyphProperties"), Out(Pointer(UINT32), "actualGlyphCount"), Out(Pointer(UINT16), "modifiedClusterMap"), Out(Pointer(UINT16), "modifiedGlyphIndices"), Out(Pointer(FLOAT), "modifiedGlyphAdvances"), Out(Pointer(DWRITE_GLYPH_OFFSET), "modifiedGlyphOffsets")]),
]
IDWriteTextAnalysisSource1.methods += [
StdMethod(HRESULT, "GetVerticalGlyphOrientation", [(UINT32, "textPosition"), Out(Pointer(UINT32), "textLength"), Out(Pointer(DWRITE_VERTICAL_GLYPH_ORIENTATION), "glyphOrientation"), Out(Pointer(UINT8), "bidiLevel")]),
]
IDWriteTextAnalysisSink1.methods += [
StdMethod(HRESULT, "SetGlyphOrientation", [(UINT32, "textPosition"), (UINT32, "textLength"), (DWRITE_GLYPH_ORIENTATION_ANGLE, "glyphOrientationAngle"), (UINT8, "adjustedBidiLevel"), (BOOL, "isSideways"), (BOOL, "isRightToLeft")]),
]
IDWriteTextLayout1.methods += [
StdMethod(HRESULT, "SetPairKerning", [(BOOL, "isPairKerningEnabled"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "GetPairKerning", [(UINT32, "currentPosition"), Out(Pointer(BOOL), "isPairKerningEnabled"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
StdMethod(HRESULT, "SetCharacterSpacing", [(FLOAT, "leadingSpacing"), (FLOAT, "trailingSpacing"), (FLOAT, "minimumAdvanceWidth"), (DWRITE_TEXT_RANGE, "textRange")]),
StdMethod(HRESULT, "GetCharacterSpacing", [(UINT32, "currentPosition"), Out(Pointer(FLOAT), "leadingSpacing"), Out(Pointer(FLOAT), "trailingSpacing"), Out(Pointer(FLOAT), "minimumAdvanceWidth"), Out(Pointer(DWRITE_TEXT_RANGE), "textRange")]),
]
DWRITE_TEXT_ANTIALIAS_MODE = Enum("DWRITE_TEXT_ANTIALIAS_MODE", [
"DWRITE_TEXT_ANTIALIAS_MODE_CLEARTYPE",
"DWRITE_TEXT_ANTIALIAS_MODE_GRAYSCALE",
])
IDWriteBitmapRenderTarget1.methods += [
StdMethod(DWRITE_TEXT_ANTIALIAS_MODE, "GetTextAntialiasMode", []),
StdMethod(HRESULT, "SetTextAntialiasMode", [(DWRITE_TEXT_ANTIALIAS_MODE, "antialiasMode")]),
]
dwrite.addInterfaces([
IDWriteFactory1
])
|
mit
|
oberlin/django
|
django/views/decorators/csrf.py
|
586
|
2202
|
from functools import wraps
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.utils.decorators import available_attrs, decorator_from_middleware
csrf_protect = decorator_from_middleware(CsrfViewMiddleware)
csrf_protect.__name__ = "csrf_protect"
csrf_protect.__doc__ = """
This decorator adds CSRF protection in exactly the same way as
CsrfViewMiddleware, but it can be used on a per view basis. Using both, or
using the decorator multiple times, is harmless and efficient.
"""
class _EnsureCsrfToken(CsrfViewMiddleware):
# We need this to behave just like the CsrfViewMiddleware, but not reject
# requests or log warnings.
def _reject(self, request, reason):
return None
requires_csrf_token = decorator_from_middleware(_EnsureCsrfToken)
requires_csrf_token.__name__ = 'requires_csrf_token'
requires_csrf_token.__doc__ = """
Use this decorator on views that need a correct csrf_token available to
RequestContext, but without the CSRF protection that csrf_protect
enforces.
"""
class _EnsureCsrfCookie(CsrfViewMiddleware):
def _reject(self, request, reason):
return None
def process_view(self, request, callback, callback_args, callback_kwargs):
retval = super(_EnsureCsrfCookie, self).process_view(request, callback, callback_args, callback_kwargs)
# Forces process_response to send the cookie
get_token(request)
return retval
ensure_csrf_cookie = decorator_from_middleware(_EnsureCsrfCookie)
ensure_csrf_cookie.__name__ = 'ensure_csrf_cookie'
ensure_csrf_cookie.__doc__ = """
Use this decorator to ensure that a view sets a CSRF cookie, whether or not it
uses the csrf_token template tag, or the CsrfViewMiddleware is used.
"""
def csrf_exempt(view_func):
"""
Marks a view function as being exempt from the CSRF view protection.
"""
# We could just do view_func.csrf_exempt = True, but decorators
# are nicer if they don't have side-effects, so we return a new
# function.
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.csrf_exempt = True
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
|
bsd-3-clause
|
MattsFleaMarket/python-for-android
|
python3-alpha/extra_modules/pyxmpp2/presence.py
|
46
|
10333
|
#
# (C) Copyright 2003-2011 Jacek Konieczny <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""Presence XMPP stanza handling
Normative reference:
- `RFC 3920 <http://www.ietf.org/rfc/rfc3920.txt>`__
"""
__docformat__ = "restructuredtext en"
from .etree import ElementTree, ElementClass
from .exceptions import BadRequestProtocolError
from .stanza import Stanza
PRESENCE_TYPES = ("available", "unavailable", "probe",
"subscribe", "unsubscribe", "subscribed", "unsubscribed",
"invisible", "error")
ACCEPT_RESPONSES = {
"subscribe": "subscribed",
"subscribed": "subscribe",
"unsubscribe": "unsubscribed",
"unsubscribed": "unsubscribe",
}
DENY_RESPONSES = {
"subscribe": "unsubscribed",
"subscribed": "unsubscribe",
"unsubscribe": "subscribed",
"unsubscribed": "subscribe",
}
class Presence(Stanza):
"""<presence /> stanza.
"""
# pylint: disable-msg=R0902,R0904
element_name = "presence"
def __init__(self, element = None, from_jid = None, to_jid = None,
stanza_type = None, stanza_id = None,
error = None, error_cond = None, return_path = None,
language = None,
show = None, status = None, priority = None):
"""Initialize a `Presence` object.
:Parameters:
- `element`: XML element
- `from_jid`: sender JID.
- `to_jid`: recipient JID.
- `stanza_type`: staza type: one of: None, "available",
"unavailable", "subscribe", "subscribed", "unsubscribe",
"unsubscribed" or "error". "available" is automaticaly changed to
None.
- `stanza_id`: stanza id -- value of stanza's "id" attribute
- `language`: default language for the stanza content
- `show`: "show" field of presence stanza. One of: None, "away",
"xa", "dnd", "chat".
- `status`: descriptive text for the presence stanza.
- `priority`: presence priority.
- `error_cond`: error condition name. Ignored if `stanza_type` is
not "error"
:Types:
- `element`: :etree:`ElementTree.Element`
- `from_jid`: `JID`
- `to_jid`: `JID`
- `stanza_type`: `str`
- `stanza_id`: `str`
- `language`: `str`
- `show`: `str`
- `status`: `str`
- `priority`: `int`
- `error_cond`: `str`
"""
# pylint: disable-msg=R0913
self._show = None
self._status = None
self._priority = 0
if element is None:
element = "presence"
elif not isinstance(element, ElementClass):
raise TypeError("Couldn't make Presence from " + repr(element))
if stanza_type is not None and stanza_type not in PRESENCE_TYPES:
raise ValueError("Bad presence type")
elif stanza_type == 'available':
stanza_type = None
Stanza.__init__(self, element, from_jid = from_jid, to_jid = to_jid,
stanza_type = stanza_type, stanza_id = stanza_id,
error = error, error_cond = error_cond,
return_path = return_path, language = language)
if self.element_name != "presence":
raise ValueError("The element is not <presence />")
self._show_tag = self._ns_prefix + "show"
self._status_tag = self._ns_prefix + "status"
self._priority_tag = self._ns_prefix + "priority"
if self._element is not None:
self._decode_subelements()
if show is not None:
self.show = show
if status is not None:
self.status = status
if priority is not None:
self.priority = priority
def _decode_subelements(self):
"""Decode the stanza subelements."""
for child in self._element:
if child.tag == self._show_tag:
self._show = child.text
elif child.tag == self._status_tag:
self._status = child.text
elif child.tag == self._priority_tag:
try:
self._priority = int(child.text.strip())
if self._priority < -128 or self._priority > 127:
raise ValueError
except ValueError:
raise BadRequestProtocolError(
"Presence priority not an integer")
def as_xml(self):
"""Return the XML stanza representation.
Always return an independent copy of the stanza XML representation,
which can be freely modified without affecting the stanza.
:returntype: :etree:`ElementTree.Element`"""
result = Stanza.as_xml(self)
if self._show:
child = ElementTree.SubElement(result, self._show_tag)
child.text = self._show
if self._status:
child = ElementTree.SubElement(result, self._status_tag)
child.text = self._status
if self._priority:
child = ElementTree.SubElement(result, self._priority_tag)
child.text = str(self._priority)
return result
def copy(self):
"""Create a deep copy of the stanza.
:returntype: `Presence`"""
result = Presence(None, self.from_jid, self.to_jid,
self.stanza_type, self.stanza_id, self.error,
self._return_path(),
self._show, self._status, self._priority)
if self._payload is None:
self.decode_payload()
for payload in self._payload:
result.add_payload(payload.copy())
return result
@property
def show(self): # pylint: disable-msg=E0202
"""Presence status type.
:returntype: `str`
"""
return self._show
@show.setter # pylint: disable-msg=E1101
def show(self, show): # pylint: disable-msg=E0202,E0102,C0111
self._show = str(show)
self._dirty = True
@property
def status(self): # pylint: disable-msg=E0202
"""Presence status message.
:returntype: `str`
"""
return self._status
@status.setter # pylint: disable-msg=E1101
def status(self, status): # pylint: disable-msg=E0202,E0102,C0111
self._status = str(status)
self._dirty = True
@property
def priority(self): # pylint: disable-msg=E0202
"""Presence priority.
:returntype: `str`
"""
return self._priority
@priority.setter # pylint: disable-msg=E1101
def priority(self, priority): # pylint: disable-msg=E0202,E0102,C0111
priority = int(priority)
if priority < -128 or priority > 127:
raise ValueError("Priority must be in the (-128, 128) range")
self._priority = priority
self._dirty = True
def make_accept_response(self):
"""Create "accept" response for the "subscribe" / "subscribed" /
"unsubscribe" / "unsubscribed" presence stanza.
:return: new stanza.
:returntype: `Presence`
"""
if self.stanza_type not in ("subscribe", "subscribed",
"unsubscribe", "unsubscribed"):
raise ValueError("Results may only be generated for 'subscribe',"
"'subscribed','unsubscribe' or 'unsubscribed' presence")
stanza = Presence(stanza_type = ACCEPT_RESPONSES[self.stanza_type],
from_jid = self.to_jid, to_jid = self.from_jid,
stanza_id = self.stanza_id)
return stanza
def make_deny_response(self):
"""Create "deny" response for the "subscribe" / "subscribed" /
"unsubscribe" / "unsubscribed" presence stanza.
:return: new presence stanza.
:returntype: `Presence`
"""
if self.stanza_type not in ("subscribe", "subscribed",
"unsubscribe", "unsubscribed"):
raise ValueError("Results may only be generated for 'subscribe',"
"'subscribed','unsubscribe' or 'unsubscribed' presence")
stanza = Presence(stanza_type = DENY_RESPONSES[self.stanza_type],
from_jid = self.to_jid, to_jid = self.from_jid,
stanza_id = self.stanza_id)
return stanza
def make_error_response(self, cond):
"""Create error response for the any non-error presence stanza.
:Parameters:
- `cond`: error condition name, as defined in XMPP specification.
:Types:
- `cond`: `str`
:return: new presence stanza.
:returntype: `Presence`
"""
if self.stanza_type == "error":
raise ValueError("Errors may not be generated in response"
" to errors")
stanza = Presence(stanza_type = "error", from_jid = self.from_jid,
to_jid = self.to_jid, stanza_id = self.stanza_id,
status = self._status, show = self._show,
priority = self._priority, error_cond = cond)
if self._payload is None:
self.decode_payload()
for payload in self._payload:
stanza.add_payload(payload)
return stanza
# vi: sts=4 et sw=4
|
apache-2.0
|
nikhilpanicker/SecureVault
|
googleplay-api/googleplay.py
|
2
|
11590
|
#!/usr/bin/python
import base64
import gzip
import pprint
import StringIO
import requests
from google.protobuf import descriptor
from google.protobuf.internal.containers import RepeatedCompositeFieldContainer
from google.protobuf import text_format
from google.protobuf.message import Message, DecodeError
import googleplay_pb2
import config
class LoginError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class RequestError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class GooglePlayAPI(object):
"""Google Play Unofficial API Class
Usual APIs methods are login(), search(), details(), bulkDetails(),
download(), browse(), reviews() and list().
toStr() can be used to pretty print the result (protobuf object) of the
previous methods.
toDict() converts the result into a dict, for easier introspection."""
SERVICE = "androidmarket"
URL_LOGIN = "https://android.clients.google.com/auth" # "https://www.google.com/accounts/ClientLogin"
ACCOUNT_TYPE_GOOGLE = "GOOGLE"
ACCOUNT_TYPE_HOSTED = "HOSTED"
ACCOUNT_TYPE_HOSTED_OR_GOOGLE = "HOSTED_OR_GOOGLE"
authSubToken = None
def __init__(self, androidId=None, lang=None, debug=False): # you must use a device-associated androidId value
self.preFetch = {}
if androidId == None:
androidId = config.ANDROID_ID
if lang == None:
lang = config.LANG
self.androidId = androidId
self.lang = lang
self.debug = debug
def toDict(self, protoObj):
"""Converts the (protobuf) result from an API call into a dict, for
easier introspection."""
iterable = False
if isinstance(protoObj, RepeatedCompositeFieldContainer):
iterable = True
else:
protoObj = [protoObj]
retlist = []
for po in protoObj:
msg = dict()
for fielddesc, value in po.ListFields():
#print value, type(value), getattr(value, "__iter__", False)
if fielddesc.type == descriptor.FieldDescriptor.TYPE_GROUP or isinstance(value, RepeatedCompositeFieldContainer) or isinstance(value, Message):
msg[fielddesc.name] = self.toDict(value)
else:
msg[fielddesc.name] = value
retlist.append(msg)
if not iterable:
if len(retlist) > 0:
return retlist[0]
else:
return None
return retlist
def toStr(self, protoObj):
"""Used for pretty printing a result from the API."""
return text_format.MessageToString(protoObj)
def _try_register_preFetch(self, protoObj):
fields = [i.name for (i,_) in protoObj.ListFields()]
if ("preFetch" in fields):
for p in protoObj.preFetch:
self.preFetch[p.url] = p.response
def setAuthSubToken(self, authSubToken):
self.authSubToken = authSubToken
# put your auth token in config.py to avoid multiple login requests
if self.debug:
print "authSubToken: " + authSubToken
def login(self, email=None, password=None, authSubToken=None):
"""Login to your Google Account. You must provide either:
- an email and password
- a valid Google authSubToken"""
if (authSubToken is not None):
self.setAuthSubToken(authSubToken)
else:
if (email is None or password is None):
raise Exception("You should provide at least authSubToken or (email and password)")
params = {"Email": email,
"Passwd": password,
"service": self.SERVICE,
"accountType": self.ACCOUNT_TYPE_HOSTED_OR_GOOGLE,
"has_permission": "1",
"source": "android",
"androidId": self.androidId,
"app": "com.android.vending",
#"client_sig": self.client_sig,
"device_country": "fr",
"operatorCountry": "fr",
"lang": "fr",
"sdk_version": "16"}
headers = {
"Accept-Encoding": "",
}
response = requests.post(self.URL_LOGIN, data=params, headers=headers, verify=False)
data = response.text.split()
params = {}
for d in data:
if not "=" in d: continue
k, v = d.split("=")
params[k.strip().lower()] = v.strip()
if "auth" in params:
self.setAuthSubToken(params["auth"])
elif "error" in params:
raise LoginError("server says: " + params["error"])
else:
raise LoginError("Auth token not found.")
def executeRequestApi2(self, path, datapost=None, post_content_type="application/x-www-form-urlencoded; charset=UTF-8"):
if (datapost is None and path in self.preFetch):
data = self.preFetch[path]
else:
headers = { "Accept-Language": self.lang,
"Authorization": "GoogleLogin auth=%s" % self.authSubToken,
"X-DFE-Enabled-Experiments": "cl:billing.select_add_instrument_by_default",
"X-DFE-Unsupported-Experiments": "nocache:billing.use_charging_poller,market_emails,buyer_currency,prod_baseline,checkin.set_asset_paid_app_field,shekel_test,content_ratings,buyer_currency_in_app,nocache:encrypted_apk,recent_changes",
"X-DFE-Device-Id": self.androidId,
"X-DFE-Client-Id": "am-android-google",
#"X-DFE-Logging-Id": self.loggingId2, # Deprecated?
"User-Agent": "Android-Finsky/3.7.13 (api=3,versionCode=8013013,sdk=16,device=crespo,hardware=herring,product=soju)",
"X-DFE-SmallestScreenWidthDp": "320",
"X-DFE-Filter-Level": "3",
"Accept-Encoding": "",
"Host": "android.clients.google.com"}
if datapost is not None:
headers["Content-Type"] = post_content_type
url = "https://android.clients.google.com/fdfe/%s" % path
if datapost is not None:
response = requests.post(url, data=datapost, headers=headers, verify=False)
else:
response = requests.get(url, headers=headers, verify=False)
data = response.content
'''
data = StringIO.StringIO(data)
gzipper = gzip.GzipFile(fileobj=data)
data = gzipper.read()
'''
message = googleplay_pb2.ResponseWrapper.FromString(data)
self._try_register_preFetch(message)
# Debug
#print text_format.MessageToString(message)
return message
#####################################
# Google Play API Methods
#####################################
def search(self, query, nb_results=None, offset=None):
"""Search for apps."""
path = "search?c=3&q=%s" % requests.utils.quote(query) # TODO handle categories
if (nb_results is not None):
path += "&n=%d" % int(nb_results)
if (offset is not None):
path += "&o=%d" % int(offset)
message = self.executeRequestApi2(path)
return message.payload.searchResponse
def details(self, packageName):
"""Get app details from a package name.
packageName is the app unique ID (usually starting with 'com.')."""
path = "details?doc=%s" % requests.utils.quote(packageName)
message = self.executeRequestApi2(path)
return message.payload.detailsResponse
def bulkDetails(self, packageNames):
"""Get several apps details from a list of package names.
This is much more efficient than calling N times details() since it
requires only one request.
packageNames is a list of app ID (usually starting with 'com.')."""
path = "bulkDetails"
req = googleplay_pb2.BulkDetailsRequest()
req.docid.extend(packageNames)
data = req.SerializeToString()
message = self.executeRequestApi2(path, data, "application/x-protobuf")
return message.payload.bulkDetailsResponse
def browse(self, cat=None, ctr=None):
"""Browse categories.
cat (category ID) and ctr (subcategory ID) are used as filters."""
path = "browse?c=3"
if (cat != None):
path += "&cat=%s" % requests.utils.quote(cat)
if (ctr != None):
path += "&ctr=%s" % requests.utils.quote(ctr)
message = self.executeRequestApi2(path)
return message.payload.browseResponse
def list(self, cat, ctr=None, nb_results=None, offset=None):
"""List apps.
If ctr (subcategory ID) is None, returns a list of valid subcategories.
If ctr is provided, list apps within this subcategory."""
path = "list?c=3&cat=%s" % requests.utils.quote(cat)
if (ctr != None):
path += "&ctr=%s" % requests.utils.quote(ctr)
if (nb_results != None):
path += "&n=%s" % requests.utils.quote(nb_results)
if (offset != None):
path += "&o=%s" % requests.utils.quote(offset)
message = self.executeRequestApi2(path)
return message.payload.listResponse
def reviews(self, packageName, filterByDevice=False, sort=2, nb_results=None, offset=None):
"""Browse reviews.
packageName is the app unique ID.
If filterByDevice is True, return only reviews for your device."""
path = "rev?doc=%s&sort=%d" % (requests.utils.quote(packageName), sort)
if (nb_results is not None):
path += "&n=%d" % int(nb_results)
if (offset is not None):
path += "&o=%d" % int(offset)
if(filterByDevice):
path += "&dfil=1"
message = self.executeRequestApi2(path)
return message.payload.reviewResponse
def download(self, packageName, versionCode, offerType=1):
"""Download an app and return its raw data (APK file).
packageName is the app unique ID (usually starting with 'com.').
versionCode can be grabbed by using the details() method on the given
app."""
path = "purchase"
data = "ot=%d&doc=%s&vc=%d" % (offerType, packageName, versionCode)
message = self.executeRequestApi2(path, data)
url = message.payload.buyResponse.purchaseStatusResponse.appDeliveryData.downloadUrl
cookie = message.payload.buyResponse.purchaseStatusResponse.appDeliveryData.downloadAuthCookie[0]
cookies = {
str(cookie.name): str(cookie.value) # python-requests #459 fixes this
}
headers = {
"User-Agent" : "AndroidDownloadManager/4.1.1 (Linux; U; Android 4.1.1; Nexus S Build/JRO03E)",
"Accept-Encoding": "",
}
response = requests.get(url, headers=headers, cookies=cookies, verify=False)
return response.content
|
gpl-3.0
|
nthiep/global-ssh-server
|
lib/python2.7/site-packages/oauthlib/oauth2/rfc6749/endpoints/base.py
|
87
|
1729
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from __future__ import absolute_import, unicode_literals
import functools
import logging
from ..errors import TemporarilyUnavailableError, ServerError
from ..errors import FatalClientError, OAuth2Error
log = logging.getLogger(__name__)
class BaseEndpoint(object):
def __init__(self):
self._available = True
self._catch_errors = False
@property
def available(self):
return self._available
@available.setter
def available(self, available):
self._available = available
@property
def catch_errors(self):
return self._catch_errors
@catch_errors.setter
def catch_errors(self, catch_errors):
self._catch_errors = catch_errors
def catch_errors_and_unavailability(f):
@functools.wraps(f)
def wrapper(endpoint, uri, *args, **kwargs):
if not endpoint.available:
e = TemporarilyUnavailableError()
log.info('Endpoint unavailable, ignoring request %s.' % uri)
return {}, e.json, 503
if endpoint.catch_errors:
try:
return f(endpoint, uri, *args, **kwargs)
except OAuth2Error:
raise
except FatalClientError:
raise
except Exception as e:
error = ServerError()
log.warning(
'Exception caught while processing request, %s.' % e)
return {}, error.json, 500
else:
return f(endpoint, uri, *args, **kwargs)
return wrapper
|
agpl-3.0
|
gs0510/coala-bears
|
bears/coffee_script/CoffeeLintBear.py
|
5
|
15297
|
import json
from coalib.bearlib import deprecate_settings
from coalib.bearlib.abstractions.Linter import linter
from dependency_management.requirements.NpmRequirement import NpmRequirement
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.results.Result import Result
from coala_utils.param_conversion import negate
@linter(executable='coffeelint',
use_stdin=True)
class CoffeeLintBear:
"""
Check CoffeeScript code for a clean and consistent style.
For more information about coffeelint, visit <http://www.coffeelint.org/>.
"""
LANGUAGES = {'CoffeeScript'}
REQUIREMENTS = {NpmRequirement('coffeelint', '1')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'[email protected]'}
LICENSE = 'AGPL-3.0'
CAN_DETECT = {'Syntax', 'Formatting', 'Smell', 'Complexity', 'Duplication'}
severity_map = {'warn': RESULT_SEVERITY.NORMAL,
'error': RESULT_SEVERITY.MAJOR,
'ignore': RESULT_SEVERITY.INFO}
@staticmethod
def create_arguments(filename, file, config_file):
return '--reporter=raw', '--stdin', '-f', config_file
@staticmethod
@deprecate_settings(indent_size='tab_width',
allow_increment=(
'no_decr_or_incrementation_operators', negate),
allow_no_parameters=(
'no_empty_parameter_list', negate),
allow_empty_functions=('no_empty_functions', negate),
allow_this_statements=('no_this', negate),
allow_implicit_parentheses=(
'no_implicit_parentheses', negate),
allow_interpolation_in_single_quotes=(
'no_interpolation_in_single_quotes', negate),
allow_stand_alone_at_sign=(
'no_stand_alone_at_sign', negate),
allow_throwing_strings=(
'disable_throwing_strings', negate),
allow_unnecessary_double_quotes=(
'no_unnecessary_double_quotes', negate),
allow_bitwise_operators=(
'use_english_operator', negate),
force_braces='no_implicit_braces')
def generate_config(filename, file,
max_line_length: int=79,
max_line_length_affect_comments: bool=True,
space_before_and_after_arrow: bool=True,
check_braces_spacing: bool=False,
braces_spacing_width: int=1,
spacing_in_empty_braces: int=0,
class_naming_camelCase: bool=True,
spaces_before_and_after_colon: bool=False,
spaces_before_colon: int=0,
spaces_after_colon: int=1,
enforce_newline_at_EOF: bool=True,
use_spaces: bool=True,
indent_size: int=2,
number_of_newlines_after_classes: int=2,
prohibit_embedding_javascript_snippet: bool=True,
force_braces: bool=False,
allow_implicit_parentheses: bool=True,
allow_interpolation_in_single_quotes: bool=True,
allow_stand_alone_at_sign: bool=False,
allow_throwing_strings: bool=False,
allow_trailing_semicolons: bool=False,
allow_trailing_whitespaces: bool=False,
allow_unnecessary_double_quotes: bool=True,
allow_bitwise_operators: bool=True,
spaces_around_operators: bool=True,
space_after_comma: bool=True,
cyclomatic_complexity: int=0,
prevent_duplicate_keys: bool=True,
consistent_line_endings_style: str='',
allow_this_statements: bool=True,
allow_increment: bool=True,
allow_no_parameters: bool=True,
allow_empty_functions: bool=False,
enforce_parentheses_on_non_empty_constructors:
bool=True
):
"""
:param max_line_length:
Maximum number of characters per line.
:param max_line_length_affect_comments:
Determines if ``max_line_length`` should also affects comments or
not.
:param space_before_and_after_arrow:
Determines if spaces should be used before and after the arrow.
:param check_braces_spacing:
Checks if proper spacing is used inside curly braces.
:param braces_spacing_width:
Determines the number of blank spaces after the opening ``{`` and
before the closing brace ``}`` given that there is something within
the braces.
:param spacing_in_empty_braces:
Determines the number of blank spaces after the opening ``{`` and
before the closing brace ``}`` given empty content.
:param class_naming_camelCase:
Checks whether the classes name should be in camel-case or not.
:param spaces_before_and_after_colon:
Checks the number of spaces before and after colon.
:param spaces_before_colon:
Determines the number of blank spaces before colon when
``spaces_before_and_after_colon == True``.
:param spaces_after_colon:
Determines the number of space after colon when
``spaces_before_and_after_colon == True``.
:param enforce_newline_at_EOF:
Checks if the file ends with a single newline.
:param use_spaces:
Forbids tabs in indentation and applies two spaces for this
purpose.
:param indent_size:
Number of spaces per indentation level.
:param number_of_newlines_after_classes:
Determines the number of newlines that separate the class
definition and the rest of the code.
:param prohibit_embedding_javascript_snippet:
Prevents some JavaScript elements like ``eval`` to affect
CoffeeScript.
:param force_braces:
Prohibits implicit braces when declaring object literals.
Example: If ``force_braces = True`` then
```
1:2, 3:4
```
is prohibited, whereas
```
{1:2, 3:4}
```
is accepted.
:param allow_implicit_parentheses:
Allows implicit parentheses.
:param allow_interpolation_in_single_quotes:
Allows string interpolation in a single quoted string.
Example: If ``allow_interpolation_in_single_quotes = False`` then
```
f = '#{bar}'
```
is prohibited, whereas
```
f = "#{bar}"
```
is correct.
:param allow_stand_alone_at_sign:
Allows the use of stand alone ``@``.
Example: If ``allow_stand_alone_at_sign = False``
```
@ notok
not(@).ok
@::
```
are prohibited, whereas
```
@alright
@(fn)
@ok()
@[ok]
@ok()
```
are accepted.
:param allow_throwing_strings:
Allows throwing string literals or interpolation.
Example: If ``allow_throwing_strings = False``
```
throw 'my error'
throw "#{1234}"
```
will not be permitted.
:param allow_trailing_semicolons:
Prohibits trailing semicolons when ``False`` since they are
not useful. The semicolon is meaningful only if there's another
instruction on the same line.
Example: If ``allow_trailing_semicolon = False``
```
x = '1234'; console.log(x)
```
Here the semicolon is meaningful.
```
alert('end of line');
```
This semicolon is redundant.
:param allow_trailing_whitespaces:
Checks whether to allow trailing whitespacess in the code or not.
:param allow_unnecessary_double_quotes:
Allows enclosing strings in double quotes.
:param allow_bitwise_operators:
Determines if ``and``, ``or``, ``is`` and ``isnt`` should be used
instead of ``&&``, ``||``, ``==`` and ``!=``.
:param spaces_around_operators:
Enforces that operators have spaces around them.
:param space_after_comma:
Checks if there is a blank space after commas.
:param cyclomatic_complexity:
Maximum cyclomatic complexity of the file.
:param prevent_duplicate_keys:
Prevents defining duplicate keys in object literals and classes.
:param enforce_parentheses_on_non_empty_constructors:
Requires constructors with parameters to include parentheses.
Example:
```
class Foo
# Warn about missing parentheses here
a = new Foo
b = new bar.foo.Foo
# The parentheses make it clear no parameters are intended
c = new Foo()
d = new bar.foo.Foo()
e = new Foo 1, 2
f = new bar.foo.Foo 1, 2
```
:param consistent_line_endings_style:
The option to ``line_endings``, its value is either ``unix`` or
``windows``.
:param allow_this_statements:
Allows the use of ``this``. ``@`` should be used if ``False``.
:param allow_increment:
Allows the use of increment and decrement arithmetic operators.
:param allow_no_parameters:
Allows empty parameter lists in function definitions.
:param allow_empty_functions:
Allows declaring empty functions.
"""
coffee_configs = {'max_line_length':
{'value': max_line_length,
'level': 'error',
'limitComments':
max_line_length_affect_comments}}
coffee_configs['arrow_spacing'] = (
{'level': 'error' if space_before_and_after_arrow else 'ignore'})
if check_braces_spacing:
coffee_configs['braces_spacing'] = (
{'level': 'error',
'spaces': braces_spacing_width,
'empty_object_spaces': spacing_in_empty_braces})
if class_naming_camelCase:
coffee_configs['camel_case_classes'] = {'level': 'error'}
if spaces_before_and_after_colon:
coffee_configs['colon_assignment_spacing'] = (
{'level': 'error',
'spacing': {'left': spaces_before_colon,
'right': spaces_after_colon}})
coffee_configs['eol_last'] = (
{'level': 'error' if enforce_newline_at_EOF else 'ignore'})
coffee_configs['newlines_after_classes'] = (
{'value': number_of_newlines_after_classes,
'level': 'error'})
coffee_configs['no_backticks'] = (
{'level': 'error'
if prohibit_embedding_javascript_snippet else 'ignore'})
if force_braces:
coffee_configs['no_implicit_braces'] = (
{'level': 'error', 'strict': True})
if not allow_implicit_parentheses:
coffee_configs['no_implicit_parens'] = (
{'strict': True, 'level': 'error'})
coffee_configs['no_interpolation_in_single_quotes'] = (
{'level': 'error'
if not allow_interpolation_in_single_quotes else 'ignore'})
if not allow_stand_alone_at_sign:
coffee_configs['no_stand_alone_at'] = {'level': 'error'}
if use_spaces:
coffee_configs['no_tabs'] = {'level': 'error'}
coffee_configs['indentation'] = (
{'value': indent_size, 'level': 'error'})
coffee_configs['no_throwing_strings'] = (
{'level': 'error' if not allow_throwing_strings else 'ignore'})
coffee_configs['no_trailing_semicolons'] = (
{'level': 'error' if not allow_trailing_semicolons else 'ignore'})
if not allow_trailing_whitespaces:
coffee_configs['no_trailing_whitespace'] = (
{'level': 'error',
'allowed_in_comments': True,
'allowed_in_empty_lines': True})
if not allow_unnecessary_double_quotes:
coffee_configs['no_unnecessary_double_quotes'] = {'level': 'error'}
if not allow_bitwise_operators:
coffee_configs['prefer_english_operator'] = (
{'level': 'error', 'doubleNotLevel': 'ignore'})
if spaces_around_operators:
coffee_configs['space_operators'] = {'level': 'error'}
if space_after_comma:
coffee_configs['spacing_after_comma'] = {'level': 'warn'}
coffee_configs['cyclomatic_complexity'] = (
{'value': cyclomatic_complexity,
'level': ('error' if cyclomatic_complexity else 'ignore')})
coffee_configs['duplicate_key'] = (
{'level': 'error' if prevent_duplicate_keys else 'ignore'})
if enforce_parentheses_on_non_empty_constructors:
coffee_configs['non_empty_constructor_needs_parens'] = (
{'level': 'error'})
if consistent_line_endings_style:
coffee_configs['line_endings'] = (
{'level': 'error', 'value': consistent_line_endings_style})
if not allow_this_statements:
coffee_configs['no_this'] = {'level': 'error'}
if not allow_increment:
coffee_configs['no_plusplus'] = {'level': 'error'}
coffee_configs['no_empty_param_list'] = (
{'level': 'error' if not allow_no_parameters else 'ignore'})
coffee_configs['no_empty_functions'] = (
{'level': 'error' if not allow_empty_functions else 'ignore'})
return json.dumps(coffee_configs)
def process_output(self, output, filename, file):
output = json.loads(output)
assert len(output) == 1, (
'More than 1 file parsed, something went wrong')
for item in tuple(output.values())[0]:
yield Result.from_values(
origin='{} ({})'.format(self.name, item['rule']),
message=item['message'],
file=filename,
line=item.get('lineNumber', None),
end_line=item.get('lineNumberEnd', None),
severity=self.severity_map[item['level']],
additional_info=item.get('description',
item.get('context', '')))
|
agpl-3.0
|
anryko/ansible
|
lib/ansible/modules/cloud/google/gcp_compute_https_health_check.py
|
13
|
15129
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_https_health_check
description:
- An HttpsHealthCheck resource. This resource defines a template for how individual
VMs should be checked for health, via HTTPS.
short_description: Creates a GCP HttpsHealthCheck
version_added: '2.6'
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
type: str
check_interval_sec:
description:
- How often (in seconds) to send a health check. The default value is 5 seconds.
required: false
type: int
description:
description:
- An optional description of this resource. Provide this property when you create
the resource.
required: false
type: str
healthy_threshold:
description:
- A so-far unhealthy instance will be marked healthy after this many consecutive
successes. The default value is 2.
required: false
type: int
host:
description:
- The value of the host header in the HTTPS health check request. If left empty
(default value), the public IP on behalf of which this health check is performed
will be used.
required: false
type: str
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
required: true
type: str
port:
description:
- The TCP port number for the HTTPS health check request.
- The default value is 80.
required: false
type: int
request_path:
description:
- The request path of the HTTPS health check request.
- The default value is /.
required: false
type: str
timeout_sec:
description:
- How long (in seconds) to wait before claiming failure.
- The default value is 5 seconds. It is invalid for timeoutSec to have greater
value than checkIntervalSec.
required: false
type: int
aliases:
- timeout_seconds
unhealthy_threshold:
description:
- A so-far healthy instance will be marked unhealthy after this many consecutive
failures. The default value is 2.
required: false
type: int
project:
description:
- The Google Cloud Platform project to use.
type: str
auth_kind:
description:
- The type of credential used.
type: str
required: true
choices:
- application
- machineaccount
- serviceaccount
service_account_contents:
description:
- The contents of a Service Account JSON file, either in a dictionary or as a
JSON string that represents it.
type: jsonarg
service_account_file:
description:
- The path of a Service Account JSON file if serviceaccount is selected as type.
type: path
service_account_email:
description:
- An optional service account email address if machineaccount is selected and
the user does not wish to use the default email.
type: str
scopes:
description:
- Array of scopes to be used
type: list
env_type:
description:
- Specifies which Ansible environment you're running this module within.
- This should not be set unless you know what you're doing.
- This only alters the User Agent string for any API requests.
type: str
notes:
- 'API Reference: U(https://cloud.google.com/compute/docs/reference/v1/httpsHealthChecks)'
- 'Adding Health Checks: U(https://cloud.google.com/compute/docs/load-balancing/health-checks#legacy_health_checks)'
- for authentication, you can set service_account_file using the C(gcp_service_account_file)
env variable.
- for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS)
env variable.
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are not set.
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
'''
EXAMPLES = '''
- name: create a HTTPS health check
gcp_compute_https_health_check:
name: test_object
healthy_threshold: 10
port: 8080
timeout_sec: 2
unhealthy_threshold: 5
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
checkIntervalSec:
description:
- How often (in seconds) to send a health check. The default value is 5 seconds.
returned: success
type: int
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource. Provide this property when you create
the resource.
returned: success
type: str
healthyThreshold:
description:
- A so-far unhealthy instance will be marked healthy after this many consecutive
successes. The default value is 2.
returned: success
type: int
host:
description:
- The value of the host header in the HTTPS health check request. If left empty
(default value), the public IP on behalf of which this health check is performed
will be used.
returned: success
type: str
id:
description:
- The unique identifier for the resource. This identifier is defined by the server.
returned: success
type: int
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
returned: success
type: str
port:
description:
- The TCP port number for the HTTPS health check request.
- The default value is 80.
returned: success
type: int
requestPath:
description:
- The request path of the HTTPS health check request.
- The default value is /.
returned: success
type: str
timeoutSec:
description:
- How long (in seconds) to wait before claiming failure.
- The default value is 5 seconds. It is invalid for timeoutSec to have greater value
than checkIntervalSec.
returned: success
type: int
unhealthyThreshold:
description:
- A so-far healthy instance will be marked unhealthy after this many consecutive
failures. The default value is 2.
returned: success
type: int
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict
import json
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
check_interval_sec=dict(type='int'),
description=dict(type='str'),
healthy_threshold=dict(type='int'),
host=dict(type='str'),
name=dict(required=True, type='str'),
port=dict(type='int'),
request_path=dict(type='str'),
timeout_sec=dict(type='int', aliases=['timeout_seconds']),
unhealthy_threshold=dict(type='int'),
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
state = module.params['state']
kind = 'compute#httpsHealthCheck'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module), kind)
fetch = fetch_resource(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.put(link, resource_to_request(module)))
def delete(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#httpsHealthCheck',
u'checkIntervalSec': module.params.get('check_interval_sec'),
u'description': module.params.get('description'),
u'healthyThreshold': module.params.get('healthy_threshold'),
u'host': module.params.get('host'),
u'name': module.params.get('name'),
u'port': module.params.get('port'),
u'requestPath': module.params.get('request_path'),
u'timeoutSec': module.params.get('timeout_sec'),
u'unhealthyThreshold': module.params.get('unhealthy_threshold'),
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind, allow_not_found=True):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind, allow_not_found)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/httpsHealthChecks/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/httpsHealthChecks".format(**module.params)
def return_if_object(module, response, kind, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError):
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'checkIntervalSec': response.get(u'checkIntervalSec'),
u'creationTimestamp': response.get(u'creationTimestamp'),
u'description': response.get(u'description'),
u'healthyThreshold': response.get(u'healthyThreshold'),
u'host': response.get(u'host'),
u'id': response.get(u'id'),
u'name': module.params.get('name'),
u'port': response.get(u'port'),
u'requestPath': response.get(u'requestPath'),
u'timeoutSec': response.get(u'timeoutSec'),
u'unhealthyThreshold': response.get(u'unhealthyThreshold'),
}
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/global/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return {}
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#httpsHealthCheck')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], module)
time.sleep(1.0)
op_result = fetch_resource(module, op_uri, 'compute#operation', False)
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
if __name__ == '__main__':
main()
|
gpl-3.0
|
brijeshkesariya/odoo
|
addons/edi/models/res_currency.py
|
437
|
2892
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011-2012 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from edi import EDIMixin
from openerp import SUPERUSER_ID
RES_CURRENCY_EDI_STRUCT = {
#custom: 'code'
'symbol': True,
'rate': True,
}
class res_currency(osv.osv, EDIMixin):
_inherit = "res.currency"
def edi_export(self, cr, uid, records, edi_struct=None, context=None):
edi_struct = dict(edi_struct or RES_CURRENCY_EDI_STRUCT)
edi_doc_list = []
for currency in records:
# Get EDI doc based on struct. The result will also contain all metadata fields and attachments.
edi_doc = super(res_currency,self).edi_export(cr, uid, [currency], edi_struct, context)[0]
edi_doc.update(code=currency.name)
edi_doc_list.append(edi_doc)
return edi_doc_list
def edi_import(self, cr, uid, edi_document, context=None):
self._edi_requires_attributes(('code','symbol'), edi_document)
external_id = edi_document['__id']
existing_currency = self._edi_get_object_by_external_id(cr, uid, external_id, 'res_currency', context=context)
if existing_currency:
return existing_currency.id
# find with unique ISO code
existing_ids = self.search(cr, uid, [('name','=',edi_document['code'])])
if existing_ids:
return existing_ids[0]
# nothing found, create a new one
currency_id = self.create(cr, SUPERUSER_ID, {'name': edi_document['code'],
'symbol': edi_document['symbol']}, context=context)
rate = edi_document.pop('rate')
if rate:
self.pool.get('res.currency.rate').create(cr, SUPERUSER_ID, {'currency_id': currency_id,
'rate': rate}, context=context)
return currency_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ralstonJ/Angelhack-Rumble
|
tweepy-master/tweepy/cache.py
|
65
|
12740
|
# Tweepy
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
import time
import datetime
import threading
import os
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import hashlib
except ImportError:
# python 2.4
import md5 as hashlib
try:
import fcntl
except ImportError:
# Probably on a windows system
# TODO: use win32file
pass
class Cache(object):
"""Cache interface"""
def __init__(self, timeout=60):
"""Initialize the cache
timeout: number of seconds to keep a cached entry
"""
self.timeout = timeout
def store(self, key, value):
"""Add new record to cache
key: entry key
value: data of entry
"""
raise NotImplementedError
def get(self, key, timeout=None):
"""Get cached entry if exists and not expired
key: which entry to get
timeout: override timeout with this value [optional]
"""
raise NotImplementedError
def count(self):
"""Get count of entries currently stored in cache"""
raise NotImplementedError
def cleanup(self):
"""Delete any expired entries in cache."""
raise NotImplementedError
def flush(self):
"""Delete all cached entries"""
raise NotImplementedError
class MemoryCache(Cache):
"""In-memory cache"""
def __init__(self, timeout=60):
Cache.__init__(self, timeout)
self._entries = {}
self.lock = threading.Lock()
def __getstate__(self):
# pickle
return {'entries': self._entries, 'timeout': self.timeout}
def __setstate__(self, state):
# unpickle
self.lock = threading.Lock()
self._entries = state['entries']
self.timeout = state['timeout']
def _is_expired(self, entry, timeout):
return timeout > 0 and (time.time() - entry[0]) >= timeout
def store(self, key, value):
self.lock.acquire()
self._entries[key] = (time.time(), value)
self.lock.release()
def get(self, key, timeout=None):
self.lock.acquire()
try:
# check to see if we have this key
entry = self._entries.get(key)
if not entry:
# no hit, return nothing
return None
# use provided timeout in arguments if provided
# otherwise use the one provided during init.
if timeout is None:
timeout = self.timeout
# make sure entry is not expired
if self._is_expired(entry, timeout):
# entry expired, delete and return nothing
del self._entries[key]
return None
# entry found and not expired, return it
return entry[1]
finally:
self.lock.release()
def count(self):
return len(self._entries)
def cleanup(self):
self.lock.acquire()
try:
for k, v in self._entries.items():
if self._is_expired(v, self.timeout):
del self._entries[k]
finally:
self.lock.release()
def flush(self):
self.lock.acquire()
self._entries.clear()
self.lock.release()
class FileCache(Cache):
"""File-based cache"""
# locks used to make cache thread-safe
cache_locks = {}
def __init__(self, cache_dir, timeout=60):
Cache.__init__(self, timeout)
if os.path.exists(cache_dir) is False:
os.mkdir(cache_dir)
self.cache_dir = cache_dir
if cache_dir in FileCache.cache_locks:
self.lock = FileCache.cache_locks[cache_dir]
else:
self.lock = threading.Lock()
FileCache.cache_locks[cache_dir] = self.lock
if os.name == 'posix':
self._lock_file = self._lock_file_posix
self._unlock_file = self._unlock_file_posix
elif os.name == 'nt':
self._lock_file = self._lock_file_win32
self._unlock_file = self._unlock_file_win32
else:
print 'Warning! FileCache locking not supported on this system!'
self._lock_file = self._lock_file_dummy
self._unlock_file = self._unlock_file_dummy
def _get_path(self, key):
md5 = hashlib.md5()
md5.update(key)
return os.path.join(self.cache_dir, md5.hexdigest())
def _lock_file_dummy(self, path, exclusive=True):
return None
def _unlock_file_dummy(self, lock):
return
def _lock_file_posix(self, path, exclusive=True):
lock_path = path + '.lock'
if exclusive is True:
f_lock = open(lock_path, 'w')
fcntl.lockf(f_lock, fcntl.LOCK_EX)
else:
f_lock = open(lock_path, 'r')
fcntl.lockf(f_lock, fcntl.LOCK_SH)
if os.path.exists(lock_path) is False:
f_lock.close()
return None
return f_lock
def _unlock_file_posix(self, lock):
lock.close()
def _lock_file_win32(self, path, exclusive=True):
# TODO: implement
return None
def _unlock_file_win32(self, lock):
# TODO: implement
return
def _delete_file(self, path):
os.remove(path)
if os.path.exists(path + '.lock'):
os.remove(path + '.lock')
def store(self, key, value):
path = self._get_path(key)
self.lock.acquire()
try:
# acquire lock and open file
f_lock = self._lock_file(path)
datafile = open(path, 'wb')
# write data
pickle.dump((time.time(), value), datafile)
# close and unlock file
datafile.close()
self._unlock_file(f_lock)
finally:
self.lock.release()
def get(self, key, timeout=None):
return self._get(self._get_path(key), timeout)
def _get(self, path, timeout):
if os.path.exists(path) is False:
# no record
return None
self.lock.acquire()
try:
# acquire lock and open
f_lock = self._lock_file(path, False)
datafile = open(path, 'rb')
# read pickled object
created_time, value = pickle.load(datafile)
datafile.close()
# check if value is expired
if timeout is None:
timeout = self.timeout
if timeout > 0 and (time.time() - created_time) >= timeout:
# expired! delete from cache
value = None
self._delete_file(path)
# unlock and return result
self._unlock_file(f_lock)
return value
finally:
self.lock.release()
def count(self):
c = 0
for entry in os.listdir(self.cache_dir):
if entry.endswith('.lock'):
continue
c += 1
return c
def cleanup(self):
for entry in os.listdir(self.cache_dir):
if entry.endswith('.lock'):
continue
self._get(os.path.join(self.cache_dir, entry), None)
def flush(self):
for entry in os.listdir(self.cache_dir):
if entry.endswith('.lock'):
continue
self._delete_file(os.path.join(self.cache_dir, entry))
class MemCacheCache(Cache):
"""Cache interface"""
def __init__(self, client, timeout=60):
"""Initialize the cache
client: The memcache client
timeout: number of seconds to keep a cached entry
"""
self.client = client
self.timeout = timeout
def store(self, key, value):
"""Add new record to cache
key: entry key
value: data of entry
"""
self.client.set(key, value, time=self.timeout)
def get(self, key, timeout=None):
"""Get cached entry if exists and not expired
key: which entry to get
timeout: override timeout with this value [optional]. DOES NOT WORK HERE
"""
return self.client.get(key)
def count(self):
"""Get count of entries currently stored in cache. RETURN 0"""
raise NotImplementedError
def cleanup(self):
"""Delete any expired entries in cache. NO-OP"""
raise NotImplementedError
def flush(self):
"""Delete all cached entries. NO-OP"""
raise NotImplementedError
class RedisCache(Cache):
'''Cache running in a redis server'''
def __init__(self, client, timeout=60, keys_container = 'tweepy:keys', pre_identifier = 'tweepy:'):
Cache.__init__(self, timeout)
self.client = client
self.keys_container = keys_container
self.pre_identifier = pre_identifier
def _is_expired(self, entry, timeout):
# Returns true if the entry has expired
return timeout > 0 and (time.time() - entry[0]) >= timeout
def store(self, key, value):
'''Store the key, value pair in our redis server'''
# Prepend tweepy to our key, this makes it easier to identify tweepy keys in our redis server
key = self.pre_identifier + key
# Get a pipe (to execute several redis commands in one step)
pipe = self.client.pipeline()
# Set our values in a redis hash (similar to python dict)
pipe.set(key, pickle.dumps((time.time(), value)))
# Set the expiration
pipe.expire(key, self.timeout)
# Add the key to a set containing all the keys
pipe.sadd(self.keys_container, key)
# Execute the instructions in the redis server
pipe.execute()
def get(self, key, timeout=None):
'''Given a key, returns an element from the redis table'''
key = self.pre_identifier + key
# Check to see if we have this key
unpickled_entry = self.client.get(key)
if not unpickled_entry:
# No hit, return nothing
return None
entry = pickle.loads(unpickled_entry)
# Use provided timeout in arguments if provided
# otherwise use the one provided during init.
if timeout is None:
timeout = self.timeout
# Make sure entry is not expired
if self._is_expired(entry, timeout):
# entry expired, delete and return nothing
self.delete_entry(key)
return None
# entry found and not expired, return it
return entry[1]
def count(self):
'''Note: This is not very efficient, since it retreives all the keys from the redis
server to know how many keys we have'''
return len(self.client.smembers(self.keys_container))
def delete_entry(self, key):
'''Delete an object from the redis table'''
pipe = self.client.pipeline()
pipe.srem(self.keys_container, key)
pipe.delete(key)
pipe.execute()
def cleanup(self):
'''Cleanup all the expired keys'''
keys = self.client.smembers(self.keys_container)
for key in keys:
entry = self.client.get(key)
if entry:
entry = pickle.loads(entry)
if self._is_expired(entry, self.timeout):
self.delete_entry(key)
def flush(self):
'''Delete all entries from the cache'''
keys = self.client.smembers(self.keys_container)
for key in keys:
self.delete_entry(key)
class MongodbCache(Cache):
"""A simple pickle-based MongoDB cache sytem."""
def __init__(self, db, timeout=3600, collection='tweepy_cache'):
"""Should receive a "database" cursor from pymongo."""
Cache.__init__(self, timeout)
self.timeout = timeout
self.col = db[collection]
self.col.create_index('created', expireAfterSeconds=timeout)
def store(self, key, value):
from bson.binary import Binary
now = datetime.datetime.utcnow()
blob = Binary(pickle.dumps(value))
self.col.insert({'created': now, '_id': key, 'value': blob})
def get(self, key, timeout=None):
if timeout:
raise NotImplementedError
obj = self.col.find_one({'_id': key})
if obj:
return pickle.loads(obj['value'])
def count(self):
return self.col.find({}).count()
def delete_entry(self, key):
return self.col.remove({'_id': key})
def cleanup(self):
"""MongoDB will automatically clear expired keys."""
pass
def flush(self):
self.col.drop()
self.col.create_index('created', expireAfterSeconds=self.timeout)
|
apache-2.0
|
piasek1906/Piasek-KK
|
scripts/rt-tester/rt-tester.py
|
11005
|
5307
|
#!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
|
gpl-2.0
|
ostree/plaso
|
plaso/parsers/skydrivelog.py
|
1
|
6955
|
# -*- coding: utf-8 -*-
"""This file contains SkyDrive log file parser in plaso."""
import logging
import pyparsing
from plaso.events import time_events
from plaso.lib import eventdata
from plaso.lib import timelib
from plaso.parsers import manager
from plaso.parsers import text_parser
__author__ = 'Francesco Picasso ([email protected])'
class SkyDriveLogEvent(time_events.TimestampEvent):
"""Convenience class for a SkyDrive log line event."""
DATA_TYPE = u'skydrive:log:line'
def __init__(self, timestamp, offset, source_code, log_level, text):
"""Initializes the event object.
Args:
timestamp: The timestamp which is an integer containing the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
source_code: Details of the source code file generating the event.
log_level: The log level used for the event.
text: The log message.
"""
super(SkyDriveLogEvent, self).__init__(
timestamp, eventdata.EventTimestamp.ADDED_TIME)
self.log_level = log_level
self.offset = offset
self.source_code = source_code
self.text = text
class SkyDriveLogParser(text_parser.PyparsingSingleLineTextParser):
"""Parse SkyDrive log files."""
NAME = u'skydrive_log'
DESCRIPTION = u'Parser for OneDrive (or SkyDrive) log files.'
ENCODING = u'UTF-8-SIG'
# Common SDL (SkyDriveLog) pyparsing objects.
SDL_COLON = pyparsing.Literal(u':')
SDL_EXCLAMATION = pyparsing.Literal(u'!')
# Timestamp (08-01-2013 21:22:28.999).
SDL_TIMESTAMP = (
text_parser.PyparsingConstants.DATE_REV +
text_parser.PyparsingConstants.TIME_MSEC).setResultsName(u'timestamp')
# SkyDrive source code pyparsing structures.
SDL_SOURCE_CODE = pyparsing.Combine(
pyparsing.CharsNotIn(u':') +
SDL_COLON +
text_parser.PyparsingConstants.INTEGER +
SDL_EXCLAMATION +
pyparsing.Word(pyparsing.printables)).setResultsName(u'source_code')
# SkyDriveLogLevel pyparsing structures.
SDL_LOG_LEVEL = (
pyparsing.Literal(u'(').suppress() +
pyparsing.SkipTo(u')').setResultsName(u'log_level') +
pyparsing.Literal(u')').suppress())
# SkyDrive line pyparsing structure.
SDL_LINE = (
SDL_TIMESTAMP + SDL_SOURCE_CODE + SDL_LOG_LEVEL +
SDL_COLON + pyparsing.SkipTo(pyparsing.lineEnd).setResultsName(u'text'))
# Sometimes the timestamped log line is followed by an empy line,
# then by a file name plus other data and finally by another empty
# line. It could happen that a logline is split in two parts.
# These lines will not be discarded and an event will be generated
# ad-hoc (see source), based on the last one if available.
SDL_NO_HEADER_SINGLE_LINE = (
pyparsing.Optional(pyparsing.Literal(u'->').suppress()) +
pyparsing.SkipTo(pyparsing.lineEnd).setResultsName(u'text'))
# Define the available log line structures.
LINE_STRUCTURES = [
(u'logline', SDL_LINE),
(u'no_header_single_line', SDL_NO_HEADER_SINGLE_LINE),
]
def __init__(self):
"""Initializes a parser object."""
super(SkyDriveLogParser, self).__init__()
self.offset = 0
self.last_event = None
def _GetTimestamp(self, timestamp_pypr):
"""Gets a timestamp from a pyparsing ParseResults timestamp.
This is a timestamp_string as returned by using
text_parser.PyparsingConstants structures:
[[8, 1, 2013], [21, 22, 28], 999]
Args:
timestamp_string: The pyparsing ParseResults object
Returns:
The timestamp which is an integer containing the number of micro seconds
since January 1, 1970, 00:00:00 UTC or 0 on error.
"""
month, day, year = timestamp_pypr[0]
hour, minute, second = timestamp_pypr[1]
millisecond = timestamp_pypr[2]
try:
return timelib.Timestamp.FromTimeParts(
year, month, day, hour, minute, second,
microseconds=(millisecond * 1000))
except ValueError:
pass
return 0
def _ParseLogLine(self, parser_mediator, structure):
"""Parse a single log line and produce an event object.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
structure: A pyparsing.ParseResults object from a line in the
log file.
"""
timestamp = self._GetTimestamp(structure.timestamp)
if not timestamp:
logging.debug(u'Invalid timestamp {0:s}'.format(structure.timestamp))
return
event_object = SkyDriveLogEvent(
timestamp, self.offset, structure.source_code, structure.log_level,
structure.text)
parser_mediator.ProduceEvent(event_object)
self.last_event = event_object
def _ParseNoHeaderSingleLine(self, parser_mediator, structure):
"""Parse an isolated line and and produce an event object.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
structure: A pyparsing.ParseResults object from a line in the
log file.
"""
if not self.last_event:
logging.debug(u'SkyDrive, found isolated line with no previous events')
return
event_object = SkyDriveLogEvent(
self.last_event.timestamp, self.last_event.offset, None, None,
structure.text)
parser_mediator.ProduceEvent(event_object)
# TODO think to a possible refactoring for the non-header lines.
self.last_event = None
def ParseRecord(self, parser_mediator, key, structure):
"""Parses a log record structure and produces events.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
key: An identification string indicating the name of the parsed
structure.
structure: A pyparsing.ParseResults object from a line in the
log file.
"""
if key == u'logline':
self._ParseLogLine(parser_mediator, structure)
elif key == u'no_header_single_line':
self._ParseNoHeaderSingleLine(parser_mediator, structure)
else:
logging.warning(
u'Unable to parse record, unknown structure: {0:s}'.format(key))
def VerifyStructure(self, parser_mediator, line):
"""Verify that this file is a SkyDrive log file.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
line: A single line from the text file.
Returns:
True if this is the correct parser, False otherwise.
"""
structure = self.SDL_LINE
parsed_structure = None
timestamp = None
try:
parsed_structure = structure.parseString(line)
except pyparsing.ParseException:
logging.debug(u'Not a SkyDrive log file')
return False
else:
timestamp = self._GetTimestamp(parsed_structure.timestamp)
if not timestamp:
logging.debug(u'Not a SkyDrive log file, invalid timestamp {0:s}'.format(
parsed_structure.timestamp))
return False
return True
manager.ParsersManager.RegisterParser(SkyDriveLogParser)
|
apache-2.0
|
gqwest-erp/server
|
openerp/addons/base/tests/test_search.py
|
290
|
7662
|
import unittest2
import openerp.tests.common as common
class test_search(common.TransactionCase):
def test_00_search_order(self):
registry, cr, uid = self.registry, self.cr, self.uid
# Create 6 partners with a given name, and a given creation order to
# ensure the order of their ID. Some are set as unactive to verify they
# are by default excluded from the searches and to provide a second
# `order` argument.
partners = registry('res.partner')
c = partners.create(cr, uid, {'name': 'test_search_order_C'})
d = partners.create(cr, uid, {'name': 'test_search_order_D', 'active': False})
a = partners.create(cr, uid, {'name': 'test_search_order_A'})
b = partners.create(cr, uid, {'name': 'test_search_order_B'})
ab = partners.create(cr, uid, {'name': 'test_search_order_AB'})
e = partners.create(cr, uid, {'name': 'test_search_order_E', 'active': False})
# The tests.
# The basic searches should exclude records that have active = False.
# The order of the returned ids should be given by the `order`
# parameter of search().
name_asc = partners.search(cr, uid, [('name', 'like', 'test_search_order%')], order="name asc")
self.assertEqual([a, ab, b, c], name_asc, "Search with 'NAME ASC' order failed.")
name_desc = partners.search(cr, uid, [('name', 'like', 'test_search_order%')], order="name desc")
self.assertEqual([c, b, ab, a], name_desc, "Search with 'NAME DESC' order failed.")
id_asc = partners.search(cr, uid, [('name', 'like', 'test_search_order%')], order="id asc")
self.assertEqual([c, a, b, ab], id_asc, "Search with 'ID ASC' order failed.")
id_desc = partners.search(cr, uid, [('name', 'like', 'test_search_order%')], order="id desc")
self.assertEqual([ab, b, a, c], id_desc, "Search with 'ID DESC' order failed.")
# The inactive records shouldn't be excluded as soon as a condition on
# that field is present in the domain. The `order` parameter of
# search() should support any legal coma-separated values.
active_asc_id_asc = partners.search(cr, uid, [('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="active asc, id asc")
self.assertEqual([d, e, c, a, b, ab], active_asc_id_asc, "Search with 'ACTIVE ASC, ID ASC' order failed.")
active_desc_id_asc = partners.search(cr, uid, [('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="active desc, id asc")
self.assertEqual([c, a, b, ab, d, e], active_desc_id_asc, "Search with 'ACTIVE DESC, ID ASC' order failed.")
active_asc_id_desc = partners.search(cr, uid, [('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="active asc, id desc")
self.assertEqual([e, d, ab, b, a, c], active_asc_id_desc, "Search with 'ACTIVE ASC, ID DESC' order failed.")
active_desc_id_desc = partners.search(cr, uid, [('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="active desc, id desc")
self.assertEqual([ab, b, a, c, e, d], active_desc_id_desc, "Search with 'ACTIVE DESC, ID DESC' order failed.")
id_asc_active_asc = partners.search(cr, uid, [('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="id asc, active asc")
self.assertEqual([c, d, a, b, ab, e], id_asc_active_asc, "Search with 'ID ASC, ACTIVE ASC' order failed.")
id_asc_active_desc = partners.search(cr, uid, [('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="id asc, active desc")
self.assertEqual([c, d, a, b, ab, e], id_asc_active_desc, "Search with 'ID ASC, ACTIVE DESC' order failed.")
id_desc_active_asc = partners.search(cr, uid, [('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="id desc, active asc")
self.assertEqual([e, ab, b, a, d, c], id_desc_active_asc, "Search with 'ID DESC, ACTIVE ASC' order failed.")
id_desc_active_desc = partners.search(cr, uid, [('name', 'like', 'test_search_order%'), '|', ('active', '=', True), ('active', '=', False)], order="id desc, active desc")
self.assertEqual([e, ab, b, a, d, c], id_desc_active_desc, "Search with 'ID DESC, ACTIVE DESC' order failed.")
def test_10_inherits_m2order(self):
registry, cr, uid = self.registry, self.cr, self.uid
users_obj = registry('res.users')
# Find Employee group
group_employee_ref = self.registry('ir.model.data').get_object_reference(cr, uid, 'base', 'group_user')
group_employee_id = group_employee_ref and group_employee_ref[1] or False
# Get country/state data
country_us_id = registry('res.country').search(cr, uid, [('code', 'like', 'US')])[0]
state_ids = registry('res.country.state').search(cr, uid, [('country_id', '=', country_us_id)], limit=2)
country_be_id = registry('res.country').search(cr, uid, [('code', 'like', 'BE')])[0]
# Create test users
search_user = users_obj.create(cr, uid, {'name': '__search', 'login': '__search', 'groups_id': [(6, 0, [group_employee_id])]})
a = users_obj.create(cr, uid, {'name': '__test_A', 'login': '__test_A', 'country_id': country_be_id, 'state_id': country_be_id})
b = users_obj.create(cr, uid, {'name': '__test_B', 'login': '__a_test_B', 'country_id': country_us_id, 'state_id': state_ids[1]})
c = users_obj.create(cr, uid, {'name': '__test_B', 'login': '__z_test_B', 'country_id': country_us_id, 'state_id': state_ids[0]})
# Do: search on res.users, order on a field on res.partner to try inherits'd fields, then res.users
user_ids = users_obj.search(cr, search_user, [], order='name asc, login desc')
expected_ids = [search_user, a, c, b]
test_user_ids = filter(lambda x: x in expected_ids, user_ids)
self.assertEqual(test_user_ids, expected_ids, 'search on res_users did not provide expected ids or expected order')
# Do: order on many2one and inherits'd fields
user_ids = users_obj.search(cr, search_user, [], order='state_id asc, country_id desc, name asc, login desc')
expected_ids = [c, b, a, search_user]
test_user_ids = filter(lambda x: x in expected_ids, user_ids)
self.assertEqual(test_user_ids, expected_ids, 'search on res_users did not provide expected ids or expected order')
# Do: order on many2one and inherits'd fields
user_ids = users_obj.search(cr, search_user, [], order='country_id desc, state_id desc, name asc, login desc')
expected_ids = [search_user, b, c, a]
test_user_ids = filter(lambda x: x in expected_ids, user_ids)
self.assertEqual(test_user_ids, expected_ids, 'search on res_users did not provide expected ids or expected order')
# Do: order on many2one, but not by specifying in order parameter of search, but by overriding _order of res_users
old_order = users_obj._order
users_obj._order = 'country_id desc, name asc, login desc'
user_ids = users_obj.search(cr, search_user, [])
expected_ids = [search_user, c, b, a]
test_user_ids = filter(lambda x: x in expected_ids, user_ids)
self.assertEqual(test_user_ids, expected_ids, 'search on res_users did not provide expected ids or expected order')
users_obj._order = old_order
if __name__ == '__main__':
unittest2.main()
|
agpl-3.0
|
zaffra/Inquire
|
GAE/django/contrib/localflavor/uy/forms.py
|
310
|
2083
|
# -*- coding: utf-8 -*-
"""
UY-specific form helpers.
"""
import re
from django.core.validators import EMPTY_VALUES
from django.forms.fields import Select, RegexField
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.contrib.localflavor.uy.util import get_validation_digit
class UYDepartamentSelect(Select):
"""
A Select widget that uses a list of Uruguayan departaments as its choices.
"""
def __init__(self, attrs=None):
from uy_departaments import DEPARTAMENT_CHOICES
super(UYDepartamentSelect, self).__init__(attrs, choices=DEPARTAMENT_CHOICES)
class UYCIField(RegexField):
"""
A field that validates Uruguayan 'Cedula de identidad' (CI) numbers.
"""
default_error_messages = {
'invalid': _("Enter a valid CI number in X.XXX.XXX-X,"
"XXXXXXX-X or XXXXXXXX format."),
'invalid_validation_digit': _("Enter a valid CI number."),
}
def __init__(self, *args, **kwargs):
super(UYCIField, self).__init__(r'(?P<num>(\d{6,7}|(\d\.)?\d{3}\.\d{3}))-?(?P<val>\d)',
*args, **kwargs)
def clean(self, value):
"""
Validates format and validation digit.
The official format is [X.]XXX.XXX-X but usually dots and/or slash are
omitted so, when validating, those characters are ignored if found in
the correct place. The three typically used formats are supported:
[X]XXXXXXX, [X]XXXXXX-X and [X.]XXX.XXX-X.
"""
value = super(UYCIField, self).clean(value)
if value in EMPTY_VALUES:
return u''
match = self.regex.match(value)
if not match:
raise ValidationError(self.error_messages['invalid'])
number = int(match.group('num').replace('.', ''))
validation_digit = int(match.group('val'))
if not validation_digit == get_validation_digit(number):
raise ValidationError(self.error_messages['invalid_validation_digit'])
return value
|
bsd-3-clause
|
atomic-labs/zulip
|
zerver/management/commands/set_default_streams.py
|
8
|
1697
|
from __future__ import absolute_import
from __future__ import print_function
from django.core.management.base import BaseCommand
from zerver.models import get_realm
from zerver.lib.actions import set_default_streams
from optparse import make_option
import sys
class Command(BaseCommand):
help = """Set default streams for a realm
Users created under this realm will start out with these streams. This
command is not additive: if you re-run it on a domain with a different
set of default streams, those will be the new complete set of default
streams.
For example:
python2.7 manage.py set_default_streams --domain=foo.com --streams=foo,bar,baz
python2.7 manage.py set_default_streams --domain=foo.com --streams="foo,bar,baz with space"
python2.7 manage.py set_default_streams --domain=foo.com --streams=
"""
option_list = BaseCommand.option_list + (
make_option('-d', '--domain',
dest='domain',
type='str',
help='The name of the existing realm to which to attach default streams.'),
make_option('-s', '--streams',
dest='streams',
type='str',
help='A comma-separated list of stream names.'),
)
def handle(self, **options):
if options["domain"] is None or options["streams"] is None:
print("Please provide both a domain name and a default \
set of streams (which can be empty, with `--streams=`).", file=sys.stderr)
exit(1)
stream_names = [stream.strip() for stream in options["streams"].split(",")]
realm = get_realm(options["domain"])
set_default_streams(realm, stream_names)
|
apache-2.0
|
gw-sd-2016/Codir
|
codirSublime/SocketIO/requests/packages/urllib3/packages/ordered_dict.py
|
2040
|
8935
|
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
|
gpl-2.0
|
B-MOOC/edx-platform
|
lms/djangoapps/dashboard/management/commands/tests/test_git_add_course.py
|
101
|
8567
|
"""
Provide tests for git_add_course management command.
"""
import logging
import os
import shutil
import StringIO
import subprocess
import unittest
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test.utils import override_settings
from opaque_keys.edx.locations import SlashSeparatedCourseKey
import dashboard.git_import as git_import
from dashboard.git_import import GitImportError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
TEST_MONGODB_LOG = {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'user': '',
'password': '',
'db': 'test_xlog',
}
FEATURES_WITH_SSL_AUTH = settings.FEATURES.copy()
FEATURES_WITH_SSL_AUTH['AUTH_USE_CERTIFICATES'] = True
@override_settings(MONGODB_LOG=TEST_MONGODB_LOG)
@unittest.skipUnless(settings.FEATURES.get('ENABLE_SYSADMIN_DASHBOARD'),
"ENABLE_SYSADMIN_DASHBOARD not set")
class TestGitAddCourse(ModuleStoreTestCase):
"""
Tests the git_add_course management command for proper functions.
"""
TEST_REPO = 'https://github.com/mitocw/edx4edx_lite.git'
TEST_COURSE = 'MITx/edx4edx/edx4edx'
TEST_BRANCH = 'testing_do_not_delete'
TEST_BRANCH_COURSE = SlashSeparatedCourseKey('MITx', 'edx4edx_branch', 'edx4edx')
GIT_REPO_DIR = getattr(settings, 'GIT_REPO_DIR')
def assertCommandFailureRegexp(self, regex, *args):
"""
Convenience function for testing command failures
"""
with self.assertRaises(SystemExit):
with self.assertRaisesRegexp(CommandError, regex):
call_command('git_add_course', *args,
stderr=StringIO.StringIO())
def test_command_args(self):
"""
Validate argument checking
"""
self.assertCommandFailureRegexp(
'This script requires at least one argument, the git URL')
self.assertCommandFailureRegexp(
'Expected no more than three arguments; recieved 4',
'blah', 'blah', 'blah', 'blah')
self.assertCommandFailureRegexp(
'Repo was not added, check log output for details',
'blah')
# Test successful import from command
if not os.path.isdir(self.GIT_REPO_DIR):
os.mkdir(self.GIT_REPO_DIR)
self.addCleanup(shutil.rmtree, self.GIT_REPO_DIR)
# Make a course dir that will be replaced with a symlink
# while we are at it.
if not os.path.isdir(self.GIT_REPO_DIR / 'edx4edx'):
os.mkdir(self.GIT_REPO_DIR / 'edx4edx')
call_command('git_add_course', self.TEST_REPO,
self.GIT_REPO_DIR / 'edx4edx_lite')
# Test with all three args (branch)
call_command('git_add_course', self.TEST_REPO,
self.GIT_REPO_DIR / 'edx4edx_lite',
self.TEST_BRANCH)
def test_add_repo(self):
"""
Various exit path tests for test_add_repo
"""
with self.assertRaisesRegexp(GitImportError, GitImportError.NO_DIR):
git_import.add_repo(self.TEST_REPO, None, None)
os.mkdir(self.GIT_REPO_DIR)
self.addCleanup(shutil.rmtree, self.GIT_REPO_DIR)
with self.assertRaisesRegexp(GitImportError, GitImportError.URL_BAD):
git_import.add_repo('foo', None, None)
with self.assertRaisesRegexp(GitImportError, GitImportError.CANNOT_PULL):
git_import.add_repo('file:///foobar.git', None, None)
# Test git repo that exists, but is "broken"
bare_repo = os.path.abspath('{0}/{1}'.format(settings.TEST_ROOT, 'bare.git'))
os.mkdir(bare_repo)
self.addCleanup(shutil.rmtree, bare_repo)
subprocess.check_output(['git', '--bare', 'init', ], stderr=subprocess.STDOUT,
cwd=bare_repo)
with self.assertRaisesRegexp(GitImportError, GitImportError.BAD_REPO):
git_import.add_repo('file://{0}'.format(bare_repo), None, None)
def test_detached_repo(self):
"""
Test repo that is in detached head state.
"""
repo_dir = self.GIT_REPO_DIR
# Test successful import from command
try:
os.mkdir(repo_dir)
except OSError:
pass
self.addCleanup(shutil.rmtree, repo_dir)
git_import.add_repo(self.TEST_REPO, repo_dir / 'edx4edx_lite', None)
subprocess.check_output(['git', 'checkout', 'HEAD~2', ],
stderr=subprocess.STDOUT,
cwd=repo_dir / 'edx4edx_lite')
with self.assertRaisesRegexp(GitImportError, GitImportError.CANNOT_PULL):
git_import.add_repo(self.TEST_REPO, repo_dir / 'edx4edx_lite', None)
def test_branching(self):
"""
Exercise branching code of import
"""
repo_dir = self.GIT_REPO_DIR
# Test successful import from command
if not os.path.isdir(repo_dir):
os.mkdir(repo_dir)
self.addCleanup(shutil.rmtree, repo_dir)
# Checkout non existent branch
with self.assertRaisesRegexp(GitImportError, GitImportError.REMOTE_BRANCH_MISSING):
git_import.add_repo(self.TEST_REPO, repo_dir / 'edx4edx_lite', 'asdfasdfasdf')
# Checkout new branch
git_import.add_repo(self.TEST_REPO,
repo_dir / 'edx4edx_lite',
self.TEST_BRANCH)
def_ms = modulestore()
# Validate that it is different than master
self.assertIsNotNone(def_ms.get_course(self.TEST_BRANCH_COURSE))
# Attempt to check out the same branch again to validate branch choosing
# works
git_import.add_repo(self.TEST_REPO,
repo_dir / 'edx4edx_lite',
self.TEST_BRANCH)
# Delete to test branching back to master
def_ms.delete_course(self.TEST_BRANCH_COURSE, ModuleStoreEnum.UserID.test)
self.assertIsNone(def_ms.get_course(self.TEST_BRANCH_COURSE))
git_import.add_repo(self.TEST_REPO,
repo_dir / 'edx4edx_lite',
'master')
self.assertIsNone(def_ms.get_course(self.TEST_BRANCH_COURSE))
self.assertIsNotNone(def_ms.get_course(SlashSeparatedCourseKey.from_deprecated_string(self.TEST_COURSE)))
def test_branch_exceptions(self):
"""
This wil create conditions to exercise bad paths in the switch_branch function.
"""
# create bare repo that we can mess with and attempt an import
bare_repo = os.path.abspath('{0}/{1}'.format(settings.TEST_ROOT, 'bare.git'))
os.mkdir(bare_repo)
self.addCleanup(shutil.rmtree, bare_repo)
subprocess.check_output(['git', '--bare', 'init', ], stderr=subprocess.STDOUT,
cwd=bare_repo)
# Build repo dir
repo_dir = self.GIT_REPO_DIR
if not os.path.isdir(repo_dir):
os.mkdir(repo_dir)
self.addCleanup(shutil.rmtree, repo_dir)
rdir = '{0}/bare'.format(repo_dir)
with self.assertRaisesRegexp(GitImportError, GitImportError.BAD_REPO):
git_import.add_repo('file://{0}'.format(bare_repo), None, None)
# Get logger for checking strings in logs
output = StringIO.StringIO()
test_log_handler = logging.StreamHandler(output)
test_log_handler.setLevel(logging.DEBUG)
glog = git_import.log
glog.addHandler(test_log_handler)
# Move remote so fetch fails
shutil.move(bare_repo, '{0}/not_bare.git'.format(settings.TEST_ROOT))
try:
git_import.switch_branch('master', rdir)
except GitImportError:
self.assertIn('Unable to fetch remote', output.getvalue())
shutil.move('{0}/not_bare.git'.format(settings.TEST_ROOT), bare_repo)
output.truncate(0)
# Replace origin with a different remote
subprocess.check_output(
['git', 'remote', 'rename', 'origin', 'blah', ],
stderr=subprocess.STDOUT, cwd=rdir
)
with self.assertRaises(GitImportError):
git_import.switch_branch('master', rdir)
self.assertIn('Getting a list of remote branches failed', output.getvalue())
|
agpl-3.0
|
dreamseekerkun/mixpanel-android
|
acceptance/test_layout.py
|
10
|
6697
|
import unittest
from selenium import webdriver
class AndroidTest(unittest.TestCase):
def _launch_app(self, decide_message):
f = open('response.txt', 'w')
f.write(decide_message)
f.close()
desired_capabilities = {'aut': 'com.mixpanel.example.hello:1.0'}
self.driver = webdriver.Remote(
desired_capabilities=desired_capabilities
)
self.driver.implicitly_wait(30)
def tearDown(self):
open('response.txt', 'w').close()
self.driver.quit()
def test_layout_change_basic(self):
decide_message = '{"notifications":[],"surveys":[],"variants":[{"tweaks":[],"actions":[{"args":[{"view_id_name":"send_to_mixpanel","verb":3,"anchor_id_name":"0"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"send_to_mixpanel","verb":11,"anchor_id_name":"-1"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"send_to_mixpanel","verb":3,"anchor_id_name":"edit_first_name"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"}],"id":8990,"experiment_id":4302}]}'
self._launch_app(decide_message)
edit_email_address_location = self.driver.find_element_by_id('edit_email_address').location
send_to_mixpanel_location = self.driver.find_element_by_id('send_to_mixpanel').location
send_revenue_location = self.driver.find_element_by_id('send_revenue').location
self.assertTrue(send_to_mixpanel_location['y'] < edit_email_address_location['y'])
self.assertTrue(send_to_mixpanel_location['x'] > edit_email_address_location['x'])
self.assertEquals(send_revenue_location['y'], edit_email_address_location['y'])
def test_layout_circular_dependency(self):
decide_message = '{"notifications":[],"surveys":[],"variants":[{"tweaks":[],"actions":[{"args":[{"view_id_name":"edit_email_address","verb":3,"anchor_id_name":"send_to_mixpanel"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"}],"id":8990,"experiment_id":4302}]}'
self._launch_app(decide_message)
self.assertTrue(self.driver.find_element_by_id('send_to_mixpanel').location['y'] > self.driver.find_element_by_id('edit_email_address').location['y'])
def test_layout_massive_changes(self):
decide_message = '{"notifications":[],"surveys":[],"variants":[{"tweaks":[],"actions":[{"args":[{"view_id_name":"send_to_mixpanel","verb":3,"anchor_id_name":"0"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"send_to_mixpanel","verb":11,"anchor_id_name":"-1"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"send_revenue","verb":3,"anchor_id_name":"edit_first_name"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"edit_email_address","verb":3,"anchor_id_name":"send_to_mixpanel"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"set_background_image","verb":10,"anchor_id_name":"-1"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"set_background_image","verb":12,"anchor_id_name":"-1"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"}],"id":8990,"experiment_id":4302}]}'
self._launch_app(decide_message)
edit_first_name_location = self.driver.find_element_by_id('edit_first_name').location
edit_last_name_location = self.driver.find_element_by_id('edit_last_name').location
edit_email_address_location = self.driver.find_element_by_id('edit_email_address').location
send_to_mixpanel_location = self.driver.find_element_by_id('send_to_mixpanel').location
send_to_mixpanel_size = self.driver.find_element_by_id('send_to_mixpanel').size
send_revenue_location = self.driver.find_element_by_id('send_revenue').location
set_background_image_location = self.driver.find_element_by_id('set_background_image').location
set_background_image_size = self.driver.find_element_by_id('set_background_image').size
self.assertEquals(send_to_mixpanel_location['y'], edit_first_name_location['y'])
self.assertTrue(send_to_mixpanel_location['x'] > send_revenue_location['x'])
self.assertEquals(edit_email_address_location['y'], edit_last_name_location['y'])
self.assertEquals(set_background_image_location['x'], 0)
self.assertTrue(set_background_image_location['y'] < edit_last_name_location['y'])
self.assertTrue(set_background_image_size['width'] > send_to_mixpanel_size['width'])
self.assertTrue(set_background_image_size['height'] > send_to_mixpanel_size['height'])
def test_layout_absent_views(self):
decide_message = '{"notifications":[],"surveys":[],"variants":[{"tweaks":[],"actions":[{"args":[{"view_id_name":"edit_email_address","verb":3,"anchor_id_name":"0"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"fake_view","verb":3,"anchor_id_name":"0"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"},{"args":[{"view_id_name":"edit_email_address","verb":3,"anchor_id_name":"fake_view"}],"name":"c155","path":[{"prefix":"shortest","index":0,"id":16908290},{"view_class":"android.widget.RelativeLayout","index":0}],"change_type":"layout"}],"id":8990,"experiment_id":4302}]}'
self._launch_app(decide_message)
self.assertEquals(self.driver.find_element_by_id('edit_email_address').location['y'], self.driver.find_element_by_id('edit_first_name').location['y'])
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
Belgabor/django
|
django/core/context_processors.py
|
65
|
3353
|
"""
A set of request processors that return dictionaries to be merged into a
template context. Each function takes the request object as its only parameter
and returns a dictionary to add to the context.
These are referenced from the setting TEMPLATE_CONTEXT_PROCESSORS and used by
RequestContext.
"""
from django.conf import settings
from django.middleware.csrf import get_token
from django.utils.functional import lazy
def auth(request):
"""
DEPRECATED. This context processor is the old location, and has been moved
to `django.contrib.auth.context_processors`.
This function still exists for backwards-compatibility; it will be removed
in Django 1.4.
"""
import warnings
warnings.warn(
"The context processor at `django.core.context_processors.auth` is " \
"deprecated; use the path `django.contrib.auth.context_processors.auth` " \
"instead.",
PendingDeprecationWarning
)
from django.contrib.auth.context_processors import auth as auth_context_processor
return auth_context_processor(request)
def csrf(request):
"""
Context processor that provides a CSRF token, or the string 'NOTPROVIDED' if
it has not been provided by either a view decorator or the middleware
"""
def _get_val():
token = get_token(request)
if token is None:
# In order to be able to provide debugging info in the
# case of misconfiguration, we use a sentinel value
# instead of returning an empty dict.
return 'NOTPROVIDED'
else:
return token
_get_val = lazy(_get_val, str)
return {'csrf_token': _get_val() }
def debug(request):
"Returns context variables helpful for debugging."
context_extras = {}
if settings.DEBUG and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
context_extras['debug'] = True
from django.db import connection
context_extras['sql_queries'] = connection.queries
return context_extras
def i18n(request):
from django.utils import translation
context_extras = {}
context_extras['LANGUAGES'] = settings.LANGUAGES
context_extras['LANGUAGE_CODE'] = translation.get_language()
context_extras['LANGUAGE_BIDI'] = translation.get_language_bidi()
return context_extras
def media(request):
"""
Adds media-related context variables to the context.
"""
return {'MEDIA_URL': settings.MEDIA_URL}
def request(request):
return {'request': request}
# PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict(object):
def __init__(self, user, module_name):
self.user, self.module_name = user, module_name
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
def __nonzero__(self):
return self.user.has_module_perms(self.module_name)
class PermWrapper(object):
def __init__(self, user):
self.user = user
def __getitem__(self, module_name):
return PermLookupDict(self.user, module_name)
def __iter__(self):
# I am large, I contain multitudes.
raise TypeError("PermWrapper is not iterable.")
|
bsd-3-clause
|
icyflame/batman
|
scripts/login.py
|
1
|
6800
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Script to log the bot in to a wiki account.
Suggestion is to make a special account to use for bot use only. Make
sure this bot account is well known on your home wiki before using.
Parameters:
-family:FF
-lang:LL Log in to the LL language of the FF family.
Example: -family:wiktionary -lang:fr will log you in at
fr.wiktionary.org.
-all Try to log in on all sites where a username is defined in
user-config.py.
-logout Log out of the curren site. Combine with -all to log out of
all sites, or with -family and -lang to log out of a specific
site.
-force Ignores if the user is already logged in, and tries to log in.
-pass Useful in combination with -all when you have accounts for
several sites and use the same password for all of them.
Asks you for the password, then logs in on all given sites.
-pass:XXXX Uses XXXX as password. Be careful if you use this
parameter because your password will be shown on your
screen, and will probably be saved in your command line
history. This is NOT RECOMMENDED for use on computers
where others have either physical or remote access.
Use -pass instead.
-sysop Log in with your sysop account.
-oauth Generate OAuth authentication information.
NOTE: Need to copy OAuth tokens to your user-config.py
manually. -logout, -pass, -force, -pass:XXXX and -sysop are not
compatible with -oauth.
If not given as parameter, the script will ask for your username and
password (password entry will be hidden), log in to your home wiki using
this combination, and store the resulting cookies (containing your password
hash, so keep it secured!) in a file in the data subdirectory.
All scripts in this library will be looking for this cookie file and will
use the login information if it is present.
To log out, throw away the *.lwp file that is created in the data
subdirectory.
"""
#
# (C) Rob W.W. Hooft, 2003
# (C) Pywikibot team, 2003-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
#
import pywikibot
from os.path import join
from pywikibot import config
from pywikibot.login import OauthLoginManager
from pywikibot.exceptions import SiteDefinitionError
def _get_consumer_token(site):
key_msg = 'OAuth consumer key on {0}:{1}'.format(site.code, site.family)
key = pywikibot.input(key_msg)
secret_msg = 'OAuth consumer secret for consumer {0}'.format(key)
secret = pywikibot.input(secret_msg, password=True)
return key, secret
def _oauth_login(site):
consumer_key, consumer_secret = _get_consumer_token(site)
login_manager = OauthLoginManager(consumer_secret, False, site,
consumer_key)
login_manager.login()
identity = login_manager.identity
if identity is None:
pywikibot.error('Invalid OAuth info for %(site)s.' %
{'site': site})
elif site.username() != identity['username']:
pywikibot.error('Logged in on %(site)s via OAuth as %(wrong)s, '
'but expect as %(right)s'
% {'site': site,
'wrong': identity['username'],
'right': site.username()})
else:
oauth_token = login_manager.consumer_token + login_manager.access_token
pywikibot.output('Logged in on %(site)s as %(username)s'
'via OAuth consumer %(consumer)s'
% {'site': site,
'username': site.username(sysop=False),
'consumer': consumer_key})
pywikibot.output('NOTE: To use OAuth, you need to copy the '
'following line to your user-config.py:')
pywikibot.output('authenticate[\'%(hostname)s\'] = %(oauth_token)s' %
{'hostname': site.hostname(),
'oauth_token': oauth_token})
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
password = None
sysop = False
logall = False
logout = False
oauth = False
unknown_args = []
for arg in pywikibot.handle_args(args):
if arg.startswith("-pass"):
if len(arg) == 5:
password = pywikibot.input(
'Password for all accounts (no characters will be shown):',
password=True)
else:
password = arg[6:]
elif arg == "-sysop":
sysop = True
elif arg == "-all":
logall = True
elif arg == "-force":
pywikibot.output(u"To force a re-login, please delete the revelant "
u"lines from '%s' (or the entire file) and try again." %
join(config.base_dir, 'pywikibot.lwp'))
elif arg == "-logout":
logout = True
elif arg == '-oauth':
oauth = True
else:
unknown_args += [arg]
if unknown_args:
pywikibot.bot.suggest_help(unknown_parameters=unknown_args)
return False
if logall:
if sysop and not oauth:
namedict = config.sysopnames
else:
namedict = config.usernames
else:
site = pywikibot.Site()
namedict = {site.family.name: {site.code: None}}
for familyName in namedict:
for lang in namedict[familyName]:
try:
site = pywikibot.Site(code=lang, fam=familyName)
if oauth:
_oauth_login(site)
continue
if logout:
site.logout()
else:
site.login(sysop)
user = site.user()
if user:
pywikibot.output(u"Logged in on %(site)s as %(user)s." % locals())
else:
if logout:
pywikibot.output(u"Logged out of %(site)s." % locals())
else:
pywikibot.output(u"Not logged in on %(site)s." % locals())
except SiteDefinitionError:
pywikibot.output(u'%s.%s is not a valid site, please remove it'
u' from your config' % (lang, familyName))
if __name__ == "__main__":
main()
|
mit
|
encyclomundi/hubzilla
|
vendor/sabre/dav/bin/googlecode_upload.py
|
124
|
8913
|
#!/usr/bin/env python
#
# Copyright 2006, 2007 Google Inc. All Rights Reserved.
# Author: [email protected] (David Anderson)
#
# Script for uploading files to a Google Code project.
#
# This is intended to be both a useful script for people who want to
# streamline project uploads and a reference implementation for
# uploading files to Google Code projects.
#
# To upload a file to Google Code, you need to provide a path to the
# file on your local machine, a small summary of what the file is, a
# project name, and a valid account that is a member or owner of that
# project. You can optionally provide a list of labels that apply to
# the file. The file will be uploaded under the same name that it has
# in your local filesystem (that is, the "basename" or last path
# component). Run the script with '--help' to get the exact syntax
# and available options.
#
# Note that the upload script requests that you enter your
# googlecode.com password. This is NOT your Gmail account password!
# This is the password you use on googlecode.com for committing to
# Subversion and uploading files. You can find your password by going
# to http://code.google.com/hosting/settings when logged in with your
# Gmail account. If you have already committed to your project's
# Subversion repository, the script will automatically retrieve your
# credentials from there (unless disabled, see the output of '--help'
# for details).
#
# If you are looking at this script as a reference for implementing
# your own Google Code file uploader, then you should take a look at
# the upload() function, which is the meat of the uploader. You
# basically need to build a multipart/form-data POST request with the
# right fields and send it to https://PROJECT.googlecode.com/files .
# Authenticate the request using HTTP Basic authentication, as is
# shown below.
#
# Licensed under the terms of the Apache Software License 2.0:
# http://www.apache.org/licenses/LICENSE-2.0
#
# Questions, comments, feature requests and patches are most welcome.
# Please direct all of these to the Google Code users group:
# http://groups.google.com/group/google-code-hosting
"""Google Code file uploader script.
"""
__author__ = '[email protected] (David Anderson)'
import httplib
import os.path
import optparse
import getpass
import base64
import sys
def upload(file, project_name, user_name, password, summary, labels=None):
"""Upload a file to a Google Code project's file server.
Args:
file: The local path to the file.
project_name: The name of your project on Google Code.
user_name: Your Google account name.
password: The googlecode.com password for your account.
Note that this is NOT your global Google Account password!
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
Returns: a tuple:
http_status: 201 if the upload succeeded, something else if an
error occurred.
http_reason: The human-readable string associated with http_status
file_url: If the upload succeeded, the URL of the file on Google
Code, None otherwise.
"""
# The login is the user part of [email protected]. If the login provided
# is in the full user@domain form, strip it down.
if user_name.endswith('@gmail.com'):
user_name = user_name[:user_name.index('@gmail.com')]
form_fields = [('summary', summary)]
if labels is not None:
form_fields.extend([('label', l.strip()) for l in labels])
content_type, body = encode_upload_request(form_fields, file)
upload_host = '%s.googlecode.com' % project_name
upload_uri = '/files'
auth_token = base64.b64encode('%s:%s'% (user_name, password))
headers = {
'Authorization': 'Basic %s' % auth_token,
'User-Agent': 'Googlecode.com uploader v0.9.4',
'Content-Type': content_type,
}
server = httplib.HTTPSConnection(upload_host)
server.request('POST', upload_uri, body, headers)
resp = server.getresponse()
server.close()
if resp.status == 201:
location = resp.getheader('Location', None)
else:
location = None
return resp.status, resp.reason, location
def encode_upload_request(fields, file_path):
"""Encode the given fields and file into a multipart form body.
fields is a sequence of (name, value) pairs. file is the path of
the file to upload. The file will be uploaded to Google Code with
the same file name.
Returns: (content_type, body) ready for httplib.HTTP instance
"""
BOUNDARY = '----------Googlecode_boundary_reindeer_flotilla'
CRLF = '\r\n'
body = []
# Add the metadata about the upload first
for key, value in fields:
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="%s"' % key,
'',
value,
])
# Now add the file itself
file_name = os.path.basename(file_path)
f = open(file_path, 'rb')
file_content = f.read()
f.close()
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="filename"; filename="%s"'
% file_name,
# The upload server determines the mime-type, no need to set it.
'Content-Type: application/octet-stream',
'',
file_content,
])
# Finalize the form body
body.extend(['--' + BOUNDARY + '--', ''])
return 'multipart/form-data; boundary=%s' % BOUNDARY, CRLF.join(body)
def upload_find_auth(file_path, project_name, summary, labels=None,
user_name=None, password=None, tries=3):
"""Find credentials and upload a file to a Google Code project's file server.
file_path, project_name, summary, and labels are passed as-is to upload.
Args:
file_path: The local path to the file.
project_name: The name of your project on Google Code.
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
config_dir: Path to Subversion configuration directory, 'none', or None.
user_name: Your Google account name.
tries: How many attempts to make.
"""
while tries > 0:
if user_name is None:
# Read username if not specified or loaded from svn config, or on
# subsequent tries.
sys.stdout.write('Please enter your googlecode.com username: ')
sys.stdout.flush()
user_name = sys.stdin.readline().rstrip()
if password is None:
# Read password if not loaded from svn config, or on subsequent tries.
print 'Please enter your googlecode.com password.'
print '** Note that this is NOT your Gmail account password! **'
print 'It is the password you use to access Subversion repositories,'
print 'and can be found here: http://code.google.com/hosting/settings'
password = getpass.getpass()
status, reason, url = upload(file_path, project_name, user_name, password,
summary, labels)
# Returns 403 Forbidden instead of 401 Unauthorized for bad
# credentials as of 2007-07-17.
if status in [httplib.FORBIDDEN, httplib.UNAUTHORIZED]:
# Rest for another try.
user_name = password = None
tries = tries - 1
else:
# We're done.
break
return status, reason, url
def main():
parser = optparse.OptionParser(usage='googlecode-upload.py -s SUMMARY '
'-p PROJECT [options] FILE')
parser.add_option('-s', '--summary', dest='summary',
help='Short description of the file')
parser.add_option('-p', '--project', dest='project',
help='Google Code project name')
parser.add_option('-u', '--user', dest='user',
help='Your Google Code username')
parser.add_option('-w', '--password', dest='password',
help='Your Google Code password')
parser.add_option('-l', '--labels', dest='labels',
help='An optional list of comma-separated labels to attach '
'to the file')
options, args = parser.parse_args()
if not options.summary:
parser.error('File summary is missing.')
elif not options.project:
parser.error('Project name is missing.')
elif len(args) < 1:
parser.error('File to upload not provided.')
elif len(args) > 1:
parser.error('Only one file may be specified.')
file_path = args[0]
if options.labels:
labels = options.labels.split(',')
else:
labels = None
status, reason, url = upload_find_auth(file_path, options.project,
options.summary, labels,
options.user, options.password)
if url:
print 'The file was uploaded successfully.'
print 'URL: %s' % url
return 0
else:
print 'An error occurred. Your file was not uploaded.'
print 'Google Code upload server said: %s (%s)' % (reason, status)
return 1
if __name__ == '__main__':
sys.exit(main())
|
mit
|
transceptor-technology/trender
|
trender/lines.py
|
1
|
2602
|
'''Lines Class which is responsible for reading lines.
:copyright: 2015, Jeroen van der Heijden (Transceptor Technology)
'''
import re
import os
from .constants import MAP_LINE_TYPE, LINE_PASTE, FILENAME
from .exceptions import DefineBlockError, TemplateNotExistsError
class Lines:
RE_BLOCK = re.compile('\s*#([a-zA-Z_]*)', re.UNICODE)
RE_INCLUDE = re.compile('^\s*#include\s+([{FILENAME}]+)\s*$'
.format(FILENAME=FILENAME), re.UNICODE)
def __init__(self, content_or_file, path=None):
self._path = path
self._lines = content_or_file.splitlines() \
if path is None \
else self._read_template(content_or_file)
self._gen_lines = self._reader()
@property
def next(self):
try:
line = next(self._gen_lines)
self.current_type = self._get_current_type(line)
return line
except StopIteration:
return None
def _reader(self):
for self.pos, line in enumerate(self._lines):
yield line
def _read_template(self, fn):
if self._path is None:
raise DefineBlockError('''Incorrect block definition at line {}, {}
include/extend statements only work when starting with a file and path,
not with string content'''.format(self.pos, self.current))
fn = os.path.join(self._path, fn)
if not os.path.exists(fn):
raise TemplateNotExistsError(
'Cannot find template file: {}'.format(fn))
with open(fn, 'r', encoding='utf-8') as f:
content = f.read()
return content.splitlines()
def include(self):
m = self.__class__.RE_INCLUDE.match(self.current)
if m is None:
raise DefineBlockError('''Incorrect block definition at line {}, {}
Should be something like: #include path/foo.html'''.format(
self.pos, self.current))
self._lines = \
self._read_template(m.group(1)) + self._lines[self.pos + 1:]
self._gen_lines = self._reader()
def extend(self, fn):
self._lines = self._read_template(fn) + self._lines[self.pos + 1:]
self._gen_lines = self._reader()
def __len__(self):
return len(self._lines)
@property
def current(self):
return self._lines[self.pos]
@classmethod
def _get_current_type(cls, line):
m = cls.RE_BLOCK.match(line)
return MAP_LINE_TYPE.get(m.group(1)
if m else (True
if line.strip() else None), LINE_PASTE)
|
mit
|
LuckyGameCn/LHCocosGame
|
cocos2d/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/enumerations.py
|
307
|
1077
|
#===- enumerations.py - Python Enumerations ------------------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
"""
Clang Enumerations
==================
This module provides static definitions of enumerations that exist in libclang.
Enumerations are typically defined as a list of tuples. The exported values are
typically munged into other types or classes at module load time.
All enumerations are centrally defined in this file so they are all grouped
together and easier to audit. And, maybe even one day this file will be
automatically generated by scanning the libclang headers!
"""
# Maps to CXTokenKind. Note that libclang maintains a separate set of token
# enumerations from the C++ API.
TokenKinds = [
('PUNCTUATION', 0),
('KEYWORD', 1),
('IDENTIFIER', 2),
('LITERAL', 3),
('COMMENT', 4),
]
__all__ = ['TokenKinds']
|
mit
|
APerson241/APersonBot
|
article-history/test_fixer.py
|
2
|
3654
|
import unittest
from fixer import History, process, encode_wikilinks, decode_wikilinks
class TestEncodeDecode(unittest.TestCase):
def roundtrip(self, some_text):
wikitext, wikilinks = encode_wikilinks(some_text)
self.assertEqual(some_text, decode_wikilinks(wikitext, wikilinks))
def test_roundtrip_empty(self):
self.roundtrip("")
def test_roundtrip_random_text(self):
self.roundtrip("asdf")
def test_roundtrip_non_piped_link(self):
self.roundtrip("[[asdf]]")
def test_roundtrip_piped_link(self):
self.roundtrip("[[asdf|hjkl]]")
class TestAH(unittest.TestCase):
def setUp(self):
self.history_normal = History("""
{{article history
|action1=GAN
|action1date=12:52, 7 December 2005
|action1result=listed
|action1oldid=30462537
|currentstatus=GA
|topic=math
}}""")
def test_actions(self):
self.assertEqual(self.history_normal.actions[0],
("GAN", "12:52, 7 December 2005", "", "listed", "30462537"))
def test_other(self):
self.assertEqual(self.history_normal.other_parameters,
{"currentstatus":"GA", "topic":"math"})
class TestFixer(unittest.TestCase):
def test_itn(self):
self.assertEqual(process("""
{{article history
|action1=GAN
|action1date=12:52, 7 December 2005
|action1result=listed
|action1oldid=30462537
|currentstatus=GA
|topic=math
}}
{{ITN talk|date1=12 September 2009|date2=24 December 2013}}"""), """
{{article history
|action1=GAN
|action1date=12:52, 7 December 2005
|action1link=
|action1result=listed
|action1oldid=30462537
|currentstatus=GA
|itndate=12 September 2009
|itn2date=24 December 2013
|topic=math
}}""")
def test_otd(self):
self.assertEqual(process("""
{{article history
|action1=GAN
|action1date=12:52, 7 December 2005
|action1result=listed
|action1oldid=30462537
|currentstatus=GA
|topic=math
}}
{{On this day|date1=2004-05-28|oldid1=6717950|date2=2005-05-28|oldid2=16335227}}"""), """
{{article history
|action1=GAN
|action1date=12:52, 7 December 2005
|action1link=
|action1result=listed
|action1oldid=30462537
|currentstatus=GA
|otddate=2004-05-28
|otdoldid=6717950
|otd2date=2005-05-28
|otd2oldid=16335227
|topic=math
}}""")
def test_dyk(self):
self.assertEqual(process("""
{{Article history
| action1 = GAN
| action1date = 14:45, 22 March 2015 (UTC)
| action1link = Talk:Dyslexia/GA1
| action1result = Passed
| action1oldid = 653061069
}}
{{dyktalk|6 April|2015|entry= ... that '''[[dyslexia]]''' is the most common learning disability, affecting about 3% to 7% of people?}}"""), """
{{article history
|action1=GAN
|action1date=14:45, 22 March 2015 (UTC)
|action1link=Talk:Dyslexia/GA1
|action1result=Passed
|action1oldid=653061069
|dykdate=6 April 2015
|dykentry= ... that '''[[dyslexia]]''' is the most common learning disability, affecting about 3% to 7% of people?
}}""")
def test_empty(self):
self.assertEqual(process(""), "")
def test_blank_ah(self):
self.assertEqual(process("""
{{Article history}}
{{ITN talk|date1=1 June 2009}}"""), """
{{article history
|itndate=1 June 2009
}}""")
def test_already(self):
self.assertEqual(process("""
{{Article history|itndate=1 June 2009}}
{{ITN talk|date1=1 June 2010}}"""), """
{{article history
|itndate=1 June 2009
|itn2date=1 June 2010
}}""")
def test_multiple(self):
self.assertEqual(process("""
{{Article history}}
{{ITN talk|date1=1 June 2010}}
{{ITN talk|date1=1 June 2009}}"""), """
{{article history
|itndate=1 June 2009
|itn2date=1 June 2010
}}""")
if __name__ == '__main__':
unittest.main()
|
mit
|
landler/flask-oauthlib
|
flask_oauthlib/contrib/cache.py
|
14
|
2871
|
# coding: utf-8
from werkzeug.contrib.cache import NullCache, SimpleCache, FileSystemCache
from werkzeug.contrib.cache import MemcachedCache, RedisCache
class Cache(object):
def __init__(self, app, config_prefix='OAUTHLIB', **kwargs):
self.config_prefix = config_prefix
self.config = app.config
cache_type = '_%s' % self._config('type')
kwargs.update(dict(
default_timeout=self._config('DEFAULT_TIMEOUT', 100)
))
try:
self.cache = getattr(self, cache_type)(**kwargs)
except AttributeError:
raise RuntimeError(
'`%s` is not a valid cache type!' % cache_type
)
app.extensions[config_prefix.lower() + '_cache'] = self.cache
def __getattr__(self, key):
try:
return object.__getattribute__(self, key)
except AttributeError:
try:
return getattr(self.cache, key)
except AttributeError:
raise AttributeError('No such attribute: %s' % key)
def _config(self, key, default='error'):
key = key.upper()
prior = '%s_CACHE_%s' % (self.config_prefix, key)
if prior in self.config:
return self.config[prior]
fallback = 'CACHE_%s' % key
if fallback in self.config:
return self.config[fallback]
if default == 'error':
raise RuntimeError('%s is missing.' % prior)
return default
def _null(self, **kwargs):
"""Returns a :class:`NullCache` instance"""
return NullCache()
def _simple(self, **kwargs):
"""Returns a :class:`SimpleCache` instance
.. warning::
This cache system might not be thread safe. Use with caution.
"""
kwargs.update(dict(threshold=self._config('threshold', 500)))
return SimpleCache(**kwargs)
def _memcache(self, **kwargs):
"""Returns a :class:`MemcachedCache` instance"""
kwargs.update(dict(
servers=self._config('MEMCACHED_SERVERS', None),
key_prefix=self._config('key_prefix', None),
))
return MemcachedCache(**kwargs)
def _redis(self, **kwargs):
"""Returns a :class:`RedisCache` instance"""
kwargs.update(dict(
host=self._config('REDIS_HOST', 'localhost'),
port=self._config('REDIS_PORT', 6379),
password=self._config('REDIS_PASSWORD', None),
db=self._config('REDIS_DB', 0),
key_prefix=self._config('KEY_PREFIX', None),
))
return RedisCache(**kwargs)
def _filesystem(self, **kwargs):
"""Returns a :class:`FileSystemCache` instance"""
kwargs.update(dict(
threshold=self._config('threshold', 500),
))
return FileSystemCache(self._config('dir', None), **kwargs)
|
bsd-3-clause
|
wweiradio/django
|
tests/utils_tests/test_duration.py
|
364
|
1677
|
import datetime
import unittest
from django.utils.dateparse import parse_duration
from django.utils.duration import duration_string
class TestDurationString(unittest.TestCase):
def test_simple(self):
duration = datetime.timedelta(hours=1, minutes=3, seconds=5)
self.assertEqual(duration_string(duration), '01:03:05')
def test_days(self):
duration = datetime.timedelta(days=1, hours=1, minutes=3, seconds=5)
self.assertEqual(duration_string(duration), '1 01:03:05')
def test_microseconds(self):
duration = datetime.timedelta(hours=1, minutes=3, seconds=5, microseconds=12345)
self.assertEqual(duration_string(duration), '01:03:05.012345')
def test_negative(self):
duration = datetime.timedelta(days=-1, hours=1, minutes=3, seconds=5)
self.assertEqual(duration_string(duration), '-1 01:03:05')
class TestParseDurationRoundtrip(unittest.TestCase):
def test_simple(self):
duration = datetime.timedelta(hours=1, minutes=3, seconds=5)
self.assertEqual(parse_duration(duration_string(duration)), duration)
def test_days(self):
duration = datetime.timedelta(days=1, hours=1, minutes=3, seconds=5)
self.assertEqual(parse_duration(duration_string(duration)), duration)
def test_microseconds(self):
duration = datetime.timedelta(hours=1, minutes=3, seconds=5, microseconds=12345)
self.assertEqual(parse_duration(duration_string(duration)), duration)
def test_negative(self):
duration = datetime.timedelta(days=-1, hours=1, minutes=3, seconds=5)
self.assertEqual(parse_duration(duration_string(duration)), duration)
|
bsd-3-clause
|
longmen21/edx-platform
|
lms/djangoapps/course_api/blocks/tests/test_views.py
|
17
|
10129
|
"""
Tests for Blocks Views
"""
from datetime import datetime
from django.core.urlresolvers import reverse
from string import join
from urllib import urlencode
from urlparse import urlunparse
from opaque_keys.edx.locator import CourseLocator
from student.models import CourseEnrollment
from student.tests.factories import AdminFactory, CourseEnrollmentFactory, UserFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import ToyCourseFactory
from .helpers import deserialize_usage_key
class TestBlocksView(SharedModuleStoreTestCase):
"""
Test class for BlocksView
"""
requested_fields = ['graded', 'format', 'student_view_multi_device', 'children', 'not_a_field', 'due']
BLOCK_TYPES_WITH_STUDENT_VIEW_DATA = ['video', 'discussion']
@classmethod
def setUpClass(cls):
super(TestBlocksView, cls).setUpClass()
# create a toy course
cls.course = ToyCourseFactory.create(
modulestore=cls.store,
due=datetime(3013, 9, 18, 11, 30, 00),
)
cls.course_key = cls.course.id
cls.course_usage_key = cls.store.make_course_usage_key(cls.course_key)
cls.non_orphaned_block_usage_keys = set(
unicode(item.location)
for item in cls.store.get_items(cls.course_key)
# remove all orphaned items in the course, except for the root 'course' block
if cls.store.get_parent_location(item.location) or item.category == 'course'
)
def setUp(self):
super(TestBlocksView, self).setUp()
# create and enroll user in the toy course
self.user = UserFactory.create()
self.client.login(username=self.user.username, password='test')
CourseEnrollmentFactory.create(user=self.user, course_id=self.course_key)
# default values for url and query_params
self.url = reverse(
'blocks_in_block_tree',
kwargs={'usage_key_string': unicode(self.course_usage_key)}
)
self.query_params = {'depth': 'all', 'username': self.user.username}
def verify_response(self, expected_status_code=200, params=None, url=None):
"""
Ensure that sending a GET request to the specified URL returns the
expected status code.
Arguments:
expected_status_code: The status_code that is expected in the
response.
params: Parameters to add to self.query_params to include in the
request.
url: The URL to send the GET request. Default is self.url.
Returns:
response: The HttpResponse returned by the request
"""
if params:
self.query_params.update(params)
response = self.client.get(url or self.url, self.query_params)
self.assertEquals(response.status_code, expected_status_code)
return response
def verify_response_block_list(self, response):
"""
Verify that the response contains only the expected block ids.
"""
self.assertSetEqual(
{block['id'] for block in response.data},
self.non_orphaned_block_usage_keys,
)
def verify_response_block_dict(self, response):
"""
Verify that the response contains the expected blocks
"""
self.assertSetEqual(
set(response.data['blocks'].iterkeys()),
self.non_orphaned_block_usage_keys,
)
def verify_response_with_requested_fields(self, response):
"""
Verify the response has the expected structure
"""
self.verify_response_block_dict(response)
for block_key_string, block_data in response.data['blocks'].iteritems():
block_key = deserialize_usage_key(block_key_string, self.course_key)
xblock = self.store.get_item(block_key)
self.assert_in_iff('children', block_data, xblock.has_children)
self.assert_in_iff('graded', block_data, xblock.graded is not None)
self.assert_in_iff('format', block_data, xblock.format is not None)
self.assert_in_iff('due', block_data, xblock.due is not None)
self.assert_true_iff(block_data['student_view_multi_device'], block_data['type'] == 'html')
self.assertNotIn('not_a_field', block_data)
if xblock.has_children:
self.assertSetEqual(
set(unicode(child.location) for child in xblock.get_children()),
set(block_data['children']),
)
def assert_in_iff(self, member, container, predicate):
"""
Assert that member is in container if and only if predicate is true.
Arguments:
member - any object
container - any container
predicate - an expression, tested for truthiness
"""
if predicate:
self.assertIn(member, container)
else:
self.assertNotIn(member, container)
def assert_true_iff(self, expression, predicate):
"""
Assert that the expression is true if and only if the predicate is true
Arguments:
expression
predicate
"""
if predicate:
self.assertTrue(expression)
else:
self.assertFalse(expression)
def test_not_authenticated(self):
self.client.logout()
self.verify_response(401)
def test_not_enrolled(self):
CourseEnrollment.unenroll(self.user, self.course_key)
self.verify_response(403)
def test_non_existent_course(self):
usage_key = self.store.make_course_usage_key(CourseLocator('non', 'existent', 'course'))
url = reverse(
'blocks_in_block_tree',
kwargs={'usage_key_string': unicode(usage_key)}
)
self.verify_response(403, url=url)
def test_no_user_non_staff(self):
self.query_params.pop('username')
self.query_params['all_blocks'] = True
self.verify_response(403)
def test_no_user_staff_not_all_blocks(self):
self.query_params.pop('username')
self.verify_response(400)
def test_no_user_staff_all_blocks(self):
self.client.login(username=AdminFactory.create().username, password='test')
self.query_params.pop('username')
self.query_params['all_blocks'] = True
self.verify_response()
def test_basic(self):
response = self.verify_response()
self.assertEquals(response.data['root'], unicode(self.course_usage_key))
self.verify_response_block_dict(response)
for block_key_string, block_data in response.data['blocks'].iteritems():
block_key = deserialize_usage_key(block_key_string, self.course_key)
self.assertEquals(block_data['id'], block_key_string)
self.assertEquals(block_data['type'], block_key.block_type)
self.assertEquals(block_data['display_name'], self.store.get_item(block_key).display_name or '')
def test_return_type_param(self):
response = self.verify_response(params={'return_type': 'list'})
self.verify_response_block_list(response)
def test_block_counts_param(self):
response = self.verify_response(params={'block_counts': ['course', 'chapter']})
self.verify_response_block_dict(response)
for block_data in response.data['blocks'].itervalues():
self.assertEquals(
block_data['block_counts']['course'],
1 if block_data['type'] == 'course' else 0,
)
self.assertEquals(
block_data['block_counts']['chapter'],
(
1 if block_data['type'] == 'chapter' else
5 if block_data['type'] == 'course' else
0
)
)
def test_student_view_data_param(self):
response = self.verify_response(params={
'student_view_data': self.BLOCK_TYPES_WITH_STUDENT_VIEW_DATA + ['chapter']
})
self.verify_response_block_dict(response)
for block_data in response.data['blocks'].itervalues():
self.assert_in_iff(
'student_view_data',
block_data,
block_data['type'] in self.BLOCK_TYPES_WITH_STUDENT_VIEW_DATA
)
def test_navigation_param(self):
response = self.verify_response(params={'nav_depth': 10})
self.verify_response_block_dict(response)
for block_data in response.data['blocks'].itervalues():
self.assertIn('descendants', block_data)
def test_requested_fields_param(self):
response = self.verify_response(
params={'requested_fields': self.requested_fields}
)
self.verify_response_with_requested_fields(response)
def test_with_list_field_url(self):
query = urlencode(self.query_params.items() + [
('requested_fields', self.requested_fields[0]),
('requested_fields', self.requested_fields[1]),
('requested_fields', join(self.requested_fields[1:], ',')),
])
self.query_params = None
response = self.verify_response(
url=urlunparse(("", "", self.url, "", query, ""))
)
self.verify_response_with_requested_fields(response)
class TestBlocksInCourseView(TestBlocksView): # pylint: disable=test-inherits-tests
"""
Test class for BlocksInCourseView
"""
def setUp(self):
super(TestBlocksInCourseView, self).setUp()
self.url = reverse('blocks_in_course')
self.query_params['course_id'] = unicode(self.course_key)
def test_no_course_id(self):
self.query_params.pop('course_id')
self.verify_response(400)
def test_invalid_course_id(self):
self.verify_response(400, params={'course_id': 'invalid_course_id'})
def test_non_existent_course(self):
self.verify_response(403, params={'course_id': unicode(CourseLocator('non', 'existent', 'course'))})
|
agpl-3.0
|
Ashaba/rms
|
rmslocalenv/lib/python2.7/site-packages/html5lib/treewalkers/dom.py
|
505
|
1421
|
from __future__ import absolute_import, division, unicode_literals
from xml.dom import Node
from . import _base
class TreeWalker(_base.NonRecursiveTreeWalker):
def getNodeDetails(self, node):
if node.nodeType == Node.DOCUMENT_TYPE_NODE:
return _base.DOCTYPE, node.name, node.publicId, node.systemId
elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
return _base.TEXT, node.nodeValue
elif node.nodeType == Node.ELEMENT_NODE:
attrs = {}
for attr in list(node.attributes.keys()):
attr = node.getAttributeNode(attr)
if attr.namespaceURI:
attrs[(attr.namespaceURI, attr.localName)] = attr.value
else:
attrs[(None, attr.name)] = attr.value
return (_base.ELEMENT, node.namespaceURI, node.nodeName,
attrs, node.hasChildNodes())
elif node.nodeType == Node.COMMENT_NODE:
return _base.COMMENT, node.nodeValue
elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
return (_base.DOCUMENT,)
else:
return _base.UNKNOWN, node.nodeType
def getFirstChild(self, node):
return node.firstChild
def getNextSibling(self, node):
return node.nextSibling
def getParentNode(self, node):
return node.parentNode
|
mit
|
cloudbau/nova
|
nova/cloudpipe/pipelib.py
|
10
|
7110
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
CloudPipe - Build a user-data payload zip file, and launch
an instance with it.
"""
import os
import string
import zipfile
from oslo.config import cfg
from nova import compute
from nova.compute import flavors
from nova import crypto
from nova import db
from nova import exception
from nova.openstack.common import fileutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova import paths
from nova import utils
cloudpipe_opts = [
cfg.StrOpt('vpn_image_id',
default='0',
help='image id used when starting up a cloudpipe vpn server'),
cfg.StrOpt('vpn_flavor',
# Deprecated in Havana
deprecated_name='vpn_instance_type',
default='m1.tiny',
help=_('Flavor for vpn instances')),
cfg.StrOpt('boot_script_template',
default=paths.basedir_def('nova/cloudpipe/bootscript.template'),
help=_('Template for cloudpipe instance boot script')),
cfg.StrOpt('dmz_net',
default='10.0.0.0',
help=_('Network to push into openvpn config')),
cfg.StrOpt('dmz_mask',
default='255.255.255.0',
help=_('Netmask to push into openvpn config')),
cfg.StrOpt('vpn_key_suffix',
default='-vpn',
help='Suffix to add to project name for vpn key and secgroups'),
]
CONF = cfg.CONF
CONF.register_opts(cloudpipe_opts)
LOG = logging.getLogger(__name__)
def is_vpn_image(image_id):
return image_id == CONF.vpn_image_id
def _load_boot_script():
shellfile = open(CONF.boot_script_template, "r")
try:
s = string.Template(shellfile.read())
finally:
shellfile.close()
CONF.import_opt('ec2_dmz_host', 'nova.api.ec2.cloud')
CONF.import_opt('ec2_port', 'nova.api.ec2.cloud')
CONF.import_opt('cnt_vpn_clients', 'nova.network.manager')
return s.substitute(cc_dmz=CONF.ec2_dmz_host,
cc_port=CONF.ec2_port,
dmz_net=CONF.dmz_net,
dmz_mask=CONF.dmz_mask,
num_vpn=CONF.cnt_vpn_clients)
class CloudPipe(object):
def __init__(self):
self.compute_api = compute.API()
def get_encoded_zip(self, project_id):
# Make a payload.zip
with utils.tempdir() as tmpdir:
filename = "payload.zip"
zippath = os.path.join(tmpdir, filename)
z = zipfile.ZipFile(zippath, "w", zipfile.ZIP_DEFLATED)
boot_script = _load_boot_script()
# genvpn, sign csr
crypto.generate_vpn_files(project_id)
z.writestr('autorun.sh', boot_script)
crl = os.path.join(crypto.ca_folder(project_id), 'crl.pem')
z.write(crl, 'crl.pem')
server_key = os.path.join(crypto.ca_folder(project_id),
'server.key')
z.write(server_key, 'server.key')
ca_crt = os.path.join(crypto.ca_path(project_id))
z.write(ca_crt, 'ca.crt')
server_crt = os.path.join(crypto.ca_folder(project_id),
'server.crt')
z.write(server_crt, 'server.crt')
z.close()
zippy = open(zippath, "r")
# NOTE(vish): run instances expects encoded userdata, it is decoded
# in the get_metadata_call. autorun.sh also decodes the zip file,
# hence the double encoding.
encoded = zippy.read().encode("base64").encode("base64")
zippy.close()
return encoded
def launch_vpn_instance(self, context):
LOG.debug(_("Launching VPN for %s") % (context.project_id))
key_name = self.setup_key_pair(context)
group_name = self.setup_security_group(context)
instance_type = flavors.get_flavor_by_name(
CONF.vpn_flavor)
instance_name = '%s%s' % (context.project_id, CONF.vpn_key_suffix)
user_data = self.get_encoded_zip(context.project_id)
return self.compute_api.create(context,
instance_type,
CONF.vpn_image_id,
display_name=instance_name,
user_data=user_data,
key_name=key_name,
security_group=[group_name])
def setup_security_group(self, context):
group_name = '%s%s' % (context.project_id, CONF.vpn_key_suffix)
group = {'user_id': context.user_id,
'project_id': context.project_id,
'name': group_name,
'description': 'Group for vpn'}
try:
group_ref = db.security_group_create(context, group)
except exception.SecurityGroupExists:
return group_name
rule = {'parent_group_id': group_ref['id'],
'cidr': '0.0.0.0/0',
'protocol': 'udp',
'from_port': 1194,
'to_port': 1194}
db.security_group_rule_create(context, rule)
rule = {'parent_group_id': group_ref['id'],
'cidr': '0.0.0.0/0',
'protocol': 'icmp',
'from_port': -1,
'to_port': -1}
db.security_group_rule_create(context, rule)
# NOTE(vish): No need to trigger the group since the instance
# has not been run yet.
return group_name
def setup_key_pair(self, context):
key_name = '%s%s' % (context.project_id, CONF.vpn_key_suffix)
try:
keypair_api = compute.api.KeypairAPI()
result, private_key = keypair_api.create_key_pair(context,
context.user_id,
key_name)
key_dir = os.path.join(CONF.keys_path, context.user_id)
fileutils.ensure_tree(key_dir)
key_path = os.path.join(key_dir, '%s.pem' % key_name)
with open(key_path, 'w') as f:
f.write(private_key)
except (exception.KeyPairExists, os.error, IOError):
pass
return key_name
|
apache-2.0
|
kleientertainment/ds_mod_tools
|
pkg/win32/Python27/Lib/symtable.py
|
13
|
7760
|
"""Interface to the compiler's internal symbol tables"""
import _symtable
from _symtable import (USE, DEF_GLOBAL, DEF_LOCAL, DEF_PARAM,
DEF_IMPORT, DEF_BOUND, OPT_IMPORT_STAR, OPT_EXEC, OPT_BARE_EXEC,
SCOPE_OFF, SCOPE_MASK, FREE, GLOBAL_IMPLICIT, GLOBAL_EXPLICIT, CELL, LOCAL)
import weakref
__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"]
def symtable(code, filename, compile_type):
raw = _symtable.symtable(code, filename, compile_type)
for top in raw.itervalues():
if top.name == 'top':
break
return _newSymbolTable(top, filename)
class SymbolTableFactory:
def __init__(self):
self.__memo = weakref.WeakValueDictionary()
def new(self, table, filename):
if table.type == _symtable.TYPE_FUNCTION:
return Function(table, filename)
if table.type == _symtable.TYPE_CLASS:
return Class(table, filename)
return SymbolTable(table, filename)
def __call__(self, table, filename):
key = table, filename
obj = self.__memo.get(key, None)
if obj is None:
obj = self.__memo[key] = self.new(table, filename)
return obj
_newSymbolTable = SymbolTableFactory()
class SymbolTable(object):
def __init__(self, raw_table, filename):
self._table = raw_table
self._filename = filename
self._symbols = {}
def __repr__(self):
if self.__class__ == SymbolTable:
kind = ""
else:
kind = "%s " % self.__class__.__name__
if self._table.name == "global":
return "<{0}SymbolTable for module {1}>".format(kind, self._filename)
else:
return "<{0}SymbolTable for {1} in {2}>".format(kind,
self._table.name,
self._filename)
def get_type(self):
if self._table.type == _symtable.TYPE_MODULE:
return "module"
if self._table.type == _symtable.TYPE_FUNCTION:
return "function"
if self._table.type == _symtable.TYPE_CLASS:
return "class"
assert self._table.type in (1, 2, 3), \
"unexpected type: {0}".format(self._table.type)
def get_id(self):
return self._table.id
def get_name(self):
return self._table.name
def get_lineno(self):
return self._table.lineno
def is_optimized(self):
return bool(self._table.type == _symtable.TYPE_FUNCTION
and not self._table.optimized)
def is_nested(self):
return bool(self._table.nested)
def has_children(self):
return bool(self._table.children)
def has_exec(self):
"""Return true if the scope uses exec"""
return bool(self._table.optimized & (OPT_EXEC | OPT_BARE_EXEC))
def has_import_star(self):
"""Return true if the scope uses import *"""
return bool(self._table.optimized & OPT_IMPORT_STAR)
def get_identifiers(self):
return self._table.symbols.keys()
def lookup(self, name):
sym = self._symbols.get(name)
if sym is None:
flags = self._table.symbols[name]
namespaces = self.__check_children(name)
sym = self._symbols[name] = Symbol(name, flags, namespaces)
return sym
def get_symbols(self):
return [self.lookup(ident) for ident in self.get_identifiers()]
def __check_children(self, name):
return [_newSymbolTable(st, self._filename)
for st in self._table.children
if st.name == name]
def get_children(self):
return [_newSymbolTable(st, self._filename)
for st in self._table.children]
class Function(SymbolTable):
# Default values for instance variables
__params = None
__locals = None
__frees = None
__globals = None
def __idents_matching(self, test_func):
return tuple([ident for ident in self.get_identifiers()
if test_func(self._table.symbols[ident])])
def get_parameters(self):
if self.__params is None:
self.__params = self.__idents_matching(lambda x:x & DEF_PARAM)
return self.__params
def get_locals(self):
if self.__locals is None:
locs = (LOCAL, CELL)
test = lambda x: ((x >> SCOPE_OFF) & SCOPE_MASK) in locs
self.__locals = self.__idents_matching(test)
return self.__locals
def get_globals(self):
if self.__globals is None:
glob = (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT)
test = lambda x:((x >> SCOPE_OFF) & SCOPE_MASK) in glob
self.__globals = self.__idents_matching(test)
return self.__globals
def get_frees(self):
if self.__frees is None:
is_free = lambda x:((x >> SCOPE_OFF) & SCOPE_MASK) == FREE
self.__frees = self.__idents_matching(is_free)
return self.__frees
class Class(SymbolTable):
__methods = None
def get_methods(self):
if self.__methods is None:
d = {}
for st in self._table.children:
d[st.name] = 1
self.__methods = tuple(d)
return self.__methods
class Symbol(object):
def __init__(self, name, flags, namespaces=None):
self.__name = name
self.__flags = flags
self.__scope = (flags >> SCOPE_OFF) & SCOPE_MASK # like PyST_GetScope()
self.__namespaces = namespaces or ()
def __repr__(self):
return "<symbol {0!r}>".format(self.__name)
def get_name(self):
return self.__name
def is_referenced(self):
return bool(self.__flags & _symtable.USE)
def is_parameter(self):
return bool(self.__flags & DEF_PARAM)
def is_global(self):
return bool(self.__scope in (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT))
def is_declared_global(self):
return bool(self.__scope == GLOBAL_EXPLICIT)
def is_local(self):
return bool(self.__flags & DEF_BOUND)
def is_free(self):
return bool(self.__scope == FREE)
def is_imported(self):
return bool(self.__flags & DEF_IMPORT)
def is_assigned(self):
return bool(self.__flags & DEF_LOCAL)
def is_namespace(self):
"""Returns true if name binding introduces new namespace.
If the name is used as the target of a function or class
statement, this will be true.
Note that a single name can be bound to multiple objects. If
is_namespace() is true, the name may also be bound to other
objects, like an int or list, that does not introduce a new
namespace.
"""
return bool(self.__namespaces)
def get_namespaces(self):
"""Return a list of namespaces bound to this name"""
return self.__namespaces
def get_namespace(self):
"""Returns the single namespace bound to this name.
Raises ValueError if the name is bound to multiple namespaces.
"""
if len(self.__namespaces) != 1:
raise ValueError, "name is bound to multiple namespaces"
return self.__namespaces[0]
if __name__ == "__main__":
import os, sys
src = open(sys.argv[0]).read()
mod = symtable(src, os.path.split(sys.argv[0])[1], "exec")
for ident in mod.get_identifiers():
info = mod.lookup(ident)
print info, info.is_local(), info.is_namespace()
|
mit
|
defance/edx-platform
|
common/djangoapps/embargo/tests/test_middleware.py
|
92
|
7409
|
"""
Tests for EmbargoMiddleware with CountryAccessRules
"""
import unittest
from mock import patch
import ddt
from django.core.urlresolvers import reverse
from django.conf import settings
from django.core.cache import cache as django_cache
from util.testing import UrlResetMixin
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from config_models.models import cache as config_cache
from embargo.models import RestrictedCourse, IPFilter
from embargo.test_utils import restrict_course
@ddt.ddt
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class EmbargoMiddlewareAccessTests(UrlResetMixin, ModuleStoreTestCase):
"""Tests of embargo middleware country access rules.
There are detailed unit tests for the rule logic in
`test_api.py`; here, we're mainly testing the integration
with middleware
"""
USERNAME = 'fred'
PASSWORD = 'secret'
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self):
super(EmbargoMiddlewareAccessTests, self).setUp('embargo')
self.user = UserFactory(username=self.USERNAME, password=self.PASSWORD)
self.course = CourseFactory.create()
self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.courseware_url = reverse(
'course_root',
kwargs={'course_id': unicode(self.course.id)}
)
self.non_courseware_url = reverse('dashboard')
# Clear the cache to avoid interference between tests
django_cache.clear()
config_cache.clear()
@patch.dict(settings.FEATURES, {'EMBARGO': True})
@ddt.data(True, False)
def test_blocked(self, disable_access_check):
with restrict_course(self.course.id, access_point='courseware', disable_access_check=disable_access_check) as redirect_url: # pylint: disable=line-too-long
response = self.client.get(self.courseware_url)
if disable_access_check:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, redirect_url)
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_allowed(self):
# Add the course to the list of restricted courses
# but don't create any access rules
RestrictedCourse.objects.create(course_key=self.course.id)
# Expect that we can access courseware
response = self.client.get(self.courseware_url)
self.assertEqual(response.status_code, 200)
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_non_courseware_url(self):
with restrict_course(self.course.id):
response = self.client.get(self.non_courseware_url)
self.assertEqual(response.status_code, 200)
@patch.dict(settings.FEATURES, {'EMBARGO': True})
@ddt.data(
# request_ip, blacklist, whitelist, is_enabled, allow_access
('173.194.123.35', ['173.194.123.35'], [], True, False),
('173.194.123.35', ['173.194.0.0/16'], [], True, False),
('173.194.123.35', ['127.0.0.0/32', '173.194.0.0/16'], [], True, False),
('173.195.10.20', ['173.194.0.0/16'], [], True, True),
('173.194.123.35', ['173.194.0.0/16'], ['173.194.0.0/16'], True, False),
('173.194.123.35', [], ['173.194.0.0/16'], True, True),
('192.178.2.3', [], ['173.194.0.0/16'], True, True),
('173.194.123.35', ['173.194.123.35'], [], False, True),
)
@ddt.unpack
def test_ip_access_rules(self, request_ip, blacklist, whitelist, is_enabled, allow_access):
# Ensure that IP blocking works for anonymous users
self.client.logout()
# Set up the IP rules
IPFilter.objects.create(
blacklist=", ".join(blacklist),
whitelist=", ".join(whitelist),
enabled=is_enabled
)
# Check that access is enforced
response = self.client.get(
"/",
HTTP_X_FORWARDED_FOR=request_ip,
REMOTE_ADDR=request_ip
)
if allow_access:
self.assertEqual(response.status_code, 200)
else:
redirect_url = reverse(
'embargo_blocked_message',
kwargs={
'access_point': 'courseware',
'message_key': 'embargo'
}
)
self.assertRedirects(response, redirect_url)
@patch.dict(settings.FEATURES, {'EMBARGO': True})
@ddt.data(
('courseware', 'default'),
('courseware', 'embargo'),
('enrollment', 'default'),
('enrollment', 'embargo')
)
@ddt.unpack
def test_always_allow_access_to_embargo_messages(self, access_point, msg_key):
# Blacklist an IP address
IPFilter.objects.create(
blacklist="192.168.10.20",
enabled=True
)
url = reverse(
'embargo_blocked_message',
kwargs={
'access_point': access_point,
'message_key': msg_key
}
)
response = self.client.get(
url,
HTTP_X_FORWARDED_FOR="192.168.10.20",
REMOTE_ADDR="192.168.10.20"
)
self.assertEqual(response.status_code, 200)
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_whitelist_ip_skips_country_access_checks(self):
# Whitelist an IP address
IPFilter.objects.create(
whitelist="192.168.10.20",
enabled=True
)
# Set up country access rules so the user would
# be restricted from the course.
with restrict_course(self.course.id):
# Make a request from the whitelisted IP address
response = self.client.get(
self.courseware_url,
HTTP_X_FORWARDED_FOR="192.168.10.20",
REMOTE_ADDR="192.168.10.20"
)
# Expect that we were still able to access the page,
# even though we would have been blocked by country
# access rules.
self.assertEqual(response.status_code, 200)
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_always_allow_course_detail_access(self):
""" Access to the Course Structure API's course detail endpoint should always be granted. """
# Make the user staff so that it has permissions to access the views.
self.user.is_staff = True
self.user.save() # pylint: disable=no-member
# Blacklist an IP address
ip_address = "192.168.10.20"
IPFilter.objects.create(
blacklist=ip_address,
enabled=True
)
url = reverse('course_structure_api:v0:detail', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(
url,
HTTP_X_FORWARDED_FOR=ip_address,
REMOTE_ADDR=ip_address
)
self.assertEqual(response.status_code, 200)
# Test with a fully-restricted course
with restrict_course(self.course.id):
response = self.client.get(
url,
HTTP_X_FORWARDED_FOR=ip_address,
REMOTE_ADDR=ip_address
)
self.assertEqual(response.status_code, 200)
|
agpl-3.0
|
orion-42/numerics-physics-stuff
|
ode.py
|
1
|
4470
|
import numpy as np
import matplotlib.pyplot as plt
################################ methods ###################################
def forward_euler(f, y0, t):
ys = np.zeros((t.size, y0.size))
ys[0, :] = y0
h = t[1] - t[0]
for i in range(t.size - 1):
ys[i + 1, :] = ys[i,:] + h*f(t[i], ys[i,:])
return ys
def heun(f, y0, t): # also called rk2
ys = np.zeros((t.size, y0.size))
ys[0, :] = y0
h = t[1] - t[0]
for i in range(t.size - 1):
k1 = f(t[i], ys[i,:])
k2 = f(t[i] + h/2, ys[i,:] + h/2*k1)
ys[i + 1, :] = ys[i,:] + h*(k1 + k2)/2
return ys
def ab2(f, y0, t):
ys = np.zeros((t.size, y0.size))
ys[0, :] = y0
h = t[1] - t[0]
ys[1, :] = forward_euler(f, y0, t[0:2])[1, :]
for i in range(1, len(t) - 1):
ys[i + 1, :] = ys[i, :] + 1.5 * f(t[i], ys[i, :]) * h - 0.5 * f(t[i - 1], ys[i - 1, :]) * h
return ys
def ab3(f, y0, t):
ys = np.zeros((t.size, y0.size))
ys[0, :] = y0
ys[1, :] = forward_euler(f, y0, t[0:2])[1, :]
ys[2, :] = ab2(f, ys[1, :], t[1:3])[1, :]
h = t[1] - t[0]
for i in range(2, len(t) - 1):
ys[i + 1, :] = ys[i, :] + h / 12.0 * (23 * f(t[i], ys[i, :]) - 16 * f(t[i - 1], ys[i - 1, :]) + 5*f(t[i - 2], ys[i - 2]))
return ys
def rk4(f, y0, t):
ys = np.zeros((t.size, y0.size))
ys[0,:] = y0
h = t[1] - t[0]
for i in range(t.size - 1):
k1 = f(t[i], ys[i,:])
k2 = f(t[i] + h/2, ys[i,:] + h/2*k1)
k3 = f(t[i] + h/2, ys[i,:] + h/2*k2)
k4 = f(t[i] + h, ys[i,:] + h*k3)
ys[i+1,:] = ys[i,:] + h*(k1 + 2*k2 + 2*k3 + k4)/6
return ys
def leap_frog(f, y0, t):
ys = np.zeros((t.size, y0.size))
ys[0, :] = y0
h = t[1] - t[0]
ys[1, :] = forward_euler(f, y0, t[0:2])[1,:]
for i in range(1, len(t) - 1):
ys[i + 1, :] = ys[i - 1, :] + 2.0 * f(t[i], ys[i, :])*h
return ys
################### test cases #####################
## harmonic oscillator
def test_harmonic_oscillator():
def exact_harm_osc(t,x0,v0,m,k):
c = k/m
B = np.sqrt(c)
C = np.arctan(-v0/(x0*B))
A = x0/np.cos(C)
return A*np.cos(B*t + C)
T = 20
steps = 1000
t = np.linspace(0, T, steps)
k = 2.3
m = 1.2
c = k/m
x0 = 100.0
v0 = 1.2
y0 = np.array([x0, v0])
harm_osc_rhs = lambda t, y: np.array([y[1], -c*y[0]])
exact_xs = exact_harm_osc(t, x0, v0, m, k)
def test_harm(name, integrator):
ys = integrator(harm_osc_rhs, y0, t)
xs = ys[:, 0]
vs = ys[:, 1]
err = np.abs(xs - exact_xs)
plt.subplot(2, 1, 1)
plt.plot(t, xs, label=name)
plt.subplot(2, 1, 2)
plt.semilogy(t, err, label=name)
test_harm("forward_euler", forward_euler)
test_harm("heun (rk2)", heun)
test_harm("rk4", rk4)
test_harm("leap_frog", leap_frog)
test_harm("AB 2", ab2)
test_harm("AB 3", ab3)
plt.subplot(2, 1, 1)
plt.plot(t, exact_xs, "--", label="analytic")
plt.legend()
plt.grid()
plt.title(r"$x'' = -\frac{k}{m}x$")
plt.xlabel("t")
plt.ylabel("x")
plt.subplot(2, 1, 2)
plt.legend()
plt.grid()
plt.xlabel("t")
plt.ylabel("absolute Error")
## radioactive decay y' = - alpha * y
def test_radioactive_decay():
alpha = 0.7
x0 = 100.0
y0 = np.array([x0])
t = np.linspace(0, 10.0, 200.0)
def radioactive_decay_rhs(t, y):
return np.array([- alpha * y[0]])
exact_xs = x0 * np.exp(- alpha * t)
def test_radio(name, integrator):
ys = integrator(radioactive_decay_rhs, y0, t)
xs = ys[:, 0]
err = np.abs(xs - exact_xs)
plt.subplot(2, 1, 1)
plt.plot(t, xs, label=name)
plt.subplot(2, 1, 2)
plt.semilogy(t, err, label=name)
test_radio("forward_euler", forward_euler)
test_radio("heun (rk2)", heun)
test_radio("rk4", rk4)
test_radio("leap_frog", leap_frog)
test_radio("AB 2", ab2)
test_radio("AB 3", ab3)
plt.subplot(2, 1, 1)
plt.plot(t, exact_xs, "--", label="analytic solution")
plt.xlabel("t")
plt.ylabel("x")
plt.title(r"$x' = - \alpha x$")
plt.legend()
plt.grid()
plt.subplot(2, 1, 2)
plt.xlabel("t")
plt.ylabel("absolute error")
plt.grid()
plt.legend()
if __name__ == "__main__":
plt.figure(1)
test_harmonic_oscillator()
plt.figure(2)
test_radioactive_decay()
plt.show()
|
mit
|
patriciolobos/desa8
|
openerp/addons/product_email_template/models/invoice.py
|
321
|
1969
|
# -*- coding: utf-8 -*-
from openerp.osv import osv
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def invoice_validate_send_email(self, cr, uid, ids, context=None):
Composer = self.pool['mail.compose.message']
for invoice in self.browse(cr, uid, ids, context=context):
# send template only on customer invoice
if invoice.type != 'out_invoice':
continue
# subscribe the partner to the invoice
if invoice.partner_id not in invoice.message_follower_ids:
self.message_subscribe(cr, uid, [invoice.id], [invoice.partner_id.id], context=context)
for line in invoice.invoice_line:
if line.product_id.email_template_id:
# CLEANME: should define and use a clean API: message_post with a template
composer_id = Composer.create(cr, uid, {
'model': 'account.invoice',
'res_id': invoice.id,
'template_id': line.product_id.email_template_id.id,
'composition_mode': 'comment',
}, context=context)
template_values = Composer.onchange_template_id(
cr, uid, composer_id, line.product_id.email_template_id.id, 'comment', 'account.invoice', invoice.id
)['value']
template_values['attachment_ids'] = [(4, id) for id in template_values.get('attachment_ids', [])]
Composer.write(cr, uid, [composer_id], template_values, context=context)
Composer.send_mail(cr, uid, [composer_id], context=context)
return True
def invoice_validate(self, cr, uid, ids, context=None):
res = super(account_invoice, self).invoice_validate(cr, uid, ids, context=context)
self.invoice_validate_send_email(cr, uid, ids, context=context)
return res
|
agpl-3.0
|
omnirom/android_external_chromium-org
|
media/tools/layout_tests/test_expectations_unittest.py
|
165
|
1697
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from test_expectations import TestExpectations
class TestTestExpectations(unittest.TestCase):
def testParseLine(self):
line = ('crbug.com/86714 [ Mac Gpu ] media/video-zoom.html [ Crash '
'ImageOnlyFailure ]')
expected_map = {'CRASH': True, 'IMAGE': True, 'Bugs': ['BUGCR86714'],
'Comments': '', 'MAC': True, 'Gpu': True,
'Platforms': ['MAC', 'Gpu']}
self.assertEquals(TestExpectations.ParseLine(line),
('media/video-zoom.html', expected_map))
def testParseLineWithLineComments(self):
line = ('crbug.com/86714 [ Mac Gpu ] media/video-zoom.html [ Crash '
'ImageOnlyFailure ] # foo')
expected_map = {'CRASH': True, 'IMAGE': True, 'Bugs': ['BUGCR86714'],
'Comments': ' foo', 'MAC': True, 'Gpu': True,
'Platforms': ['MAC', 'Gpu']}
self.assertEquals(TestExpectations.ParseLine(line),
('media/video-zoom.html', expected_map))
def testParseLineWithLineGPUComments(self):
line = ('crbug.com/86714 [ Mac ] media/video-zoom.html [ Crash '
'ImageOnlyFailure ] # Gpu')
expected_map = {'CRASH': True, 'IMAGE': True, 'Bugs': ['BUGCR86714'],
'Comments': ' Gpu', 'MAC': True,
'Platforms': ['MAC']}
self.assertEquals(TestExpectations.ParseLine(line),
('media/video-zoom.html', expected_map))
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
navcoindev/navcoin-core
|
contrib/GenesisH0/genesis.py
|
5
|
7978
|
import hashlib, binascii, struct, array, os, time, sys, optparse
import scrypt
from construct import *
def main():
options = get_args()
algorithm = get_algorithm(options)
input_script = create_input_script(options.timestamp)
output_script = create_output_script(options.pubkey)
# hash merkle root is the double sha256 hash of the transaction(s)
tx = create_transaction(input_script, output_script,options)
hash_merkle_root = hashlib.sha256(hashlib.sha256(tx).digest()).digest()
print_block_info(options, hash_merkle_root)
block_header = create_block_header(hash_merkle_root, options.time, options.bits, options.nonce)
genesis_hash, nonce = generate_hash(block_header, algorithm, options.nonce, options.bits)
announce_found_genesis(genesis_hash, nonce)
def get_args():
parser = optparse.OptionParser()
parser.add_option("-t", "--time", dest="time", default=int(time.time()),
type="int", help="the (unix) time when the genesisblock is created")
parser.add_option("-z", "--timestamp", dest="timestamp", default="The Times 03/Jan/2009 Chancellor on brink of second bailout for banks",
type="string", help="the pszTimestamp found in the coinbase of the genesisblock")
parser.add_option("-n", "--nonce", dest="nonce", default=0,
type="int", help="the first value of the nonce that will be incremented when searching the genesis hash")
parser.add_option("-a", "--algorithm", dest="algorithm", default="SHA256",
help="the PoW algorithm: [SHA256|scrypt|X11|X13|X15]")
parser.add_option("-p", "--pubkey", dest="pubkey", default="04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f",
type="string", help="the pubkey found in the output script")
parser.add_option("-v", "--value", dest="value", default=5000000000,
type="int", help="the value in coins for the output, full value (exp. in bitcoin 5000000000 - To get other coins value: Block Value * 100000000)")
parser.add_option("-b", "--bits", dest="bits",
type="int", help="the target in compact representation, associated to a difficulty of 1")
(options, args) = parser.parse_args()
if not options.bits:
if options.algorithm == "scrypt" or options.algorithm == "X11" or options.algorithm == "X13" or options.algorithm == "X15":
options.bits = 0x1e0ffff0
else:
options.bits = 0x1d00ffff
return options
def get_algorithm(options):
supported_algorithms = ["SHA256", "scrypt", "X11", "X13", "X15"]
if options.algorithm in supported_algorithms:
return options.algorithm
else:
sys.exit("Error: Given algorithm must be one of: " + str(supported_algorithms))
def create_input_script(psz_timestamp):
psz_prefix = ""
#use OP_PUSHDATA1 if required
if len(psz_timestamp) > 76: psz_prefix = '4c'
script_prefix = '04ffff001d0104' + psz_prefix + chr(len(psz_timestamp)).encode('hex')
print (script_prefix + psz_timestamp.encode('hex'))
return (script_prefix + psz_timestamp.encode('hex')).decode('hex')
def create_output_script(pubkey):
script_len = '41'
OP_CHECKSIG = 'ac'
return (script_len + pubkey + OP_CHECKSIG).decode('hex')
def create_transaction(input_script, output_script,options):
transaction = Struct("transaction",
Bytes("version", 4),
Byte("num_inputs"),
StaticField("prev_output", 32),
UBInt32('prev_out_idx'),
Byte('input_script_len'),
Bytes('input_script', len(input_script)),
UBInt32('sequence'),
Byte('num_outputs'),
Bytes('out_value', 8),
Byte('output_script_len'),
Bytes('output_script', 0x43),
UBInt32('locktime'))
tx = transaction.parse('\x00'*(127 + len(input_script)))
tx.version = struct.pack('<I', 1)
tx.num_inputs = 1
tx.prev_output = struct.pack('<qqqq', 0,0,0,0)
tx.prev_out_idx = 0xFFFFFFFF
tx.input_script_len = len(input_script)
tx.input_script = input_script
tx.sequence = 0xFFFFFFFF
tx.num_outputs = 1
tx.out_value = struct.pack('<q' ,options.value)#0x000005f5e100)#012a05f200) #50 coins
#tx.out_value = struct.pack('<q' ,0x000000012a05f200) #50 coins
tx.output_script_len = 0x43
tx.output_script = output_script
tx.locktime = 0
return transaction.build(tx)
def create_block_header(hash_merkle_root, time, bits, nonce):
block_header = Struct("block_header",
Bytes("version",4),
Bytes("hash_prev_block", 32),
Bytes("hash_merkle_root", 32),
Bytes("time", 4),
Bytes("bits", 4),
Bytes("nonce", 4))
genesisblock = block_header.parse('\x00'*80)
genesisblock.version = struct.pack('<I', 1)
genesisblock.hash_prev_block = struct.pack('<qqqq', 0,0,0,0)
genesisblock.hash_merkle_root = hash_merkle_root
genesisblock.time = struct.pack('<I', time)
genesisblock.bits = struct.pack('<I', bits)
genesisblock.nonce = struct.pack('<I', nonce)
return block_header.build(genesisblock)
# https://en.bitcoin.it/wiki/Block_hashing_algorithm
def generate_hash(data_block, algorithm, start_nonce, bits):
print 'Searching for genesis hash..'
nonce = start_nonce
last_updated = time.time()
# https://en.bitcoin.it/wiki/Difficulty
target = (bits & 0xffffff) * 2**(8*((bits >> 24) - 3))
while True:
sha256_hash, header_hash = generate_hashes_from_block(data_block, algorithm)
last_updated = calculate_hashrate(nonce, last_updated)
if is_genesis_hash(header_hash, target):
if algorithm == "X11" or algorithm == "X13" or algorithm == "X15":
return (header_hash, nonce)
return (sha256_hash, nonce)
else:
nonce = nonce + 1
data_block = data_block[0:len(data_block) - 4] + struct.pack('<I', nonce)
def generate_hashes_from_block(data_block, algorithm):
sha256_hash = hashlib.sha256(hashlib.sha256(data_block).digest()).digest()[::-1]
header_hash = ""
if algorithm == 'scrypt':
header_hash = scrypt.hash(data_block,data_block,1024,1,1,32)[::-1]
elif algorithm == 'SHA256':
header_hash = sha256_hash
elif algorithm == 'X11':
try:
exec('import %s' % "xcoin_hash")
except ImportError:
sys.exit("Cannot run X11 algorithm: module xcoin_hash not found")
header_hash = xcoin_hash.getPoWHash(data_block)[::-1]
elif algorithm == 'X13':
try:
exec('import %s' % "x13_hash")
except ImportError:
sys.exit("Cannot run X13 algorithm: module x13_hash not found")
header_hash = x13_hash.getPoWHash(data_block)[::-1]
elif algorithm == 'X15':
try:
exec('import %s' % "x15_hash")
except ImportError:
sys.exit("Cannot run X15 algorithm: module x15_hash not found")
header_hash = x15_hash.getPoWHash(data_block)[::-1]
return sha256_hash, header_hash
def is_genesis_hash(header_hash, target):
return int(header_hash.encode('hex_codec'), 16) < target
def calculate_hashrate(nonce, last_updated):
if nonce % 1000000 == 999999:
now = time.time()
hashrate = round(1000000/(now - last_updated))
generation_time = round(pow(2, 32) / hashrate / 3600, 1)
sys.stdout.write("\r%s hash/s, estimate: %s h"%(str(hashrate), str(generation_time)))
sys.stdout.flush()
return now
else:
return last_updated
def print_block_info(options, hash_merkle_root):
print "algorithm: " + (options.algorithm)
print "merkle hash: " + hash_merkle_root[::-1].encode('hex_codec')
print "pszTimestamp: " + options.timestamp
print "pubkey: " + options.pubkey
print "time: " + str(options.time)
print "bits: " + str(hex(options.bits))
def announce_found_genesis(genesis_hash, nonce):
print "genesis hash found!"
print "nonce: " + str(nonce)
print "genesis hash: " + genesis_hash.encode('hex_codec')
# GOGOGO!
main()
|
mit
|
ringling123/jupiter1wiz
|
plugin.program.advancedsettings/default.py
|
13
|
86871
|
'''
kinkin
'''
import urllib,urllib2,re,xbmcplugin,xbmcgui,os
import settings
import time,datetime
import glob
import shutil
from xml.etree import ElementTree
from xml.etree.ElementTree import Element
from xml.etree.ElementTree import SubElement
from xml.dom import minidom
ADDON = settings.addon()
VIEW = settings.viewtype()
FILE_DIR = settings.xml_files()
addon_path = os.path.join(xbmc.translatePath('special://home/addons'), '')
fanart = xbmc.translatePath(os.path.join('special://home/addons/plugin.program.advancedsettings', 'fanart.jpg'))
iconart = xbmc.translatePath(os.path.join('special://home/addons/plugin.program.advancedsettings', 'icon.png'))
SETTINGS_PATH = os.path.join(xbmc.translatePath('special://profile/addon_data/plugin.program.advancedsettings'), 'settings.xml')
USERDATA_PATH = os.path.join(xbmc.translatePath('special://profile'), '')
ACTIVESETTINGSFILE = os.path.join(xbmc.translatePath('special://profile'), 'advancedsettings.xml')
editfile = settings.xml_file()
def MENU(name):
addDir("Edit Settings", 'url',499, iconart,'blank','Build your advanced settings file')
addDirPlayable("Write XML File", 'url',500, iconart,'Feeling brave? Write your advanced settings directly to your userdata directory. You may need to reboot for settings to take effect',ACTIVESETTINGSFILE,'')
if os.path.exists(ACTIVESETTINGSFILE):
addDirPlayable("View active advancedsettings.xml", 'url',495, '','View your active advanced settings file','','')
addDirPlayable("Remove advancedsettings.xml", 'url',490, '','Delete all advanced settings (settings are saved in this addon to be written later). Reboot may be required to take effect','','')
addDirPlayable("Write XML File to temporary location", 'url',500, '','Play it safe. Write your advanced settings to userdata/plugin.program.advancedsettings/XML_FILES/ directory',editfile,'')
addDirPlayable("Reset all settings", 'url',489, '','Resets all settings saved in this addon only. You will still need to run "Remove advancedsettings.xml" and reboot to remove completely','','')
addDir("List enabled settings", 'url',498, '','blank','Check which settings are enabled before writing your xml file')
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def buildmenu(name):
addDir("Troubleshooting settings ", 'url',10, '','blank','Define your video settings')
addDir("Audio/video playback settings", 'url',11, '','blank','Define your audio settings')
addDir("Video library settings", 'url',12, '','blank','Define your video library settings')
addDir("Library artwork", 'url',13, '','blank','Define your library artwork settings')
addDir("Video and music library settings", 'url',14, '','blank','Define your video and music library settings')
addDir("Music settings", 'url',15, '','blank','Define your music settings')
addDir("Photos settings", 'url',16, '','blank','Define your photo settings')
addDir("Network settings", 'url',17, '','blank','Define your network settings')
addDir("File system settings", 'url',18, '','blank','Define your file system settings')
addDir("Remote control settings", 'url',19, '','blank','Define your remote control settings')
addDir("Other interface settings", 'url',20, '','blank','Define other interface settings')#Unsorted
addDir("Unsorted", 'url',21, '','blank','Unsorted network settings')
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def removexmlfile(name):
if os.path.exists(ACTIVESETTINGSFILE):
os.remove(ACTIVESETTINGSFILE)
notification('Easy Advanced Settings', 'advancedsettings.xml removed', '4000', iconart)
xbmc.executebuiltin("Container.Refresh")
def resetsettings(name):
allsettings = read_from_file(SETTINGS_PATH)
all_ids = regex_get_all(allsettings, 'setting id="', '"')
for id in all_ids:
if id != 'viewtype':
ADDON.setSetting(id, value="DISABLED")
notification('Easy Advanced Settings', 'All settings reset', '4000', iconart)
def checksettings(name):
allsettings = read_from_file(SETTINGS_PATH)
match = re.compile('setting id="(.+?)" value="(.+?)"').findall(allsettings)
for id, value in match:
if value != 'DISABLED' and id != 'viewtype':
value = value.replace('
', '\n').replace('<', '<').replace('>', '>')
text = "[COLOR lime]%s[/COLOR]" % (value)
addDirPlayable(text, 'url','url', '','','','')
def troubleshooting(name):
addDir("jsonrpc", 'url',101, '','blank','To make it easier for developers using the JSON RPC API in their (third party) applications to debug during development the json output of XBMC can be prettified by setting compactoutput to false. Default json output mode is a compact print which does not contain any unnecessary whitespaces and therefore is difficult to read for humans. Furthermore using the tcpport setting it is possible to change the default TCP port used by XBMC for JSON RPC (which is 9090) to any other valid port')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def audiovideo(name):
addDir("Video", 'url',102, '','blank','Define your video settings')
addDir("Audio", 'url',103, '','blank','Define your audio settings')
addDir("EDL", 'url',104, '','blank','Commercial break detection not as good you think it could be? Are some commercial breaks in a series of adverts not being skipped? Are some parts being skipped that are clearly not commercials? Does the end of the previous recording still show? The following advanced settings can be used to better identify full commercial break sequences, remove incorrectly flagged commercial breaks, and have playback start at the actual beginning of the recording.')
addDir("PVR", 'url',105, '','blank','Define your PVR settings')
addDir("EPG", 'url',106, '','blank','Define your EPG settings')
name = name.lower()
dirlist = ['skiploopfilter<>dialog<>The amount of the loop filter to skip on h264 decoding. This can help the performance of slower machines when decoding h264 content. Values, in decreasing CPU usage (and decreasing quality)<>["-16","0","8","16","32","48","DISABLED"]<>root',
'measurerefreshrate<>bool<>When using "Sync playback to display" on Windows, the detected refreshrate might be wrong. When enabling measurerefreshrate, the refreshrate is measured instead, which makes it more accurate<><>root',
'forcedswaptime<>num<>Use to force a backbuffer->frontbuffer swap while vsync is enabled. Set to the time (in ms) to allow for the swap (e.g. <forcedswaptime>1</forcedswaptime> is typical)<><>root']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
try:
name = splitd[4]
except:
name = name
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def videolibrary(name):
name = name.lower()
addDir("Video Library", 'url',115, '','blank','Options specific to the Video Library')
dirlist = ['videoextensions$add<>text<>Allow (add) file extensions in the My Video windows (.m4v .3gp). Separate with a space<>',
'videoextensions$remove<>text<>Exclude (remove) file extensions in the My Video windows (.m4v .3gp). Separate with a space<>',
'discstubextensions$add<>text<>Additional file-extensions that will be treated as disc stubs (.dvd .blu). Separate with a space<>',
'discstubextensions$remove<>text<>Additional file-extensions that will NOT be treated as disc stubs (.dvd .blu). Separate with a space<>',
'sorttokens$token<>text<>Allows you to specify additional tokens that will be ignored at the start of lines during sorting (ie. the)<>',
'moviestacking$regexp<>moviestcking<>This is used by the File Stacking algorithm to combine multi-part files and contains a list of "Regular Expressions". As of XBMC v9.11, video stacking regular expressions must contain exactly four (4) capture expressions<>',
'video$cleandatetime<>text<>Matches a year number in a string using a Regular Expression. The string found before will be used as basis string getting cleaned by the cleanstrings expressions.By default date formats like MM:YY are ignored.<>',
'video$cleanstrings$regexp<>text<>Clean unwanted characters from filenames or folders by using a list of Regular Expressions. Please note that everything right of the match (at the end of the file name) is removed, so if you would have a file named Super movie.mp4 and would add <regexp> </regexp> (only a space), the only thing that would be left is Super, which is probably not what you want.<>'
'tvshowmatching$regexp<>text<>Matches the season and episode numbers in file paths by using a list of Regular Expressions. Arguments action="append" or action="prepend" will insert user specified expressions after, or before, the defaults. For multi-episode matching to work, there needs to be a third set of parentheses at the end, this part is fed back into the regexp engine. <>',
'tvmultipartmatching<>text<>Matches a multipart episode number based on a previously identified episode file, using a list of Regular Expressions<>',
'video$excludetvshowsfromscan$regexp<>text<>Matches filenames or folders which should be excluded from a tvshow library scan using a list of Regular Expressions<>',
'trailermatching$regexp<>text<>Contains "Regular Expression" syntax (commonly referred to as "RegEx" or "RegExp") to match the locally stored trailers to movies in the library<>',
'videoscanner$ignoreerrors<>bool<>Set to true to silently ignore errors while scanning videos. This prevents the error dialogue box, so you do not have to keep hitting "yes" to keep scanning<>'
'myth$movielength<>num<>Not seeing all the recordings you expected in the Movies folder? If so, it is very likely that the electronic program guide (EPG) used by MythTV does not accurately distinguish between TV Shows and Movies all the time. The following setting allows the length of the recording to also be used to determine if a recording is a Movie<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def video_library(name):
name = name.lower().replace(' ', '')
dirlist = ['allitemsonbottom<>bool<>Sorts the "*All" items at the bottom of the list when in Ascending order<>',
'backgroundupdate<>bool<>Set to hide the video scanner dialog from the gui. NOTE: To get this working properly, you have to do a "Clean Library" in settings the first time after you enable the setting<>',
'cleanonupdate<>bool<>Sefault set to false to prevent xbmc from removing items from the database while updating<>',
'hideallitems<>bool<>Removes the "*All" items from the video library<>',
'hideemptyseries<>bool<>Hide empty series in the video library<>',
'hiderecentlyaddeditems<>bool<>Removes the "Recently added ..." items from the video library<>',
'recentlyaddeditems<>num<>Number of recently added items. Defaults to 25<>',
'itemseparator<>text<>Separator used for multiple artists/genres in tags.<>'
'exportautothumbs<>bool<>Export auto-generated thumbs. Defaults to false <>',
'importwatchedstate<>bool<>Import previously exported playdate and playcount from .nfo files. Defaults to false<>',
'importresumepoint<>bool<>Import previously exported resume point from .nfo files. Defaults to false<>',
'mymovies$categoriestogenres<>bool<>Add MyMovies Custom Categories to XBMC Genres (boolean, default is false)<>',
'dateadded<>dialog<>0 results in using the current datetime when adding a video. 1 (default) results in prefering to use the files mtime (if it is valid) and only using the files ctime if the mtime is not valid. 2 results in using the newer datetime of the files mtime and ctime<>["0","1","2","DISABLED"]']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
options = splitd[3]
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def libraryartwork(name):
name = 'root'
dirlist = ['imageres<>num<>Specify the maximal resolution that art should be resized to in pixels. The width is automatically calculated as being 16/9*height. The image will be resized to fit within this size. e.g. an image that is 2000x500 will be cached at size 1280x320. An image that is 500x800 will be cached at size 450x720 using the default value of 720<>',
'fanartres<>num<>Specify the resolution that cached fanart should be resized to in pixels. The width is automatically calculated as being 16/9*height. Only images that are exactly 16x9 and equal to or greater than this resolution will be cached at this size - all other images will be cached using <imageres>. The default value is 1080<>',
'fanart$add<>text<>A list of additional files to try when searching for fanart images. (The defaults are fanart.jpg and fanart.png which can be removed.)<>',
'fanart$remove<>text<>A list of additional files to try when searching for fanart images. (The defaults are fanart.jpg and fanart.png which can be removed.)<>',
'musicthumbs$add<>text<>A list of additional files to try when searching for music thumbnail images. (The default is folder.jpg which can be removed.) <>',
'musicthumbs$remove<>text<>A list of additional files to try when searching for music thumbnail images. (The default is folder.jpg which can be removed.) <>',
'useddsfanart<>bool<>This settings allows XBMC to use your GPU rendering fanart and some other images. This will make loading images considerably faster, especially on systems with slower processors (e.g. atom based systems). Do not use this option on ARM based systems (Apple TV2/iOS/RPi/many Android systems) as it is likely to degrade performance because DDS images are not supported<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def videomusiclibrary(name):
name = 'root'
addDir("Video Database", 'url',470, '','blank','Allows advanced customization of the default database settings for video\nNote: It is HIGHLY recommended that you not attempt to place an sqlite3 database outside of XBMCs path. sqlite3 contains no filesystem abstraction, so this will plain break on any non-local (as far as XBMC is concerned) paths. Use this for mysql only.')
addDir("Music Database", 'url',471, '','blank','Allows advanced customization of the default database settings for music\nNote: It is HIGHLY recommended that you not attempt to place an sqlite3 database outside of XBMCs path. sqlite3 contains no filesystem abstraction, so this will plain break on any non-local (as far as XBMC is concerned) paths. Use this for mysql only.\nNote: If you use MySQL for the music database, but are finding that it slows down your music library significantly, execute the following query to create an index on the song table. This will significantly speed up queries using the songview views looking up by artist')
dirlist = ['playlistretries<>num<>The number of retries attempted if a source is offline. With this control you can alter the number of consecutive failed items before a playlist fails<>',
'playlisttimeout<>num<>The timeout, in seconds, before item failure.<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def videodatabase(name):
name = name.lower().replace(' ', '')
dirlist = ['type<>dialog<>Can be either "sqlite3" or "mysql" (default: sqlite3)<>["sqlite3","mysql","DISABLED"]',
'host<>text<>sqlite3: defines the relative path to the database file (eg. /usr/local/xbmc/databases)\nmysql: defines the host of the mysql socket (eg. localhost, 192.168.0.1, etc)<>',
'port<>num<>sqlite3: silently ignored\nmysql: defines the port of the mysql socket (default: 3306)<>',
'name<>text<>Not needed by default, and some users report issues when defining the this tag. When not used "MyVideos"+DB number will be used\nsqlite3: defines the name of the database file to read from, excluding the ".db" extensionznmysql: defines the name of the database to use<>',
'user<>text<>sqlite3: silently ignored\nmysql: defines the user with privileged access to the database <>',
'pass<>text<>sqlite3: silently ignored\nmysql: defines the password for the user with privileged access to the database<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def musicdatabase(name):
name = name.lower().replace(' ', '')
dirlist = ['_type<>dialog<>Can be either "sqlite3" or "mysql" (default: sqlite3)<>["sqlite3","mysql","DISABLED"]',
'_host<>text<>sqlite3: defines the relative path to the database file (eg. /usr/local/xbmc/databases)\nmysql: defines the host of the mysql socket (eg. localhost, 192.168.0.1, etc)<>',
'_port<>num<>sqlite3: silently ignored\nmysql: defines the port of the mysql socket (default: 3306)<>',
'_name<>text<>Not needed by default, and some users report issues when defining the this tag. When not used "MyVideos"+DB number will be used\nsqlite3: defines the name of the database file to read from, excluding the ".db" extensionznmysql: defines the name of the database to use<>',
'_user<>text<>sqlite3: silently ignored\nmysql: defines the user with privileged access to the database <>',
'_pass<>text<>sqlite3: silently ignored\nmysql: defines the password for the user with privileged access to the database<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def music(name):
name = 'root'
addDir("Music Library", 'url',472, '','blank','Allows advanced customization of the default database settings for video\nNote: It is HIGHLY recommended that you not attempt to place an sqlite3 database outside of XBMCs path. sqlite3 contains no filesystem abstraction, so this will plain break on any non-local (as far as XBMC is concerned) paths. Use this for mysql only.')
addDir("Karaoke", 'url',473, '','blank','Allows advanced customization of the default database setting')
dirlist = ['musicextensions$add<>text<>A list of additional file-extensions to allow (add) in the My Music window. Separate with a space<>',
'musicextensions$remove<>text<>A list of additional file-extensions to remove from the My Music window. Separate with a space<>',
'cddbaddress<>text<>The address of the online CDDb database. You may set this to another freedb mirror if there is a more suitable one<>',
'songinfoduration<>num<>This controls how long the song information will remain onscreen when the song changes during visualisations. The valid range is "1" to "Indefinite (0)", in seconds. This does not include the duration of any transition effects<>',
'musicfilenamefilters<>text<>Contains filters to match music information (artist, title etc.) from a tag-less music filename. The first <filter> to match completely is used. Matched items include\n%A - Artist\n%T - Title\n%B - Album\n%N - Track number\n%S - Part of set (disk number)\n%D - Duration\n%G - Genre\n%Y - Year\n%R - Rating\n\nExample: %A - %T<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def musiclibrary(name):
name = name.lower().replace(' ', '')
dirlist = ['_hideallitems<>bool<>Removes the "*All" items from the music library<>',
'_allitemsonbottom<>bool<>Sorts the "*All" items at the bottom of the list when in Ascending order<>',
'_backgroundupdate<>bool<>Set to hide the music scanner dialog from the gui<>',
'_recentlyaddeditems<>num<>Number of recently added items. Defaults to 25<>',
'albumssortbyartistthenyear<>bool<>At an albums listing, when you sort by artist, secondary sort will be year<>',
'albumformat<>text<>Album label template, default is "%B"<>'
'_prioritiseapetags<>bool<>Prioritise APEv2 tags over ID3v1/2 tags, default is false<>'
'_itemseparator<>text<>Separator used for multiple artists/genres in tags<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def karaoke(name):
name = name.lower().replace(' ', '')
dirlist = ['syncdelaycdg<>text<>Music-lyrics delay for CDG format lyrics in SECONDS. Floating number, may be negative<>',
'syncdelaylrc<>text<>Music-lyrics delay for LRC format lyrics in 1/10 seconds. Floating number, may be negative<>',
'alwaysreplacegenre<>bool<>If set to true, when the songs are added to the library, XBMC will automatically replace the song genre by "Karaoke" if the song has associated lyrics. Default is false.<>',
'storedelay<>bool<>If set to true, when the music-lyrics delay was modified while playing using subtitle delay buttons, the delay value for this song will be stored, and restored when the song is played next time. Default is true.<>',
'autoassignstartfrom<>num<>When karaoke songs are added to the library during scans, an autoincrement number is automatically assigned to each song, starting from the value specified below. Default starts from 1<>',
'nocdgbackground<>bool<>If set to true (default), the background for CDG songs is always empty (plain color) no matter what setting is set in defaultbackground below. When setting this to false, then one can see through the background and see the video or visualization<>'
'defaultbackground:none<>text<>Sets default background mode. For image/video types the path should specify the image or video file to play<>',
'defaultbackground:vis<>text<>Sets default background mode. For image/video types the path should specify the image or video file to play<>',
'defaultbackground:image<>text<>Sets default background mode. For image/video types the path should specify the image or video file to play<>',
'defaultbackground:video<>text<>Sets default background mode. For image/video types the path should specify the image or video file to play<>',
'nextsongpopuptime<>text<>If non-zero, specifies the time in seconds left before the end of the current song when a window will pop up informing you about the next played song. The window does not pop up if there is no next song, or it is not a karaoke song<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def photos(name):
name = 'root'
dirlist = ['pictureextensions$add<>text<>A list of additional file-extensions to allow (add) in the My Pictures window. Separate with a space<>',
'pictureextensions$remove<>text<>A list of additional file-extensions to remove from the My Pictures window. Separate with a space<>',
'pictureexcludes$regexp<>text<>Regular expressions that if evaluated to true will not be displayed in My Pictures<>',
'slideshow$panamount<>text<>Amount to pan images as a percentage of the screen<>',
'slideshow$zoomamount<>text<>Amount to zoom images as a percentage of the screen<>'
'slideshow$blackbarcompensation<>num<>Amount to compensate (zoom) images to attempt to reduce black bars.\nResults in cropping of the longer length of the image in order to reduce the black bars on the shorter length of the image. Defaults to 20<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def networkmenu(name):
addDir("Samba", 'url',301, '','blank','Samba settings')
addDir("Network", 'url',302, '','blank','Network client settings')
addDir("Tuxbox", 'url',303, '','blank','Tuxboc settings')
name = "root"
dirlist = ['ftpshowcache<>bool<>default is false, if set to true, shows cache (X Y Z) partitions in the root directory listing <>',
'enableairtunesdebuglog<>bool<>This enables the debug output of libshairport which is used for the AirTunes feature. Defaults to off - because its spamming badly.<>',
'airtunesport<>num<>This overwrites the defalt listening port of the AirTunes server (announced via zeroconf)<>',
'airplayport<>num<>This overwrites the default listening port of the AirPlay server (announced via zeroconf)<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def filesystem(name):
addDir("Path Substitution", 'url',430, '','blank','Path substitutions are for use for redirecting file paths. These are processed in order, and are useful for substituting an absolute path on a PC with a path suitable for XBMC to handle.')
name = "root"
dirlist = ['packagefoldersize<>num<>The amount (in megabytes) of add-on zip packages saved from previous add-on installs. These packages are mainly used for the add-on rollback feature. Increasing the size should increase the amount of past versions saved.<>',
'detectasudf<>bool<>Set to true if you wish to detect joint ISO9660/UDF disks as UDF. Default is False<>',
'virtualshares<>bool<>Set to false to disable virtual shares like plugin, last.fm or shoutcast sources. Default is True<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def pathsubstitution(name):
name = name.lower().replace(' ', '')
dirlist = ['substitute$from<>text<>Substitute from this path. you must set the "To" path below<>',
'substitute$to<>text<>Substitute to this path.<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def remotecontrol(name):
name = "root"
dirlist = ['remotedelay<>num<>The repeat delay for a LIRC remote control. A delay value between 1 and 20 before a remote button starts repeating on a long keypress (i.e. continuously sending button pushes while it is held down) Default: 3<>',
'remoterepeat<>num<>This used to adjust the amount of time in milliseconds between repeated keystrokes. Used to prevent key-bounce, in other words prevents xbmc (lirc?) seeing one key press as multiple key presses with certain remotes<>',
'controllerdeadzone<>text<>The controller deadzone is the region of movement around the center which is not recognized by the device. Because joysticks can have noise (report motion when still) and bias (report an offset when centered), spurious events can be reported even though the controller is not being touched. If you notice these kinds of events, you most likely need to increase your controllers deadzone (both axes recommended). The values range from 0.0 (no deadzone, XBMC will see all input your controller is capable of registering) to 1.0 (XBMC will ignore all input inside of the devices physical limits)<>',
'enablemultimediakeys<>bool<>This setting only has any effect on Windows versions of XBMC, and only applies to builds from 28th May 2011 onwards. In Windows the multimedia keys generate a WM_APPCOMMAND message in addition the keypress. XBMC processes both keypresses and the WM_APPCOMMAND messages, and the end result would be that the command is executed twice. To avoid this, by default multimedia keypresses are disabled. Although it should rarely be necessary, the enablemultimediakeys setting allows you to enable the multimedia keys<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def interface(name):
name = "root"
dirlist = ['allowd3d9ex<>bool<>Applies only to Windows Vista and up. Values: true/false. Allows xbmc to use Direct3D 9Ex, which is a bit more stable and robust than Direct3D 9<>',
'restrictcapsmask<>dialog<>Windows only. A bitmask to prevent xbmc from using detected texture capabilities of the GPU. This helps work around buggy hardware/drivers\n1: prevent the use of compressed textures (DXT1, DXT3, DXT5) 2: prevent the use of non-power-of-two dimensions for textures 4: prevent the use of compressed textures with non-power-of-two dimensions.<>["1","2","4","DISABLED"]',
'forced3d9ex<>bool<>Windows only: XBMC attempts to detect drivers released for a version earlier than Vista, to avoid using advanced features which are often not emulated correctly. The detection may be incorrect and this setting allows forcing the using of D3D9Ex<>',
'gui$algorithmdirtyregions<>dialog<>Enable dirty-region processing. Dirty regions are any parts of the screen that have changed since the last frame. By not re-rendering what has not changed, big speed gains can be seen. Because all GPUs work differently, only Mode 3, combined with nofliptimeout=0, is guaranteed to be safe for everyone, but current timing issues with nofliptimeout keep this from being the default\n0: Off-The entire viewport is always rendered\n1: Union-All dirty regions are grouped into the smallest possible rectangle. This is typically the fastest mode for slower GPUs due to only making one pass.\n2: Cost reduction-Each dirty region is presented separately, in as many passes as there are regions\n3: Whole Screen-The entire screen is rendered if there are any dirty regions. This, combined with nofliptimeout is a safe default for drivers that clear buffer contents (manifests as blinking or vibrating images).<>["0","1","2","3","DISABLED"]',
'gui$visualizedirtyregions<>bool<>Enable dirty-region visualization. Paints a rectangle over marked controls<>',
'gui$nofliptimeout<>text<>Specifies the timeout in milliseconds after which XBMC will not flip the graphics buffers anymore when nothing has been rendered, this lowers both CPU and GPU usage\n-1: disabled\n0 or higher: timeout in milliseconds (0 is default)<>',
'showexitbutton<>bool<>Setting to hide the exit button, useful for people running appliance based setups where exit would only confuse/complicate the user. Modifiable via the advancedsettings.xml by setting showexitbutton to false, default is true (show)<>',
'screensaver$dimlevel<>num<>To avoid potential worries of plasma screen burn-in, you can set the Dim screensaver fade level to 0% here or in the Settings\n0 will remove the Fade Level control from the settings screen altogether<>',
'fullscreen<>bool<>Starts XBMC in full screen (check resolutions!)<>',
'cputempcommand<>text<>Provide a shell command XBMC will use to get CPU temperature. It should print out only "[temp as integer] [scale as one of "CcFf"]"<>',
'gputempcommand<>text<>Provide a shell command XBMC will use to get GPU temperature. It should print out only "[temp as integer] [scale as one of "CcFf"]"<>',
'glrectanglehack<>dialog<>Problems with ghosting or videos which are only played back in the left upper quarter? The following ATI hack may solve it<>["yes","DISABLED"]',
'alwaysontop<>dialog<>Added in XBMC v9.11 (Windows OS only). Keeps XBMC always on top when windowed<>["yes","DISABLED"]']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def unsorted(name):
name = "masterlock"
dirlist = ['startuplock<>bool<>true prompts user for code upon startup\nIf you enable, setting will be removed from UI<>',
'automastermode<>bool<>automatically enters master mode if the master code is given\nIf you enable, setting will be removed from UI<>',
'loginlock<>bool<>whether to use locks on login screen or not\nIf you enable, setting will be removed from UI<>',
'maxretries<>num<>enter the max number of retries to input code, 3 is default.<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def edl(name):
name = "edl"
dirlist = ['mergeshortcommbreaks<>bool<>If true, commercial breaks will be merged according to the remaining options.<>',
'maxcommbreaklength<>num<>Commercial breaks will not be merged if the total length of the commercial break would be greater than this (seconds)<>',
'mincommbreaklength<>num<>After merging, commercial breaks shorter than this will be removed (seconds)<>',
'maxcommbreakgap<>num<>Commercial breaks that are further apart than this will not be merged (seconds)<>',
'commbreakautowait<>num<>How long to wait before automatically skipping when the start of a commercial break reached (seconds)<>',
'commbreakautowind<>num<>How long to rewind after automatically skipping to the end of the commercial break (seconds)<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def pvr(name):
name = "pvr"
dirlist = ['timecorrection<>num<>Correct all times (epg tags, timer tags, recording tags) by this amount of minutes<>',
'infotoggleinterval<>num<>If there is more than one pvr gui info item available (e.g. multiple recordings active at the same time), use this toggle delay in milliseconds<>',
'minvideocachelevel<>num<>Cache up to this level in the video buffer buffer before resuming playback if the buffers run dry<>',
'maxvideocachelevel<>num<>Cache up to this level in the audio buffer before resuming playback if the buffers run dry<>',
'cacheindvdplayer<>bool<>Cache PVR stream in DVDPlayer<>',
'channeliconsautoscan<>bool<>Automatically scan user defined folder for channel icons when loading internal channel groups<>',
'autoscaniconsuserset<>bool<>Mark channel icons populated by auto scan as "user set"<>',
'numericchannelswitchtimeout<>num<>Time in ms before the numeric dialog auto closes when confirmchannelswitch is disabled<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],'')
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def epg(name):
name = "epg"
dirlist = ['lingertime<>num<>keep X minutes of past epg data (default: 24h)<>',
'updatecheckinterval<>num<>Check if tables need to be updated every X minutes<>',
'lingercleanupintervaltime<>num<>Remove old entries from the EPG every X minutes<>',
'activetagcheckinterval<>num<>Check for updated active tags every X minute<>',
'retryinterruptedupdateinterval<>num<>Retry an interrupted epg update after X seconds<>',
'updateemptytagsinterval<>num<>Override user selectable EPG update interval (minutes) for empty EPG tags<>',
'displayupdatepopup<>bool<>Display a progress popup while updating EPG data from clients<>',
'displayincrementalupdatepopup<>bool<>also display a progress popup while doing incremental EPG updates<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def samba(name):
name = name.lower()
dirlist = ['doscodepage<>text<>code page to use for filenames<>',
'clienttimeout<>num<>timeout (in seconds)<>',
'statfiles<>bool<>Set to false to disable smb stat() on files to speed up listings of large directories (over slow links)<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def tuxbox(name):
name = name.lower()
dirlist = ['audiochannelselection<>bool<>"audiochannelselection"; default is "false", "true" will popup the audio channel selection if there is more then one audio stream<>',
'submenuselection<>bool<>"submenuselection"; default is "false", "true" will popup the Submenu selection<>',
'defaultrootmenu<>dialog<>"defaultrootmenu"; MODE: 0 = TV (Default), 1 = Radio, 2 = Data, 3 = Movies, 4 = Root<>["0","1","2","3","4","DISABLED"]',
'defaultsubmenu<>dialog<>"defaultsubmenu"; 1=Services 2=Satellites 3=Providers 4=Bouquets (default)<>["1","2","3","4","DISABLED"]',
'pictureicon<>bool<>"pictureicon"; default is "true", will use the Picture Icons from folder /UserData/PictureIcon/<>',
'epgrequesttime<>num<>"epgrequesttime"; default is "10", 0-3600, defines the time in seconds between epg queries, some tuxbox devices need longer to response (Minimum: 1, Maximum: 3600)<>',
'zapwaittime<>num<>"zapwaittime"; default is "0" (0 = OFF), defines the time in seconds to wait for a valid PMT after the zaping was send (Minimum: 0, Maximum: 120)<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def network(name):
name = name.lower()
dirlist = ['curlclienttimeout<>num<>Timeout in seconds for libcurl (http/ftp) connections<>',
'curllowspeedtime<>num<>Time in seconds for libcurl to consider a connection lowspeed<>',
'httpproxyusername<>text<>username for Basic Proxy Authentication<>',
'httpproxypassword<>text<>password for Basic Proxy Authentication<>',
'cachemembuffersize<>text<>Number of bytes used for buffering streams ahead in memory XBMC will not buffer ahead more than this. WARNING: for the bytes set here, XBMC will consume 3x the amount of RAM. When set to 0 the cache will be written to disk instead of RAM, as of v12 Frodo \nRemember, 1MB = 1,048,576 bytes<>',
'buffermode<>dialog<>Choose what to buffer: 0) Buffer all internet filesystems (like "2" but additionally also ftp, webdav, etc.) (default) 1) Buffer all filesystems (including local) 2) Only buffer true internet filesystems (streams) (http, etc.) 3) No buffer<>["0","1","2","3","DISABLED"]',
'readbufferfactor<>text<>This factor determines the max readrate in terms of readbufferfactor * avg bitrate of a video file.This can help on bad connections to keep the cache filled. It will also greatly speed up buffering. Default value 1.0<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def jsonrpc(name):
dirlist = ['compactoutput<>bool<>Prettify json output<>',
'tcpport<>num<>Change the default TCP port used by XBMC for JSON RPC (which is 9090) to any other valid port<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def video(name):
name = name.lower()
addDir("Adjust Refresh Rate", 'url',202, '','blank','Settings for when "Adjust refreshrate to match video fps" is enabled. "Adjust refreshrate to match video fps" will try to select the best refreshrate for the video fps but it does not always get it right, for example it might switch to an unsupported refreshrate. You can add overrides here to switch to a certain refreshrate based on video fps. It is possible to add as many overrides as you need. Overrides are processed in order, if the first one does not match the fps or no refreshrates match that override, it will try the next one until no overrides are left')
addDir("Latency", 'url',205, '','blank','Compensate display latency (video lag). Latency is given in msecs.. Requires XBMC 11.0 (Eden) or later.')
addDir("Stagefright", 'url',206, '','blank','Enable and disable codecs')
dirlist = ['subsdelayrange<>num<>Delay range for subtitles, in seconds.<>',
'audiodelayrange<>num<>Delay range for audio/video sync, in seconds.<>',
'smallstepbackseconds<>num<>Length of the small skip back when playing a video<>',
'usetimeseeking<>bool<>Whether to use time based or percentage based seeking.<>',
'timeseekforward<>num<>Time to seek forward in seconds when doing a short seek. Defaults to 30.<>',
'timeseekbackward<>num_minus<>Time to seek backward in seconds when doing a short seek. Defaults to -30.<>',
'timeseekforwardbig<>num<>Time to seek forward in seconds when doing a long seek. Defaults to 600 (10 minutes).<>',
'timeseekbackwardbig<>num_minus<>Time to seek forward in seconds when doing a long seek. Defaults to -600 (10 minutes).<>',
'percentseekforward<>num<>Amount to seek forward as a percentage, when doing a short seek. Defaults to 2.<>',
'percentseekbackward<>num_minus<>Amount to seek backward as a percentage, when doing a short seek. Defaults to -2.<>',
'percentseekforwardbig<>num<>Amount to seek forward as a percentage, when doing a long seek. Defaults to 10.<>',
'percentseekbackwardbig<>num_minus<>Amount to seek forward as a percentage, when doing a long seek. Defaults to -10.<>',
'blackbarcolour<>num<>colour of the black bars (0->255), (black->white) on videos.<>',
'fullscreenonmoviestart<>bool<>Whether to go to fullscreen or not when starting a movie. Defaults to true.<>',
'defaultplayer<>text<>Set the default video player: dvdplayer or extplayer.<>',
'excludefromscan<>text<>Regular expressions that if evaluated to true will not be added to library. Separate each word with a space (" "), string will be converted to correct format<>text',
'excludefromlisting<>text<>Regular expressions that if evaluated to true will not be displayed in Files View. Separate each word with a space (" "), string will be converted to correct format<>text',
'playcountminimumpercent<>num<>Minimum percentage that has to be played before it is marked as watched. Set to 101 to never auto-mark items as watched<>',
'ignoresecondsatstart<>num<>Number of seconds to ignore at video start after which a resume point is created<>',
'ignorepercentatend<>num<>percentage of video to ignore at the end. If you stop watching the video here no resume point is created. Set to 101 to never save a resume point. The video is already marked as watched at 90%, see above "ignoresecondsatstart" setting<>',
'vdpauscaling<>bool<>scales with vdpau instead of opengl and turns on its HQ scaler when available, enabling this might slow down rendering and cause framedrops especially on ION systems. This setting requires a vdpau feature set C gpu<>',
'enablehighqualityhwscalers<>bool<>allow turning on the spline36 and lanczos3 shader (for GL builds)<>',
'ppffmpegdeinterlacing<>text<>override the deinterlacing options passed to libpostproc (i.e. linblenddeint)<>',
'ppffmpegpostprocessing<>text<>override the post processing options passed to libpostproc when "Video post-processing" is activated in GUI Videos-Settings-Playback (i.e. ha:128:7,va,dr)<>'
'allowmpeg4vdpau<>bool<>allows mpeg4 decoding with vdpau, currently broken<>',
'allowmpeg4vaapi<>bool<>allows mpeg4 decoding with vaapi, currently broken on Nvidia cards, not implemented on Intel<>',
'autoscalemaxfps<>num<>when scaling method is set to auto, bilinear is chosen when the fps is higher than this limit, the default is 30<>',
'checkdxvacompatibility<>bool<>Advanced setting not present: let xbmc autodetect cards that support H264 profile > L4.1. Set value to false to enable DXVA no matter what. Set value to true if xbmc does not autodetect that the graphics card does not support > L4.1<>',
'useocclusionquery<>dialog<>Use an occlusion query when capturing videoframes, -1 means auto detect, 0 means disabled, 1 means enabled, the default is -1.<>["-1","0","1","DISABLED"]',
'fpsdetect<>dialog<>fps detection for video playback, 0 = trust codec fps, 1 = recalculate from video timestamps with uniform spacing, 2 = recalculate from video timestamps always<>["0","1","2","DISABLED"]',
'stereoscopicregex3d<>textreg<>Filename triggers for 3D (stereoscopic) mode.<>',
'stereoscopicregexsbs<>textreg<>Filename triggers for 3D (stereoscopic) mode.<>',
'stereoscopicregextab<>textreg<>Filename triggers for 3D (stereoscopic) mode.<>',
'disablehi10pmultithreading<>bool<>If you want hi10p decoded only on one CPU, set this to true. It will be renamed to disableswmultithreading in v14 Helix<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1], options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def adjustrefreshrates(name):
name = name.lower().replace(' ', '')
dirlist = ['override-av<>override<>Overrides are processed in order, if the first one does not match the fps or no refreshrates match that override, it will try the next one until no overrides are left<>',
'override-bv<>override<>Overrides are processed in order, if the first one does not match the fps or no refreshrates match that override, it will try the next one until no overrides are left<>',
'override-range-av<>override_range<>You can also specify the fps range yourself<>',
'override-range-bv<>override_range<>You can also specify the fps range yourself<>',
'fallback-av<>fallback<>Switch to the first found refreshrate<>'
'fallback-bv<>fallback<>Switch to the first found refreshrate<>'
'fallback-range-av<>fallback_range<>You can also specify the range for the fallback yourself<>'
'fallback-range-bv<>fallback_range<>You can also specify the range for the fallback yourself<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2].replace('\n', '').replace('<', '<').replace('>', '>').replace(' ', '')
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1], options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def stagefright(name):
name = name.lower().replace(' ', '')
dirlist = ['useavccodec<>dialog<>-1 is default, 0 is never used this codec, 1 is always use this codec, bypassing blacklist<>["-1","0","1","DISABLED"]',
'usevc1codec<>dialog<>-1 is default, 0 is never used this codec, 1 is always use this codec, bypassing blacklist<>["-1","0","1","DISABLED"]',
'usevpxcodec<>dialog<>-1 is default, 0 is never used this codec, 1 is always use this codec, bypassing blacklist<>["-1","0","1","DISABLED"]',
'usemp4codec<>dialog<>-1 is default, 0 is never used this codec, 1 is always use this codec, bypassing blacklist<>["-1","0","1","DISABLED"]',
'usempeg2codec<>dialog<>-1 is default, 0 is never used this codec, 1 is always use this codec, bypassing blacklist<>["-1","0","1","DISABLED"]',
'useswrenderer<>bool<>True or False<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2].replace('\n', '').replace('<', '<').replace('>', '>').replace(' ', '')
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1], options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def latency(name):
name = name.lower()
dirlist = ['delay-av<>num1<>Global default display latency<>',
'refresh-rate-av<>num2<>Override latency for given display (not video) refresh rates. When XBMC is in windowed mode, override is ignored. Multiple overrides are allowed<>',
'refresh-range-bv<>num3<>Override latency for given range of display (not video) refresh rates. When XBMC is in windowed mode, override is ignored. Multiple overrides are allowed<>']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2].replace('\n', '').replace('<', '<').replace('>', '>').replace(' ', '')
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1], options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def audio(name):
name = name.lower()
dirlist = ['headroom<>dialog<>Amount of headroom XBMC should use above the maximum volume level, in decibels. Defaults to 0, valid values 0, 6, 12.<>["0","6","12","DISABLED"]',
'_defaultplayer<>dialog<>Default audio player: paplayer or dvdplayer<>["paplayer","dvdplayer","DISABLED"]',
'ac3downmixgain<>dialog<>Amount of gain (dB) to be applied to AC3 streams that have been mixed-down to 2 channels. Default is 12.0. Valid values are: -96.0 to 96.0<>["-96","-84","-72","-60","-48","-36","-24","-12","0","12","24","36","48","60","72","84","96","DISABLED"]',
'_playcountminimumpercent<>num<>Minimum percentage that has to be played before it is considered for incrementing in the Top 100 database view, or for last.fm submittal<>',
'resample<>num<>Force a specific samplerate to be produced by paplayer to send to the audio hardware, i.e HDMI Audio is usually only capable of 48000<>',
'applydrc<>bool<>Whether to use DRC on AC3 and DTS streams<>',
'dvdplayerignoredtsinwav<>bool<>set to true to skip decoding of DTS in wav files when using dvdplayer (10.10 only)<>',
'limiterhold<>num<>default values for limiter/compressor<>',
'limiterrelease<>num<>default values for limiter/compressor<>',
'_excludefromscan<>text<>Regular expressions that if evaluated to true will not be added to library. Separate each word with a space (" "), string will be converted to correct format<>',
'_excludefromlisting<>text<>Regular expressions that if evaluated to true will not be displayed in Files View. Separate each word with a space (" "), string will be converted to correct format<>',
'forceDirectSound<>dialog<>Windows-specific - will not use Wasapi API 0 = false, 1 = true<>["0","1","DISABLE"]',
'audiophile<>dialog<>forces playback of original format, will not down/upmix next song to match current, not compatible with cross-fading 0 = false, 1 = true<>["0","1","DISABLED"]',
'audiosinkbufferdurationmsec<>num<>Windows-specific, buffer time in msec, hard minimum of 50msec<>',
'allowtranscode44100<>dialog<>allows 44100hz when trancoding for SPDIF devices 0=false, 1=true<>["0","1","DISABLED"]',
'streamsilence<>dialog<>Forces original AE behaviour where an audio signal is constantly streamed to the audio device, even if silent. If 1 or true, this prevents some receivers from losing the signal/source, and prevents sinks re-opening and possible receiver switching with each new stream after any silence. If 0 or false, enables Eden-style releasing of the audio device so external players, programs and the system can access the audio device, i.e. prevents XBMC from hogging the audio device.<>["0","1","DISABLED"]']
for d in dirlist:
splitd=d.split('<>')
description = splitd[2]
options = splitd[3]
currentsetting = ADDON.getSetting(str(splitd[0]))
if '<>' in currentsetting:
currentsetting = currentsetting.split('<>')[2]
if currentsetting == 'DISABLED':
d = "[COLOR red]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
else:
d = "[COLOR lime]%s[/COLOR] (%s)" % (splitd[0], currentsetting)
addDirPlayable(d,name,200,currentsetting,description,splitd[1],options)
setView('movies', 'movies-view')
xbmc.executebuiltin("Container.SetViewMode(%s)" % VIEW)
def edit_setting(name,url,iconimage,list,options):
if 'COLOR' in name:
name = regex_from_to(name, 'COLOR', '/COLOR').replace(' red','').replace(' lime','').replace('[','').replace(']','')
if list == 'num':
data = keypad(name,iconimage)
elif list == 'num1':
data = keypad_root(name,iconimage)
elif list == 'num2':
data = keypad_root2(name,iconimage)
elif list == 'num3':
data = keypad_root3(name,iconimage)
elif list == 'num_minus':
data = keypad_minus(name,iconimage)
elif list == 'text':
data = keyboard(name,iconimage)
elif list == 'bool':
data = bool(name)
elif list == 'dialog':
data = dialog(name,options)
elif list == 'override':
data = override(name,options)
elif list == 'override_range':
data = override_range(name,options)
elif list == 'fallback':
data = fallback(name,options)
elif list == 'fallback_range':
data = fallback_range(name,options)
if len(data)>0:
if data != 'DISABLED':
data = "%s<>%s<>%s" % (url, name, str(data))
ADDON.setSetting(name, value=str(data))
xbmc.executebuiltin("Container.Refresh")
def write_xml(name,dir_path):
count = 0
readsettings = read_from_file(SETTINGS_PATH)
settings = regex_get_all(readsettings, '<setting', '/>')
for s in settings:
try:
value = regex_from_to(s, 'value="', '"')
if value != 'DISABLED' and 'id="viewtype"' not in s:
count = count + 1
except:
pass
if count > 0:
write_to_file(dir_path, '<!-- Created using Easy Advanced Settings addon -->\n<advancedsettings>\n', False)
settinglist = ['video<>','audio<>','network<>','edl<>','pvr<>','epg<>','samba<>','videolibrary<>','videodatabase<>','musicdatabase<>','musiclibrary<>','karaoke<>','tuxbox<>','pathsubstitution<>','masterlock<>']
for s in settinglist:
buildsection(s, dir_path)
for s in settings:
try:
value = regex_from_to(s, 'value="', '"')
except:
pass
if value != 'DISABLED' and 'id="viewtype"' not in s:
rootid = value.split('<>')[0]
id = value.split('<>')[1]
id1 = id.replace('_','')
if '$' in id:
multi = "two"
splitid = id.split('$')
id1 = splitid[0]
id2 = splitid[1]
try:
id3 = splitid[2]
if len(id3)>0:
multi = 'three'
except:
pass
else:
multi = "one"
setting = value.split('<>')[2]
if multi == 'two':
settingstring = " <%s>\n <%s>%s</%s>\n </%s>\n" % (id1, id2, setting.replace(' ', '|'),id2, id1)
elif multi == 'three':
settingstring = " <%s>\n <%s>\n <%s>%s</%s>\n </%s>\n </%s>\n" % (id1, id2, id3,setting.replace(' ', '|'), id3, id2, id1)
elif rootid == 'root':
settingstring = " <%s>%s</%s>\n" % (id1, setting, id1)
elif '-av' in id or '-bv' in id or rootid == 'stagefright':
id = id.replace('-av','').replace('-bv','').replace('-range','').replace('-rate','')
setting = setting.replace('
', '\n').replace('<', '<').replace('>', '>')
settingstring = " <%s>\n <%s>\n <%s>%s\n </%s>\n </%s>\n </%s>\n" % ('video', rootid,id, setting,id, rootid,'video')
if id == 'delay':
settingstring = " <%s>\n <%s>%s\n </%s>\n </%s>\n" % ('video', rootid, setting, rootid,'video')
if rootid == 'stagefright':
settingstring = " <%s>\n <%s>\n <%s>%s</%s>\n </%s>\n </%s>\n" % ('video', rootid, id,setting,id, rootid,'video')
else:
settingstring = " <%s>\n <%s>%s</%s>\n </%s>\n" % (rootid, id1, setting, id1, rootid)
if rootid not in str(settinglist):#!= 'video' and rootid != 'audio' and rootid != 'network' and rootid != 'edl' and rootid != 'pvr' and rootid != 'samba' and rootid != 'epg'
write_to_file(dir_path, settingstring, True)
write_to_file(dir_path, '</advancedsettings>', True)
else:
write_to_file(dir_path, '', False)
notification('Easy Advanced Settings', 'File created', '4000', iconart)
xbmc.executebuiltin("Container.Refresh")
def buildsection(settingid, dir_path):
countsettingid = 0
setid = settingid.replace('<>','')
readsettings = read_from_file(SETTINGS_PATH)
settings = regex_get_all(readsettings, '<setting', '/>')
for s in settings:
try:
value = regex_from_to(s, 'value="', '"')
if str(settingid) in value:
countsettingid = countsettingid + 1
except:
pass
if countsettingid > 0:
write_to_file(dir_path, ' <%s>\n' % setid, True)
for s in settings:
try:
value = regex_from_to(s, 'value="', '"')
except:
pass
if value != 'DISABLED' and 'id="viewtype"' not in s:
rootid = value.split('<>')[0]
id = value.split('<>')[1]
id = id.replace('_','')
if '$' in id:
multi = "two"
splitid = id.split('$')
id1 = splitid[0]
id2 = splitid[1]
try:
id3 = splitid[2]
if len(id3)>0:
multi = 'three'
except:
pass
else:
multi = "one"
setting = value.split('<>')[2]
if rootid == setid:
if (id == 'excludefromscan' or id == 'excludefromlisting'):
settingstring = " <%s>\n <regexp>[-\._ ](%s)[-\._ ]</regexp>\n </%s>\n" % (id, setting.replace(' ', '|'), id)
elif multi == 'two':
settingstring = " <%s>\n <%s>%s</%s>\n </%s>\n" % (id1, id2, setting.replace(' ', '|'),id2, id1)
elif multi == 'three':
settingstring = " <%s>\n <%s>\n <%s>%s</%s>\n </%s>\n </%s>\n" % (id1, id2, id3,setting.replace(' ', '|'), id3, id2, id1)
else:
if ":" in id:
idsplit = id.split(':')
settingstring = ' <%s type="%s" path="%s/>\n' % (idsplit[0], idsplit[1], setting)
else:
settingstring = " <%s>%s</%s>\n" % (id, setting, id)
write_to_file(dir_path, settingstring, True)
write_to_file(dir_path, ' </%s>\n' % setid, True)
def keypad(name,csetting):
data = xbmcgui.Dialog().numeric(0,'Enter ' + name + ' value')
data=int(data)
if data>0:
return str(data)
else:
return 'DISABLED'
def keypad_root(name,csetting):
data = xbmcgui.Dialog().numeric(0,'Enter latency value')
data=int(data)
if data>0:
data = "\n <delay>%s</delay>" % data
return str(data)
else:
return 'DISABLED'
def keypad_root3(name,csetting):
data = xbmcgui.Dialog().numeric(0,'Enter MIN rate (Hz) value')
minrate=int(data)
if data>0:
data = xbmcgui.Dialog().numeric(0,'Enter MAX rate (Hz) value')
maxrate=int(data)
if data>0:
data = xbmcgui.Dialog().numeric(0,'Enter delay value')
delay=int(data)
if data>0:
data = "\n <min>%s</min>\n <max>%s</max>\n <delay>%s</delay>" % (minrate,maxrate,delay)
return str(data)
else:
return 'DISABLED'
else:
return 'DISABLED'
else:
return 'DISABLED'
def keypad_root2(name,csetting):
data = xbmcgui.Dialog().numeric(0,'Enter rate (Hz) value')
rate=int(data)
if data>0:
data = xbmcgui.Dialog().numeric(0,'Enter delay value')
delay=int(data)
if data>0:
data = "\n <rate>%s</rate>\n <delay>%s</delay>" % (rate,delay)
return str(data)
else:
return 'DISABLED'
else:
return 'DISABLED'
def keypad_minus(name,csetting):
data = xbmcgui.Dialog().numeric(0,'Enter ' + name + ' value')
data=int(data)
if data>0:
return "%s%s" % ('-', str(data))
else:
return 'DISABLED'
def keyboard(name,csetting):
keyboard = xbmc.Keyboard(csetting, 'Enter ' + name + ' value', False)
keyboard.doModal()
if keyboard.isConfirmed():
data = keyboard.getText()
if len(data) > 0:
return data
else:
return 'DISABLED'
def bool(name):
dialog = xbmcgui.Dialog()
action_list = ["True","False", "Disable"]
action_list_return = ["true","false", "DISABLED"]
action_id = dialog.select('Select ' + name + ' option', action_list)
action = action_list_return[action_id]
if(action_id < 0):
return "DISABLED"
else:
return action
def override(name,csetting):
keyboard = xbmc.Keyboard(csetting, 'Enter fps value', False)
keyboard.doModal()
if keyboard.isConfirmed():
fps = keyboard.getText()
if len(fps) > 0:
keyboard = xbmc.Keyboard(csetting, 'Enter refresh value', False)
keyboard.doModal()
if keyboard.isConfirmed():
refresh = keyboard.getText()
if len(refresh) > 0:
data = "\n <fps>%s</fps>\n <refresh>%s</refresh>" % (fps,refresh)
return data
else:
return 'DISABLED'
else:
return 'DISABLED'
def override_range(name,csetting):
keyboard = xbmc.Keyboard(csetting, 'Enter MIN fps value', False)
keyboard.doModal()
if keyboard.isConfirmed():
fpsmin = keyboard.getText()
if len(fpsmin) > 0:
keyboard = xbmc.Keyboard(csetting, 'Enter MAX fps value', False)
keyboard.doModal()
if keyboard.isConfirmed():
fpsmax = keyboard.getText()
if len(fpsmax) > 0:
keyboard = xbmc.Keyboard(csetting, 'Enter MIN refresh value', False)
keyboard.doModal()
if keyboard.isConfirmed():
refreshmin = keyboard.getText()
if len(refreshmin) > 0:
keyboard = xbmc.Keyboard(csetting, 'Enter MAX refresh value', False)
keyboard.doModal()
if keyboard.isConfirmed():
refreshmax = keyboard.getText()
if len(refreshmax) > 0:
data = "\n <fpsmin>%s</fpsmin>\n <fpsmax>%s</fpsmax>\n <refreshmin>%s</refreshmin>\n <refreshmax>%s</refreshmax>" % (fpsmin,fpsmax,refreshmin,refreshmax)
return data
else:
return 'DISABLED'
else:
return 'DISABLED'
else:
return 'DISABLED'
else:
return 'DISABLED'
def fallback(name,csetting):
keyboard = xbmc.Keyboard(csetting, 'Enter fallback refresh value', False)
keyboard.doModal()
if keyboard.isConfirmed():
refresh = keyboard.getText()
if len(refresh) > 0:
data = "\n <refresh>%s</refresh>" % (refresh)
return data
else:
return 'DISABLED'
else:
return 'DISABLED'
def fallback_range(name,csetting):
keyboard = xbmc.Keyboard(csetting, 'Enter MIN fallback refresh value', False)
keyboard.doModal()
if keyboard.isConfirmed():
refreshmin = keyboard.getText()
if len(refreshmin) > 0:
keyboard = xbmc.Keyboard(csetting, 'Enter MAX fallback refresh value', False)
keyboard.doModal()
if keyboard.isConfirmed():
refreshmax = keyboard.getText()
if len(refreshmax) > 0:
data = "\n <refreshmin>%s</refreshmin>\n <refreshmax>%s</refreshmax>" % (refreshmin,refreshmax)
return data
else:
return 'DISABLED'
else:
return 'DISABLED'
def dialog(name, options):
dialog = xbmcgui.Dialog()
list = regex_get_all(options, '"', '"')
action_list = list
action_id = dialog.select('Select ' + name + ' option', action_list)
action = action_list[action_id]
if(action_id < 0):
return "DISABLED"
else:
return action
def viewxml(name):
msg = ACTIVESETTINGSFILE
TextBoxes("[B][COLOR lime]Your advancedsettings.xml file[/B][/COLOR]",msg)
def TextBoxes(heading,anounce):
class TextBox():
"""Thanks to BSTRDMKR for this code:)"""
# constants
WINDOW = 10147
CONTROL_LABEL = 1
CONTROL_TEXTBOX = 5
def __init__( self, *args, **kwargs):
# activate the text viewer window
xbmc.executebuiltin( "ActivateWindow(%d)" % ( self.WINDOW, ) )
# get window
self.win = xbmcgui.Window( self.WINDOW )
# give window time to initialize
xbmc.sleep( 500 )
self.setControls()
def setControls( self ):
# set heading
self.win.getControl( self.CONTROL_LABEL ).setLabel(heading)
try:
f = open(anounce)
text = f.read()
except:
text=anounce
self.win.getControl( self.CONTROL_TEXTBOX ).setText(text)
return
TextBox()
def create_directory(dir_path, dir_name=None):
if dir_name:
dir_path = os.path.join(dir_path, dir_name)
dir_path = dir_path.strip()
if not os.path.exists(dir_path):
os.makedirs(dir_path)
return dir_path
def create_file(dir_path, file_name=None):
if file_name:
file_path = os.path.join(dir_path, file_name)
file_path = file_path.strip()
if not os.path.exists(file_path):
f = open(file_path, 'w')
f.write('')
f.close()
return file_path
def regex_from_to(text, from_string, to_string, excluding=True):
if excluding:
r = re.search("(?i)" + from_string + "([\S\s]+?)" + to_string, text).group(1)
else:
r = re.search("(?i)(" + from_string + "[\S\s]+?" + to_string + ")", text).group(1)
return r
def regex_get_all(text, start_with, end_with):
r = re.findall("(?i)" + start_with + "([\S\s]+?)" + end_with, text)
return r
def strip_text(r, f, t, excluding=True):
r = re.search("(?i)" + f + "([\S\s]+?)" + t, r).group(1)
return r
def find_list(query, search_file):
try:
content = read_from_file(search_file)
lines = content.split('\n')
index = lines.index(query)
return index
except:
return -1
def add_to_list(list, file):
if find_list(list, file) >= 0:
return
if os.path.isfile(file):
content = read_from_file(file)
else:
content = ""
lines = content.split('\n')
s = '%s\n' % list
for line in lines:
if len(line) > 0:
s = s + line + '\n'
write_to_file(file, s)
xbmc.executebuiltin("Container.Refresh")
def remove_from_list(list, file):
index = find_list(list, file)
if index >= 0:
content = read_from_file(file)
lines = content.split('\n')
lines.pop(index)
s = ''
for line in lines:
if len(line) > 0:
s = s + line + '\n'
write_to_file(file, s)
xbmc.executebuiltin("Container.Refresh")
def write_to_file(path, content, append, silent=False):
try:
if append:
f = open(path, 'a')
else:
f = open(path, 'w')
f.write(content)
f.close()
return True
except:
if not silent:
print("Could not write to " + path)
return False
def read_from_file(path, silent=False):
try:
f = open(path, 'r')
r = f.read()
f.close()
return str(r)
except:
if not silent:
print("Could not read from " + path)
return None
def notification(title, message, ms, nart):
xbmc.executebuiltin("XBMC.notification(" + title + "," + message + "," + ms + "," + nart + ")")
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
xbmc.executebuiltin("Container.SetViewMode(504)")
#xbmc.executebuiltin("Container.SetViewMode(504)")
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
def addDir(name,url,mode,iconimage,list,description):
suffix = ""
suffix2 = ""
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+str(iconimage)+"&list="+str(list)+"&description="+str(description)
ok=True
liz=xbmcgui.ListItem(name + suffix + suffix2, iconImage=iconart, thumbnailImage=iconart)
liz.setProperty('fanart_image', fanart )
liz.setInfo( type="Video", infoLabels={ "Title": name, 'plot': description, 'year': '2014', 'genre': 'Advanced Setings' } )
setView('movies', 'movies-view')
#xbmc.executebuiltin("Container.SetViewMode(52)")
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addDirPlayable(name,url,mode,iconimage, description,list,options):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+urllib.quote_plus(iconimage)+"&description="+str(description)+"&list="+str(list)+"&options="+str(options)
ok=True
liz=xbmcgui.ListItem(name, iconart, thumbnailImage=iconart)
liz.setProperty('fanart_image', fanart )
liz.setInfo( type="Video", infoLabels={ "Title": name, 'plot': description, 'year': '2014', 'genre': 'Advanced Setings' } )
setView('movies', 'movies-view')
#xbmc.executebuiltin("Container.SetViewMode(502)")
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
params=get_params()
url=None
name=None
mode=None
iconimage=None
options=None
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
start=urllib.unquote_plus(params["start"])
except:
pass
try:
list=urllib.unquote_plus(params["list"])
except:
pass
try:
options=str(params["options"])
except:
pass
try:
description=str(params["description"])
except:
pass
if mode==None or url==None or len(url)<1:
#ADDON.setSetting('element', value='advancedsettings')
MENU(name)
elif mode == 499:
buildmenu(name)
elif mode == 498:
checksettings(name)
elif mode == 490:
removexmlfile(name)
elif mode == 489:
resetsettings(name)
elif mode == 10:
troubleshooting(name)
elif mode == 11:
audiovideo(name)
elif mode == 12:
videolibrary(name)
elif mode == 13:
libraryartwork(name)
elif mode == 14:
videomusiclibrary(name)
elif mode == 15:
music(name)
elif mode == 16:
photos(name)
elif mode == 17:
networkmenu(name)
elif mode == 18:
filesystem(name)
elif mode == 19:
remotecontrol(name)
elif mode == 20:
interface(name)
elif mode == 21:
unsorted(name)
elif mode==101:
jsonrpc(name)
elif mode==102:
video(name)
elif mode==103:
audio(name)
elif mode==104:
edl(name)
elif mode==105:
pvr(name)
elif mode==106:
epg(name)
elif mode==115:
video_library(name)
elif mode == 202:
adjustrefreshrates(name)
elif mode == 205:
latency(name)
elif mode == 206:
stagefright(name)
elif mode==300:
networksettings(name)
elif mode==301:
samba(name)
elif mode==302:
network(name)
elif mode==303:
tuxbox(name)
elif mode == 200:
edit_setting(name,url,iconimage,list,options)
elif mode==470:
videodatabase(name)
elif mode==471:
musicdatabase(name)
elif mode==472:
musiclibrary(name)
elif mode==473:
karaoke(name)
elif mode == 495:
viewxml(name)
elif mode == 430:
pathsubstitution(name)
elif mode == 500:
write_xml(name,list)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
|
gpl-2.0
|
cheery/essence
|
seed/argon/renderer.py
|
1
|
6437
|
from util import mix, in_module, rgba
from OpenGL.GL import *
from vertexformat import VertexFormat
from vertexstream import VertexStream
from texturecache import TextureCache
from patch9 import Patch9
from image import Image
from program import Program
image_empty = Image(1, 1, "\xff\xff\xff\xff")
white = rgba(255, 255, 255, 255)
fmt = VertexFormat([
('position', 2, GL_FALSE, GL_FLOAT),
('texcoord', 2, GL_FALSE, GL_FLOAT),
('color', 4, GL_TRUE, GL_UNSIGNED_BYTE),
])
def quad_gradient(stream, (left, top, width, height), (s0,t0,s1,t1), (tl,tr,bl,br)):
stream.vertex((left, top), (s0, t0), tuple(tl))
stream.vertex((left, top+height), (s0, t1), tuple(bl))
stream.vertex((left+width, top), (s1, t0), tuple(tr))
stream.vertex((left+width, top), (s1, t0), tuple(tr))
stream.vertex((left, top+height), (s0, t1), tuple(bl))
stream.vertex((left+width, top+height), (s1, t1), tuple(br))
def quad_flat(stream, (left, top, width, height), (s0,t0,s1,t1), color):
color = tuple(color)
stream.vertex((left, top), (s0, t0), color)
stream.vertex((left, top+height), (s0, t1), color)
stream.vertex((left+width, top), (s1, t0), color)
stream.vertex((left+width, top), (s1, t0), color)
stream.vertex((left, top+height), (s0, t1), color)
stream.vertex((left+width, top+height), (s1, t1), color)
def characters_flat(stream, font, (x,y), text, color):
offset = 0
sK = 1.0 / font.image.width
tK = 1.0 / font.image.height
for character in text:
metrics = font.metadata.get(character)
if metrics is None:
continue
if metrics['display']:
width = metrics["width"]
height = metrics["height"]
hbearing = metrics["hbearing"]
vbearing = -metrics["vbearing"]
s0 = metrics["uv"]["s"]
t0 = metrics["uv"]["t"]
s1 = s0 + width*sK
t1 = t0 + height*tK
left = x + offset + hbearing
top = y + vbearing
quad_flat(stream, (left, top, width, height), (s0,t0,s1,t1), color)
offset += metrics["advance"]
def patch9_cell_flat(stream, patch9, x, y, (left, top, width, height), color):
x0,s0 = patch9.gridpoint(0, x+0, width)
x1,s1 = patch9.gridpoint(0, x+1, width)
y0,t0 = patch9.gridpoint(1, y+0, height)
y1,t1 = patch9.gridpoint(1, y+1, height)
quad_flat(stream, (left+x0, top+y0, x1-x0, y1-y0), (s0, t0, s1, t1), color)
def patch9_cell_gradient(stream, patch9, x, y, (left, top, width, height), (c0,c1,c2,c3)):
x0,s0 = patch9.gridpoint(0, x+0, width)
x1,s1 = patch9.gridpoint(0, x+1, width)
y0,t0 = patch9.gridpoint(1, y+0, height)
y1,t1 = patch9.gridpoint(1, y+1, height)
k0 = mix(c0, c1, float(x0)/width)
k1 = mix(c0, c1, float(x1)/width)
k2 = mix(c2, c3, float(x0)/width)
k3 = mix(c2, c3, float(x1)/width)
c0 = mix(k0, k2, float(y0)/height)
c1 = mix(k1, k3, float(y0)/height)
c2 = mix(k0, k2, float(y1)/height)
c3 = mix(k1, k3, float(y1)/height)
quad_gradient(stream, (left+x0, top+y0, x1-x0, y1-y0), (s0, t0, s1, t1), (c0,c1,c2,c3))
class Renderer(object):
def __init__(self, output, default_font, program=None):
self.output = output
self.default_font = default_font
self.stream = VertexStream(fmt)
self.textures = TextureCache()
if program is None:
self.program = Program.load(in_module('shaders/flat.glsl'))
else:
self.program = program
def bind(self):
self.output.bind()
self.program.use()
self.program.uniform2f('resolution', (self.output.width, self.output.height))
glViewport(0, 0, self.output.width, self.output.height)
self.stream.vbo.bind()
fmt.use(self.program)
def unbind(self):
fmt.enduse(self.program)
self.stream.vbo.unbind()
self.program.enduse()
self.output.unbind()
def rectangle(self, rect, image=image_empty, color=white, gradient=None):
if isinstance(image, Patch9) and gradient is None:
patch9 = image
image = image.image
texture = self.textures.get(image)
texture.bind()
patch9_cell_flat(self.stream, patch9, 0, 0, rect, color)
patch9_cell_flat(self.stream, patch9, 1, 0, rect, color)
patch9_cell_flat(self.stream, patch9, 2, 0, rect, color)
patch9_cell_flat(self.stream, patch9, 0, 1, rect, color)
patch9_cell_flat(self.stream, patch9, 1, 1, rect, color)
patch9_cell_flat(self.stream, patch9, 2, 1, rect, color)
patch9_cell_flat(self.stream, patch9, 0, 2, rect, color)
patch9_cell_flat(self.stream, patch9, 1, 2, rect, color)
patch9_cell_flat(self.stream, patch9, 2, 2, rect, color)
self.stream.flush()
texture.unbind()
elif isinstance(image, Patch9):
patch9 = image
image = image.image
texture = self.textures.get(image)
texture.bind()
patch9_cell_gradient(self.stream, 0, 0, rect, gradient)
patch9_cell_gradient(self.stream, 1, 0, rect, gradient)
patch9_cell_gradient(self.stream, 2, 0, rect, gradient)
patch9_cell_gradient(self.stream, 0, 1, rect, gradient)
patch9_cell_gradient(self.stream, 1, 1, rect, gradient)
patch9_cell_gradient(self.stream, 2, 1, rect, gradient)
patch9_cell_gradient(self.stream, 0, 2, rect, gradient)
patch9_cell_gradient(self.stream, 1, 2, rect, gradient)
patch9_cell_gradient(self.stream, 2, 2, rect, gradient)
self.stream.flush()
texture.unbind()
else:
if gradient is None:
gradient = color, color, color, color
texture = self.textures.get(image)
texture.bind()
quad_gradient(self.stream, rect, (0,1,1,0), gradient)
self.stream.flush()
texture.unbind()
def text(self, pos, text, font=None, color=white):
font = self.default_font if font is None else font
texture = self.textures.get(font.image)
texture.bind()
characters_flat(self.stream, font, pos, text, color)
self.stream.flush()
texture.unbind()
|
gpl-3.0
|
savoirfairelinux/OpenUpgrade
|
addons/base_report_designer/plugin/openerp_report_designer/bin/script/Repeatln.py
|
90
|
13231
|
#########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer [email protected]
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import uno
import string
import unohelper
import xmlrpclib
from com.sun.star.task import XJobExecutor
if __name__<>"package":
from lib.gui import *
from lib.error import ErrorDialog
from lib.functions import *
from ServerParameter import *
from lib.logreport import *
from lib.rpc import *
from LoginTest import *
database="test_db1"
uid = 3
#class RepeatIn:
class RepeatIn( unohelper.Base, XJobExecutor ):
def __init__(self, sObject="", sVariable="", sFields="", sDisplayName="", bFromModify=False):
# Interface Design
LoginTest()
self.logobj=Logger()
if not loginstatus and __name__=="package":
exit(1)
self.win = DBModalDialog(60, 50, 180, 250, "RepeatIn Builder")
self.win.addFixedText("lblVariable", 2, 12, 60, 15, "Objects to loop on :")
self.win.addComboBox("cmbVariable", 180-120-2, 10, 120, 15,True, itemListenerProc=self.cmbVariable_selected)
self.insVariable = self.win.getControl( "cmbVariable" )
self.win.addFixedText("lblFields", 10, 32, 60, 15, "Field to loop on :")
self.win.addComboListBox("lstFields", 180-120-2, 30, 120, 150, False,itemListenerProc=self.lstbox_selected)
self.insField = self.win.getControl( "lstFields" )
self.win.addFixedText("lblName", 12, 187, 60, 15, "Variable name :")
self.win.addEdit("txtName", 180-120-2, 185, 120, 15,)
self.win.addFixedText("lblUName", 8, 207, 60, 15, "Displayed name :")
self.win.addEdit("txtUName", 180-120-2, 205, 120, 15,)
self.win.addButton('btnOK',-2 ,-10,45,15,'Ok', actionListenerProc = self.btnOk_clicked )
self.win.addButton('btnCancel',-2 - 45 - 5 ,-10,45,15,'Cancel', actionListenerProc = self.btnCancel_clicked )
global passwd
self.password = passwd
global url
self.sock=RPCSession(url)
# Variable Declaration
self.sValue=None
self.sObj=None
self.aSectionList=[]
self.sGVariable=sVariable
self.sGDisplayName=sDisplayName
self.aItemList=[]
self.aComponentAdd=[]
self.aObjectList=[]
self.aListRepeatIn=[]
self.aVariableList=[]
# Call method to perform Enumration on Report Document
EnumDocument(self.aItemList,self.aComponentAdd)
# Perform checking that Field-1 and Field - 4 is available or not alos get Combobox
# filled if condition is true
desktop = getDesktop()
doc = desktop.getCurrentComponent()
docinfo = doc.getDocumentInfo()
# Check weather Field-1 is available if not then exit from application
self.sMyHost= ""
if not docinfo.getUserFieldValue(3) == "" and not docinfo.getUserFieldValue(0)=="":
self.sMyHost= docinfo.getUserFieldValue(0)
self.count=0
oParEnum = doc.getTextFields().createEnumeration()
while oParEnum.hasMoreElements():
oPar = oParEnum.nextElement()
if oPar.supportsService("com.sun.star.text.TextField.DropDown"):
self.count += 1
getList(self.aObjectList, self.sMyHost,self.count)
cursor = doc.getCurrentController().getViewCursor()
text = cursor.getText()
tcur = text.createTextCursorByRange(cursor)
self.aVariableList.extend( filter( lambda obj: obj[:obj.find(" ")] == "List", self.aObjectList ) )
for i in range(len(self.aItemList)):
try:
anItem = self.aItemList[i][1]
component = self.aComponentAdd[i]
if component == "Document":
sLVal = anItem[anItem.find(",'") + 2:anItem.find("')")]
self.aVariableList.extend( filter( lambda obj: obj[:obj.find("(")] == sLVal, self.aObjectList ) )
if tcur.TextSection:
getRecersiveSection(tcur.TextSection,self.aSectionList)
if component in self.aSectionList:
sLVal = anItem[anItem.find(",'") + 2:anItem.find("')")]
self.aVariableList.extend( filter( lambda obj: obj[:obj.find("(")] == sLVal, self.aObjectList ) )
if tcur.TextTable:
if not component == "Document" and component[component.rfind(".") + 1:] == tcur.TextTable.Name:
VariableScope( tcur, self.aVariableList, self.aObjectList, self.aComponentAdd, self.aItemList, component )
except :
import traceback,sys
info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
self.logobj.log_write('RepeatIn', LOG_ERROR, info)
self.bModify=bFromModify
if self.bModify==True:
if sObject=="":
self.insVariable.setText("List of "+docinfo.getUserFieldValue(3))
self.insField.addItem("objects",self.win.getListBoxItemCount("lstFields"))
self.win.setEditText("txtName", sVariable)
self.win.setEditText("txtUName",sDisplayName)
self.sValue= "objects"
else:
sItem=""
for anObject in self.aObjectList:
if anObject[:anObject.find("(")] == sObject:
sItem = anObject
self.insVariable.setText( sItem )
genTree(
sItem[sItem.find("(")+1:sItem.find(")")],
self.aListRepeatIn,
self.insField,
self.sMyHost,
2,
ending=['one2many','many2many'],
recur=['one2many','many2many']
)
self.sValue= self.win.getListBoxItem("lstFields",self.aListRepeatIn.index(sFields))
for var in self.aVariableList:
if var[:8] <> 'List of ':
self.model_ids = self.sock.execute(database, uid, self.password, 'ir.model' , 'search', [('model','=',var[var.find("(")+1:var.find(")")])])
else:
self.model_ids = self.sock.execute(database, uid, self.password, 'ir.model' , 'search', [('model','=',var[8:])])
fields=['name','model']
self.model_res = self.sock.execute(database, uid, self.password, 'ir.model', 'read', self.model_ids,fields)
if self.model_res <> []:
if var[:8]<>'List of ':
self.insVariable.addItem(var[:var.find("(")+1] + self.model_res[0]['name'] + ")" ,self.insVariable.getItemCount())
else:
self.insVariable.addItem('List of ' + self.model_res[0]['name'] ,self.insVariable.getItemCount())
else:
self.insVariable.addItem(var ,self.insVariable.getItemCount())
self.win.doModalDialog("lstFields",self.sValue)
else:
ErrorDialog("Please Select Appropriate module" ,"Create new report from: \nOpenERP -> Open a New Report")
self.win.endExecute()
def lstbox_selected(self, oItemEvent):
sItem=self.win.getListBoxSelectedItem("lstFields")
sMain=self.aListRepeatIn[self.win.getListBoxSelectedItemPos("lstFields")]
if self.bModify==True:
self.win.setEditText("txtName", self.sGVariable)
self.win.setEditText("txtUName",self.sGDisplayName)
else:
self.win.setEditText("txtName",sMain[sMain.rfind("/")+1:])
self.win.setEditText("txtUName","|-."+sItem[sItem.rfind("/")+1:]+".-|")
def cmbVariable_selected(self, oItemEvent):
if self.count > 0 :
desktop=getDesktop()
doc =desktop.getCurrentComponent()
docinfo=doc.getDocumentInfo()
self.win.removeListBoxItems("lstFields", 0, self.win.getListBoxItemCount("lstFields"))
sItem=self.win.getComboBoxText("cmbVariable")
for var in self.aVariableList:
if var[:8]=='List of ':
if var[:8]==sItem[:8]:
sItem = var
elif var[:var.find("(")+1] == sItem[:sItem.find("(")+1]:
sItem = var
self.aListRepeatIn=[]
data = ( sItem[sItem.rfind(" ") + 1:] == docinfo.getUserFieldValue(3) ) and docinfo.getUserFieldValue(3) or sItem[sItem.find("(")+1:sItem.find(")")]
genTree( data, self.aListRepeatIn, self.insField, self.sMyHost, 2, ending=['one2many','many2many'], recur=['one2many','many2many'] )
self.win.selectListBoxItemPos("lstFields", 0, True )
else:
sItem=self.win.getComboBoxText("cmbVariable")
for var in self.aVariableList:
if var[:8]=='List of ' and var[:8] == sItem[:8]:
sItem = var
if sItem.find(".")==-1:
temp=sItem[sItem.rfind("x_"):]
else:
temp=sItem[sItem.rfind(".")+1:]
self.win.setEditText("txtName",temp)
self.win.setEditText("txtUName","|-."+temp+".-|")
self.insField.addItem("objects",self.win.getListBoxItemCount("lstFields"))
self.win.selectListBoxItemPos("lstFields", 0, True )
def btnOk_clicked(self, oActionEvent):
desktop=getDesktop()
doc = desktop.getCurrentComponent()
cursor = doc.getCurrentController().getViewCursor()
selectedItem = self.win.getListBoxSelectedItem( "lstFields" )
selectedItemPos = self.win.getListBoxSelectedItemPos( "lstFields" )
txtName = self.win.getEditText( "txtName" )
txtUName = self.win.getEditText( "txtUName" )
if selectedItem != "" and txtName != "" and txtUName != "":
sKey=u""+ txtUName
if selectedItem == "objects":
sValue=u"[[ repeatIn(" + selectedItem + ",'" + txtName + "') ]]"
else:
sObjName=self.win.getComboBoxText("cmbVariable")
sObjName=sObjName[:sObjName.find("(")]
sValue=u"[[ repeatIn(" + sObjName + self.aListRepeatIn[selectedItemPos].replace("/",".") + ",'" + txtName +"') ]]"
if self.bModify == True:
oCurObj = cursor.TextField
oCurObj.Items = (sKey,sValue)
oCurObj.update()
else:
oInputList = doc.createInstance("com.sun.star.text.TextField.DropDown")
if self.win.getListBoxSelectedItem("lstFields") == "objects":
oInputList.Items = (sKey,sValue)
doc.Text.insertTextContent(cursor,oInputList,False)
else:
sValue=u"[[ repeatIn(" + sObjName + self.aListRepeatIn[selectedItemPos].replace("/",".") + ",'" + txtName +"') ]]"
if cursor.TextTable==None:
oInputList.Items = (sKey,sValue)
doc.Text.insertTextContent(cursor,oInputList,False)
else:
oInputList.Items = (sKey,sValue)
widget = ( cursor.TextTable or selectedItem <> 'objects' ) and cursor.TextTable.getCellByName( cursor.Cell.CellName ) or doc.Text
widget.insertTextContent(cursor,oInputList,False)
self.win.endExecute()
else:
ErrorDialog("Please fill appropriate data in Object Field or Name field \nor select particular value from the list of fields.")
def btnCancel_clicked(self, oActionEvent):
self.win.endExecute()
if __name__<>"package" and __name__=="__main__":
RepeatIn()
elif __name__=="package":
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation( RepeatIn, "org.openoffice.openerp.report.repeatln", ("com.sun.star.task.Job",),)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
eoyilmaz/anima
|
anima/env/fusion/__init__.py
|
1
|
49729
|
# -*- coding: utf-8 -*-
import sys
exceptions = None
if sys.version_info[0] >= 3:
exceptions = (ImportError, ModuleNotFoundError)
else:
exceptions = ImportError
try:
# for Fusion 6 and 7
import PeyeonScript as bmf
except exceptions:
# for Fusion 8+
try:
# for Fusion inside Resolve
import BlackmagicFusion as bmf
except exceptions:
from anima.env import blackmagic as bmd
bmf = bmd.get_bmd()
from anima import logger
from anima.env import empty_reference_resolution
from anima.env.base import EnvironmentBase
from anima.recent import RecentFileManager
class Fusion(EnvironmentBase):
"""the fusion environment class
"""
name = "Fusion"
extensions = ['.comp']
fusion_formats = {
"Multimedia": {
"id": 0,
"Width": 320,
"Height": 240,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 15.0
},
"NTSC (D1)": {
"id": 1,
"Width": 720,
"Height": 486,
"AspectX": 0.9,
"AspectY": 1.0,
"Rate": 29.97
},
"NTSC (DV)": {
"id": 2,
"Width": 720,
"Height": 480,
"AspectX": 0.9,
"AspectY": 1.0,
"Rate": 29.97
},
"NTSC (Perception)": {
"id": 3,
"Width": 720,
"Height": 480,
"AspectX": 0.9,
"AspectY": 1.0,
"Rate": 29.97
},
"NTSC (Square Pixel)": {
"id": 4,
"Width": 640,
"Height": 480,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 29.97
},
"NTSC 16:9": {
"id": 5,
"Width": 720,
"Height": 486,
"AspectX": 1.2,
"AspectY": 1.0,
"Rate": 29.97
},
"PAL / SECAM (D1)": {
"id": 6,
"Width": 720,
"Height": 576,
"AspectX": 1.0,
"AspectY": 0.9375,
"Rate": 25
},
"PAL / SECAM (Square Pixel)": {
"id": 7,
"Width": 768,
"Height": 576,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 25
},
"PALplus 16:9": {
"id": 8,
"Width": 720,
"Height": 576,
"AspectX": 1.0,
"AspectY": 0.703125,
"Rate": 25
},
"HDTV 720": {
"id": 9,
"Width": 1280,
"Height": 720,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 30
},
"HDTV 1080": {
"id": 10,
"Width": 1920,
"Height": 1080,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 30
},
"D16": {
"id": 11,
"Width": 2880,
"Height": 2304,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"2K Full Aperture (Super 35)": {
"id": 12,
"Width": 2048,
"Height": 1556,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"4K Full Aperture (Super 35)": {
"id": 13,
"Width": 4096,
"Height": 3112,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"2K Academy (Regular 35)": {
"id": 14,
"Width": 1828,
"Height": 1332,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"4K Academy (Regular 35)": {
"id": 15,
"Width": 3656,
"Height": 2664,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"2K Academy in Full Aperture": {
"id": 16,
"Width": 2048,
"Height": 1556,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"4K Academy in Full Aperture": {
"id": 17,
"Width": 4096,
"Height": 3112,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"2K Anamorphic (CinemaScope)": {
"id": 18,
"Width": 1828,
"Height": 1556,
"AspectX": 2.0,
"AspectY": 1.0,
"Rate": 24
},
"4K Anamorphic (CinemaScope)": {
"id": 19,
"Width": 3656,
"Height": 3112,
"AspectX": 2.0,
"AspectY": 1.0,
"Rate": 24
},
"2K 1.85": {
"id": 20,
"Width": 1828,
"Height": 988,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"4K 1.85": {
"id": 21,
"Width": 3656,
"Height": 1976,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"3K VistaVision": {
"id": 22,
"Width": 3072,
"Height": 2048,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"6K VistaVision": {
"id": 23,
"Width": 6144,
"Height": 4096,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
"5K IMAX 70mm": {
"id": 24,
"Width": 5464,
"Height": 4096,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24
},
}
def __init__(self, name='', version=None):
"""fusion specific init
"""
super(Fusion, self).__init__(name=name, version=version)
# and add you own modifications to __init__
# self.fusion = bmd.scriptapp("Fusion")
# self.fusion = bmd.get_fusion()
self.fusion = bmf.scriptapp("Fusion")
self.fusion_prefs = self.fusion.GetPrefs()['Global']
# update name with version
self.name = 'Fusion%s' % self.fusion.GetAttrs("FUSIONS_Version").split('.')[0]
self.comp = self.fusion.GetCurrentComp()
self.comp_prefs = self.comp.GetPrefs()['Comp']
self._main_output_node_name = "Main_Output"
def save_as(self, version, run_pre_publishers=True):
""""the save action for fusion environment
uses Fusions own python binding
"""
# set the extension to '.comp'
# refresh the current comp
self.comp = self.fusion.GetCurrentComp()
from stalker import Version
assert isinstance(version, Version)
# its a new version please update the paths
version.update_paths()
version.extension = self.extensions[0]
version.created_with = self.name
# set project_directory
import os
self.project_directory = os.path.dirname(version.absolute_path)
# set range from the shot
self.set_range_from_shot(version)
# create the main write node
self.create_main_saver_node(version)
# replace read and write node paths
# self.replace_external_paths()
# create the path before saving
try:
os.makedirs(version.absolute_path)
except OSError:
# path already exists OSError
pass
version_full_path = os.path.normpath(version.absolute_full_path)
self.comp.Lock()
self.comp.Save(version_full_path if sys.version_info[0] >= 3 else version_full_path.encode())
self.comp.Unlock()
# create a local copy
self.create_local_copy(version)
rfm = RecentFileManager()
rfm.add(self.name, version.absolute_full_path)
return True
def set_range_from_shot(self, version):
"""sets the frame range from the Shot entity if this version is related
to one.
:param version:
:return:
"""
# check if this is a shot related task
shot = self.get_shot(version)
if shot:
# use the shot image_format
fps = shot.fps
imf = shot.image_format
# set frame ranges
self.set_frame_range(
start_frame=shot.cut_in,
end_frame=shot.cut_out,
)
else:
# use the Project image_format
fps = version.task.project.fps
imf = version.task.project.image_format
# set comp resolution and fps
if imf:
self.comp.SetPrefs({
# Image Format
"Comp.FrameFormat.Width": imf.width,
"Comp.FrameFormat.Height": imf.height,
"Comp.FrameFormat.AspectY": imf.pixel_aspect,
"Comp.FrameFormat.AspectX": imf.pixel_aspect,
# FPS
"Comp.FrameFormat.Rate": fps,
# set project frame format to 16bit
"Comp.FrameFormat.DepthFull": 2.0,
"Comp.FrameFormat.DepthLock": True,
})
def set_shot_from_range(self, version):
"""sets the Shot.cut_in and Shot.cut_out attributes from the current frame range if the current task is related
to a Stalker Shot instance.
:param Stalker.Version version: A Stalker Version instance.
:return:
"""
# check if this is a shot related task
is_shot_related_task = False
shot = None
from stalker import Shot
for task in version.task.parents:
if isinstance(task, Shot):
is_shot_related_task = True
shot = task
break
if is_shot_related_task and shot:
# set frame ranges
cut_in, cut_out = self.get_frame_range()
shot.cut_in = int(cut_in)
shot.cut_out = int(cut_out)
from stalker.db.session import DBSession
DBSession.add(shot)
DBSession.commit()
def export_as(self, version):
"""the export action for nuke environment
"""
# its a new version please update the paths
version.update_paths()
# set the extension to '.comp'
version.extension = self.extensions[0]
version.created_with = self.name
raise NotImplementedError(
'export_as() is not implemented yet for Fusion'
)
# # create a local copy
# self.create_local_copy(version)
def open(self, version, force=False, representation=None,
reference_depth=0, skip_update_check=False):
"""the open action for nuke environment
"""
import os
version_full_path = os.path.normpath(version.absolute_full_path)
# # delete all the comps and open new one
# comps = self.fusion.GetCompList().values()
# for comp_ in comps:
# comp_.Close()
self.fusion.LoadComp(version_full_path if sys.version_info[0] >= 3 else version_full_path.encode())
self.comp.Lock()
# set the project_directory
# get the current comp fist
self.comp = self.fusion.GetCurrentComp()
self.project_directory = os.path.dirname(version.absolute_path)
# update the savers
self.create_main_saver_node(version)
# file paths in different OS'es should be replaced with a path that is suitable for the current one
# update loaders
self.fix_loader_paths()
self.comp.Unlock()
rfm = RecentFileManager()
rfm.add(self.name, version.absolute_full_path)
# return True to specify everything was ok and an empty list
# for the versions those needs to be updated
return empty_reference_resolution()
def import_(self, version):
"""the import action for nuke environment
"""
# nuke.nodePaste(version.absolute_full_path)
return True
def get_current_version(self):
"""Finds the Version instance from the current open file.
If it can't find any then returns None.
:return: :class:`~oyProjectManager.models.version.Version`
"""
# full_path = self._root.knob('name').value()
import os
full_path = os.path.normpath(
self.comp.GetAttrs()['COMPS_FileName']
).replace('\\', '/')
return self.get_version_from_full_path(full_path)
def get_version_from_recent_files(self):
"""It will try to create a
:class:`~oyProjectManager.models.version.Version` instance by looking
at the recent files list.
It will return None if it can not find one.
:return: :class:`~oyProjectManager.models.version.Version`
"""
# full_path = self.fusion_prefs["LastCompFile"]
# return self.get_version_from_full_path(full_path)
version = None
rfm = RecentFileManager()
try:
recent_files = rfm[self.name]
except KeyError:
logger.debug('no recent files')
recent_files = None
if recent_files is not None:
for i in range(len(recent_files)):
version = self.get_version_from_full_path(recent_files[i])
if version is not None:
break
logger.debug("version from recent files is: %s" % version)
return version
def get_version_from_project_dir(self):
"""Tries to find a Version from the current project directory
:return: :class:`~oyProjectManager.models.version.Version`
"""
versions = self.get_versions_from_path(self.project_directory)
version = None
if versions and len(versions):
version = versions[0]
return version
def get_last_version(self):
"""gets the file name from nuke
"""
version = self.get_current_version()
# read the recent file list
if version is None:
version = self.get_version_from_recent_files()
# get the latest possible Version instance by using the workspace path
if version is None:
version = self.get_version_from_project_dir()
return version
def get_frame_range(self):
"""returns the current frame range
"""
start_frame = self.comp.GetAttrs()['COMPN_RenderStart']
end_frame = self.comp.GetAttrs()['COMPN_RenderEnd']
return start_frame, end_frame
def set_frame_range(self, start_frame=1, end_frame=100,
adjust_frame_range=False):
"""sets the start and end frame range
"""
self.comp.SetAttrs(
{
"COMPN_GlobalStart": start_frame,
"COMPN_RenderStart": start_frame,
"COMPN_GlobalEnd": end_frame,
"COMPN_RenderEnd": end_frame,
}
)
def set_fps(self, fps=25):
"""sets the current fps
"""
pass
def get_fps(self):
"""returns the current fps
"""
return None
def fix_loader_paths(self):
"""fixes loader paths mainly from one OS to another
"""
import os
# get all loaders
for loader in self.comp.GetToolList(False, 'Loader').values():
path = self.get_node_input_entry_value_by_name(loader, 'Clip')
if os.path.sep not in path:
# replace '\\' with os.path.sep
path = path.replace('/', '\\').replace('\\', os.path.sep)
self.set_node_input_entry_by_name(loader, 'Clip', path)
def get_node_input_entry_by_name(self, node, key):
"""returns the Input List entry by input list entry name
:param node: The node
:param string key: The entry name
:return:
"""
node_input_list = node.GetInputList()
for input_entry_key in node_input_list.keys():
input_entry = node_input_list[input_entry_key]
input_id = input_entry.GetAttrs()['INPS_ID']
if input_id == key:
return input_entry
def get_node_input_entry_value_by_name(self, node, key):
"""returns the Input List entry by input list entry name
:param node: The node
:param string key: The entry name
:return:
"""
input_entry = self.get_node_input_entry_by_name(node, key)
return input_entry[0]
def set_node_input_entry_by_name(self, node, key, value):
"""sets the Input List entry value by Input ID
:param node: The node
:param string key: The INS_ID of the key
:param value: The value
:return:
"""
input_entry = self.get_node_input_entry_by_name(node, key)
input_entry[0] = value
def get_main_saver_node(self):
"""Returns the main saver nodes in the scene or an empty list.
:return: list
"""
# list all the saver nodes in the current file
all_saver_nodes = self.comp.GetToolList(False, 'Saver').values()
saver_nodes = []
for saver_node in all_saver_nodes:
if saver_node.GetAttrs('TOOLS_Name').startswith(self._main_output_node_name):
saver_nodes.append(saver_node)
return saver_nodes
def create_node_tree(self, node_tree):
"""Creates a node tree from the given node tree.
The node_tree is a Python dictionary showing node types and attribute
values. Also it can be a list of dictionaries to create more complex
trees.
Each node_tree can create only one shading network. The format of the
dictionary should be as follows.
node_tree: {
'type': <- The fusion node type of the toppest shader
'attr': {
<- A dictionary that contains attribute names and values.
'Input': {
'type': --- type name of the connected node
'attr': {
<- attribute values ->
}
}
},
}
:param [dict, list] node_tree: A dictionary showing the node tree
attributes.
:return:
"""
# allow it to accept both a list or dict
if isinstance(node_tree, list):
created_root_nodes = []
for item in node_tree:
created_root_nodes.append(
self.create_node_tree(item)
)
return created_root_nodes
node_type = node_tree['type']
self.comp.Lock()
node = self.comp.AddTool(node_type)
self.comp.Unlock()
# attributes
if 'attr' in node_tree:
attributes = node_tree['attr']
for key in attributes:
value = attributes[key]
if isinstance(value, dict):
new_node = self.create_node_tree(value)
node.Input = new_node
else:
node.SetAttrs({key: value})
# input lists
if 'input_list' in node_tree:
input_list = node_tree['input_list']
for key in input_list:
node_input_list = node.GetInputList()
for input_entry_key in node_input_list.keys():
input_entry = node_input_list[input_entry_key]
input_id = input_entry.GetAttrs()['INPS_ID']
if input_id == key:
value = input_list[key]
input_entry[0] = value
break
# ref_id
if 'ref_id' in node_tree:
node.SetData('ref_id', node_tree['ref_id'])
# connected to
if 'connected_to' in node_tree:
connected_to = node_tree['connected_to']
if 'Input' in connected_to:
input_node = self.create_node_tree(connected_to['Input'])
node.Input = input_node
elif 'ref_id' in node_tree['connected_to']:
ref_id = node_tree['connected_to']['ref_id']
print('ref_id: %s' % ref_id)
# find a node with ref_id equals to ref_id that is given in the
# node tree
all_nodes = self.comp.GetToolList().values()
for r_node in all_nodes:
node_ref_id = r_node.GetData('ref_id')
print('node_ref_id: %s' % node_ref_id)
if node_ref_id == ref_id:
node.Input = r_node
break
return node
def output_path_generator(self, version, file_format):
"""helper function to generate the output path
:param version: Stalker Version instance
:param str file_format: A string showing the file format. Ex: tga, exr
etc.
:return:
"""
# generate the data needed
# the output path
file_name_buffer = []
template_kwargs = {}
# if this is a shot related task set it to shots resolution
version_sig_name = self.get_significant_name(version, include_project_code=False)
file_name_buffer.append('%(version_sig_name)s.001.%(format)s')
template_kwargs.update({
'version_sig_name': version_sig_name,
'format': file_format
})
output_file_name = ''.join(file_name_buffer) % template_kwargs
# check if it is a stereo comp
# if it is enable separate view rendering
import os
output_file_path = os.path.join(
version.absolute_path,
'Outputs',
version.take_name,
'v%03d' % version.version_number,
file_format
)
# create the dir
try:
os.makedirs(output_file_path)
except OSError:
# path exists
pass
output_file_full_path = os.path.join(
output_file_path,
output_file_name
).replace('\\', '/')
# make the path Project: relative
output_file_full_path = 'Project:%s' % os.path.relpath(
output_file_full_path,
os.path.dirname(version.absolute_path)
)
# set the output path
if sys.version_info[0] >= 3:
return '%s' % os.path.normpath(output_file_full_path)
else:
return '%s' % os.path.normpath(output_file_full_path).encode()
def output_node_name_generator(self, file_format):
return '%s_%s' % (self._main_output_node_name, file_format)
def create_slate_node(self, version):
"""Creates the slate node
:param version: A Stalker Version instance
:return:
"""
# if the channels are animated, set new keyframes
# first try to find the slate tool
slate_node = self.comp.FindTool("MainSlate")
if not slate_node:
# create one
self.comp.Lock()
self.comp.DoAction("AddSetting", {"filename": "Macros:/AnimaSlate.setting"})
slate_node = self.comp.FindTool("AnimaSlate1")
self.comp.Unlock()
slate_node.SetAttrs({"TOOLS_Name": "MainSlate", "TOOLB_Locked": False})
# set slate attributes
from anima.env.fusion import utils
# Thumbnail
shot = self.get_shot(version)
imf = None
if shot:
if shot.thumbnail:
import os
thumbnail_full_path = os.path.expandvars(shot.thumbnail.full_path)
slate_node.Input1 = thumbnail_full_path
if shot:
imf = shot.image_format
else:
imf = version.task.project.image_format
# Shot Types
# TODO: For now use Netflix format, extend it later on
from anima.utils.report import NetflixReporter
slate_node.Input8 = ", ".join(NetflixReporter.generate_shot_methodologies(shot))
# Shot Description
from anima.utils import text_splitter
split_description = text_splitter(shot.description, 40)
slate_node.Input9 = "\n".join(split_description[0:3])
slate_node.Input10 = "\n".join(split_description[0:3])
# Submission Note
slate_node.Input11 = ""
# Shot Name
slate_node.Input12 = shot.name
# Episode and Sequence
seq = None
if shot.sequences:
seq = shot.sequences[0]
slate_node.Input14 = seq.name
slate_node.Input15 = seq.name
# Scene Name
# Use shot name for now
parts = shot.name.split("_")
try:
scene_name = parts[2]
except IndexError:
scene_name = ''
slate_node.Input16 = scene_name
# Frames
slate_node.Input17 = shot.cut_out - shot.cut_in + 1
else:
# Frames
slate_node.Input17 = ""
# Show Name
slate_node.Input4 = version.task.project.name
# Version Name
slate_node.Input5 = "%s_v%03d" % (version.nice_name, version.version_number)
# Submitting For
slate_node.Input6 = "WIP"
# Date
import datetime
today = datetime.datetime.today()
date_time_format = "%Y-%m-%d"
slate_node.Input7 = today.strftime(date_time_format)
# Vendor
from stalker import Studio
studio = Studio.query.first()
if studio:
slate_node.Input13 = studio.name
# Media Color
slate_node.Input18 = ""
# Resolution
# create a resize node or use the immediate resize node if any
# resize_node = slate_node.FindMainOutput(1)
# resize_node = self.comp.FindTool("SlateResize")
# if resize_node:
# # check if this is a Resize node
# if not resize_node.GetAttrs("TOOLS_RegID") == "BetterResize":
# resize_node = None
#
# if not resize_node:
# # create a new one
# resize_node = self.comp.BetterResize()
# resize_node.SetAttrs({"TOOLS_Name": "SlateResize", "TOOLB_Locked": False})
#
# resize_node.Input = slate_node
# if imf:
# resize_node.Width = int(3840 * float(imf.height) / 2160)
# resize_node.Height = imf.height
# resize_node.KeepAspect = True
# # create the SlateMerge tool
# slate_merge_node = self.comp.FindTool("SlateMerge")
# if slate_merge_node:
# # check if this is a Merge node
# if not slate_merge_node.GetAttrs("TOOLS_RegID") == "Merge":
# slate_merge_node = None
#
# if not slate_merge_node:
# # create a new one
# slate_merge_node = self.comp.Merge()
# slate_merge_node.SetAttrs({"TOOLS_Name": "SlateMerge", "TOOLB_Locked": False})
#
# slate_merge_node.Foreground = resize_node
# Animate the slate_merge_node
# slate_merge_node.Blend = self.comp.BezierSpline({})
# if shot:
# slate_merge_node.Blend[shot.cut_in - 1] = 1
# slate_merge_node.Blend[shot.cut_in] = 0
# print("Using resolve setup!")
# connect the output to MediaOut
media_out_node = None
i = 0
import time
while not media_out_node and i < 10:
media_out_node = self.comp.FindTool("MediaOut1")
if not media_out_node:
print("no MediaOut1 node, waiting for 1 sec!")
time.sleep(1)
else:
print("found MediaOut1 node!")
media_out_node.Input = slate_node
i += 1
return slate_node
def create_main_saver_node(self, version):
"""Creates the default saver node if there is no created before.
Creates the default saver nodes if there isn't any existing outputs,
and updates the ones that is already created
"""
fps = 25
if version:
project = version.task.project
fps = project.fps
import uuid
random_ref_id = uuid.uuid4().hex
output_format_data = [
{
'name': 'jpg',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('jpg'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'jpg'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'JPEGFormat',
'JpegFormat.Quality': 85,
},
'connected_to': {
'Input': {
"type": "OCIOColorSpace",
"ref_id": random_ref_id,
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "Output - Rec.709",
},
'connected_to': {
'Input': {
"type": "OCIOColorSpace",
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "Utility - Linear - sRGB",
"OutputSpace": "ACES - ACES2065-1",
},
}
}
}
}
},
},
{
'name': 'tga',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('tga'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'tga'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'TGAFormat',
},
'connected_to': {
'ref_id': random_ref_id
}
},
},
{
'name': 'exr',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('exr'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'exr'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'OpenEXRFormat',
'OpenEXRFormat.Depth': 1, # 16-bit float
'OpenEXRFormat.RedEnable': 1,
'OpenEXRFormat.GreenEnable': 1,
'OpenEXRFormat.BlueEnable': 1,
'OpenEXRFormat.AlphaEnable': 0,
'OpenEXRFormat.ZEnable': 0,
'OpenEXRFormat.CovEnable': 0,
'OpenEXRFormat.ObjIDEnable': 0,
'OpenEXRFormat.MatIDEnable': 0,
'OpenEXRFormat.UEnable': 0,
'OpenEXRFormat.VEnable': 0,
'OpenEXRFormat.XNormEnable': 0,
'OpenEXRFormat.YNormEnable': 0,
'OpenEXRFormat.ZNormEnable': 0,
'OpenEXRFormat.XVelEnable': 0,
'OpenEXRFormat.YVelEnable': 0,
'OpenEXRFormat.XRevVelEnable': 0,
'OpenEXRFormat.YRevVelEnable': 0,
'OpenEXRFormat.XPosEnable': 0,
'OpenEXRFormat.YPosEnable': 0,
'OpenEXRFormat.ZPosEnable': 0,
'OpenEXRFormat.XDispEnable': 0,
'OpenEXRFormat.YDispEnable': 0,
},
'connected_to': {
'ref_id': random_ref_id
}
}
},
{
'name': 'mp4',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('mp4'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'mp4'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'QuickTimeMovies',
'ProcessMode': 'Auto',
'SaveFrames': 'Full',
'QuickTimeMovies.Compression': 'H.264_avc1',
'QuickTimeMovies.Quality': 95.0,
'QuickTimeMovies.FrameRateFps': fps,
'QuickTimeMovies.KeyFrames': 5,
'StartRenderScript': 'frames_at_once = comp:GetPrefs("Comp.Memory.FramesAtOnce")\ncomp:SetPrefs("Comp.Memory.FramesAtOnce", 1)',
'EndRenderScript': 'comp:SetPrefs("Comp.Memory.FramesAtOnce", frames_at_once)',
},
'connected_to': {
'ref_id': random_ref_id
}
}
},
{
'name': 'mov',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('mov'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'mov'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'QuickTimeMovies',
'ProcessMode': 'Auto',
'SaveFrames': 'Full',
'QuickTimeMovies.Compression': 'Apple ProRes 422 HQ_apch',
'QuickTimeMovies.Quality': 95.0,
'QuickTimeMovies.FrameRateFps': fps,
'QuickTimeMovies.KeyFrames': 5,
'QuickTimeMovies.LimitDataRate': 0.0,
'QuickTimeMovies.DataRateK': 1000.0,
'QuickTimeMovies.Advanced': 1.0,
'QuickTimeMovies.Primaries': 0.0,
'QuickTimeMovies.Transfer': 0.0,
'QuickTimeMovies.Matrix': 0.0,
'QuickTimeMovies.PixelAspectRatio': 0.0,
'QuickTimeMovies.ErrorDiffusion': 1.0,
'QuickTimeMovies.SaveAlphaChannel': 1.0,
'StartRenderScript': 'frames_at_once = comp:GetPrefs("Comp.Memory.FramesAtOnce")\ncomp:SetPrefs("Comp.Memory.FramesAtOnce", 1)',
'EndRenderScript': 'comp:SetPrefs("Comp.Memory.FramesAtOnce", frames_at_once)',
},
'connected_to': {
'ref_id': random_ref_id
}
}
},
]
if version.task.type and version.task.type.name == 'Plate':
# create a different type of outputs
output_format_data = [
{
'name': 'jpg',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('jpg'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'jpg'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'JPEGFormat',
'JpegFormat.Quality': 85,
},
'connected_to': {
'Input': {
"type": "OCIOColorSpace",
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "Output - sRGB",
},
'connected_to': {
'Input': {
"type": "OCIOColorSpace",
"ref_id": random_ref_id,
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "ACES - ACES2065-1",
},
}
}
}
}
},
},
{
'name': 'exr',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('exr'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'exr'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'OpenEXRFormat',
'OpenEXRFormat.Depth': 1, # 16-bit float
'OpenEXRFormat.RedEnable': 1,
'OpenEXRFormat.GreenEnable': 1,
'OpenEXRFormat.BlueEnable': 1,
'OpenEXRFormat.AlphaEnable': 0,
'OpenEXRFormat.ZEnable': 0,
'OpenEXRFormat.CovEnable': 0,
'OpenEXRFormat.ObjIDEnable': 0,
'OpenEXRFormat.MatIDEnable': 0,
'OpenEXRFormat.UEnable': 0,
'OpenEXRFormat.VEnable': 0,
'OpenEXRFormat.XNormEnable': 0,
'OpenEXRFormat.YNormEnable': 0,
'OpenEXRFormat.ZNormEnable': 0,
'OpenEXRFormat.XVelEnable': 0,
'OpenEXRFormat.YVelEnable': 0,
'OpenEXRFormat.XRevVelEnable': 0,
'OpenEXRFormat.YRevVelEnable': 0,
'OpenEXRFormat.XPosEnable': 0,
'OpenEXRFormat.YPosEnable': 0,
'OpenEXRFormat.ZPosEnable': 0,
'OpenEXRFormat.XDispEnable': 0,
'OpenEXRFormat.YDispEnable': 0,
},
'connected_to': {
'Input': {
"type": "OCIOColorSpace",
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "ACES - ACES2065-1",
},
'connected_to': {
"ref_id": random_ref_id,
}
}
}
},
},
{
'name': 'mov',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('mov'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'mov'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'QuickTimeMovies',
'ProcessMode': 'Auto',
'SaveFrames': 'Full',
'QuickTimeMovies.Compression': 'Apple ProRes 422 HQ_apch',
'QuickTimeMovies.Quality': 95.0,
'QuickTimeMovies.FrameRateFps': fps,
'QuickTimeMovies.KeyFrames': 5,
'QuickTimeMovies.LimitDataRate': 0.0,
'QuickTimeMovies.DataRateK': 1000.0,
'QuickTimeMovies.Advanced': 1.0,
'QuickTimeMovies.Primaries': 0.0,
'QuickTimeMovies.Transfer': 0.0,
'QuickTimeMovies.Matrix': 0.0,
'QuickTimeMovies.PixelAspectRatio': 0.0,
'QuickTimeMovies.ErrorDiffusion': 1.0,
'QuickTimeMovies.SaveAlphaChannel': 1.0,
'StartRenderScript': 'frames_at_once = comp:GetPrefs("Comp.Memory.FramesAtOnce")\ncomp:SetPrefs("Comp.Memory.FramesAtOnce", 1)',
'EndRenderScript': 'comp:SetPrefs("Comp.Memory.FramesAtOnce", frames_at_once)',
},
'connected_to': {
'Input': {
"type": "OCIOColorSpace",
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "Output - Rec.709",
},
'connected_to': {
"ref_id": random_ref_id,
}
}
}
},
},
]
if version.take_name == "STMap":
output_format_data = [
{
'name': 'exr',
'node_tree': {
'type': 'Saver',
'attr': {
'TOOLS_Name': self.output_node_name_generator('exr'),
},
'input_list': {
'Clip': self.output_path_generator(version, 'exr'),
'CreateDir': 1,
'ProcessRed': 1,
'ProcessGreen': 1,
'ProcessBlue': 1,
'ProcessAlpha': 0,
'OutputFormat': 'OpenEXRFormat',
'OpenEXRFormat.Depth': 2, # 32-bit float
'OpenEXRFormat.RedEnable': 1,
'OpenEXRFormat.GreenEnable': 1,
'OpenEXRFormat.BlueEnable': 1,
'OpenEXRFormat.AlphaEnable': 0,
'OpenEXRFormat.ZEnable': 0,
'OpenEXRFormat.CovEnable': 0,
'OpenEXRFormat.ObjIDEnable': 0,
'OpenEXRFormat.MatIDEnable': 0,
'OpenEXRFormat.UEnable': 0,
'OpenEXRFormat.VEnable': 0,
'OpenEXRFormat.XNormEnable': 0,
'OpenEXRFormat.YNormEnable': 0,
'OpenEXRFormat.ZNormEnable': 0,
'OpenEXRFormat.XVelEnable': 0,
'OpenEXRFormat.YVelEnable': 0,
'OpenEXRFormat.XRevVelEnable': 0,
'OpenEXRFormat.YRevVelEnable': 0,
'OpenEXRFormat.XPosEnable': 0,
'OpenEXRFormat.YPosEnable': 0,
'OpenEXRFormat.ZPosEnable': 0,
'OpenEXRFormat.XDispEnable': 0,
'OpenEXRFormat.YDispEnable': 0,
},
'connected_to': {
'ref_id': random_ref_id
}
}
},
]
self.comp.SetPrefs({
# set project frame format to 32bit
"Comp.FrameFormat.DepthFull": 3.0,
"Comp.FrameFormat.DepthLock": True,
})
# selectively generate output format
saver_nodes = self.get_main_saver_node()
for data in output_format_data:
format_name = data['name']
node_tree = data['node_tree']
# now check if a node with the same name exists
format_node = None
format_node_name = self.output_node_name_generator(format_name)
for node in saver_nodes:
node_name = node.GetAttrs('TOOLS_Name')
if node_name.startswith(format_node_name):
format_node = node
break
# create the saver node for this format if missing
if not format_node:
self.create_node_tree(node_tree)
else:
# just update the input_lists
if 'input_list' in node_tree:
input_list = node_tree['input_list']
for key in input_list:
node_input_list = format_node.GetInputList()
for input_entry_key in node_input_list.keys():
input_entry = node_input_list[input_entry_key]
input_id = input_entry.GetAttrs()['INPS_ID']
if input_id == key:
value = input_list[key]
input_entry[0] = value
break
try:
import os
os.makedirs(
os.path.dirname(
self.output_path_generator(version, format_name)
)
)
except OSError:
# path already exists
pass
@property
def project_directory(self):
"""The project directory.
Set it to the project root, and set all your paths relative to this
directory.
"""
# try to figure it out from the maps
# search for Project path
project_dir = None
maps = self.comp_prefs['Paths'].get('Map', None)
if maps:
project_dir = maps.get('Project:', None)
# if not project_dir:
# # set the map for the project dir
# if self.version:
# project_dir = os.path.dirname(self.version.absolute_path)
# self.project_directory = project_dir
return project_dir
@project_directory.setter
def project_directory(self, project_directory_in):
"""Sets project directory
:param str project_directory_in: the project directory
:return:
"""
import os
project_directory_in = os.path.normpath(project_directory_in)
print('setting project directory to: %s' % project_directory_in)
# set a path map
self.comp.SetPrefs(
{
'Comp.Paths.Map': {
'Project:': project_directory_in if sys.version_info[0] >= 3 else project_directory_in.encode()
}
}
)
|
mit
|
pforret/python-for-android
|
python-build/python-libs/gdata/tests/atom_tests/auth_test.py
|
128
|
1342
|
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = '[email protected] (Jeff Scudder)'
import unittest
import atom.auth
import atom.http_core
class BasicAuthTest(unittest.TestCase):
def test_modify_request(self):
http_request = atom.http_core.HttpRequest()
credentials = atom.auth.BasicAuth('Aladdin', 'open sesame')
self.assert_(credentials.basic_cookie == 'QWxhZGRpbjpvcGVuIHNlc2FtZQ==')
credentials.modify_request(http_request)
self.assert_(http_request.headers[
'Authorization'] == 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==')
def suite():
return unittest.TestSuite((unittest.makeSuite(BasicAuthTest,'test'),))
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
travisdoor/ArgosRts
|
engine/thirdparty/yaml-cpp/test/gmock-1.7.0/gtest/test/gtest_output_test.py
|
1733
|
12005
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the text output of Google C++ Testing Framework.
SYNOPSIS
gtest_output_test.py --build_dir=BUILD/DIR --gengolden
# where BUILD/DIR contains the built gtest_output_test_ file.
gtest_output_test.py --gengolden
gtest_output_test.py
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
import sys
import gtest_test_utils
# The flag for generating the golden file
GENGOLDEN_FLAG = '--gengolden'
CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
IS_WINDOWS = os.name == 'nt'
# TODO([email protected]): remove the _lin suffix.
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
# At least one command we exercise must not have the
# --gtest_internal_skip_environment_and_ad_hoc_tests flag.
COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
'--gtest_print_time',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
COMMAND_WITH_DISABLED = (
{}, [PROGRAM_PATH,
'--gtest_also_run_disabled_tests',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=*DISABLED_*'])
COMMAND_WITH_SHARDING = (
{'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
[PROGRAM_PATH,
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=PassingTest.*'])
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
def ToUnixLineEnding(s):
"""Changes all Windows/Mac line endings in s to UNIX line endings."""
return s.replace('\r\n', '\n').replace('\r', '\n')
def RemoveLocations(test_output):
"""Removes all file location info from a Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with all file location info (in the form of
'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
'FILE_NAME:#: '.
"""
return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\: ', r'\1:#: ', test_output)
def RemoveStackTraceDetails(output):
"""Removes all stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n',
'Stack trace: (omitted)\n\n', output)
def RemoveStackTraces(output):
"""Removes all traces of stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
def RemoveTime(output):
"""Removes all time information from a Google Test program's output."""
return re.sub(r'\(\d+ ms', '(? ms', output)
def RemoveTypeInfoDetails(test_output):
"""Removes compiler-specific type info from Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with type information normalized to canonical form.
"""
# some compilers output the name of type 'unsigned int' as 'unsigned'
return re.sub(r'unsigned int', 'unsigned', test_output)
def NormalizeToCurrentPlatform(test_output):
"""Normalizes platform specific output details for easier comparison."""
if IS_WINDOWS:
# Removes the color information that is not present on Windows.
test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output)
# Changes failure message headers into the Windows format.
test_output = re.sub(r': Failure\n', r': error: ', test_output)
# Changes file(line_number) to file:line_number.
test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output)
return test_output
def RemoveTestCounts(output):
"""Removes test counts from a Google Test program's output."""
output = re.sub(r'\d+ tests?, listed below',
'? tests, listed below', output)
output = re.sub(r'\d+ FAILED TESTS',
'? FAILED TESTS', output)
output = re.sub(r'\d+ tests? from \d+ test cases?',
'? tests from ? test cases', output)
output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
r'? tests from \1', output)
return re.sub(r'\d+ tests?\.', '? tests.', output)
def RemoveMatchingTests(test_output, pattern):
"""Removes output of specified tests from a Google Test program's output.
This function strips not only the beginning and the end of a test but also
all output in between.
Args:
test_output: A string containing the test output.
pattern: A regex string that matches names of test cases or
tests to remove.
Returns:
Contents of test_output with tests whose names match pattern removed.
"""
test_output = re.sub(
r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % (
pattern, pattern),
'',
test_output)
return re.sub(r'.*%s.*\n' % pattern, '', test_output)
def NormalizeOutput(output):
"""Normalizes output (the output of gtest_output_test_.exe)."""
output = ToUnixLineEnding(output)
output = RemoveLocations(output)
output = RemoveStackTraceDetails(output)
output = RemoveTime(output)
return output
def GetShellCommandOutput(env_cmd):
"""Runs a command in a sub-process, and returns its output in a string.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
Returns:
A string with the command's combined standard and diagnostic output.
"""
# Spawns cmd in a sub-process, and gets its standard I/O file objects.
# Set and save the environment properly.
environ = os.environ.copy()
environ.update(env_cmd[0])
p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)
return p.output
def GetCommandOutput(env_cmd):
"""Runs a command and returns its output with all file location
info stripped off.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
"""
# Disables exception pop-ups on Windows.
environ, cmdline = env_cmd
environ = dict(environ) # Ensures we are modifying a copy.
environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'
return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))
def GetOutputOfAllCommands():
"""Returns concatenated output from several representative commands."""
return (GetCommandOutput(COMMAND_WITH_COLOR) +
GetCommandOutput(COMMAND_WITH_TIME) +
GetCommandOutput(COMMAND_WITH_DISABLED) +
GetCommandOutput(COMMAND_WITH_SHARDING))
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
SUPPORTS_STACK_TRACES = False
CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
SUPPORTS_TYPED_TESTS and
SUPPORTS_THREADS)
class GTestOutputTest(gtest_test_utils.TestCase):
def RemoveUnsupportedTests(self, test_output):
if not SUPPORTS_DEATH_TESTS:
test_output = RemoveMatchingTests(test_output, 'DeathTest')
if not SUPPORTS_TYPED_TESTS:
test_output = RemoveMatchingTests(test_output, 'TypedTest')
test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
if not SUPPORTS_THREADS:
test_output = RemoveMatchingTests(test_output,
'ExpectFailureWithThreadsTest')
test_output = RemoveMatchingTests(test_output,
'ScopedFakeTestPartResultReporterTest')
test_output = RemoveMatchingTests(test_output,
'WorksConcurrently')
if not SUPPORTS_STACK_TRACES:
test_output = RemoveStackTraces(test_output)
return test_output
def testOutput(self):
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'rb')
# A mis-configured source control system can cause \r appear in EOL
# sequences when we read the golden file irrespective of an operating
# system used. Therefore, we need to strip those \r's from newlines
# unconditionally.
golden = ToUnixLineEnding(golden_file.read())
golden_file.close()
# We want the test to pass regardless of certain features being
# supported or not.
# We still have to remove type name specifics in all cases.
normalized_actual = RemoveTypeInfoDetails(output)
normalized_golden = RemoveTypeInfoDetails(golden)
if CAN_GENERATE_GOLDEN_FILE:
self.assertEqual(normalized_golden, normalized_actual)
else:
normalized_actual = NormalizeToCurrentPlatform(
RemoveTestCounts(normalized_actual))
normalized_golden = NormalizeToCurrentPlatform(
RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
# This code is very handy when debugging golden file differences:
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_actual.txt'), 'wb').write(
normalized_actual)
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_golden.txt'), 'wb').write(
normalized_golden)
self.assertEqual(normalized_golden, normalized_actual)
if __name__ == '__main__':
if sys.argv[1:] == [GENGOLDEN_FLAG]:
if CAN_GENERATE_GOLDEN_FILE:
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'wb')
golden_file.write(output)
golden_file.close()
else:
message = (
"""Unable to write a golden file when compiled in an environment
that does not support all the required features (death tests, typed tests,
and multiple threads). Please generate the golden file using a binary built
with those features enabled.""")
sys.stderr.write(message)
sys.exit(1)
else:
gtest_test_utils.Main()
|
mit
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.