text
stringlengths 4
1.02M
| meta
dict |
---|---|
import subprocess
from collections import namedtuple
from .tools import is_exe, NotExecutableError
# factory class for bowtie build class returned values
# the order of the outfiles is defined in the build_command self._outfnames
# index - this is the index file generated.
# stderr
Results = namedtuple("Results", "command index stdout stderr")
class Bowtie2_BuildError(Exception):
"""Exception raised when bowtie2-build fails"""
def __init__(self, message):
self.message = message
class Bowtie2_Build(object):
"""Class for working with bowtie2-build"""
def __init__(self, exe_path):
"""Instantiate with location of executable"""
if not is_exe(exe_path):
msg = "{0} is not an executable".format(exe_path)
raise NotExecutableError(msg)
self._exe_path = exe_path
def run(self, infname, outstem, dry_run=False):
"""Construct and execute a bowtie2-build command-line"""
self.__build_cmd(infname, outstem)
if dry_run:
results = Results(self._cmd, self._outfname, None, None)
else:
pipe = subprocess.run(self._cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True)
results = Results(self._cmd, self._outfname, pipe.stdout,
pipe.stderr)
return results
def __build_cmd(self, infname, outstem):
"""Build a command-line for bowtie2-build"""
self._outfname = outstem
cmd = ["bowtie2-build",
"--quiet",
"-f",
infname,
self._outfname]
self._cmd = ' '.join(cmd)
| {
"content_hash": "70432ddd99462c89a9223e55a64a6636",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 75,
"avg_line_length": 34.86538461538461,
"alnum_prop": 0.5626034197462769,
"repo_name": "widdowquinn/THAPBI-pycits",
"id": "224c77c26fd547c465e4813cdeb1c8feadda550b",
"size": "2017",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycits/bowtie2_build.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "469746"
},
{
"name": "Python",
"bytes": "199487"
},
{
"name": "UnrealScript",
"bytes": "31869"
}
],
"symlink_target": ""
} |
__author__ = 'Kamo Petrosyan'
class Offer:
"""
url
URL страницы товара. Максимальная длина URL — 512 символов.
Необязательный элемент для магазинов-салонов.
price
Цена, по которой данный товар можно приобрести. Цена товарного предложения округляется, формат, в котором она отображается, зависит от настроек пользователя.
Обязательный элемент.
oldprice
Старая цена на товар, которая обязательно должна быть выше новой цены (<price>). Параметр <oldprice> необходим для автоматического расчета скидки на товар.
Необязательный элемент.
currencyId
Идентификатор валюты товара (RUR, USD, UAH, KZT). Для корректного отображения цены в национальной валюте необходимо использовать идентификатор (например, UAH) с соответствующим значением цены.
Обязательный элемент.
categoryId
Идентификатор категории товара, присвоенный магазином (целое число не более 18 знаков). Товарное предложение может принадлежать только одной категории.
Обязательный элемент. Элемент <offer> может содержать только один элемент <categoryId>.
market_category
Категория товара, в которой он должен быть размещен на Яндекс.Маркете. Допустимо указывать названия категорий только из товарного дерева категорий Яндекс.Маркета.
Необязательный элемент.
Примечание. Скачать дерево категорий Яндекс.Маркета в формате XLS.
picture
Ссылка на картинку соответствующего товарного предложения. Недопустимо давать ссылку на «заглушку», т. е. на страницу, где написано «картинка отсутствует», или на логотип магазина. Максимальная длина URL — 512 символов.
Необязательный элемент.
store
Элемент позволяет указать возможность купить соответствующий товар в розничном магазине.
Возможные значения:
1) false — возможность покупки в розничном магазине отсутствует;
2) true — товар можно купить в розничном магазине.
Необязательный элемент.
pickup
Элемент позволяет указать возможность зарезервировать выбранный товар и забрать его самостоятельно.
Возможные значения:
1) false — возможность «самовывоза» отсутствует;
2) true — товар можно забрать самостоятельно.
Необязательный элемент.
delivery
Элемент позволяет указать возможность доставки соответствующего товара.
Возможные значения:
1) false — товар не может быть доставлен;
2) true — товар доставляется на условиях, которые описываются в партнерском интерфейсе на странице Параметры размещения.
Необязательный элемент.
local_delivery_cost
Стоимость доставки данного товара в своем регионе.
Необязательный элемент.
name
Название товарного предложения. В названии упрощенного предложения рекомендуется указывать наименование и код производителя.
Обязательный элемент.
vendor
Производитель. Не отображается в названии предложения.
Необязательный элемент.
vendorCode
Код товара (указывается код производителя). Не отображается в названии предложения.
Необязательный элемент.
description
Описание товарного предложения. Длина текста не более 175 символов (не включая знаки препинания), запрещено использовать HTML-теги (информация внутри тегов публиковаться не будет).
Необязательный элемент.
sales_notes
Элемент используется для отражения информации о минимальной сумме заказа, минимальной партии товара или необходимости предоплаты, а так же для описания акций, скидок и распродаж. Допустимая длина текста в элементе — 50 символов.
Необязательный элемент.
manufacturer_warranty
Элемент предназначен для отметки товаров, имеющих официальную гарантию производителя.
Необязательный элемент.
Возможные значения:
1) false — товар не имеет официальной гарантии;
2) true — товар имеет официальную гарантию.
country_of_origin
Элемент предназначен для указания страны производства товара. Список стран, которые могут быть указаны в этом элементе, доступен по адресу: http://partner.market.yandex.ru/pages/help/Countries.pdf.
Примечание. Если вы хотите участвовать в программе «Заказ на Маркете», то желательно указывать данный элемент в YML-файле.
Необязательный элемент.
adult
Элемент обязателен для обозначения товара, имеющего отношение к удовлетворению сексуальных потребностей, либо иным образом эксплуатирующего интерес к сексу.
Необязательный элемент.
age
Возрастная категория товара. Годы задаются с помощью атрибута unit со значением year, месяцы — с помощью атрибута unit со значением month.
Допустимые значения параметра при unit="year": 0, 6, 12, 16, 18. Допустимые значения параметра при unit="month": 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12.
Необязательный элемент.
barcode
Штрихкод товара, указанный производителем.
Необязательный элемент. Элемент <offer> может содержать несколько элементов <barcode>.
cpa
Элемент предназначен для управления участием товарных предложений в программе «Заказ на Маркете».
Необязательный элемент.
param
Элемент предназначен для указания характеристик товара. Для описания каждого параметра используется отдельный элемент <param>.
Необязательный элемент. Элемент <offer> может содержать несколько элементов <param>.
"""
def __init__(self):
self.id = 0
self.url = None
self.price = None
self.oldprice = None
self.currencyId = None
self.categoryId = None
self.market_category = None
self.pictures = []
'''
Добавление изображений pictures_append(picture)
'''
self.store = False
self.pickup = False
self.delivery = False
self.local_delivery_cost = None
self.name = None
self.vendor = None
self.vendorCode = None
self.description = None
self.sales_notes = None
self.manufacturer_warranty = None
self.country_of_origin = None
self.adult = None
self.age = None
self.age_unit = None
self.barcode = None
self.cpa = None
self.params = []
'''
Добавление изображений params_append(picture)
'''
def pictures_append(self, url):
self.pictures.append(url)
def params_append(self, param_name, param_value):
self.params.append({'name': param_name, 'value': param_value})
def validate(self):
if self.price is None or self.currencyId is None or self.categoryId is None or self.name is None:
# Not valid offer
raise Exception(u"Поля price, currencyId, categoryId, name обязательны") | {
"content_hash": "ea7168b0ac0c6bb7769e776f63ab31b0",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 232,
"avg_line_length": 42.18589743589744,
"alnum_prop": 0.7310439142987388,
"repo_name": "Haikson/virtenviro",
"id": "9463fd06374f46ead147b876fed3cc369e906702",
"size": "10257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "virtenviro/shop/yml_import/offer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "48"
},
{
"name": "CSS",
"bytes": "237430"
},
{
"name": "HTML",
"bytes": "190190"
},
{
"name": "JavaScript",
"bytes": "204448"
},
{
"name": "PHP",
"bytes": "2199"
},
{
"name": "Python",
"bytes": "193113"
},
{
"name": "Ruby",
"bytes": "322"
}
],
"symlink_target": ""
} |
from flask import jsonify, json
"""
Helper functions used in the app.
"""
def json_data(req):
"""(flask.request) -> dict
Returns a dict representing the json request
submitted to /email.
"""
return {k: v.strip() for k, v in req.get_json().iteritems()}
def get_services(email, app):
"""(Email) -> list
Return a tuple containing email service names in order
"""
services = app.config['SERVICES'][:]
if email.service == 'mandrill':
services.reverse()
return services
def abort(message, code=400):
"""(str, int) -> flask.Response
Produces a response object with the proper error code and message.
"""
resp = jsonify({'status': 'error', 'message': message})
resp.status_code = code
return resp
def success(email):
"""(Email) -> flask.Response
Produces a response object with a success message.
"""
return jsonify({
'status': 'success',
'message': 'Email queued to be sent by {}.'.format(email.service.capitalize())
})
def log_error(logger, email, resp):
"""(logger, Email, requests.Request) -> None
Logs an error: mention the mail service
name, the response message and code.
"""
logger.error('Error {0} - {1} responded with: {2}'.format(resp.status_code, email.service, resp.text))
def send_email(email):
"""(Email) -> (str, int)
Processes the email body. Fires the correct email
service and sends the email stored in data.
Returns a response message and status code.
"""
service = service_obj(email.service)
return service.send(email)
def service_obj(service):
"""(str) -> Mailgun() or Mandrill()
Returns a wrapper object of the default email service.
"""
module = __import__(service)
Service = getattr(module, service.capitalize())
return Service()
def save_email(db, email):
"""(SQLAlchemy, Email) -> None
Saves the email object in the database.
"""
db.session.add(email)
db.session.commit()
| {
"content_hash": "bffdf504a34638e1f3cbcad802a3e5c4",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 104,
"avg_line_length": 25.144736842105264,
"alnum_prop": 0.6708529565672423,
"repo_name": "wa3l/mailr",
"id": "caf8f556420d1c325f75a3fc3079748c1ce207cf",
"size": "1911",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16207"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.backends.interfaces import (
CMACBackend, CipherBackend, DERSerializationBackend, DSABackend,
EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
PEMSerializationBackend, RSABackend, X509Backend
)
@utils.register_interface(CMACBackend)
@utils.register_interface(CipherBackend)
@utils.register_interface(DERSerializationBackend)
@utils.register_interface(HashBackend)
@utils.register_interface(HMACBackend)
@utils.register_interface(PBKDF2HMACBackend)
@utils.register_interface(RSABackend)
@utils.register_interface(DSABackend)
@utils.register_interface(EllipticCurveBackend)
@utils.register_interface(PEMSerializationBackend)
@utils.register_interface(X509Backend)
class MultiBackend(object):
name = "multibackend"
def __init__(self, backends):
self._backends = backends
def _filtered_backends(self, interface):
for b in self._backends:
if isinstance(b, interface):
yield b
def cipher_supported(self, cipher, mode):
return any(
b.cipher_supported(cipher, mode)
for b in self._filtered_backends(CipherBackend)
)
def create_symmetric_encryption_ctx(self, cipher, mode):
for b in self._filtered_backends(CipherBackend):
try:
return b.create_symmetric_encryption_ctx(cipher, mode)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
def create_symmetric_decryption_ctx(self, cipher, mode):
for b in self._filtered_backends(CipherBackend):
try:
return b.create_symmetric_decryption_ctx(cipher, mode)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
def hash_supported(self, algorithm):
return any(
b.hash_supported(algorithm)
for b in self._filtered_backends(HashBackend)
)
def create_hash_ctx(self, algorithm):
for b in self._filtered_backends(HashBackend):
try:
return b.create_hash_ctx(algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def hmac_supported(self, algorithm):
return any(
b.hmac_supported(algorithm)
for b in self._filtered_backends(HMACBackend)
)
def create_hmac_ctx(self, key, algorithm):
for b in self._filtered_backends(HMACBackend):
try:
return b.create_hmac_ctx(key, algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def pbkdf2_hmac_supported(self, algorithm):
return any(
b.pbkdf2_hmac_supported(algorithm)
for b in self._filtered_backends(PBKDF2HMACBackend)
)
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
for b in self._filtered_backends(PBKDF2HMACBackend):
try:
return b.derive_pbkdf2_hmac(
algorithm, length, salt, iterations, key_material
)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def generate_rsa_private_key(self, public_exponent, key_size):
for b in self._filtered_backends(RSABackend):
return b.generate_rsa_private_key(public_exponent, key_size)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_rsa_parameters_supported(self, public_exponent, key_size):
for b in self._filtered_backends(RSABackend):
return b.generate_rsa_parameters_supported(
public_exponent, key_size
)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def rsa_padding_supported(self, padding):
for b in self._filtered_backends(RSABackend):
return b.rsa_padding_supported(padding)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_rsa_private_numbers(self, numbers):
for b in self._filtered_backends(RSABackend):
return b.load_rsa_private_numbers(numbers)
raise UnsupportedAlgorithm("RSA is not supported by the backend",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_rsa_public_numbers(self, numbers):
for b in self._filtered_backends(RSABackend):
return b.load_rsa_public_numbers(numbers)
raise UnsupportedAlgorithm("RSA is not supported by the backend",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_parameters(self, key_size):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_parameters(key_size)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_private_key(self, parameters):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_private_key(parameters)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_private_key_and_parameters(self, key_size):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_private_key_and_parameters(key_size)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def dsa_hash_supported(self, algorithm):
for b in self._filtered_backends(DSABackend):
return b.dsa_hash_supported(algorithm)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def dsa_parameters_supported(self, p, q, g):
for b in self._filtered_backends(DSABackend):
return b.dsa_parameters_supported(p, q, g)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_public_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_public_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_private_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_private_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_parameter_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_parameter_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def cmac_algorithm_supported(self, algorithm):
return any(
b.cmac_algorithm_supported(algorithm)
for b in self._filtered_backends(CMACBackend)
)
def create_cmac_ctx(self, algorithm):
for b in self._filtered_backends(CMACBackend):
try:
return b.create_cmac_ctx(algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm("This backend does not support CMAC.",
_Reasons.UNSUPPORTED_CIPHER)
def elliptic_curve_supported(self, curve):
return any(
b.elliptic_curve_supported(curve)
for b in self._filtered_backends(EllipticCurveBackend)
)
def elliptic_curve_signature_algorithm_supported(
self, signature_algorithm, curve
):
return any(
b.elliptic_curve_signature_algorithm_supported(
signature_algorithm, curve
)
for b in self._filtered_backends(EllipticCurveBackend)
)
def generate_elliptic_curve_private_key(self, curve):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.generate_elliptic_curve_private_key(curve)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_elliptic_curve_private_numbers(self, numbers):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.load_elliptic_curve_private_numbers(numbers)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_elliptic_curve_public_numbers(self, numbers):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.load_elliptic_curve_public_numbers(numbers)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_pem_private_key(self, data, password):
for b in self._filtered_backends(PEMSerializationBackend):
return b.load_pem_private_key(data, password)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_pem_public_key(self, data):
for b in self._filtered_backends(PEMSerializationBackend):
return b.load_pem_public_key(data)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_der_private_key(self, data, password):
for b in self._filtered_backends(DERSerializationBackend):
return b.load_der_private_key(data, password)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_der_public_key(self, data):
for b in self._filtered_backends(DERSerializationBackend):
return b.load_der_public_key(data)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_pem_x509_certificate(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_certificate(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_certificate(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_certificate(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_csr(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_csr(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_pem_x509_csr(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_csr(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_csr(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_csr(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_certificate(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_certificate(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
| {
"content_hash": "b1d3b1e56c3eb7042e8ccb3b4dc17160",
"timestamp": "",
"source": "github",
"line_count": 358,
"max_line_length": 78,
"avg_line_length": 38.99441340782123,
"alnum_prop": 0.6228510028653295,
"repo_name": "ARMmbed/yotta_osx_installer",
"id": "9db32aa56530727dd6f66c62083038216b1c5e1a",
"size": "14141",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "workspace/lib/python2.7/site-packages/cryptography/hazmat/backends/multibackend.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "46"
},
{
"name": "Assembly",
"bytes": "29493"
},
{
"name": "Batchfile",
"bytes": "1321"
},
{
"name": "C",
"bytes": "3589917"
},
{
"name": "C++",
"bytes": "10603800"
},
{
"name": "CMake",
"bytes": "2408460"
},
{
"name": "CSS",
"bytes": "17863"
},
{
"name": "Emacs Lisp",
"bytes": "14305"
},
{
"name": "FORTRAN",
"bytes": "2105"
},
{
"name": "Groff",
"bytes": "3889491"
},
{
"name": "HTML",
"bytes": "31505361"
},
{
"name": "JavaScript",
"bytes": "90647"
},
{
"name": "Logos",
"bytes": "8877"
},
{
"name": "Makefile",
"bytes": "2798"
},
{
"name": "Objective-C",
"bytes": "254392"
},
{
"name": "Python",
"bytes": "7903768"
},
{
"name": "Shell",
"bytes": "36795"
},
{
"name": "VimL",
"bytes": "8478"
},
{
"name": "XC",
"bytes": "8384"
},
{
"name": "XS",
"bytes": "8334"
}
],
"symlink_target": ""
} |
import sys
from fs.opener import opener
from fs.commands.runner import Command
from fs.utils import print_fs
class FSTree(Command):
usage = """fstree [OPTION]... [PATH]
Recursively display the contents of PATH in an ascii tree"""
def get_optparse(self):
optparse = super(FSTree, self).get_optparse()
optparse.add_option('-l', '--level', dest='depth', type="int", default=5,
help="Descend only LEVEL directories deep (-1 for infinite)", metavar="LEVEL")
optparse.add_option('-g', '--gui', dest='gui', action='store_true', default=False,
help="browse the tree with a gui")
optparse.add_option('-a', '--all', dest='all', action='store_true', default=False,
help="do not hide dot files")
optparse.add_option('--dirsfirst', dest='dirsfirst', action='store_true', default=False,
help="List directories before files")
optparse.add_option('-P', dest="pattern", default=None,
help="Only list files that match the given pattern")
optparse.add_option('-d', dest="dirsonly", default=False, action='store_true',
help="List directories only")
return optparse
def do_run(self, options, args):
if not args:
args = ['.']
for fs, path, is_dir in self.get_resources(args, single=True):
if not is_dir:
self.error(u"'%s' is not a dir\n" % path)
return 1
fs.cache_hint(True)
if options.gui:
from fs.browsewin import browse
if path:
fs = fs.opendir(path)
browse(fs, hide_dotfiles=not options.all)
else:
if options.depth < 0:
max_levels = None
else:
max_levels = options.depth
self.output(self.wrap_dirname(args[0] + '\n'))
dircount, filecount = print_fs(fs, path or '',
file_out=self.output_file,
max_levels=max_levels,
terminal_colors=self.terminal_colors,
hide_dotfiles=not options.all,
dirs_first=options.dirsfirst,
files_wildcard=options.pattern,
dirs_only=options.dirsonly)
self.output('\n')
def pluralize(one, many, count):
if count == 1:
return '%i %s' % (count, one)
else:
return '%i %s' % (count, many)
self.output("%s, %s\n" % (pluralize('directory', 'directories', dircount),
pluralize('file', 'files', filecount)))
def run():
return FSTree().run()
if __name__ == "__main__":
sys.exit(run())
| {
"content_hash": "7be0e0238be1807dc6402feb44de5993",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 106,
"avg_line_length": 42.78082191780822,
"alnum_prop": 0.4748639129042587,
"repo_name": "PyFilesystem/pyfilesystem",
"id": "bbb88b554fe5c96f414a090b3ad8f51f25dcd7e1",
"size": "3145",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "fs/commands/fstree.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1052910"
}
],
"symlink_target": ""
} |
def change_base(arr, base):
r = reversed(arr)
total = 0
for i, val in enumerate(r):
total += val * pow(base,i)
return total
def next_permutation(length):
initial = None
for p in permutation(length-2):
yield [1] + p + [1]
def permutation(length):
for prefix in xrange(2):
if length == 1:
yield [prefix]
else:
for p in permutation(length - 1):
yield [prefix] + p
def primes_up_to(top):
primes = [2]
for i in xrange(3, top, 2):
isPrime = True
for prime in primes:
if i % prime == 0:
isPrime = False
break
if prime * prime >= i:
break;
if isPrime:
primes.append(i)
return primes
t = int(raw_input())
for case in xrange(1, t+1):
number, jamcoins = map(int, raw_input().split())
# constants to play
top = 100000
first_primes = primes_up_to(top)
js = []
for p in next_permutation(number):
divisors = []
for base in xrange(2,11):
currNum = change_base(p, base)
for divisorCandidate in first_primes:
if currNum % divisorCandidate == 0:
divisors.append(divisorCandidate)
break
if divisorCandidate * divisorCandidate > currNum:
break
if len(divisors) == 9:
js.append((''.join([str(i) for i in p]), divisors))
if len(js) == jamcoins: break
print "Case #{}:".format(case)
for cad, divisors in js:
print cad , ' '.join(map(str,divisors))
| {
"content_hash": "6494b283602832eb542442b12582e6b3",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 54,
"avg_line_length": 23.677966101694917,
"alnum_prop": 0.60773085182534,
"repo_name": "clemus90/competitive-programming",
"id": "d6d5fab0ed32fb8e28016212da42eff605639e44",
"size": "1397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "GoogleCodeJam/2016/Qualification/c_coin_jam.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "1067"
},
{
"name": "Java",
"bytes": "30862"
},
{
"name": "JavaScript",
"bytes": "4311"
},
{
"name": "Python",
"bytes": "49167"
},
{
"name": "Scala",
"bytes": "38749"
}
],
"symlink_target": ""
} |
"""Volume snapshot interface (1.1 extension)."""
import six
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
from cinderclient import base
class Snapshot(base.Resource):
"""A Snapshot is a point-in-time snapshot of an openstack volume."""
def __repr__(self):
return "<Snapshot: %s>" % self.id
def delete(self):
"""Delete this snapshot."""
self.manager.delete(self)
def update(self, **kwargs):
"""Update the name or description for this snapshot."""
self.manager.update(self, **kwargs)
@property
def progress(self):
return self._info.get('os-extended-snapshot-attributes:progress')
@property
def project_id(self):
return self._info.get('os-extended-snapshot-attributes:project_id')
def reset_state(self, state):
"""Update the snapshot with the provided state."""
self.manager.reset_state(self, state)
class SnapshotManager(base.ManagerWithFind):
"""Manage :class:`Snapshot` resources."""
resource_class = Snapshot
def create(self, volume_id, force=False,
name=None, description=None):
"""Create a snapshot of the given volume.
:param volume_id: The ID of the volume to snapshot.
:param force: If force is True, create a snapshot even if the volume is
attached to an instance. Default is False.
:param name: Name of the snapshot
:param description: Description of the snapshot
:rtype: :class:`Snapshot`
"""
body = {'snapshot': {'volume_id': volume_id,
'force': force,
'name': name,
'description': description}}
return self._create('/snapshots', body, 'snapshot')
def get(self, snapshot_id):
"""Get a snapshot.
:param snapshot_id: The ID of the snapshot to get.
:rtype: :class:`Snapshot`
"""
return self._get("/snapshots/%s" % snapshot_id, "snapshot")
def list(self, detailed=True, search_opts=None):
"""Get a list of all snapshots.
:rtype: list of :class:`Snapshot`
"""
if search_opts is None:
search_opts = {}
qparams = {}
for opt, val in six.iteritems(search_opts):
if val:
qparams[opt] = val
query_string = "?%s" % urlencode(qparams) if qparams else ""
detail = ""
if detailed:
detail = "/detail"
return self._list("/snapshots%s%s" % (detail, query_string),
"snapshots")
def delete(self, snapshot):
"""Delete a snapshot.
:param snapshot: The :class:`Snapshot` to delete.
"""
self._delete("/snapshots/%s" % base.getid(snapshot))
def update(self, snapshot, **kwargs):
"""Update the name or description for a snapshot.
:param snapshot: The :class:`Snapshot` to delete.
"""
if not kwargs:
return
body = {"snapshot": kwargs}
self._update("/snapshots/%s" % base.getid(snapshot), body)
def reset_state(self, snapshot, state):
"""Update the specified snapshot with the provided state."""
return self._action('os-reset_status', snapshot, {'status': state})
def _action(self, action, snapshot, info=None, **kwargs):
"""Perform a snapshot action."""
body = {action: info}
self.run_hooks('modify_body_for_action', body, **kwargs)
url = '/snapshots/%s/action' % base.getid(snapshot)
return self.api.client.post(url, body=body)
def update_snapshot_status(self, snapshot, update_dict):
return self._action('os-update_snapshot_status',
base.getid(snapshot), update_dict)
| {
"content_hash": "0ee9916ecf6189dbd21117a76546d0fa",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 79,
"avg_line_length": 31.04032258064516,
"alnum_prop": 0.58482722785139,
"repo_name": "ntt-sic/python-cinderclient",
"id": "4e16ba81d97742e91de0d315e57bc5d79f121995",
"size": "4489",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinderclient/v2/volume_snapshots.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "525750"
},
{
"name": "Shell",
"bytes": "6954"
}
],
"symlink_target": ""
} |
from ..wildchain import *
| {
"content_hash": "6cf6fac213f3d2abe7737f068a25a1fc",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 25,
"avg_line_length": 26,
"alnum_prop": 0.7307692307692307,
"repo_name": "rtfd/sphinx-autoapi",
"id": "a26276a0ec1eaa078ea734e347cb462c2c34ee9b",
"size": "26",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/python/pypackagecomplex/complex/wildwildchain/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "5349"
},
{
"name": "JavaScript",
"bytes": "1212"
},
{
"name": "Makefile",
"bytes": "7509"
},
{
"name": "Python",
"bytes": "159954"
},
{
"name": "Shell",
"bytes": "45"
}
],
"symlink_target": ""
} |
import re
import sys
import time
import socket
import inspect
import datetime
import traceback
from flask import render_template, request, json
from flask.ext import login
from pyspider.libs import utils, sample_handler, dataurl
from pyspider.libs.response import rebuild_response
from pyspider.processor.project_module import ProjectManager, ProjectFinder, ProjectLoader
from .app import app
default_task = {
'taskid': 'data:,on_start',
'project': '',
'url': 'data:,on_start',
'process': {
'callback': 'on_start',
},
}
default_script = inspect.getsource(sample_handler)
def verify_project_name(project):
if re.search(r"[^\w]", project):
return False
return True
@app.route('/debug/<project>')
def debug(project):
if not verify_project_name(project):
return 'project name is not allowed!', 400
projectdb = app.config['projectdb']
info = projectdb.get(project)
if info:
script = info['script']
else:
script = (default_script
.replace('__DATE__', datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
.replace('__PROJECT_NAME__', project))
taskid = request.args.get('taskid')
if taskid:
taskdb = app.config['taskdb']
task = taskdb.get_task(
project, taskid, ['taskid', 'project', 'url', 'fetch', 'process'])
else:
task = default_task
default_task['project'] = project
return render_template("debug.html", task=task, script=script, project_name=project)
@app.before_first_request
def enable_projects_import():
class DebuggerProjectFinder(ProjectFinder):
def get_loader(self, name):
info = app.config['projectdb'].get(name)
if info:
return ProjectLoader(info)
sys.meta_path.append(DebuggerProjectFinder())
@app.route('/debug/<project>/run', methods=['POST', ])
def run(project):
start_time = time.time()
try:
task = utils.decode_unicode_obj(json.loads(request.form['task']))
except Exception:
result = {
'fetch_result': "",
'logs': u'task json error',
'follows': [],
'messages': [],
'result': None,
'time': time.time() - start_time,
}
return json.dumps(utils.unicode_obj(result)), 200, {'Content-Type': 'application/json'}
project_info = {
'name': project,
'status': 'DEBUG',
'script': request.form['script'],
}
fetch_result = {}
try:
fetch_result = app.config['fetch'](task)
response = rebuild_response(fetch_result)
module = ProjectManager.build_module(project_info, {
'debugger': True
})
ret = module['instance'].run(module['module'], task, response)
except Exception:
type, value, tb = sys.exc_info()
tb = utils.hide_me(tb, globals())
logs = ''.join(traceback.format_exception(type, value, tb))
result = {
'fetch_result': fetch_result,
'logs': logs,
'follows': [],
'messages': [],
'result': None,
'time': time.time() - start_time,
}
else:
result = {
'fetch_result': fetch_result,
'logs': ret.logstr(),
'follows': ret.follows,
'messages': ret.messages,
'result': ret.result,
'time': time.time() - start_time,
}
result['fetch_result']['content'] = response.text
if (response.headers.get('content-type', '').startswith('image')):
result['fetch_result']['dataurl'] = dataurl.encode(
response.content, response.headers['content-type'])
try:
# binary data can't encode to JSON, encode result as unicode obj
# before send it to frontend
return json.dumps(utils.unicode_obj(result)), 200, {'Content-Type': 'application/json'}
except Exception:
type, value, tb = sys.exc_info()
tb = utils.hide_me(tb, globals())
logs = ''.join(traceback.format_exception(type, value, tb))
result = {
'fetch_result': "",
'logs': logs,
'follows': [],
'messages': [],
'result': None,
'time': time.time() - start_time,
}
return json.dumps(utils.unicode_obj(result)), 200, {'Content-Type': 'application/json'}
@app.route('/debug/<project>/save', methods=['POST', ])
def save(project):
if not verify_project_name(project):
return 'project name is not allowed!', 400
projectdb = app.config['projectdb']
script = request.form['script']
project_info = projectdb.get(project, fields=['name', 'status', 'group'])
if project_info and 'lock' in projectdb.split_group(project_info.get('group')) \
and not login.current_user.is_active():
return app.login_response
if project_info:
info = {
'script': script,
}
if project_info.get('status') in ('DEBUG', 'RUNNING', ):
info['status'] = 'CHECKING'
projectdb.update(project, info)
else:
info = {
'name': project,
'script': script,
'status': 'TODO',
'rate': app.config.get('max_rate', 1),
'burst': app.config.get('max_burst', 3),
}
projectdb.insert(project, info)
rpc = app.config['scheduler_rpc']
if rpc is not None:
try:
rpc.update_project()
except socket.error as e:
app.logger.warning('connect to scheduler rpc error: %r', e)
return 'rpc error', 200
return 'ok', 200
@app.route('/helper.js')
def resizer_js():
host = request.headers['Host']
return render_template("helper.js", host=host), 200, {'Content-Type': 'application/javascript'}
@app.route('/helper.html')
def resizer_html():
height = request.args.get('height')
script = request.args.get('script', '')
return render_template("helper.html", height=height, script=script)
| {
"content_hash": "39601b13fea2478ccf175e4edbe0d38b",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 99,
"avg_line_length": 31.55440414507772,
"alnum_prop": 0.5742200328407225,
"repo_name": "giantoak/pyspider",
"id": "281a0ca1635744d94be9f28f6209af4da7c137ee",
"size": "6276",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "pyspider/webui/debug.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "21362"
},
{
"name": "JavaScript",
"bytes": "36632"
},
{
"name": "Python",
"bytes": "314391"
}
],
"symlink_target": ""
} |
from gotham import Gotham
from toaster import Toaster
from nashville import Nashville
from lomo import Lomo
from kelvin import Kelvin | {
"content_hash": "121d32e19e3e38cd830650b3f3f7faee",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 31,
"avg_line_length": 26.6,
"alnum_prop": 0.8571428571428571,
"repo_name": "kevinmel2000/instagram-filters",
"id": "052a6797b3cfa2cdf864382666d65c8dbd137edf",
"size": "133",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "instagram_filters/filters/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "5331"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from forex.models import Currency, CurrencyPrice
class CurrencyAdmin(admin.ModelAdmin):
search_fields=["name", ]
list_display = ('name', 'symbol', 'digits', 'num_code', 'ascii_symbol')
admin.site.register(Currency, CurrencyAdmin)
class CurrencyPriceAdmin(admin.ModelAdmin):
list_filter=['currency']
list_display = ('currency', 'date', 'ask_price', 'bid_price')
admin.site.register(CurrencyPrice, CurrencyPriceAdmin)
| {
"content_hash": "ebf54ca469a9cd1a09aa3d14703ab9f1",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 75,
"avg_line_length": 33.57142857142857,
"alnum_prop": 0.7340425531914894,
"repo_name": "Valuehorizon/valuehorizon-forex",
"id": "f27e567ea795f10adfc10e083d7e0c5fef1c2c54",
"size": "470",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "forex/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "303"
},
{
"name": "Python",
"bytes": "36862"
}
],
"symlink_target": ""
} |
"""Tools for working with MongoDB `ObjectIds
<http://dochub.mongodb.org/core/objectids>`_.
"""
import binascii
import calendar
import datetime
try:
import hashlib
_md5func = hashlib.md5
except ImportError: # for Python < 2.5
import md5
_md5func = md5.new
import os
import random
import socket
import struct
import threading
import time
from bson.errors import InvalidId
from bson.py3compat import (PY3, b, binary_type, text_type,
bytes_from_hex, string_types)
from bson.tz_util import utc
EMPTY = b("")
ZERO = b("\x00")
def _machine_bytes():
"""Get the machine portion of an ObjectId.
"""
machine_hash = _md5func()
if PY3:
# gethostname() returns a unicode string in python 3.x
# while update() requires a byte string.
machine_hash.update(socket.gethostname().encode())
else:
# Calling encode() here will fail with non-ascii hostnames
machine_hash.update(socket.gethostname())
return machine_hash.digest()[0:3]
class ObjectId(object):
"""A MongoDB ObjectId.
"""
_inc = random.randint(0, 0xFFFFFF)
_inc_lock = threading.Lock()
_machine_bytes = _machine_bytes()
__slots__ = ('__id')
_type_marker = 7
def __init__(self, oid=None):
"""Initialize a new ObjectId.
If `oid` is ``None``, create a new (unique) ObjectId. If `oid`
is an instance of (:class:`basestring` (:class:`str` or :class:`bytes`
in python 3), :class:`ObjectId`) validate it and use that. Otherwise,
a :class:`TypeError` is raised. If `oid` is invalid,
:class:`~bson.errors.InvalidId` is raised.
:Parameters:
- `oid` (optional): a valid ObjectId (12 byte binary or 24 character
hex string)
.. versionadded:: 1.2.1
The `oid` parameter can be a ``unicode`` instance (that contains
only hexadecimal digits).
.. mongodoc:: objectids
"""
if oid is None:
self.__generate()
else:
self.__validate(oid)
@classmethod
def from_datetime(cls, generation_time):
"""Create a dummy ObjectId instance with a specific generation time.
This method is useful for doing range queries on a field
containing :class:`ObjectId` instances.
.. warning::
It is not safe to insert a document containing an ObjectId
generated using this method. This method deliberately
eliminates the uniqueness guarantee that ObjectIds
generally provide. ObjectIds generated with this method
should be used exclusively in queries.
`generation_time` will be converted to UTC. Naive datetime
instances will be treated as though they already contain UTC.
An example using this helper to get documents where ``"_id"``
was generated before January 1, 2010 would be:
>>> gen_time = datetime.datetime(2010, 1, 1)
>>> dummy_id = ObjectId.from_datetime(gen_time)
>>> result = collection.find({"_id": {"$lt": dummy_id}})
:Parameters:
- `generation_time`: :class:`~datetime.datetime` to be used
as the generation time for the resulting ObjectId.
.. versionchanged:: 1.8
Properly handle timezone aware values for
`generation_time`.
.. versionadded:: 1.6
"""
if generation_time.utcoffset() is not None:
generation_time = generation_time - generation_time.utcoffset()
ts = calendar.timegm(generation_time.timetuple())
oid = struct.pack(">i", int(ts)) + ZERO * 8
return cls(oid)
@classmethod
def is_valid(cls, oid):
"""Checks if a `oid` string is valid or not.
:Parameters:
- `oid`: the object id to validate
.. versionadded:: 2.3
"""
try:
ObjectId(oid)
return True
except (InvalidId, TypeError):
return False
def __generate(self):
"""Generate a new value for this ObjectId.
"""
oid = EMPTY
# 4 bytes current time
oid += struct.pack(">i", int(time.time()))
# 3 bytes machine
oid += ObjectId._machine_bytes
# 2 bytes pid
oid += struct.pack(">H", os.getpid() % 0xFFFF)
# 3 bytes inc
ObjectId._inc_lock.acquire()
oid += struct.pack(">i", ObjectId._inc)[1:4]
ObjectId._inc = (ObjectId._inc + 1) % 0xFFFFFF
ObjectId._inc_lock.release()
self.__id = oid
def __validate(self, oid):
"""Validate and use the given id for this ObjectId.
Raises TypeError if id is not an instance of
(:class:`basestring` (:class:`str` or :class:`bytes`
in python 3), ObjectId) and InvalidId if it is not a
valid ObjectId.
:Parameters:
- `oid`: a valid ObjectId
"""
if isinstance(oid, ObjectId):
self.__id = oid.__id
elif isinstance(oid, string_types):
if len(oid) == 12:
if isinstance(oid, binary_type):
self.__id = oid
else:
raise InvalidId("%s is not a valid ObjectId" % oid)
elif len(oid) == 24:
try:
self.__id = bytes_from_hex(oid)
except (TypeError, ValueError):
raise InvalidId("%s is not a valid ObjectId" % oid)
else:
raise InvalidId("%s is not a valid ObjectId" % oid)
else:
raise TypeError("id must be an instance of (%s, %s, ObjectId), "
"not %s" % (binary_type.__name__,
text_type.__name__, type(oid)))
@property
def binary(self):
"""12-byte binary representation of this ObjectId.
"""
return self.__id
@property
def generation_time(self):
"""A :class:`datetime.datetime` instance representing the time of
generation for this :class:`ObjectId`.
The :class:`datetime.datetime` is timezone aware, and
represents the generation time in UTC. It is precise to the
second.
.. versionchanged:: 1.8
Now return an aware datetime instead of a naive one.
.. versionadded:: 1.2
"""
t = struct.unpack(">i", self.__id[0:4])[0]
return datetime.datetime.fromtimestamp(t, utc)
def __getstate__(self):
"""return value of object for pickling.
needed explicitly because __slots__() defined.
"""
return self.__id
def __setstate__(self, value):
"""explicit state set from pickling
"""
# Provide backwards compatability with OIDs
# pickled with pymongo-1.9 or older.
if isinstance(value, dict):
oid = value["_ObjectId__id"]
else:
oid = value
# ObjectIds pickled in python 2.x used `str` for __id.
# In python 3.x this has to be converted to `bytes`
# by encoding latin-1.
if PY3 and isinstance(oid, text_type):
self.__id = oid.encode('latin-1')
else:
self.__id = oid
def __str__(self):
if PY3:
return binascii.hexlify(self.__id).decode()
return binascii.hexlify(self.__id)
def __repr__(self):
return "ObjectId('%s')" % (str(self),)
def __eq__(self, other):
if isinstance(other, ObjectId):
return self.__id == other.__id
return NotImplemented
def __ne__(self, other):
if isinstance(other, ObjectId):
return self.__id != other.__id
return NotImplemented
def __lt__(self, other):
if isinstance(other, ObjectId):
return self.__id < other.__id
return NotImplemented
def __le__(self, other):
if isinstance(other, ObjectId):
return self.__id <= other.__id
return NotImplemented
def __gt__(self, other):
if isinstance(other, ObjectId):
return self.__id > other.__id
return NotImplemented
def __ge__(self, other):
if isinstance(other, ObjectId):
return self.__id >= other.__id
return NotImplemented
def __hash__(self):
"""Get a hash value for this :class:`ObjectId`.
.. versionadded:: 1.1
"""
return hash(self.__id)
| {
"content_hash": "aaf84c08e520cdcd8b84fe9519edf2a4",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 78,
"avg_line_length": 30.63898916967509,
"alnum_prop": 0.5638034641215978,
"repo_name": "Mitali-Sodhi/CodeLingo",
"id": "bd8c7e77aedc8d0cb4f3ca445f64d2dee6d4832b",
"size": "9064",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Dataset/python/objectid.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9681846"
},
{
"name": "C#",
"bytes": "1741915"
},
{
"name": "C++",
"bytes": "5686017"
},
{
"name": "HTML",
"bytes": "11812193"
},
{
"name": "Java",
"bytes": "11198971"
},
{
"name": "JavaScript",
"bytes": "21693468"
},
{
"name": "M",
"bytes": "61627"
},
{
"name": "Objective-C",
"bytes": "4085820"
},
{
"name": "Perl",
"bytes": "193472"
},
{
"name": "Perl6",
"bytes": "176248"
},
{
"name": "Python",
"bytes": "10296284"
},
{
"name": "Ruby",
"bytes": "1050136"
}
],
"symlink_target": ""
} |
import json
import urlparse
from django.http import HttpResponse
from django.http import HttpResponseRedirect, QueryDict
from django.utils.translation import ugettext as _
from django.views.generic.base import TemplateView
from django.core.exceptions import ObjectDoesNotExist
from oauth2.models import Client
from . import constants, scope
class OAuthError(Exception):
"""
Exception to throw inside any views defined in :attr:`provider.views`.
Any :attr:`OAuthError` thrown will be signalled to the API consumer.
:attr:`OAuthError` expects a dictionary as its first argument outlining the
type of error that occured.
:example:
::
raise OAuthError({'error': 'invalid_request'})
The different types of errors are outlined in :rfc:`4.2.2.1` and
:rfc:`5.2`.
"""
class OAuthView(TemplateView):
"""
Base class for any view dealing with the OAuth flow. This class overrides
the dispatch method of :attr:`TemplateView` to add no-caching headers to
every response as outlined in :rfc:`5.1`.
"""
def dispatch(self, request, *args, **kwargs):
response = super(OAuthView, self).dispatch(request, *args, **kwargs)
response['Cache-Control'] = 'no-store'
response['Pragma'] = 'no-cache'
return response
class Mixin(object):
"""
Mixin providing common methods required in the OAuth view defined in
:attr:`provider.views`.
"""
def get_data(self, request, key='params'):
"""
Return stored data from the session store.
:param key: `str` The key under which the data was stored.
"""
return request.session.get('%s:%s' % (constants.SESSION_KEY, key))
def cache_data(self, request, data, key='params'):
"""
Cache data in the session store.
:param request: :attr:`django.http.HttpRequest`
:param data: Arbitrary data to store.
:param key: `str` The key under which to store the data.
"""
request.session['%s:%s' % (constants.SESSION_KEY, key)] = data
def clear_data(self, request):
"""
Clear all OAuth related data from the session store.
"""
for key in request.session.keys():
if key.startswith(constants.SESSION_KEY):
del request.session[key]
def authenticate(self, request):
"""
Authenticate a client against all the backends configured in
:attr:`authentication`.
"""
for backend in self.authentication:
client = backend().authenticate(request)
if client is not None:
return client
return None
class Capture(OAuthView, Mixin):
"""
As stated in section :rfc:`3.1.2.5` this view captures all the request
parameters and redirects to another URL to avoid any leakage of request
parameters to potentially harmful JavaScripts.
This application assumes that whatever web-server is used as front-end will
handle SSL transport.
If you want strict enforcement of secure communication at application
level, set :attr:`settings.OAUTH_ENFORCE_SECURE` to ``True``.
The actual implementation is required to override :meth:`get_redirect_url`.
"""
template_name = 'provider/authorize.html'
def get_redirect_url(self, request):
"""
Return a redirect to a URL where the resource owner (see :rfc:`1`)
authorizes the client (also :rfc:`1`).
:return: :class:`django.http.HttpResponseRedirect`
"""
raise NotImplementedError
def handle(self, request, data):
self.cache_data(request, data)
if constants.ENFORCE_SECURE and not request.is_secure():
return self.render_to_response({'error': 'access_denied',
'error_description': _("A secure connection is required."),
'next': None},
status=400)
return HttpResponseRedirect(self.get_redirect_url(request))
def get(self, request):
return self.handle(request, request.GET)
def post(self, request):
return self.handle(request, request.POST)
class Authorize(OAuthView, Mixin):
"""
View to handle the client authorization as outlined in :rfc:`4`.
Implementation must override a set of methods:
* :attr:`get_redirect_url`
* :attr:`get_request_form`
* :attr:`get_authorization_form`
* :attr:`get_client`
* :attr:`save_authorization`
:attr:`Authorize` renders the ``provider/authorize.html`` template to
display the authorization form.
On successful authorization, it redirects the user back to the defined
client callback as defined in :rfc:`4.1.2`.
On authorization fail :attr:`Authorize` displays an error message to the
user with a modified redirect URL to the callback including the error
and possibly description of the error as defined in :rfc:`4.1.2.1`.
"""
template_name = 'provider/authorize.html'
def get_redirect_url(self, request):
"""
:return: ``str`` - The client URL to display in the template after
authorization succeeded or failed.
"""
raise NotImplementedError
def get_request_form(self, client, data):
"""
Return a form that is capable of validating the request data captured
by the :class:`Capture` view.
The form must accept a keyword argument ``client``.
"""
raise NotImplementedError
def get_authorization_form(self, request, client, data, client_data):
"""
Return a form that is capable of authorizing the client to the resource
owner.
:return: :attr:`django.forms.Form`
"""
raise NotImplementedError
def get_client(self, client_id):
"""
Return a client object from a given client identifier. Return ``None``
if no client is found. An error will be displayed to the resource owner
and presented to the client upon the final redirect.
"""
raise NotImplementedError
def save_authorization(self, request, client, form, client_data):
"""
Save the authorization that the user granted to the client, involving
the creation of a time limited authorization code as outlined in
:rfc:`4.1.2`.
Should return ``None`` in case authorization is not granted.
Should return a string representing the authorization code grant.
:return: ``None``, ``str``
"""
raise NotImplementedError
def _validate_client(self, request, data):
"""
:return: ``tuple`` - ``(client or False, data or error)``
"""
client = self.get_client(data.get('client_id'))
if client is None:
raise OAuthError({
'error': 'unauthorized_client',
'error_description': _("An unauthorized client tried to access"
" your resources.")
})
form = self.get_request_form(client, data)
if not form.is_valid():
raise OAuthError(form.errors)
return client, form.cleaned_data
def error_response(self, request, error, **kwargs):
"""
Return an error to be displayed to the resource owner if anything goes
awry. Errors can include invalid clients, authorization denials and
other edge cases such as a wrong ``redirect_uri`` in the authorization
request.
:param request: :attr:`django.http.HttpRequest`
:param error: ``dict``
The different types of errors are outlined in :rfc:`4.2.2.1`
"""
ctx = {}
ctx.update(error)
# If we got a malicious redirect_uri or client_id, remove all the
# cached data and tell the resource owner. We will *not* redirect back
# to the URL.
if error['error'] in ['redirect_uri', 'unauthorized_client']:
ctx.update(next='/')
return self.render_to_response(ctx, **kwargs)
ctx.update(next=self.get_redirect_url(request))
return self.render_to_response(ctx, **kwargs)
def handle(self, request, post_data=None):
data = self.get_data(request)
if data is None:
return self.error_response(request, {
'error': 'expired_authorization',
'error_description': _('Authorization session has expired.')})
try:
client, data = self._validate_client(request, data)
except OAuthError, e:
return self.error_response(request, e.args[0], status=400)
authorization_form = self.get_authorization_form(request, client,
post_data, data)
if not authorization_form.is_bound or not authorization_form.is_valid():
return self.render_to_response({
'client': client,
'form': authorization_form,
'oauth_data': data, })
code = self.save_authorization(request, client,
authorization_form, data)
# be sure to serialize any objects that aren't natively json
# serializable because these values are stored as session data
self.cache_data(request, data)
self.cache_data(request, code, "code")
self.cache_data(request, client.serialize(), "client")
return HttpResponseRedirect(self.get_redirect_url(request))
def get(self, request):
return self.handle(request, None)
def post(self, request):
return self.handle(request, request.POST)
class Redirect(OAuthView, Mixin):
"""
Redirect the user back to the client with the right query parameters set.
This can be either parameters indicating success or parameters indicating
an error.
"""
def error_response(self, error, mimetype='application/json', status=400,
**kwargs):
"""
Return an error response to the client with default status code of
*400* stating the error as outlined in :rfc:`5.2`.
"""
return HttpResponse(json.dumps(error), content_type=mimetype,
status=status, **kwargs)
def get(self, request):
data = self.get_data(request)
code = self.get_data(request, "code")
error = self.get_data(request, "error")
client = self.get_data(request, "client")
# client must be properly deserialized to become a valid instance
client = Client.deserialize(client)
# this is an edge case that is caused by making a request with no data
# it should only happen if this view is called manually, out of the
# normal capture-authorize-redirect flow.
if data is None or client is None:
return self.error_response({
'error': 'invalid_data',
'error_description': _('Data has not been captured')})
redirect_uri = data.get('redirect_uri', None) or client.redirect_uri
parsed = urlparse.urlparse(redirect_uri)
query = QueryDict('', mutable=True)
if 'state' in data:
query['state'] = data['state']
if error is not None:
query.update(error)
elif code is None:
query['error'] = 'access_denied'
else:
query['code'] = code
parsed = parsed[:4] + (query.urlencode(), '')
redirect_uri = urlparse.ParseResult(*parsed).geturl()
self.clear_data(request)
return HttpResponseRedirect(redirect_uri)
class AccessToken(OAuthView, Mixin):
"""
:attr:`AccessToken` handles creation and refreshing of access tokens.
Implementations must implement a number of methods:
* :attr:`get_authorization_code_grant`
* :attr:`get_refresh_token_grant`
* :attr:`get_password_grant`
* :attr:`get_access_token`
* :attr:`create_access_token`
* :attr:`create_refresh_token`
* :attr:`invalidate_grant`
* :attr:`invalidate_access_token`
* :attr:`invalidate_refresh_token`
The default implementation supports the grant types defined in
:attr:`grant_types`.
According to :rfc:`4.4.2` this endpoint too must support secure
communication. For strict enforcement of secure communication at
application level set :attr:`settings.OAUTH_ENFORCE_SECURE` to ``True``.
According to :rfc:`3.2` we can only accept POST requests.
Returns with a status code of *400* in case of errors. *200* in case of
success.
"""
authentication = ()
"""
Authentication backends used to authenticate a particular client.
"""
grant_types = ['authorization_code', 'refresh_token', 'password',
'client_credentials']
"""
The default grant types supported by this view.
"""
def get_authorization_code_grant(self, request, data, client):
"""
Return the grant associated with this request or an error dict.
:return: ``tuple`` - ``(True or False, grant or error_dict)``
"""
raise NotImplementedError
def get_refresh_token_grant(self, request, data, client):
"""
Return the refresh token associated with this request or an error dict.
:return: ``tuple`` - ``(True or False, token or error_dict)``
"""
raise NotImplementedError
def get_password_grant(self, request, data, client):
"""
Return a user associated with this request or an error dict.
:return: ``tuple`` - ``(True or False, user or error_dict)``
"""
raise NotImplementedError
def get_client_credentials_grant(self, request, data, client):
"""
Return the optional parameters (scope) associated with this request.
:return: ``tuple`` - ``(True or False, options)``
"""
raise NotImplementedError
def get_access_token(self, request, user, scope, client):
"""
Override to handle fetching of an existing access token.
:return: ``object`` - Access token
"""
raise NotImplementedError
def create_access_token(self, request, user, scope, client):
"""
Override to handle access token creation.
:return: ``object`` - Access token
"""
raise NotImplementedError
def create_refresh_token(self, request, user, scope, access_token, client):
"""
Override to handle refresh token creation.
:return: ``object`` - Refresh token
"""
raise NotImplementedError
def invalidate_grant(self, grant):
"""
Override to handle grant invalidation. A grant is invalidated right
after creating an access token from it.
:return None:
"""
raise NotImplementedError
def invalidate_refresh_token(self, refresh_token):
"""
Override to handle refresh token invalidation. When requesting a new
access token from a refresh token, the old one is *always* invalidated.
:return None:
"""
raise NotImplementedError
def invalidate_access_token(self, access_token):
"""
Override to handle access token invalidation. When a new access token
is created from a refresh token, the old one is *always* invalidated.
:return None:
"""
raise NotImplementedError
def error_response(self, error, mimetype='application/json', status=400,
**kwargs):
"""
Return an error response to the client with default status code of
*400* stating the error as outlined in :rfc:`5.2`.
"""
return HttpResponse(json.dumps(error), content_type=mimetype,
status=status, **kwargs)
def access_token_response(self, access_token):
"""
Returns a successful response after creating the access token
as defined in :rfc:`5.1`.
"""
response_data = {
'access_token': access_token.token,
'token_type': constants.TOKEN_TYPE,
'expires_in': access_token.get_expire_delta(),
'scope': ' '.join(scope.names(access_token.scope)),
}
# Not all access_tokens are given a refresh_token
# (for example, public clients doing password auth)
try:
rt = access_token.refresh_token
response_data['refresh_token'] = rt.token
except ObjectDoesNotExist:
pass
return HttpResponse(
json.dumps(response_data), content_type='application/json'
)
def authorization_code(self, request, data, client):
"""
Handle ``grant_type=authorization_code`` requests as defined in
:rfc:`4.1.3`.
"""
grant = self.get_authorization_code_grant(request, request.POST,
client)
if constants.SINGLE_ACCESS_TOKEN:
at = self.get_access_token(request, grant.user, grant.scope, client)
else:
at = self.create_access_token(request, grant.user, grant.scope, client)
rt = self.create_refresh_token(request, grant.user, grant.scope, at,
client)
self.invalidate_grant(grant)
return self.access_token_response(at)
def refresh_token(self, request, data, client):
"""
Handle ``grant_type=refresh_token`` requests as defined in :rfc:`6`.
"""
rt = self.get_refresh_token_grant(request, data, client)
# this must be called first in case we need to purge expired tokens
self.invalidate_refresh_token(rt)
self.invalidate_access_token(rt.access_token)
at = self.create_access_token(request, rt.user, rt.access_token.scope,
client)
rt = self.create_refresh_token(request, at.user, at.scope, at, client)
return self.access_token_response(at)
def password(self, request, data, client):
"""
Handle ``grant_type=password`` requests as defined in :rfc:`4.3`.
"""
data = self.get_password_grant(request, data, client)
user = data.get('user')
scope = data.get('scope')
if constants.SINGLE_ACCESS_TOKEN:
at = self.get_access_token(request, user, scope, client)
else:
at = self.create_access_token(request, user, scope, client)
# Public clients don't get refresh tokens
if client.client_type != 1:
rt = self.create_refresh_token(request, user, scope, at, client)
return self.access_token_response(at)
def client_credentials(self, request, data, client):
"""
Handle ``grant_type=client_credentials`` requests as defined in
:rfc:`4.4`.
"""
data = self.get_client_credentials_grant(request, data, client)
scope = data.get('scope')
if constants.SINGLE_ACCESS_TOKEN:
at = self.get_access_token(request, client.user, scope, client)
else:
at = self.create_access_token(request, client.user, scope, client)
rt = self.create_refresh_token(request, client.user, scope, at,
client)
return self.access_token_response(at)
def get_handler(self, grant_type):
"""
Return a function or method that is capable handling the ``grant_type``
requested by the client or return ``None`` to indicate that this type
of grant type is not supported, resulting in an error response.
"""
if grant_type == 'authorization_code':
return self.authorization_code
elif grant_type == 'refresh_token':
return self.refresh_token
elif grant_type == 'password':
return self.password
elif grant_type == 'client_credentials':
return self.client_credentials
return None
def get(self, request):
"""
As per :rfc:`3.2` the token endpoint *only* supports POST requests.
Returns an error response.
"""
return self.error_response({
'error': 'invalid_request',
'error_description': _("Only POST requests allowed.")})
def post(self, request):
"""
As per :rfc:`3.2` the token endpoint *only* supports POST requests.
"""
if constants.ENFORCE_SECURE and not request.is_secure():
return self.error_response({
'error': 'invalid_request',
'error_description': _("A secure connection is required.")})
if not 'grant_type' in request.POST:
return self.error_response({
'error': 'invalid_request',
'error_description': _("No 'grant_type' included in the "
"request.")})
grant_type = request.POST['grant_type']
if grant_type not in self.grant_types:
return self.error_response({'error': 'unsupported_grant_type'})
client = self.authenticate(request)
if client is None:
return self.error_response({'error': 'invalid_client'})
handler = self.get_handler(grant_type)
try:
return handler(request, request.POST, client)
except OAuthError, e:
return self.error_response(e.args[0])
| {
"content_hash": "6cddf5249e7550a53a0013fb75f1c144",
"timestamp": "",
"source": "github",
"line_count": 628,
"max_line_length": 83,
"avg_line_length": 33.92993630573248,
"alnum_prop": 0.6146987047118453,
"repo_name": "Kalyzee/django-oauth2-provider",
"id": "fc177bcf97be8a963a6d9c02ead2394e4a291bfa",
"size": "21308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "provider/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1407"
},
{
"name": "Python",
"bytes": "118747"
},
{
"name": "Shell",
"bytes": "408"
}
],
"symlink_target": ""
} |
import os
import time
import warnings
from asgiref.local import Local
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.db import connections, router
from django.db.utils import ConnectionRouter
from django.dispatch import Signal, receiver
from django.utils import timezone
from django.utils.formats import FORMAT_SETTINGS, reset_format_cache
from django.utils.functional import empty
template_rendered = Signal()
# Most setting_changed receivers are supposed to be added below,
# except for cases where the receiver is related to a contrib app.
# Settings that may not work well when using 'override_settings' (#19031)
COMPLEX_OVERRIDE_SETTINGS = {'DATABASES'}
@receiver(setting_changed)
def clear_cache_handlers(**kwargs):
if kwargs['setting'] == 'CACHES':
from django.core.cache import caches, close_caches
close_caches()
caches._caches = Local()
@receiver(setting_changed)
def update_installed_apps(**kwargs):
if kwargs['setting'] == 'INSTALLED_APPS':
# Rebuild any AppDirectoriesFinder instance.
from django.contrib.staticfiles.finders import get_finder
get_finder.cache_clear()
# Rebuild management commands cache
from django.core.management import get_commands
get_commands.cache_clear()
# Rebuild get_app_template_dirs cache.
from django.template.utils import get_app_template_dirs
get_app_template_dirs.cache_clear()
# Rebuild translations cache.
from django.utils.translation import trans_real
trans_real._translations = {}
@receiver(setting_changed)
def update_connections_time_zone(**kwargs):
if kwargs['setting'] == 'TIME_ZONE':
# Reset process time zone
if hasattr(time, 'tzset'):
if kwargs['value']:
os.environ['TZ'] = kwargs['value']
else:
os.environ.pop('TZ', None)
time.tzset()
# Reset local time zone cache
timezone.get_default_timezone.cache_clear()
# Reset the database connections' time zone
if kwargs['setting'] in {'TIME_ZONE', 'USE_TZ'}:
for conn in connections.all():
try:
del conn.timezone
except AttributeError:
pass
try:
del conn.timezone_name
except AttributeError:
pass
conn.ensure_timezone()
@receiver(setting_changed)
def clear_routers_cache(**kwargs):
if kwargs['setting'] == 'DATABASE_ROUTERS':
router.routers = ConnectionRouter().routers
@receiver(setting_changed)
def reset_template_engines(**kwargs):
if kwargs['setting'] in {
'TEMPLATES',
'DEBUG',
'INSTALLED_APPS',
}:
from django.template import engines
try:
del engines.templates
except AttributeError:
pass
engines._templates = None
engines._engines = {}
from django.template.engine import Engine
Engine.get_default.cache_clear()
from django.forms.renderers import get_default_renderer
get_default_renderer.cache_clear()
@receiver(setting_changed)
def clear_serializers_cache(**kwargs):
if kwargs['setting'] == 'SERIALIZATION_MODULES':
from django.core import serializers
serializers._serializers = {}
@receiver(setting_changed)
def language_changed(**kwargs):
if kwargs['setting'] in {'LANGUAGES', 'LANGUAGE_CODE', 'LOCALE_PATHS'}:
from django.utils.translation import trans_real
trans_real._default = None
trans_real._active = Local()
if kwargs['setting'] in {'LANGUAGES', 'LOCALE_PATHS'}:
from django.utils.translation import trans_real
trans_real._translations = {}
trans_real.check_for_language.cache_clear()
@receiver(setting_changed)
def localize_settings_changed(**kwargs):
if kwargs['setting'] in FORMAT_SETTINGS or kwargs['setting'] == 'USE_THOUSAND_SEPARATOR':
reset_format_cache()
@receiver(setting_changed)
def file_storage_changed(**kwargs):
if kwargs['setting'] == 'DEFAULT_FILE_STORAGE':
from django.core.files.storage import default_storage
default_storage._wrapped = empty
@receiver(setting_changed)
def complex_setting_changed(**kwargs):
if kwargs['enter'] and kwargs['setting'] in COMPLEX_OVERRIDE_SETTINGS:
# Considering the current implementation of the signals framework,
# this stacklevel shows the line containing the override_settings call.
warnings.warn("Overriding setting %s can lead to unexpected behavior."
% kwargs['setting'], stacklevel=6)
@receiver(setting_changed)
def root_urlconf_changed(**kwargs):
if kwargs['setting'] == 'ROOT_URLCONF':
from django.urls import clear_url_caches, set_urlconf
clear_url_caches()
set_urlconf(None)
@receiver(setting_changed)
def static_storage_changed(**kwargs):
if kwargs['setting'] in {
'STATICFILES_STORAGE',
'STATIC_ROOT',
'STATIC_URL',
}:
from django.contrib.staticfiles.storage import staticfiles_storage
staticfiles_storage._wrapped = empty
@receiver(setting_changed)
def static_finders_changed(**kwargs):
if kwargs['setting'] in {
'STATICFILES_DIRS',
'STATIC_ROOT',
}:
from django.contrib.staticfiles.finders import get_finder
get_finder.cache_clear()
@receiver(setting_changed)
def auth_password_validators_changed(**kwargs):
if kwargs['setting'] == 'AUTH_PASSWORD_VALIDATORS':
from django.contrib.auth.password_validation import get_default_password_validators
get_default_password_validators.cache_clear()
@receiver(setting_changed)
def user_model_swapped(**kwargs):
if kwargs['setting'] == 'AUTH_USER_MODEL':
apps.clear_cache()
try:
from django.contrib.auth import get_user_model
UserModel = get_user_model()
except ImproperlyConfigured:
# Some tests set an invalid AUTH_USER_MODEL.
pass
else:
from django.contrib.auth import backends
backends.UserModel = UserModel
from django.contrib.auth import forms
forms.UserModel = UserModel
from django.contrib.auth.handlers import modwsgi
modwsgi.UserModel = UserModel
from django.contrib.auth.management.commands import changepassword
changepassword.UserModel = UserModel
from django.contrib.auth import views
views.UserModel = UserModel
| {
"content_hash": "4037b1d8933715606a4a12a77a553f21",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 93,
"avg_line_length": 32.52427184466019,
"alnum_prop": 0.6607462686567164,
"repo_name": "theo-l/django",
"id": "2ddb425f4c7bca44188a5a73624060e6bd568b59",
"size": "6700",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "django/test/signals.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "54515"
},
{
"name": "HTML",
"bytes": "172728"
},
{
"name": "JavaScript",
"bytes": "247742"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11279991"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
"""JSON implementations of cataloging searches."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package json package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from . import objects
from . import queries
from .. import utilities
from ..osid import searches as osid_searches
from ..primitives import Id
from ..utilities import get_registry
from dlkit.abstract_osid.cataloging import searches as abc_cataloging_searches
from dlkit.abstract_osid.osid import errors
class CatalogSearch(abc_cataloging_searches.CatalogSearch, osid_searches.OsidSearch):
"""The search interface for governing the search query for ``Catalogs``."""
def __init__(self, runtime):
self._namespace = 'cataloging.Catalog'
self._runtime = runtime
record_type_data_sets = get_registry('RESOURCE_RECORD_TYPES', runtime)
self._record_type_data_sets = record_type_data_sets
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
self._id_list = None
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_searches.OsidSearch.__init__(self, runtime)
@utilities.arguments_not_none
def search_among_catalogs(self, catalog_ids):
"""Execute this search among the given list of catalogs.
arg: catalog_ids (osid.id.IdList): list of catalogs
raise: NullArgument - ``catalog_ids`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
self._id_list = catalog_ids
@utilities.arguments_not_none
def order_catalog_results(self, catalog_search_order):
"""Specify an ordering to the search results.
arg: catalog_search_order
(osid.cataloging.CatalogSearchOrder): catalog search
order
raise: NullArgument - ``catalog_search_order`` is ``null``
raise: Unsupported - ``catalog_search_order`` is not of this
service
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_catalog_search_record(self, catalog_search_record_type):
"""Gets the catalog search record corresponding to the given catalog search record ``Type``.
This method is used to retrieve an object implementing the
requested record.
arg: catalog_search_record_type (osid.type.Type): a catalog
search record type
return: (osid.cataloging.records.CatalogSearchRecord) - the
catalog search record
raise: NullArgument - ``catalog_search_record_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unsupported -
``has_record_type(catalog_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class CatalogSearchResults(abc_cataloging_searches.CatalogSearchResults, osid_searches.OsidSearchResults):
"""This interface provides a means to capture results of a search."""
def __init__(self, results, query_terms, runtime):
# if you don't iterate, then .count() on the cursor is an inaccurate representation of limit / skip
# self._results = [r for r in results]
self._namespace = 'cataloging.Catalog'
self._results = results
self._query_terms = query_terms
self._runtime = runtime
self.retrieved = False
def get_catalogs(self):
"""Gets the catalog list resulting from the search.
return: (osid.cataloging.CatalogList) - the catalogs list
raise: IllegalState - list has already been retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.')
self.retrieved = True
return objects.CatalogList(self._results, runtime=self._runtime)
catalogs = property(fget=get_catalogs)
def get_catalog_query_inspector(self):
"""Gets the inspector for the query to examine the terms used in the search.
return: (osid.cataloging.CatalogQueryInspector) - the catalog
query inspector
*compliance: mandatory -- This method must be implemented.*
"""
return queries.CatalogQueryInspector(self._query_terms, runtime=self._runtime)
catalog_query_inspector = property(fget=get_catalog_query_inspector)
@utilities.arguments_not_none
def get_catalog_search_results_record(self, catalog_search_record_type):
"""Gets the catalog search results record corresponding to the given catalog search record ``Type``.
This method is used to retrieve an object implementing the
requested record.
arg: catalog_search_record_type (osid.type.Type): a catalog
search record type
return: (osid.cataloging.records.CatalogSearchResultsRecord) -
the catalog search results record
raise: NullArgument - ``catalog_search_record_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unsupported -
``has_record_type(catalog_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
| {
"content_hash": "f42ae0a148b983b38db23f9a29a3fc5b",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 108,
"avg_line_length": 40.90972222222222,
"alnum_prop": 0.6610083177728738,
"repo_name": "mitsei/dlkit",
"id": "3901de850b16c54978427740f41363489e47c4f0",
"size": "5891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dlkit/json_/cataloging/searches.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25170465"
},
{
"name": "TeX",
"bytes": "1088"
}
],
"symlink_target": ""
} |
""" This is the Bokeh charts testing interface.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2014, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from bokeh.charts.builders.histogram_builder import HistogramBuilder
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
def test_series_input(test_data):
hist_builder = HistogramBuilder(test_data.auto_data.mpg)
hist_builder.create()
assert len(hist_builder.comp_glyphs) > 0
| {
"content_hash": "91e05a96161b08a6e0f879514a957d32",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 78,
"avg_line_length": 37.17857142857143,
"alnum_prop": 0.40634005763688763,
"repo_name": "quasiben/bokeh",
"id": "81719bdbbdc647e9f16b84bcb73c4a73a0aba4f9",
"size": "1041",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "bokeh/charts/builders/tests/test_histogram_builder.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "448001"
},
{
"name": "CoffeeScript",
"bytes": "2130601"
},
{
"name": "JavaScript",
"bytes": "2530410"
},
{
"name": "Python",
"bytes": "1056239"
},
{
"name": "Scala",
"bytes": "28977"
},
{
"name": "Shell",
"bytes": "13082"
}
],
"symlink_target": ""
} |
"""ialm_test.py
"""
import os
import sys
import unittest
import numpy as np
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)),'..'))
import core.ialm as ialm
class IALMTesting(unittest.TestCase):
"""Testcase for the IALM method.
"""
def setUp(self):
"""Load data.
"""
self._data = np.genfromtxt("im_outliers.dat");
def test_recover(self):
"""Test recovery from outliers.
"""
# run recovery
lr, sp, _ = ialm.recover(self._data, None)
# load baseline (no outliers)
baseline = np.genfromtxt("im_baseline.dat")
# Frobenius norm between recovered mat. and baseline
d = np.linalg.norm(np.round(lr)-baseline, ord='fro')
self.assertTrue(np.allclose(d,0.0))
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "d51d9efe112b70ea9c850face140a5aa",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 82,
"avg_line_length": 22.236842105263158,
"alnum_prop": 0.5952662721893491,
"repo_name": "matthieuheitz/TubeTK",
"id": "faf46e907c53fc4b546ab526c344ea5873f9dd25",
"size": "845",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Base/Python/pyrpca/tests/ialm_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "114312"
},
{
"name": "C++",
"bytes": "3293954"
},
{
"name": "CSS",
"bytes": "17428"
},
{
"name": "Python",
"bytes": "283223"
},
{
"name": "Shell",
"bytes": "34482"
},
{
"name": "XSLT",
"bytes": "8636"
}
],
"symlink_target": ""
} |
"""
Implements the plugin API for MkDocs.
"""
from __future__ import annotations
import logging
import sys
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Generic,
List,
MutableMapping,
Optional,
Tuple,
Type,
TypeVar,
overload,
)
if sys.version_info >= (3, 10):
from importlib.metadata import EntryPoint, entry_points
else:
from importlib_metadata import EntryPoint, entry_points
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
import jinja2.environment
from mkdocs import utils
from mkdocs.config.base import Config, ConfigErrors, ConfigWarnings, LegacyConfig, PlainConfigSchema
from mkdocs.livereload import LiveReloadServer
from mkdocs.structure.files import Files
from mkdocs.structure.nav import Navigation
from mkdocs.structure.pages import Page
if TYPE_CHECKING:
from mkdocs.config.defaults import MkDocsConfig
log = logging.getLogger('mkdocs.plugins')
def get_plugins() -> Dict[str, EntryPoint]:
"""Return a dict of all installed Plugins as {name: EntryPoint}."""
plugins = entry_points(group='mkdocs.plugins')
# Allow third-party plugins to override core plugins
pluginmap = {}
for plugin in plugins:
if plugin.name in pluginmap and plugin.value.startswith("mkdocs.contrib."):
continue
pluginmap[plugin.name] = plugin
return pluginmap
SomeConfig = TypeVar('SomeConfig', bound=Config)
class BasePlugin(Generic[SomeConfig]):
"""
Plugin base class.
All plugins should subclass this class.
"""
config_class: Type[SomeConfig] = LegacyConfig # type: ignore[assignment]
config_scheme: PlainConfigSchema = ()
config: SomeConfig = {} # type: ignore[assignment]
supports_multiple_instances: bool = False
"""Set to true in subclasses to declare support for adding the same plugin multiple times."""
def __class_getitem__(cls, config_class: Type[Config]):
"""Eliminates the need to write `config_class = FooConfig` when subclassing BasePlugin[FooConfig]"""
name = f'{cls.__name__}[{config_class.__name__}]'
return type(name, (cls,), dict(config_class=config_class))
def __init_subclass__(cls):
if not issubclass(cls.config_class, Config):
raise TypeError(
f"config_class {cls.config_class} must be a subclass of `mkdocs.config.base.Config`"
)
if cls.config_class is not LegacyConfig:
cls.config_scheme = cls.config_class._schema # For compatibility.
def load_config(
self, options: Dict[str, Any], config_file_path: Optional[str] = None
) -> Tuple[ConfigErrors, ConfigWarnings]:
"""Load config from a dict of options. Returns a tuple of (errors, warnings)."""
if self.config_class is LegacyConfig:
self.config = LegacyConfig(self.config_scheme, config_file_path=config_file_path) # type: ignore
else:
self.config = self.config_class(config_file_path=config_file_path)
self.config.load_dict(options)
return self.config.validate()
# One-time events
def on_startup(self, *, command: Literal['build', 'gh-deploy', 'serve'], dirty: bool) -> None:
"""
The `startup` event runs once at the very beginning of an `mkdocs` invocation.
New in MkDocs 1.4.
The presence of an `on_startup` method (even if empty) migrates the plugin to the new
system where the plugin object is kept across builds within one `mkdocs serve`.
Note that for initializing variables, the `__init__` method is still preferred.
For initializing per-build variables (and whenever in doubt), use the `on_config` event.
Parameters:
command: the command that MkDocs was invoked with, e.g. "serve" for `mkdocs serve`.
dirty: whether `--dirtyreload` or `--dirty` flags were passed.
"""
def on_shutdown(self) -> None:
"""
The `shutdown` event runs once at the very end of an `mkdocs` invocation, before exiting.
This event is relevant only for support of `mkdocs serve`, otherwise within a
single build it's undistinguishable from `on_post_build`.
New in MkDocs 1.4.
The presence of an `on_shutdown` method (even if empty) migrates the plugin to the new
system where the plugin object is kept across builds within one `mkdocs serve`.
Note the `on_post_build` method is still preferred for cleanups, when possible, as it has
a much higher chance of actually triggering. `on_shutdown` is "best effort" because it
relies on detecting a graceful shutdown of MkDocs.
"""
def on_serve(
self, server: LiveReloadServer, *, config: MkDocsConfig, builder: Callable
) -> Optional[LiveReloadServer]:
"""
The `serve` event is only called when the `serve` command is used during
development. It runs only once, after the first build finishes.
It is passed the `Server` instance which can be modified before
it is activated. For example, additional files or directories could be added
to the list of "watched" files for auto-reloading.
Parameters:
server: `livereload.Server` instance
config: global configuration object
builder: a callable which gets passed to each call to `server.watch`
Returns:
`livereload.Server` instance
"""
return server
# Global events
def on_config(self, config: MkDocsConfig) -> Optional[Config]:
"""
The `config` event is the first event called on build and is run immediately
after the user configuration is loaded and validated. Any alterations to the
config should be made here.
Parameters:
config: global configuration object
Returns:
global configuration object
"""
return config
def on_pre_build(self, *, config: MkDocsConfig) -> None:
"""
The `pre_build` event does not alter any variables. Use this event to call
pre-build scripts.
Parameters:
config: global configuration object
"""
def on_files(self, files: Files, *, config: MkDocsConfig) -> Optional[Files]:
"""
The `files` event is called after the files collection is populated from the
`docs_dir`. Use this event to add, remove, or alter files in the
collection. Note that Page objects have not yet been associated with the
file objects in the collection. Use [Page Events](plugins.md#page-events) to manipulate page
specific data.
Parameters:
files: global files collection
config: global configuration object
Returns:
global files collection
"""
return files
def on_nav(
self, nav: Navigation, *, config: MkDocsConfig, files: Files
) -> Optional[Navigation]:
"""
The `nav` event is called after the site navigation is created and can
be used to alter the site navigation.
Parameters:
nav: global navigation object
config: global configuration object
files: global files collection
Returns:
global navigation object
"""
return nav
def on_env(
self, env: jinja2.Environment, *, config: MkDocsConfig, files: Files
) -> Optional[jinja2.Environment]:
"""
The `env` event is called after the Jinja template environment is created
and can be used to alter the
[Jinja environment](https://jinja.palletsprojects.com/en/latest/api/#jinja2.Environment).
Parameters:
env: global Jinja environment
config: global configuration object
files: global files collection
Returns:
global Jinja Environment
"""
return env
def on_post_build(self, *, config: MkDocsConfig) -> None:
"""
The `post_build` event does not alter any variables. Use this event to call
post-build scripts.
Parameters:
config: global configuration object
"""
def on_build_error(self, error: Exception) -> None:
"""
The `build_error` event is called after an exception of any kind
is caught by MkDocs during the build process.
Use this event to clean things up before MkDocs terminates. Note that any other
events which were scheduled to run after the error will have been skipped. See
[Handling Errors](plugins.md#handling-errors) for more details.
Parameters:
error: exception raised
"""
# Template events
def on_pre_template(
self, template: jinja2.Template, *, template_name: str, config: MkDocsConfig
) -> Optional[jinja2.Template]:
"""
The `pre_template` event is called immediately after the subject template is
loaded and can be used to alter the template.
Parameters:
template: a Jinja2 [Template](https://jinja.palletsprojects.com/en/latest/api/#jinja2.Template) object
template_name: string filename of template
config: global configuration object
Returns:
a Jinja2 [Template](https://jinja.palletsprojects.com/en/latest/api/#jinja2.Template) object
"""
return template
def on_template_context(
self, context: Dict[str, Any], *, template_name: str, config: MkDocsConfig
) -> Optional[Dict[str, Any]]:
"""
The `template_context` event is called immediately after the context is created
for the subject template and can be used to alter the context for that specific
template only.
Parameters:
context: dict of template context variables
template_name: string filename of template
config: global configuration object
Returns:
dict of template context variables
"""
return context
def on_post_template(
self, output_content: str, *, template_name: str, config: MkDocsConfig
) -> Optional[str]:
"""
The `post_template` event is called after the template is rendered, but before
it is written to disc and can be used to alter the output of the template.
If an empty string is returned, the template is skipped and nothing is is
written to disc.
Parameters:
output_content: output of rendered template as string
template_name: string filename of template
config: global configuration object
Returns:
output of rendered template as string
"""
return output_content
# Page events
def on_pre_page(self, page: Page, *, config: MkDocsConfig, files: Files) -> Optional[Page]:
"""
The `pre_page` event is called before any actions are taken on the subject
page and can be used to alter the `Page` instance.
Parameters:
page: `mkdocs.nav.Page` instance
config: global configuration object
files: global files collection
Returns:
`mkdocs.nav.Page` instance
"""
return page
def on_page_read_source(self, *, page: Page, config: MkDocsConfig) -> Optional[str]:
"""
The `on_page_read_source` event can replace the default mechanism to read
the contents of a page's source from the filesystem.
Parameters:
page: `mkdocs.nav.Page` instance
config: global configuration object
Returns:
The raw source for a page as unicode string. If `None` is returned, the
default loading from a file will be performed.
"""
return None
def on_page_markdown(
self, markdown: str, *, page: Page, config: MkDocsConfig, files: Files
) -> Optional[str]:
"""
The `page_markdown` event is called after the page's markdown is loaded
from file and can be used to alter the Markdown source text. The meta-
data has been stripped off and is available as `page.meta` at this point.
Parameters:
markdown: Markdown source text of page as string
page: `mkdocs.nav.Page` instance
config: global configuration object
files: global files collection
Returns:
Markdown source text of page as string
"""
return markdown
def on_page_content(
self, html: str, *, page: Page, config: MkDocsConfig, files: Files
) -> Optional[str]:
"""
The `page_content` event is called after the Markdown text is rendered to
HTML (but before being passed to a template) and can be used to alter the
HTML body of the page.
Parameters:
html: HTML rendered from Markdown source as string
page: `mkdocs.nav.Page` instance
config: global configuration object
files: global files collection
Returns:
HTML rendered from Markdown source as string
"""
return html
def on_page_context(
self, context: Dict[str, Any], *, page: Page, config: MkDocsConfig, nav: Navigation
) -> Optional[Dict[str, Any]]:
"""
The `page_context` event is called after the context for a page is created
and can be used to alter the context for that specific page only.
Parameters:
context: dict of template context variables
page: `mkdocs.nav.Page` instance
config: global configuration object
nav: global navigation object
Returns:
dict of template context variables
"""
return context
def on_post_page(self, output: str, *, page: Page, config: MkDocsConfig) -> Optional[str]:
"""
The `post_page` event is called after the template is rendered, but
before it is written to disc and can be used to alter the output of the
page. If an empty string is returned, the page is skipped and nothing is
written to disc.
Parameters:
output: output of rendered template as string
page: `mkdocs.nav.Page` instance
config: global configuration object
Returns:
output of rendered template as string
"""
return output
EVENTS = tuple(k[3:] for k in BasePlugin.__dict__ if k.startswith("on_"))
# The above definitions were just for docs and type checking, we don't actually want them.
for k in EVENTS:
delattr(BasePlugin, 'on_' + k)
T = TypeVar('T')
def event_priority(priority: float) -> Callable[[T], T]:
"""A decorator to set an event priority for an event handler method.
Recommended priority values:
`100` "first", `50` "early", `0` "default", `-50` "late", `-100` "last".
As different plugins discover more precise relations to each other, the values should be further tweaked.
```python
@plugins.event_priority(-100) # Wishing to run this after all other plugins' `on_files` events.
def on_files(self, files, config, **kwargs):
...
```
New in MkDocs 1.4.
Recommended shim for backwards compatibility:
```python
try:
from mkdocs.plugins import event_priority
except ImportError:
event_priority = lambda priority: lambda f: f # No-op fallback
```
"""
def decorator(event_method):
event_method.mkdocs_priority = priority
return event_method
return decorator
class PluginCollection(dict, MutableMapping[str, BasePlugin]):
"""
A collection of plugins.
In addition to being a dict of Plugin instances, each event method is registered
upon being added. All registered methods for a given event can then be run in order
by calling `run_event`.
"""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.events: Dict[str, List[Callable]] = {k: [] for k in EVENTS}
def _register_event(self, event_name: str, method: Callable) -> None:
"""Register a method for an event."""
utils.insort(
self.events[event_name], method, key=lambda m: -getattr(m, 'mkdocs_priority', 0)
)
def __getitem__(self, key: str) -> BasePlugin:
return super().__getitem__(key)
def __setitem__(self, key: str, value: BasePlugin) -> None:
super().__setitem__(key, value)
# Register all of the event methods defined for this Plugin.
for event_name in (x for x in dir(value) if x.startswith('on_')):
method = getattr(value, event_name, None)
if callable(method):
self._register_event(event_name[3:], method)
@overload
def run_event(self, name: str, item: None = None, **kwargs) -> Any:
...
@overload
def run_event(self, name: str, item: T, **kwargs) -> T:
...
def run_event(self, name: str, item=None, **kwargs):
"""
Run all registered methods of an event.
`item` is the object to be modified or replaced and returned by the event method.
If it isn't given the event method creates a new object to be returned.
All other keywords are variables for context, but would not generally
be modified by the event method.
"""
pass_item = item is not None
events = self.events[name]
if events:
log.debug(f'Running {len(events)} `{name}` events')
for method in events:
if pass_item:
result = method(item, **kwargs)
else:
result = method(**kwargs)
# keep item if method returned `None`
if result is not None:
item = result
return item
| {
"content_hash": "6916d03e7bee51a0a98296b88ef3631d",
"timestamp": "",
"source": "github",
"line_count": 526,
"max_line_length": 114,
"avg_line_length": 34.346007604562736,
"alnum_prop": 0.6291929591497841,
"repo_name": "mkdocs/mkdocs",
"id": "63fdcb81cd9a67b048f87349f45b1c227888ba43",
"size": "18066",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mkdocs/plugins.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "11343"
},
{
"name": "HTML",
"bytes": "31899"
},
{
"name": "JavaScript",
"bytes": "115956"
},
{
"name": "Python",
"bytes": "599473"
}
],
"symlink_target": ""
} |
import frappe
from frappe.deferred_insert import deferred_insert, save_to_db
from frappe.tests.utils import FrappeTestCase
class TestDeferredInsert(FrappeTestCase):
def test_deferred_insert(self):
route_history = {"route": frappe.generate_hash(), "user": "Administrator"}
deferred_insert("Route History", [route_history])
save_to_db()
self.assertTrue(frappe.db.exists("Route History", route_history))
| {
"content_hash": "ba612782538967440af55941b18fec8c",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 76,
"avg_line_length": 34.416666666666664,
"alnum_prop": 0.7651331719128329,
"repo_name": "frappe/frappe",
"id": "4f27bef4f0b1449f97b9521aacbefa162b3ab9d8",
"size": "413",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "frappe/tests/test_deferred_insert.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "65093"
},
{
"name": "HTML",
"bytes": "250850"
},
{
"name": "JavaScript",
"bytes": "2523337"
},
{
"name": "Less",
"bytes": "10921"
},
{
"name": "Python",
"bytes": "3618097"
},
{
"name": "SCSS",
"bytes": "261690"
},
{
"name": "Vue",
"bytes": "98456"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import calendar
from datetime import datetime, timedelta
import pytz
import requests
import six
from django.conf import settings
from sentry.utils.compat.mock import patch
from sentry.utils import json
from sentry.models import GroupHash, GroupRelease, Release
from sentry.tsdb.base import TSDBModel
from sentry.tsdb.snuba import SnubaTSDB
from sentry.testutils import TestCase, SnubaTestCase
from sentry.utils.dates import to_timestamp
def timestamp(d):
t = int(to_timestamp(d))
return t - (t % 3600)
def has_shape(data, shape, allow_empty=False):
"""
Determine if a data object has the provided shape
At any level, the object in `data` and in `shape` must have the same type.
A dict is the same shape if all its keys and values have the same shape as the
key/value in `shape`. The number of keys/values is not relevant.
A list is the same shape if all its items have the same shape as the value
in `shape`
A tuple is the same shape if it has the same length as `shape` and all the
values have the same shape as the corresponding value in `shape`
Any other object simply has to have the same type.
If `allow_empty` is set, lists and dicts in `data` will pass even if they are empty.
"""
if not isinstance(data, type(shape)):
return False
if isinstance(data, dict):
return (
(allow_empty or len(data) > 0)
and all(has_shape(k, list(shape.keys())[0]) for k in data.keys())
and all(has_shape(v, list(shape.values())[0]) for v in data.values())
)
elif isinstance(data, list):
return (allow_empty or len(data) > 0) and all(has_shape(v, shape[0]) for v in data)
elif isinstance(data, tuple):
return len(data) == len(shape) and all(
has_shape(data[i], shape[i]) for i in range(len(data))
)
else:
return True
class SnubaTSDBTest(TestCase, SnubaTestCase):
def setUp(self):
super(SnubaTSDBTest, self).setUp()
self.db = SnubaTSDB()
self.now = datetime.utcnow().replace(
hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC
)
self.proj1 = self.create_project()
self.proj1env1 = self.create_environment(project=self.proj1, name="test")
self.proj1env2 = self.create_environment(project=self.proj1, name="dev")
self.proj1env3 = self.create_environment(project=self.proj1, name="staging")
self.proj1defaultenv = self.create_environment(project=self.proj1, name="")
self.proj1group1 = self.create_group(self.proj1)
self.proj1group2 = self.create_group(self.proj1)
hash1 = "1" * 32
hash2 = "2" * 32
GroupHash.objects.create(project=self.proj1, group=self.proj1group1, hash=hash1)
GroupHash.objects.create(project=self.proj1, group=self.proj1group2, hash=hash2)
self.release1 = Release.objects.create(
organization_id=self.organization.id, version="1" * 10, date_added=self.now
)
self.release1.add_project(self.proj1)
self.release2 = Release.objects.create(
organization_id=self.organization.id, version="2" * 10, date_added=self.now
)
self.release2.add_project(self.proj1)
self.group1release1 = GroupRelease.objects.create(
project_id=self.proj1.id, group_id=self.proj1group1.id, release_id=self.release1.id
)
self.group1release2 = GroupRelease.objects.create(
project_id=self.proj1.id, group_id=self.proj1group1.id, release_id=self.release2.id
)
self.group2release1 = GroupRelease.objects.create(
project_id=self.proj1.id, group_id=self.proj1group2.id, release_id=self.release1.id
)
data = json.dumps(
[
(
2,
"insert",
{
"event_id": (six.text_type(r) * 32)[:32],
"primary_hash": [hash1, hash2][(r // 600) % 2], # Switch every 10 mins
"group_id": [self.proj1group1.id, self.proj1group2.id][(r // 600) % 2],
"project_id": self.proj1.id,
"message": "message 1",
"platform": "python",
"datetime": (self.now + timedelta(seconds=r)).strftime(
"%Y-%m-%dT%H:%M:%S.%fZ"
),
"data": {
"type": "transaction" if r % 1200 == 0 else "error",
"received": calendar.timegm(self.now.timetuple()) + r,
"tags": {
"foo": "bar",
"baz": "quux",
# Switch every 2 hours
"environment": [self.proj1env1.name, None][(r // 7200) % 3],
"sentry:user": u"id:user{}".format(r // 3300),
"sentry:release": six.text_type(r // 3600) * 10, # 1 per hour
},
"user": {
# change every 55 min so some hours have 1 user, some have 2
"id": u"user{}".format(r // 3300),
"email": u"user{}@sentry.io".format(r),
},
},
},
)
for r in range(0, 14400, 600)
]
) # Every 10 min for 4 hours
assert (
requests.post(settings.SENTRY_SNUBA + "/tests/events/insert", data=data).status_code
== 200
)
# snuba trims query windows based on first_seen/last_seen, so these need to be correct-ish
self.proj1group1.first_seen = self.now
self.proj1group1.last_seen = self.now + timedelta(seconds=14400)
self.proj1group1.save()
self.proj1group2.first_seen = self.now
self.proj1group2.last_seen = self.now + timedelta(seconds=14400)
self.proj1group2.save()
def test_range_groups(self):
dts = [self.now + timedelta(hours=i) for i in range(4)]
assert self.db.get_range(
TSDBModel.group, [self.proj1group1.id], dts[0], dts[-1], rollup=3600
) == {
self.proj1group1.id: [
(timestamp(dts[0]), 3),
(timestamp(dts[1]), 3),
(timestamp(dts[2]), 3),
(timestamp(dts[3]), 3),
]
}
# Multiple groups
assert self.db.get_range(
TSDBModel.group,
[self.proj1group1.id, self.proj1group2.id],
dts[0],
dts[-1],
rollup=3600,
) == {
self.proj1group1.id: [
(timestamp(dts[0]), 3),
(timestamp(dts[1]), 3),
(timestamp(dts[2]), 3),
(timestamp(dts[3]), 3),
],
self.proj1group2.id: [
(timestamp(dts[0]), 3),
(timestamp(dts[1]), 3),
(timestamp(dts[2]), 3),
(timestamp(dts[3]), 3),
],
}
assert self.db.get_range(TSDBModel.group, [], dts[0], dts[-1], rollup=3600) == {}
def test_range_releases(self):
dts = [self.now + timedelta(hours=i) for i in range(4)]
assert self.db.get_range(
TSDBModel.release, [self.release1.id], dts[0], dts[-1], rollup=3600
) == {
self.release1.id: [
(timestamp(dts[0]), 0),
(timestamp(dts[1]), 6),
(timestamp(dts[2]), 0),
(timestamp(dts[3]), 0),
]
}
def test_range_project(self):
dts = [self.now + timedelta(hours=i) for i in range(4)]
assert self.db.get_range(
TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=3600
) == {
self.proj1.id: [
(timestamp(dts[0]), 3),
(timestamp(dts[1]), 3),
(timestamp(dts[2]), 3),
(timestamp(dts[3]), 3),
]
}
def test_range_environment_filter(self):
dts = [self.now + timedelta(hours=i) for i in range(4)]
assert self.db.get_range(
TSDBModel.project,
[self.proj1.id],
dts[0],
dts[-1],
rollup=3600,
environment_ids=[self.proj1env1.id],
) == {
self.proj1.id: [
(timestamp(dts[0]), 3),
(timestamp(dts[1]), 3),
(timestamp(dts[2]), 0),
(timestamp(dts[3]), 0),
]
}
# No events submitted for env2
assert self.db.get_range(
TSDBModel.project,
[self.proj1.id],
dts[0],
dts[-1],
rollup=3600,
environment_ids=[self.proj1env2.id],
) == {
self.proj1.id: [
(timestamp(dts[0]), 0),
(timestamp(dts[1]), 0),
(timestamp(dts[2]), 0),
(timestamp(dts[3]), 0),
]
}
# Events submitted with no environment should match default environment
assert self.db.get_range(
TSDBModel.project,
[self.proj1.id],
dts[0],
dts[-1],
rollup=3600,
environment_ids=[self.proj1defaultenv.id],
) == {
self.proj1.id: [
(timestamp(dts[0]), 0),
(timestamp(dts[1]), 0),
(timestamp(dts[2]), 3),
(timestamp(dts[3]), 3),
]
}
def test_range_rollups(self):
# Daily
daystart = self.now.replace(hour=0) # day buckets start on day boundaries
dts = [daystart + timedelta(days=i) for i in range(2)]
assert self.db.get_range(
TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=86400
) == {self.proj1.id: [(timestamp(dts[0]), 12), (timestamp(dts[1]), 0)]}
# Minutely
dts = [self.now + timedelta(minutes=i) for i in range(120)]
# Expect every 20th minute to have a 1, else 0
expected = [
(to_timestamp(d), 1 if i % 10 == 0 and i % 20 != 0 else 0) for i, d in enumerate(dts)
]
expected[0] = (expected[0][0], 0)
assert self.db.get_range(
TSDBModel.project, [self.proj1.id], dts[0], dts[-1], rollup=60
) == {self.proj1.id: expected}
def test_distinct_counts_series_users(self):
dts = [self.now + timedelta(hours=i) for i in range(4)]
assert self.db.get_distinct_counts_series(
TSDBModel.users_affected_by_group, [self.proj1group1.id], dts[0], dts[-1], rollup=3600
) == {
self.proj1group1.id: [
(timestamp(dts[0]), 1),
(timestamp(dts[1]), 1),
(timestamp(dts[2]), 1),
(timestamp(dts[3]), 2),
]
}
dts = [self.now + timedelta(hours=i) for i in range(4)]
assert self.db.get_distinct_counts_series(
TSDBModel.users_affected_by_project, [self.proj1.id], dts[0], dts[-1], rollup=3600
) == {
self.proj1.id: [
(timestamp(dts[0]), 1),
(timestamp(dts[1]), 2),
(timestamp(dts[2]), 2),
(timestamp(dts[3]), 2),
]
}
assert (
self.db.get_distinct_counts_series(
TSDBModel.users_affected_by_group, [], dts[0], dts[-1], rollup=3600
)
== {}
)
def get_distinct_counts_totals_users(self):
assert self.db.get_distinct_counts_totals(
TSDBModel.users_affected_by_group,
[self.proj1group1.id],
self.now,
self.now + timedelta(hours=4),
rollup=3600,
) == {
self.proj1group1.id: 2 # 2 unique users overall
}
assert self.db.get_distinct_counts_totals(
TSDBModel.users_affected_by_group,
[self.proj1group1.id],
self.now,
self.now,
rollup=3600,
) == {
self.proj1group1.id: 1 # Only 1 unique user in the first hour
}
assert self.db.get_distinct_counts_totals(
TSDBModel.users_affected_by_project,
[self.proj1.id],
self.now,
self.now + timedelta(hours=4),
rollup=3600,
) == {self.proj1.id: 2}
assert (
self.db.get_distinct_counts_totals(
TSDBModel.users_affected_by_group,
[],
self.now,
self.now + timedelta(hours=4),
rollup=3600,
)
== {}
)
def test_most_frequent(self):
assert self.db.get_most_frequent(
TSDBModel.frequent_issues_by_project,
[self.proj1.id],
self.now,
self.now + timedelta(hours=4),
rollup=3600,
) == {self.proj1.id: [(self.proj1group1.id, 2.0), (self.proj1group2.id, 1.0)]}
assert (
self.db.get_most_frequent(
TSDBModel.frequent_issues_by_project,
[],
self.now,
self.now + timedelta(hours=4),
rollup=3600,
)
== {}
)
def test_frequency_series(self):
dts = [self.now + timedelta(hours=i) for i in range(4)]
assert self.db.get_frequency_series(
TSDBModel.frequent_releases_by_group,
{
self.proj1group1.id: (self.group1release1.id, self.group1release2.id),
self.proj1group2.id: (self.group2release1.id,),
},
dts[0],
dts[-1],
rollup=3600,
) == {
self.proj1group1.id: [
(timestamp(dts[0]), {self.group1release1.id: 0, self.group1release2.id: 0}),
(timestamp(dts[1]), {self.group1release1.id: 3, self.group1release2.id: 0}),
(timestamp(dts[2]), {self.group1release1.id: 0, self.group1release2.id: 3}),
(timestamp(dts[3]), {self.group1release1.id: 0, self.group1release2.id: 0}),
],
self.proj1group2.id: [
(timestamp(dts[0]), {self.group2release1.id: 0}),
(timestamp(dts[1]), {self.group2release1.id: 3}),
(timestamp(dts[2]), {self.group2release1.id: 0}),
(timestamp(dts[3]), {self.group2release1.id: 0}),
],
}
assert (
self.db.get_frequency_series(
TSDBModel.frequent_releases_by_group, {}, dts[0], dts[-1], rollup=3600
)
== {}
)
def test_result_shape(self):
"""
Tests that the results from the different TSDB methods have the
expected format.
"""
project_id = self.proj1.id
dts = [self.now + timedelta(hours=i) for i in range(4)]
results = self.db.get_most_frequent(
TSDBModel.frequent_issues_by_project, [project_id], dts[0], dts[0]
)
assert has_shape(results, {1: [(1, 1.0)]})
results = self.db.get_most_frequent_series(
TSDBModel.frequent_issues_by_project, [project_id], dts[0], dts[0]
)
assert has_shape(results, {1: [(1, {1: 1.0})]})
items = {
# {project_id: (issue_id, issue_id, ...)}
project_id: (self.proj1group1.id, self.proj1group2.id)
}
results = self.db.get_frequency_series(
TSDBModel.frequent_issues_by_project, items, dts[0], dts[-1]
)
assert has_shape(results, {1: [(1, {1: 1})]})
results = self.db.get_frequency_totals(
TSDBModel.frequent_issues_by_project, items, dts[0], dts[-1]
)
assert has_shape(results, {1: {1: 1}})
results = self.db.get_range(TSDBModel.project, [project_id], dts[0], dts[-1])
assert has_shape(results, {1: [(1, 1)]})
results = self.db.get_distinct_counts_series(
TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
)
assert has_shape(results, {1: [(1, 1)]})
results = self.db.get_distinct_counts_totals(
TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
)
assert has_shape(results, {1: 1})
results = self.db.get_distinct_counts_union(
TSDBModel.users_affected_by_project, [project_id], dts[0], dts[-1]
)
assert has_shape(results, 1)
def test_calculated_limit(self):
with patch("sentry.tsdb.snuba.snuba") as snuba:
# 24h test
rollup = 3600
end = self.now
start = end + timedelta(days=-1, seconds=rollup)
self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
assert snuba.query.call_args[1]["limit"] == 120
# 14 day test
rollup = 86400
start = end + timedelta(days=-14, seconds=rollup)
self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
assert snuba.query.call_args[1]["limit"] == 70
# 1h test
rollup = 3600
end = self.now
start = end + timedelta(hours=-1, seconds=rollup)
self.db.get_data(TSDBModel.group, [1, 2, 3, 4, 5], start, end, rollup=rollup)
assert snuba.query.call_args[1]["limit"] == 5
| {
"content_hash": "13fb7473edf86cfd822393967ea56135",
"timestamp": "",
"source": "github",
"line_count": 480,
"max_line_length": 98,
"avg_line_length": 37.03541666666667,
"alnum_prop": 0.5104910839849244,
"repo_name": "beeftornado/sentry",
"id": "111fa2272d0ab1e91ec9ae6c66a28ef06f1c6069",
"size": "17777",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/snuba/tsdb/test_tsdb_backend.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157195"
},
{
"name": "HTML",
"bytes": "197026"
},
{
"name": "JavaScript",
"bytes": "380379"
},
{
"name": "Makefile",
"bytes": "2832"
},
{
"name": "Python",
"bytes": "6473603"
}
],
"symlink_target": ""
} |
import json
import re
import logging
from bs4 import BeautifulSoup
from datafeeds.parser_base import ParserBase
class UsfirstMatchesParser2003(ParserBase):
@classmethod
def parse(self, html):
"""
Parse the table that contains qualification match results.
Note that 2002 match tables aren't consistently formatted, and this
parser takes that into account.
"""
soup = BeautifulSoup(html)
match_table = soup.findAll('table')[0]
matches = self.parseMatchResultList(match_table)
return matches, False
@classmethod
def parseMatchResultList(self, table):
matches = []
mid_match = False # Matches are split across rows. This keeps track of whether or not we are parsing the same match.
ignore_match = False # Parsing failed. Ignore this match
mid_match_comp_level = None
mid_match_number = None
mid_match_set_number = None
mid_match_teams = [] # Teams for the current match, if mid_match. If not mid match, this should be empty.
mid_match_scores = [] # Scores for the current match, if mid_match. If not mid match, this should be empty.
for tr in table.findAll('tr')[2:]: # skip table headers
tds = tr.findAll('td')
match_name = self._recurseUntilString(tds[0])
# Start of a new match. Combine info and reset mid_match info.
if mid_match and (match_name is not None or len(tds) == 1):
if not ignore_match:
if len(mid_match_teams) == len(mid_match_scores):
blue_teams = mid_match_teams[:len(mid_match_teams) / 2]
red_teams = mid_match_teams[len(mid_match_teams) / 2:]
blue_score = mid_match_scores[0]
red_score = mid_match_scores[len(mid_match_scores) / 2]
alliances = {"red": {
"teams": red_teams,
"score": red_score
},
"blue": {
"teams": blue_teams,
"score": blue_score
}
}
matches.append({"alliances_json": json.dumps(alliances),
"comp_level": mid_match_comp_level,
"match_number": mid_match_number,
"set_number": mid_match_set_number,
"team_key_names": red_teams + blue_teams,
})
else:
logging.warning("Lengths of mid_match_teams ({}) and mid_match_scores ({}) aren't the same!".format(mid_match_teams, mid_match_scores))
mid_match = False
ignore_match = False
mid_match_comp_level = None
mid_match_number = None
mid_match_set_number = None
mid_match_teams = []
mid_match_scores = []
continue
if not mid_match:
mid_match = True
match_name_lower = match_name.lower()
if 'elim' in match_name_lower:
# looks like: "Elim Finals.1" or or "Elim QF1.2"
if 'finals' in match_name_lower:
mid_match_comp_level = 'f'
mid_match_set_number = 1
try:
mid_match_number = int(re.findall(r'\d+', match_name)[0])
except:
logging.warning("Finals match number parse for '%s' failed!" % match_name)
ignore_match = True
continue
else:
if 'qf' in match_name_lower:
mid_match_comp_level = 'qf'
elif 'sf' in match_name_lower:
mid_match_comp_level = 'sf'
else:
logging.warning("Could not extract comp level from: {}".format(match_name))
ignore_match = True
continue
try:
prefix, suffix = match_name_lower.split('.')
mid_match_set_number = int(prefix[-1])
mid_match_number = int(suffix[0])
except:
logging.warning("Could not extract match set and number from: {}".format(match_name))
ignore_match = True
continue
else:
mid_match_comp_level = 'qm'
mid_match_set_number = 1
try:
mid_match_number = int(re.findall(r'\d+', match_name)[0])
except:
logging.warning("Qual match number parse for '%s' failed!" % match_name)
ignore_match = True
continue
else:
team_col = self._recurseUntilString(tds[2])
try:
team_key = 'frc{}'.format(int(re.findall(r'\d+', team_col)[0]))
except:
logging.warning("Team number parse for '%s' failed!" % team_col)
ignore_match = True
continue
score_col = self._recurseUntilString(tds[3])
try:
match_score = int(re.findall(r'\d+', score_col)[0])
if match_score is None:
match_score = -1
except:
logging.warning("Score parse for '%s' failed!" % score_col)
ignore_match = True
continue
mid_match_teams.append(team_key)
mid_match_scores.append(match_score)
return matches
| {
"content_hash": "e1d0dfd2a2020efd918e0cdc22f6fc29",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 159,
"avg_line_length": 45.35036496350365,
"alnum_prop": 0.45163367133429905,
"repo_name": "bdaroz/the-blue-alliance",
"id": "8c095015fab6e6d0e01f4c56c03292fc7048ce1f",
"size": "6213",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "datafeeds/usfirst_matches_parser_2003.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "340944"
},
{
"name": "Dockerfile",
"bytes": "1510"
},
{
"name": "HTML",
"bytes": "910114"
},
{
"name": "JavaScript",
"bytes": "512382"
},
{
"name": "PHP",
"bytes": "10727"
},
{
"name": "Python",
"bytes": "2744849"
},
{
"name": "Ruby",
"bytes": "3494"
},
{
"name": "Shell",
"bytes": "13901"
}
],
"symlink_target": ""
} |
"""Tests for the extractoutput preprocessor"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from .base import PreprocessorTestsBase
from ..extractoutput import ExtractOutputPreprocessor
class TestExtractOutput(PreprocessorTestsBase):
"""Contains test functions for extractoutput.py"""
def build_preprocessor(self):
"""Make an instance of a preprocessor"""
preprocessor = ExtractOutputPreprocessor()
preprocessor.extract_output_types = {
'text/plain', 'image/png', 'application/pdf'}
preprocessor.enabled = True
return preprocessor
def test_constructor(self):
"""Can a ExtractOutputPreprocessor be constructed?"""
self.build_preprocessor()
def test_output(self):
"""Test the output of the ExtractOutputPreprocessor"""
nb = self.build_notebook()
res = self.build_resources()
preprocessor = self.build_preprocessor()
nb, res = preprocessor(nb, res)
# Check if text was extracted.
output = nb.cells[0].outputs[1]
self.assertIn('filenames', output.metadata)
self.assertIn('text/plain', output.metadata.filenames)
text_filename = output.metadata.filenames['text/plain']
# Check if png was extracted.
output = nb.cells[0].outputs[6]
self.assertIn('filenames', output.metadata)
self.assertIn('image/png', output.metadata.filenames)
png_filename = output.metadata.filenames['image/png']
# Check that pdf was extracted
output = nb.cells[0].outputs[7]
self.assertIn('filenames', output.metadata)
self.assertIn('application/pdf', output.metadata.filenames)
pdf_filename = output.metadata.filenames['application/pdf']
# Verify text output
self.assertIn(text_filename, res['outputs'])
self.assertEqual(res['outputs'][text_filename], b'b')
# Verify png output
self.assertIn(png_filename, res['outputs'])
self.assertEqual(res['outputs'][png_filename], b'g')
# Verify pdf output
self.assertIn(pdf_filename, res['outputs'])
self.assertEqual(res['outputs'][pdf_filename], b'h')
| {
"content_hash": "af361a2bcc37aec88b725372d3c0e5b5",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 67,
"avg_line_length": 37.4,
"alnum_prop": 0.660873440285205,
"repo_name": "mattvonrocketstein/smash",
"id": "b54dbb53f69f44ec999b8590402e70cf0e968d2a",
"size": "2244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smashlib/ipy3x/nbconvert/preprocessors/tests/test_extractoutput.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "162188"
},
{
"name": "HTML",
"bytes": "32106"
},
{
"name": "JavaScript",
"bytes": "1615935"
},
{
"name": "Makefile",
"bytes": "550"
},
{
"name": "Python",
"bytes": "4934398"
},
{
"name": "Shell",
"bytes": "2990"
}
],
"symlink_target": ""
} |
import logging
import traceback
import time
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.hooks import RedisHook
class List2DictTransformer(BaseOperator):
"""
This Operator is used to evaluate List data and converts the input data to dict provided as skeleton dict <br />
<b> Requirement </b> :- <br />
Connection:The Telrad connection should have: <br />
1) redis_conn : the Redis Hook Object <br />
2) identifier_input <br />
3) identifier_output <br />
4) output_identifier_index <br />
5) start_timestamp <br />
6) end_timestamp <br />
7) payload
8) index_key
9) skeleton_dict
10) indexed = False <br />
"""
ui_color = '#e1 ffed'
arguments= """
1) redis_conn : the Redis Hook Object <br />
2) identifier_input <br />
3) identifier_output <br />
4) output_identifier_index <br />
5) start_timestamp <br />
6) end_timestamp <br />
7) payload <br />
8) index_key <br />
9) skeleton_dict <br />
10) indexed = False <br />
"""
@apply_defaults
def __init__(
self,redis_conn,identifier_input,identifier_output,output_identifier_index,start_timestamp,end_timestamp,payload,index_key,skeleton_dict,indexed=False, *args, **kwargs):
super(List2DictTransformer, self).__init__(*args, **kwargs)
print kwargs,BaseOperator
self.redis_conn=redis_conn
self.identifier_input = identifier_input
self.start_timestamp = start_timestamp
self.end_timestamp = end_timestamp
self.identifier_output = identifier_output
self.output_identifier_index = output_identifier_index
self.payload = payload
self.index_key = index_key
self.indexed = indexed
def execute(self, context,**kwargs):
logging.info("Executing Evaluator Operator")
transformed_data = []
converted_dict = {}
if indexed:
data = redis_conn.get_event_by_key(self.identifier_input,self.payload,self.index_key,self.start_timestamp,self.end_timestamp)
else:
data = redis_conn.get_event(self.identifier_input,self.start_timestamp,self.end_timestamp)
for data_values in data:
converted_dict = {}
for key in skeleton_dict:
converted_dict[key] = data_values[int(skeleton_dict.get(key))]
transformed_data.append(converted_dict.copy())
if indexed:
redis_conn.add_event_by_key(self.identifier_output,transformed_data,{self.output_identifier_index:self.output_identifier_index})
else:
redis_conn.add_event(self.identifier_input,self.start_timestamp,transformed_data)
| {
"content_hash": "1f56f6498db0467d384b2dc7629af1c6",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 172,
"avg_line_length": 31.544303797468356,
"alnum_prop": 0.7158908507223114,
"repo_name": "vipul-tm/DAG",
"id": "4f7018dbeb6e6eb5ef6fc60f9fce1b412fd85469",
"size": "2492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins-ttpl/operators/list2dict_transformer_operator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "36360"
},
{
"name": "Makefile",
"bytes": "677624"
},
{
"name": "Python",
"bytes": "527876"
}
],
"symlink_target": ""
} |
import ShareYourSystem as SYS
#Represent a simple object
@SYS.RepresenterClass()
class MakerClass(SYS.InitiatorClass):
RepresentingKeyStrsList=['MadeMyInt']
def default_init(self,
_MakingMyFloat=0.,
_MadeMyInt=0
):
pass
#Definition a simple instance
SimpleMaker=MakerClass(_MakingMyFloat=2.)
#Represent a structured instance
ParentMaker=MakerClass()
ParentMaker.FirstChildMaker=MakerClass()
ParentMaker.CircularChildMaker=MakerClass()
ParentMaker.CircularChildMaker.ParentMaker=ParentMaker
ParentMaker.CircularChildMaker.SelfMaker=ParentMaker.CircularChildMaker
#Definition a derived class from the MakerClass
@SYS.RepresenterClass()
class BuilderClass(MakerClass):
RepresentingKeyStrsList=['BuiltMyStr']
def default_init(self,
_BuildingMyStr='hello',
_BuiltMyStr='None'
):
pass
#Definition a simple instance
SimpleBuilder=BuilderClass(_MakingMyFloat=2.)
#Definition the AttestedStr
SYS._attest(
[
'SimpleMaker is '+SYS._str(SimpleMaker),
'ParentMaker is '+SYS._str(ParentMaker),
'SimpleBuilder is '+SYS._str(SimpleBuilder)
]
)
#Print
| {
"content_hash": "6bef8a1bc206370e3c0b13ac187d9f38",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 71,
"avg_line_length": 22.06,
"alnum_prop": 0.7688123300090662,
"repo_name": "Ledoux/ShareYourSystem",
"id": "1c373fcd7f1fd2a3fcc37af154dcfa67cf59996a",
"size": "1118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Pythonlogy/draft/Representer/05_ExampleDoc.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "86"
},
{
"name": "C++",
"bytes": "4244220"
},
{
"name": "CSS",
"bytes": "142769"
},
{
"name": "CoffeeScript",
"bytes": "37331"
},
{
"name": "HTML",
"bytes": "36211676"
},
{
"name": "JavaScript",
"bytes": "2147968"
},
{
"name": "Jupyter Notebook",
"bytes": "7930602"
},
{
"name": "Makefile",
"bytes": "6362"
},
{
"name": "PHP",
"bytes": "11096341"
},
{
"name": "Python",
"bytes": "5700092"
},
{
"name": "Ruby",
"bytes": "60"
},
{
"name": "Scala",
"bytes": "2412"
},
{
"name": "Shell",
"bytes": "2525"
},
{
"name": "Swift",
"bytes": "154"
},
{
"name": "TeX",
"bytes": "2556"
},
{
"name": "XSLT",
"bytes": "20993"
}
],
"symlink_target": ""
} |
DOCUMENTATION = '''
---
inventory: openshift
short_description: Openshift gears external inventory script
description:
- Generates inventory of Openshift gears using the REST interface
- this permit to reuse playbook to setup an Openshift gear
version_added: None
author: Michael Scherer
'''
try:
import json
except ImportError:
import simplejson as json
import os
import os.path
import sys
import ConfigParser
import StringIO
from ansible.module_utils.urls import open_url
configparser = None
def get_from_rhc_config(variable):
global configparser
CONF_FILE = os.path.expanduser('~/.openshift/express.conf')
if os.path.exists(CONF_FILE):
if not configparser:
ini_str = '[root]\n' + open(CONF_FILE, 'r').read()
configparser = ConfigParser.SafeConfigParser()
configparser.readfp(StringIO.StringIO(ini_str))
try:
return configparser.get('root', variable)
except ConfigParser.NoOptionError:
return None
def get_config(env_var, config_var):
result = os.getenv(env_var)
if not result:
result = get_from_rhc_config(config_var)
if not result:
sys.exit("failed=True msg='missing %s'" % env_var)
return result
def get_json_from_api(url, username, password):
headers = {'Accept': 'application/json; version=1.5'}
response = open_url(url, headers=headers, url_username=username, url_password=password)
return json.loads(response.read())['data']
username = get_config('ANSIBLE_OPENSHIFT_USERNAME', 'default_rhlogin')
password = get_config('ANSIBLE_OPENSHIFT_PASSWORD', 'password')
broker_url = 'https://%s/broker/rest/' % get_config('ANSIBLE_OPENSHIFT_BROKER', 'libra_server')
response = get_json_from_api(broker_url + '/domains', username, password)
response = get_json_from_api("%s/domains/%s/applications" %
(broker_url, response[0]['id']), username, password)
result = {}
for app in response:
# ssh://[email protected]
(user, host) = app['ssh_url'][6:].split('@')
app_name = host.split('-')[0]
result[app_name] = {}
result[app_name]['hosts'] = []
result[app_name]['hosts'].append(host)
result[app_name]['vars'] = {}
result[app_name]['vars']['ansible_ssh_user'] = user
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({}))
else:
print("Need an argument, either --list or --host <host>")
| {
"content_hash": "be98c0f6e15f8adc879dfe87c3e44256",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 95,
"avg_line_length": 30.36904761904762,
"alnum_prop": 0.6640533124264995,
"repo_name": "abtreece/ansible",
"id": "479b8085d61c7a269b018d0bc213ced0cb7d5660",
"size": "3274",
"binary": false,
"copies": "193",
"ref": "refs/heads/stable-2.2",
"path": "contrib/inventory/openshift.py",
"mode": "33261",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from __future__ import print_function
try:
from StringIO import StringIO
except ImportError: # pragma: nocover
from io import StringIO
from mock import patch
from ploy.common import InstanceHooks, BaseInstance, StartupScriptMixin
from ploy.config import Config, StartupScriptMassager
import os
import pytest
class MockController(object):
plugins = {}
class MockMaster(object):
def __init__(self, main_config):
self.ctrl = MockController()
self.main_config = main_config
class MockInstance(BaseInstance, StartupScriptMixin):
sectiongroupname = "instance"
def __init__(self):
self.config = {}
self.id = "foo"
self.hooks = InstanceHooks(self)
class TestStartupScript:
@pytest.fixture(autouse=True)
def setup_tempdir(self, tempdir):
self.tempdir = tempdir
self.directory = tempdir.directory
def _create_config(self, contents, path=None):
contents = StringIO(contents)
config = Config(contents, path=path)
config.add_massager(
StartupScriptMassager('instance', 'startup_script'))
return config.parse()
def testNoStartupScript(self):
instance = MockInstance()
config = self._create_config("[instance:foo]")
instance.master = MockMaster(config)
result = instance.startup_script()
assert result == ""
def testMissingStartupScript(self):
instance = MockInstance()
config = self._create_config(
"\n".join([
"[instance:foo]",
"startup_script = foo"]),
path=self.directory)
instance.master = MockMaster(config)
with patch('ploy.common.log') as CommonLogMock:
with pytest.raises(SystemExit):
instance.startup_script()
CommonLogMock.error.assert_called_with(
"Startup script '%s' not found.",
os.path.join(self.directory, 'foo'))
def testEmptyStartupScript(self):
self.tempdir['foo'].fill("")
instance = MockInstance()
config = self._create_config(
"\n".join([
"[instance:foo]",
"startup_script = foo"]),
path=self.directory)
instance.master = MockMaster(config)
result = instance.startup_script()
assert result == ""
def testGzip(self):
self.tempdir['foo'].fill("")
instance = MockInstance()
config = self._create_config(
"\n".join([
"[instance:foo]",
"startup_script = gzip:foo"]),
path=self.directory)
instance.master = MockMaster(config)
result = instance.startup_script()
expected = b"\n".join([
b"#!/bin/sh",
b"tail -n+4 $0 | gunzip -c | /bin/sh",
b"exit $?",
b""])
assert result[:len(expected)] == expected
payload = result[len(expected):]
header = payload[:10]
body = payload[10:]
assert header[:4] == b"\x1f\x8b\x08\x00" # magic + compression + flags
assert header[8:] == b"\x02\xff" # extra flags + os
assert body == b"\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00"
def testGzipCustomShebang(self):
self.tempdir['foo'].fill("#!/usr/bin/env python")
instance = MockInstance()
config = self._create_config(
"\n".join([
"[instance:foo]",
"startup_script = gzip:foo"]),
path=self.directory)
instance.master = MockMaster(config)
result = instance.startup_script()
expected = b"\n".join([
b"#!/bin/sh",
b"tail -n+4 $0 | gunzip -c | /usr/bin/env python",
b"exit $?",
b""])
assert result[:len(expected)] == expected
def test_strip_hashcomments(self):
self.tempdir['foo'].fill([
"#!/bin/bash",
"some command",
"#some comment",
" # an indented comment",
"and another command"])
instance = MockInstance()
config = self._create_config(
"\n".join([
"[instance:foo]",
"startup_script = foo"]),
path=self.directory)
instance.master = MockMaster(config)
result = instance.startup_script()
assert result == "\n".join([
"#!/bin/bash",
"some command",
"and another command"])
def testMaxSizeOk(self):
self.tempdir['foo'].fill("")
instance = MockInstance()
config = self._create_config(
"\n".join([
"[instance:foo]",
"startup_script = foo"]),
path=self.directory)
instance.master = MockMaster(config)
instance.max_startup_script_size = 10
result = instance.startup_script()
assert result == ""
def testMaxSizeExceeded(self):
self.tempdir['foo'].fill("aaaaabbbbbccccc")
instance = MockInstance()
config = self._create_config(
"\n".join([
"[instance:foo]",
"startup_script = foo"]),
path=self.directory)
instance.master = MockMaster(config)
instance.max_startup_script_size = 10
with patch('ploy.common.log') as LogMock:
with pytest.raises(SystemExit):
instance.startup_script()
LogMock.error.assert_called_with('Startup script too big (%s > %s).', 15, 10)
def testMaxSizeExceededDebug(self):
self.tempdir['foo'].fill("aaaaabbbbbccccc")
instance = MockInstance()
config = self._create_config(
"\n".join([
"[instance:foo]",
"startup_script = foo"]),
path=self.directory)
instance.master = MockMaster(config)
instance.max_startup_script_size = 10
with patch('ploy.common.log') as LogMock:
instance.startup_script(debug=True)
LogMock.error.assert_called_with('Startup script too big (%s > %s).', 15, 10)
@pytest.mark.parametrize("default, all, question, answer, expected", [
(None, False, 'Foo [yes/no] ', ['y'], True),
(None, False, 'Foo [yes/no] ', ['yes'], True),
(None, False, 'Foo [yes/no] ', ['Yes'], True),
(None, False, 'Foo [yes/no] ', ['YES'], True),
(None, False, 'Foo [yes/no] ', ['n'], False),
(None, False, 'Foo [yes/no] ', ['no'], False),
(None, False, 'Foo [yes/no] ', ['No'], False),
(None, False, 'Foo [yes/no] ', ['NO'], False),
(None, True, 'Foo [yes/no/all] ', ['a'], 'all'),
(None, True, 'Foo [yes/no/all] ', ['all'], 'all'),
(None, True, 'Foo [yes/no/all] ', ['All'], 'all'),
(None, True, 'Foo [yes/no/all] ', ['ALL'], 'all'),
(None, False, 'Foo [yes/no] ', ['YEbUS'], IndexError),
(None, False, 'Foo [yes/no] ', ['NarNJa'], IndexError),
(None, True, 'Foo [yes/no/all] ', ['ALfred'], IndexError),
(True, False, 'Foo [Yes/no] ', [''], True),
(False, False, 'Foo [yes/No] ', [''], False),
('all', True, 'Foo [yes/no/All] ', [''], 'all')])
def test_yesno(default, all, question, answer, expected):
from ploy.common import yesno
raw_input_values = answer
def get_input_result(q):
assert q == question
a = raw_input_values.pop()
print(q, repr(a))
return a
with patch('ploy.common.get_input') as RawInput:
RawInput.side_effect = get_input_result
try:
assert yesno('Foo', default, all) == expected
except Exception as e:
assert type(e) == expected
| {
"content_hash": "d41d9a88aa15418c6e50c4da63264f1f",
"timestamp": "",
"source": "github",
"line_count": 216,
"max_line_length": 89,
"avg_line_length": 35.38425925925926,
"alnum_prop": 0.5500457935365695,
"repo_name": "fschulze/ploy",
"id": "d0d4cd01c8b95b7cdc834623e8c1af6fdebabb52",
"size": "7643",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ploy/tests/test_common.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "145099"
}
],
"symlink_target": ""
} |
from textcom.alien import ALIEN_DEFAULT_ITEMS, \
ALIEN_RANKS, \
ALIEN_SPECIES, \
Alien, \
create_alien, \
sectoidfName, \
sectoidlName, \
SPECIES_AIM_RANGE, \
SPECIES_FIRSTNAME, \
SPECIES_HP_BASE, \
SPECIES_LASTNAME, \
SPECIES_MOBILITY_RANGE, \
SPECIES_WEAPON_CLASS
def test_death():
a = Alien(0, 'Ayy', 10, 10, 10, 0, 'Ayy', 'Lmao', '', None, [], [])
a.hp = 0
assert a.check_death()
assert not a.alive
def test_str():
rank = 0
a = Alien(0, 'Ayy', 10, 10, 10, rank, 'Ayy', 'Lmao', '', None, [], [])
assert str(a) == "(Ayy) " + ALIEN_RANKS[rank] +" Ayy Lmao"
def test_create_alien():
# this function could be regarded trivial, but the **kwargs stuff is not
# that intuitive, so test coverage is good
# test all accepted species
alien_species = ['Sectoid', 'Thinman', 'Floater', 'Muton']
for species in alien_species:
a = create_alien(0, 0, species)
assert a.species == species
# test invalid species detection
exception = None
try:
a = create_alien(0, 0, 'Ayy')
except Exception as e:
exception = e
assert exception is not None
assert str(exception) == 'Unknown alien species'
# test correct rank handling
# the value 10 is used because it's not a real rank, that could be
# generated by the function
species = 'Sectoid'
rank = 10
a = create_alien(0, 0, species, nrank=rank)
assert a.nrank == rank
assert a.hp == rank + ALIEN_SPECIES[species][SPECIES_HP_BASE]
assert rank + ALIEN_SPECIES[species][SPECIES_AIM_RANGE][0] <= a.aim
assert a.aim < rank + ALIEN_SPECIES[species][SPECIES_AIM_RANGE][1]
assert rank + ALIEN_SPECIES[species][SPECIES_MOBILITY_RANGE][0] \
<= a.mobility
assert a.mobility \
< rank + ALIEN_SPECIES[species][SPECIES_MOBILITY_RANGE][1]
assert a.firstname in sectoidfName
assert a.lastname in sectoidlName
assert a.armour == 'BDY'
assert type(a.weapon) == ALIEN_SPECIES[species][SPECIES_WEAPON_CLASS]
assert len(a.items) == 1
assert a.items[0] in ALIEN_DEFAULT_ITEMS
assert len(a.mods) == 0
# test correct overrides
hp_value = 2
aim_value = 2
mobility_value = 2
firstname_value = 'Ayy'
lastname_value = 'Lmao'
armour_value = 'LOLarmour'
weapon_value = 'LOLweapon'
items_value = [1,2]
mods_value = [1, 2]
a = create_alien(0, 0, species, nrank=rank, hp=hp_value, aim=aim_value,
mobility=mobility_value, firstname=firstname_value,
lastname=lastname_value, armour=armour_value,
weapon=weapon_value, items=items_value, mods=mods_value)
assert a.aim == aim_value
assert a.hp == hp_value
assert a.mobility == mobility_value
assert a.firstname == firstname_value
assert a.lastname == lastname_value
assert a.armour == armour_value
assert a.weapon == weapon_value
assert a.items == items_value
assert a.mods == mods_value
| {
"content_hash": "f135a2678335539a506ae0f8c9249e5b",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 79,
"avg_line_length": 41.64835164835165,
"alnum_prop": 0.49709762532981533,
"repo_name": "flimberger/textcom",
"id": "b80edcbe951d84ad062fbc91327cc0042fdb4861",
"size": "4921",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_alien.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "79875"
}
],
"symlink_target": ""
} |
"""
PyGeoj
**Version: 0.1.0**
**Date: July 25, 2014**
**Author: [Karim Bahgat](https://uit.academia.edu/KarimBahgat)**
**Contact: [email protected]**
**Homepage: https://github.com/karimbahgat/pygeoj**
## Table of Content
- [About](#about)
- [System Compatibility](#system-compatibility)
- [Dependencies](#dependencies)
- [License](#license)
- [Basic Usage](#basic-usage)
- [Setup](#setup)
- [Reading](#reading)
- [Editing](#editing)
- [Constructing](#constructing)
## About
A simple Python Geojson file reader and writer.
### System Compatibility
Should work on Python version 2.x and Windows. Has not yet been tested on
Python 3.x or other OS systems.
### Dependencies
None.
### License
This code is free to share, use, reuse,
and modify according to the MIT license, see license.txt
## Basic Usage
### Setup
PyGeoj is installed by simply placing the "pygeoj" package folder in an importable location like
"C:/PythonXX/Lib/site-packages". It can then be imported with:
```
import pygeoj
```
### Reading
Reading geojson formatted GIS files is a simple one-liner (requires the geojson file to be a
"FeatureCollection", the most sensible format for most GIS files):
```
testfile = pygeoj.load(filepath="testfile.geojson")
```
Basic information about the geojson file can then be extracted, such as:
```
len(testfile) # the number of features
testfile.bbox # the bounding box region of the entire file
testfile.crs # the coordinate reference system
testfile.common_attributes # retrieves which field attributes are common to all features
```
Individual features can be accessed either by their index in the features list:
```
testfile.getfeature(3)
```
Or by iterating through all of them:
```
for feature in testfile:
# do something
```
A feature can be inspected in various ways:
```
feature.properties
feature.geometry.coordinates
feature.geometry.bbox
```
### Editing
The standard Python list operations can be used to edit and swap around the features in a geojson
instance, and then saving to a new geojson file:
```
_third = testfile.getfeature(3)
testfile.insertfeature(8, _third)
testfile.replacefeature(1, _third)
testfile.removefeature(3)
testfile.save("test_edit.geojson")
```
An existing feature can also be tweaked by using simple attribute-setting:
```
# set your own properties
_third.properties = {"newfield1":"newvalue1", "newfield2":"newvalue2"}
# borrow the geometry of the 16th feature
_third.geometry = testfile.getfeature(16).geometry
```
### Constructing
Creating a new geojson file from scratch is also easy:
```
newfile = pygeoj.new()
# The data coordinate system defaults to long/lat WGS84 or can be manually defined:
newfile.define_crs(type="link", link="http://spatialreference.org/ref/epsg/26912/esriwkt/", link_type="esriwkt")
```
The new file can then be populated with custom-made features created with the Feature and Geometry
classes:
```
_Norwayfeat_ = pygeoj.Feature(properties={"country":"Norway"},
geometry=pygeoj.Geometry(type="Polygon", coordinates=[[(21,3),(33,11),(44,22)]]))
_USAfeat_ = pygeoj.Feature(properties={"country":"USA"},
geometry=pygeoj.Geometry(type="Polygon", coordinates=[[(11,23),(14,5),(66,31)]]))
newfile.addfeature(_Norwayfeat_)
newfile.addfeature(_USAfeat_)
```
Finally, some useful additional information can be added to top off the geojson file before saving it to
file:
```
newfile.add_all_bboxes()
newfile.add_unique_id()
newfile.save("test_construct.geojson")
```
"""
from .main import Geometry, Feature, GeojsonFile
from .main import load, new
| {
"content_hash": "0b1f38fbc4fbbe748f13063a5ad1502f",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 112,
"avg_line_length": 22.714285714285715,
"alnum_prop": 0.7202625102543068,
"repo_name": "karimbahgat/PythonGis",
"id": "43e0098fd935cf31bfafc1b9935cbec1a4ab931a",
"size": "3657",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "(sandbox,tobemerged)/pythongis/vector/fileformats/pygeoj/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "125"
},
{
"name": "HTML",
"bytes": "1979518"
},
{
"name": "Python",
"bytes": "1762972"
},
{
"name": "Tcl",
"bytes": "345753"
}
],
"symlink_target": ""
} |
import torch.nn as nn
import torch as T
import torch.nn.functional as F
import numpy as np
import torch
from torch.autograd import Variable
import re
import string
def recursiveTrace(obj):
print(type(obj))
if hasattr(obj, 'grad_fn'):
print(obj.grad_fn)
recursiveTrace(obj.grad_fn)
elif hasattr(obj, 'saved_variables'):
print(obj.requires_grad, len(obj.saved_tensors), len(obj.saved_variables))
[print(v) for v in obj.saved_variables]
[recursiveTrace(v.grad_fn) for v in obj.saved_variables]
def cuda(x, grad=False, gpu_id=-1):
x = x.float() if T.is_tensor(x) else x
if gpu_id == -1:
t = T.FloatTensor(x)
t.requires_grad=grad
return t
else:
t = T.FloatTensor(x.pin_memory()).cuda(gpu_id)
t.requires_grad=grad
return t
def cudavec(x, grad=False, gpu_id=-1):
if gpu_id == -1:
t = T.Tensor(T.from_numpy(x))
t.requires_grad = grad
return t
else:
t = T.Tensor(T.from_numpy(x).pin_memory()).cuda(gpu_id)
t.requires_grad = grad
return t
def cudalong(x, grad=False, gpu_id=-1):
if gpu_id == -1:
t = T.LongTensor(T.from_numpy(x.astype(np.long)))
t.requires_grad = grad
return t
else:
t = T.LongTensor(T.from_numpy(x.astype(np.long)).pin_memory()).cuda(gpu_id)
t.requires_grad = grad
return t
def θ(a, b, normBy=2):
"""Batchwise Cosine similarity
Cosine similarity
Arguments:
a {Tensor} -- A 3D Tensor (b * m * w)
b {Tensor} -- A 3D Tensor (b * r * w)
Returns:
Tensor -- Batchwise cosine similarity (b * r * m)
"""
dot = T.bmm(a, b.transpose(1,2))
a_norm = T.norm(a, normBy, dim=2).unsqueeze(2)
b_norm = T.norm(b, normBy, dim=2).unsqueeze(1)
cos = dot / (a_norm * b_norm + δ)
return cos.transpose(1,2).contiguous()
def σ(input, axis=1):
"""Softmax on an axis
Softmax on an axis
Arguments:
input {Tensor} -- input Tensor
Keyword Arguments:
axis {number} -- axis on which to take softmax on (default: {1})
Returns:
Tensor -- Softmax output Tensor
"""
input_size = input.size()
trans_input = input.transpose(axis, len(input_size) - 1)
trans_size = trans_input.size()
input_2d = trans_input.contiguous().view(-1, trans_size[-1])
soft_max_2d = F.softmax(input_2d, -1)
soft_max_nd = soft_max_2d.view(*trans_size)
return soft_max_nd.transpose(axis, len(input_size) - 1)
δ = 1e-6
def register_nan_checks(model):
def check_grad(module, grad_input, grad_output):
# print(module) you can add this to see that the hook is called
# print('hook called for ' + str(type(module)))
if any(np.all(np.isnan(gi.data.cpu().numpy())) for gi in grad_input if gi is not None):
print('NaN gradient in grad_input ' + type(module).__name__)
model.apply(lambda module: module.register_backward_hook(check_grad))
def apply_dict(dic):
for k, v in dic.items():
apply_var(v, k)
if isinstance(v, nn.Module):
key_list = [a for a in dir(v) if not a.startswith('__')]
for key in key_list:
apply_var(getattr(v, key), key)
for pk, pv in v._parameters.items():
apply_var(pv, pk)
def apply_var(v, k):
if isinstance(v, Variable) and v.requires_grad:
v.register_hook(check_nan_gradient(k))
def check_nan_gradient(name=''):
def f(tensor):
if np.isnan(T.mean(tensor).data.cpu().numpy()):
print('\nnan gradient of {} :'.format(name))
# print(tensor)
# assert 0, 'nan gradient'
return tensor
return f
def ptr(tensor):
if T.is_tensor(tensor):
return tensor.storage().data_ptr()
elif hasattr(tensor, 'data'):
return tensor.clone().data.storage().data_ptr()
else:
return tensor
# TODO: EWW change this shit
def ensure_gpu(tensor, gpu_id):
if "cuda" in str(type(tensor)) and gpu_id != -1:
return tensor.cuda(gpu_id)
elif "cuda" in str(type(tensor)):
return tensor.cpu()
elif "Tensor" in str(type(tensor)) and gpu_id != -1:
return tensor.cuda(gpu_id)
elif "Tensor" in str(type(tensor)):
return tensor
elif type(tensor) is np.ndarray:
return cudavec(tensor, gpu_id=gpu_id).data
else:
return tensor
def print_gradient(x, name):
s = "Gradient of " + name + " ----------------------------------"
x.register_hook(lambda y: print(s, y.squeeze()))
| {
"content_hash": "ff726bfa392a1d53fb3d3b2b2e43afa5",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 91,
"avg_line_length": 26.196319018404907,
"alnum_prop": 0.6320843091334895,
"repo_name": "ixaxaar/pytorch-dnc",
"id": "fc246d7fe7a2a31f217d0c31feec3326d6b41be9",
"size": "4322",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dnc/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "142692"
},
{
"name": "Shell",
"bytes": "106"
}
],
"symlink_target": ""
} |
import random
from keystoneclient import exceptions as k_exceptions
from keystoneclient.v2_0 import client as k_client
from oslo_config import cfg
from oslo_utils import importutils
from oslo_utils import timeutils
from sqlalchemy.orm import exc
from sqlalchemy.orm import joinedload
from neutron.common import exceptions as n_exc
from neutron.common import utils
from neutron import context as neutron_context
from neutron.db import agents_db
from neutron.i18n import _LE, _LI, _LW
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.plugins.cisco.common import cisco_constants as c_constants
from neutron.plugins.cisco.db.l3 import l3_models
from neutron.plugins.cisco.l3 import service_vm_lib
from neutron.plugins.common import constants as svc_constants
LOG = logging.getLogger(__name__)
DEVICE_HANDLING_OPTS = [
cfg.StrOpt('l3_admin_tenant', default='L3AdminTenant',
help=_('Name of the L3 admin tenant.')),
cfg.StrOpt('management_network', default='osn_mgmt_nw',
help=_('Name of management network for device configuration. '
'Default value is osn_mgmt_nw')),
cfg.StrOpt('default_security_group', default='mgmt_sec_grp',
help=_('Default security group applied on management port. '
'Default value is mgmt_sec_grp.')),
cfg.IntOpt('cfg_agent_down_time', default=60,
help=_('Seconds of no status update until a cfg agent '
'is considered down.')),
cfg.BoolOpt('ensure_nova_running', default=True,
help=_('Ensure that Nova is running before attempting to '
'create any VM.'))
]
CSR1KV_OPTS = [
cfg.StrOpt('csr1kv_image', default='csr1kv_openstack_img',
help=_('Name of Glance image for CSR1kv.')),
cfg.StrOpt('csr1kv_flavor', default=621,
help=_('UUID of Nova flavor for CSR1kv.')),
cfg.StrOpt('csr1kv_plugging_driver',
default=('neutron.plugins.cisco.l3.plugging_drivers.'
'n1kv_trunking_driver.N1kvTrunkingPlugDriver'),
help=_('Plugging driver for CSR1kv.')),
cfg.StrOpt('csr1kv_device_driver',
default=('neutron.plugins.cisco.l3.hosting_device_drivers.'
'csr1kv_hd_driver.CSR1kvHostingDeviceDriver'),
help=_('Hosting device driver for CSR1kv.')),
cfg.StrOpt('csr1kv_cfgagent_router_driver',
default=('neutron.plugins.cisco.cfg_agent.device_drivers.'
'csr1kv.csr1kv_routing_driver.CSR1kvRoutingDriver'),
help=_('Config agent driver for CSR1kv.')),
cfg.IntOpt('csr1kv_booting_time', default=420,
help=_('Booting time in seconds before a CSR1kv '
'becomes operational.')),
cfg.StrOpt('csr1kv_username', default='stack',
help=_('Username to use for CSR1kv configurations.')),
cfg.StrOpt('csr1kv_password', default='cisco', secret=True,
help=_('Password to use for CSR1kv configurations.'))
]
cfg.CONF.register_opts(DEVICE_HANDLING_OPTS, "general")
cfg.CONF.register_opts(CSR1KV_OPTS, "hosting_devices")
class DeviceHandlingMixin(object):
"""A class implementing some functionality to handle devices."""
# The all-mighty tenant owning all hosting devices
_l3_tenant_uuid = None
# The management network for hosting devices
_mgmt_nw_uuid = None
_mgmt_sec_grp_id = None
# Loaded driver modules for CSR1kv
_hosting_device_driver = None
_plugging_driver = None
# Service VM manager object that interacts with Nova
_svc_vm_mgr = None
# Flag indicating is needed Nova services are reported as up.
_nova_running = False
@classmethod
def l3_tenant_id(cls):
"""Returns id of tenant owning hosting device resources."""
if cls._l3_tenant_uuid is None:
auth_url = cfg.CONF.keystone_authtoken.auth_uri
user = cfg.CONF.keystone_authtoken.admin_user
pw = cfg.CONF.keystone_authtoken.admin_password
tenant = cfg.CONF.keystone_authtoken.admin_tenant_name
keystone = k_client.Client(username=user, password=pw,
tenant_name=tenant,
auth_url=auth_url)
try:
tenant = keystone.tenants.find(
name=cfg.CONF.general.l3_admin_tenant)
cls._l3_tenant_uuid = tenant.id
except k_exceptions.NotFound:
LOG.error(_LE('No tenant with a name or ID of %s exists.'),
cfg.CONF.general.l3_admin_tenant)
except k_exceptions.NoUniqueMatch:
LOG.error(_LE('Multiple tenants matches found for %s'),
cfg.CONF.general.l3_admin_tenant)
return cls._l3_tenant_uuid
@classmethod
def mgmt_nw_id(cls):
"""Returns id of the management network."""
if cls._mgmt_nw_uuid is None:
tenant_id = cls.l3_tenant_id()
if not tenant_id:
return
net = manager.NeutronManager.get_plugin().get_networks(
neutron_context.get_admin_context(),
{'tenant_id': [tenant_id],
'name': [cfg.CONF.general.management_network]},
['id', 'subnets'])
if len(net) == 1:
num_subnets = len(net[0]['subnets'])
if num_subnets == 0:
LOG.error(_LE('The virtual management network has no '
'subnet. Please assign one.'))
return
elif num_subnets > 1:
LOG.info(_LI('The virtual management network has %d '
'subnets. The first one will be used.'),
num_subnets)
cls._mgmt_nw_uuid = net[0].get('id')
elif len(net) > 1:
# Management network must have a unique name.
LOG.error(_LE('The virtual management network does not have '
'unique name. Please ensure that it is.'))
else:
# Management network has not been created.
LOG.error(_LE('There is no virtual management network. Please '
'create one.'))
return cls._mgmt_nw_uuid
@classmethod
def mgmt_sec_grp_id(cls):
"""Returns id of security group used by the management network."""
if not utils.is_extension_supported(
manager.NeutronManager.get_plugin(), "security-group"):
return
if cls._mgmt_sec_grp_id is None:
# Get the id for the _mgmt_security_group_id
tenant_id = cls.l3_tenant_id()
res = manager.NeutronManager.get_plugin().get_security_groups(
neutron_context.get_admin_context(),
{'tenant_id': [tenant_id],
'name': [cfg.CONF.general.default_security_group]},
['id'])
if len(res) == 1:
cls._mgmt_sec_grp_id = res[0].get('id')
elif len(res) > 1:
# the mgmt sec group must be unique.
LOG.error(_LE('The security group for the virtual management '
'network does not have unique name. Please ensure '
'that it is.'))
else:
# CSR Mgmt security group is not present.
LOG.error(_LE('There is no security group for the virtual '
'management network. Please create one.'))
return cls._mgmt_sec_grp_id
@classmethod
def get_hosting_device_driver(cls):
"""Returns device driver."""
if cls._hosting_device_driver:
return cls._hosting_device_driver
else:
try:
cls._hosting_device_driver = importutils.import_object(
cfg.CONF.hosting_devices.csr1kv_device_driver)
except (ImportError, TypeError, n_exc.NeutronException):
LOG.exception(_LE('Error loading hosting device driver'))
return cls._hosting_device_driver
@classmethod
def get_hosting_device_plugging_driver(cls):
"""Returns plugging driver."""
if cls._plugging_driver:
return cls._plugging_driver
else:
try:
cls._plugging_driver = importutils.import_object(
cfg.CONF.hosting_devices.csr1kv_plugging_driver)
except (ImportError, TypeError, n_exc.NeutronException):
LOG.exception(_LE('Error loading plugging driver'))
return cls._plugging_driver
def get_hosting_devices_qry(self, context, hosting_device_ids,
load_agent=True):
"""Returns hosting devices with <hosting_device_ids>."""
query = context.session.query(l3_models.HostingDevice)
if load_agent:
query = query.options(joinedload('cfg_agent'))
if len(hosting_device_ids) > 1:
query = query.filter(l3_models.HostingDevice.id.in_(
hosting_device_ids))
else:
query = query.filter(l3_models.HostingDevice.id ==
hosting_device_ids[0])
return query
def handle_non_responding_hosting_devices(self, context, host,
hosting_device_ids):
with context.session.begin(subtransactions=True):
e_context = context.elevated()
hosting_devices = self.get_hosting_devices_qry(
e_context, hosting_device_ids).all()
# 'hosting_info' is dictionary with ids of removed hosting
# devices and the affected logical resources for each
# removed hosting device:
# {'hd_id1': {'routers': [id1, id2, ...],
# 'fw': [id1, ...],
# ...},
# 'hd_id2': {'routers': [id3, id4, ...]},
# 'fw': [id1, ...],
# ...},
# ...}
hosting_info = dict((id, {}) for id in hosting_device_ids)
try:
#TODO(bobmel): Modify so service plugins register themselves
self._handle_non_responding_hosting_devices(
context, hosting_devices, hosting_info)
except AttributeError:
pass
for hd in hosting_devices:
if not self._process_non_responsive_hosting_device(e_context,
hd):
# exclude this device since we did not remove it
del hosting_info[hd['id']]
self.l3_cfg_rpc_notifier.hosting_devices_removed(
context, hosting_info, False, host)
def get_device_info_for_agent(self, hosting_device):
"""Returns information about <hosting_device> needed by config agent.
Convenience function that service plugins can use to populate
their resources with information about the device hosting their
logical resource.
"""
credentials = {'username': cfg.CONF.hosting_devices.csr1kv_username,
'password': cfg.CONF.hosting_devices.csr1kv_password}
mgmt_ip = (hosting_device.management_port['fixed_ips'][0]['ip_address']
if hosting_device.management_port else None)
return {'id': hosting_device.id,
'credentials': credentials,
'management_ip_address': mgmt_ip,
'protocol_port': hosting_device.protocol_port,
'created_at': str(hosting_device.created_at),
'booting_time': cfg.CONF.hosting_devices.csr1kv_booting_time,
'cfg_agent_id': hosting_device.cfg_agent_id}
@classmethod
def is_agent_down(cls, heart_beat_time,
timeout=cfg.CONF.general.cfg_agent_down_time):
return timeutils.is_older_than(heart_beat_time, timeout)
def get_cfg_agents_for_hosting_devices(self, context, hosting_device_ids,
admin_state_up=None, active=None,
schedule=False):
if not hosting_device_ids:
return []
query = self.get_hosting_devices_qry(context, hosting_device_ids)
if admin_state_up is not None:
query = query.filter(
agents_db.Agent.admin_state_up == admin_state_up)
if schedule:
agents = []
for hosting_device in query:
if hosting_device.cfg_agent is None:
agent = self._select_cfgagent(context, hosting_device)
if agent is not None:
agents.append(agent)
else:
agents.append(hosting_device.cfg_agent)
else:
agents = [hosting_device.cfg_agent for hosting_device in query
if hosting_device.cfg_agent is not None]
if active is not None:
agents = [a for a in agents if not
self.is_agent_down(a['heartbeat_timestamp'])]
return agents
def auto_schedule_hosting_devices(self, context, agent_host):
"""Schedules unassociated hosting devices to Cisco cfg agent.
Schedules hosting devices to agent running on <agent_host>.
"""
with context.session.begin(subtransactions=True):
# Check if there is a valid Cisco cfg agent on the host
query = context.session.query(agents_db.Agent)
query = query.filter_by(agent_type=c_constants.AGENT_TYPE_CFG,
host=agent_host, admin_state_up=True)
try:
cfg_agent = query.one()
except (exc.MultipleResultsFound, exc.NoResultFound):
LOG.debug('No enabled Cisco cfg agent on host %s',
agent_host)
return False
if self.is_agent_down(
cfg_agent.heartbeat_timestamp):
LOG.warning(_LW('Cisco cfg agent %s is not alive'),
cfg_agent.id)
query = context.session.query(l3_models.HostingDevice)
query = query.filter_by(cfg_agent_id=None)
for hd in query:
hd.cfg_agent = cfg_agent
context.session.add(hd)
return True
def _setup_device_handling(self):
auth_url = cfg.CONF.keystone_authtoken.auth_uri
u_name = cfg.CONF.keystone_authtoken.admin_user
pw = cfg.CONF.keystone_authtoken.admin_password
tenant = cfg.CONF.general.l3_admin_tenant
self._svc_vm_mgr = service_vm_lib.ServiceVMManager(
user=u_name, passwd=pw, l3_admin_tenant=tenant, auth_url=auth_url)
def _process_non_responsive_hosting_device(self, context, hosting_device):
"""Host type specific processing of non responsive hosting devices.
:param hosting_device: db object for hosting device
:return: True if hosting_device has been deleted, otherwise False
"""
self._delete_service_vm_hosting_device(context, hosting_device)
return True
def _create_csr1kv_vm_hosting_device(self, context):
"""Creates a CSR1kv VM instance."""
# Note(bobmel): Nova does not handle VM dispatching well before all
# its services have started. This creates problems for the Neutron
# devstack script that creates a Neutron router, which in turn
# triggers service VM dispatching.
# Only perform pool maintenance if needed Nova services have started
if (cfg.CONF.general.ensure_nova_running and not self._nova_running):
if self._svc_vm_mgr.nova_services_up():
self.__class__._nova_running = True
else:
LOG.info(_LI('Not all Nova services are up and running. '
'Skipping this CSR1kv vm create request.'))
return
plugging_drv = self.get_hosting_device_plugging_driver()
hosting_device_drv = self.get_hosting_device_driver()
if plugging_drv is None or hosting_device_drv is None:
return
# These resources are owned by the L3AdminTenant
complementary_id = uuidutils.generate_uuid()
dev_data = {'complementary_id': complementary_id,
'device_id': 'CSR1kv',
'admin_state_up': True,
'protocol_port': 22,
'created_at': timeutils.utcnow()}
res = plugging_drv.create_hosting_device_resources(
context, complementary_id, self.l3_tenant_id(),
self.mgmt_nw_id(), self.mgmt_sec_grp_id(), 1)
if res.get('mgmt_port') is None:
# Required ports could not be created
return
vm_instance = self._svc_vm_mgr.dispatch_service_vm(
context, 'CSR1kv_nrouter', cfg.CONF.hosting_devices.csr1kv_image,
cfg.CONF.hosting_devices.csr1kv_flavor, hosting_device_drv,
res['mgmt_port'], res.get('ports'))
with context.session.begin(subtransactions=True):
if vm_instance is not None:
dev_data.update(
{'id': vm_instance['id'],
'management_port_id': res['mgmt_port']['id']})
hosting_device = self._create_hosting_device(
context, {'hosting_device': dev_data})
else:
# Fundamental error like could not contact Nova
# Cleanup anything we created
plugging_drv.delete_hosting_device_resources(
context, self.l3_tenant_id(), **res)
return
LOG.info(_LI('Created a CSR1kv hosting device VM'))
return hosting_device
def _delete_service_vm_hosting_device(self, context, hosting_device):
"""Deletes a <hosting_device> service VM.
This will indirectly make all of its hosted resources unscheduled.
"""
if hosting_device is None:
return
plugging_drv = self.get_hosting_device_plugging_driver()
if plugging_drv is None:
return
res = plugging_drv.get_hosting_device_resources(
context, hosting_device['id'], hosting_device['complementary_id'],
self.l3_tenant_id(), self.mgmt_nw_id())
if not self._svc_vm_mgr.delete_service_vm(context,
hosting_device['id']):
LOG.error(_LE('Failed to delete hosting device %s service VM. '
'Will un-register it anyway.'),
hosting_device['id'])
plugging_drv.delete_hosting_device_resources(
context, self.l3_tenant_id(), **res)
with context.session.begin(subtransactions=True):
context.session.delete(hosting_device)
def _create_hosting_device(self, context, hosting_device):
LOG.debug('create_hosting_device() called')
hd = hosting_device['hosting_device']
tenant_id = self._get_tenant_id_for_create(context, hd)
with context.session.begin(subtransactions=True):
hd_db = l3_models.HostingDevice(
id=hd.get('id') or uuidutils.generate_uuid(),
complementary_id=hd.get('complementary_id'),
tenant_id=tenant_id,
device_id=hd.get('device_id'),
admin_state_up=hd.get('admin_state_up', True),
management_port_id=hd['management_port_id'],
protocol_port=hd.get('protocol_port'),
cfg_agent_id=hd.get('cfg_agent_id'),
created_at=hd.get('created_at', timeutils.utcnow()),
status=hd.get('status', svc_constants.ACTIVE))
context.session.add(hd_db)
return hd_db
def _select_cfgagent(self, context, hosting_device):
"""Selects Cisco cfg agent that will configure <hosting_device>."""
if not hosting_device:
LOG.debug('Hosting device to schedule not specified')
return
elif hosting_device.cfg_agent:
LOG.debug('Hosting device %(hd_id)s has already been '
'assigned to Cisco cfg agent %(agent_id)s',
{'hd_id': id,
'agent_id': hosting_device.cfg_agent.id})
return
with context.session.begin(subtransactions=True):
active_cfg_agents = self._get_cfg_agents(context, active=True)
if not active_cfg_agents:
LOG.warning(_LW('There are no active Cisco cfg agents'))
# No worries, once a Cisco cfg agent is started and
# announces itself any "dangling" hosting devices
# will be scheduled to it.
return
chosen_agent = random.choice(active_cfg_agents)
hosting_device.cfg_agent = chosen_agent
context.session.add(hosting_device)
return chosen_agent
def _get_cfg_agents(self, context, active=None, filters=None):
query = context.session.query(agents_db.Agent)
query = query.filter(
agents_db.Agent.agent_type == c_constants.AGENT_TYPE_CFG)
if active is not None:
query = (query.filter(agents_db.Agent.admin_state_up == active))
if filters:
for key, value in filters.iteritems():
column = getattr(agents_db.Agent, key, None)
if column:
query = query.filter(column.in_(value))
cfg_agents = query.all()
if active is not None:
cfg_agents = [cfg_agent for cfg_agent in cfg_agents
if not self.is_agent_down(
cfg_agent['heartbeat_timestamp'])]
return cfg_agents
| {
"content_hash": "0bee58ffc910eb3836880818ed89f379",
"timestamp": "",
"source": "github",
"line_count": 475,
"max_line_length": 79,
"avg_line_length": 46.84421052631579,
"alnum_prop": 0.5692328434677093,
"repo_name": "rdo-management/neutron",
"id": "e8c3a8187f286082736268f75e0aa258ad945dcf",
"size": "22885",
"binary": false,
"copies": "2",
"ref": "refs/heads/mgt-master",
"path": "neutron/plugins/cisco/db/l3/device_handling_db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "24047"
},
{
"name": "Gettext Catalog",
"bytes": "575107"
},
{
"name": "Mako",
"bytes": "1043"
},
{
"name": "Python",
"bytes": "6918375"
},
{
"name": "Shell",
"bytes": "12287"
}
],
"symlink_target": ""
} |
import logging
import optparse
import time
from pytries import Trie
def fill(trie, data_file):
'''
Initialize the Trie with the content of a file.
The file should have a word per line
'''
data = open(data_file,'r')
for line in data:
trie[line.strip()]=True
data.close()
def main():
p = optparse.OptionParser()
p.add_option('--data', '-d', default="data.txt")
options, arguments = p.parse_args()
trie_dict = Trie()
start = time.clock()
logging.info('Processing data fill. This can take a few minutes')
fill(trie_dict, options.data)
end = time.clock()
logging.info('Data processed in %g seconds', (end-start))
print("")
print("Quit with CTRL+C")
print("")
while(True):
prefix = raw_input("Please type the prefix of a word: ")
start = time.clock()
suggestions = trie_dict.prefix(prefix)
end = time.clock()
idx = 0
for suggestion in suggestions:
idx += 1
print(' ' + str(suggestion[0]))
print(" %d suggestions found in %g seconds :" % (
idx,
end-start
)
)
if __name__ == '__main__':
main() | {
"content_hash": "1c0b9e043d8116c5340fd57323b2540a",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 69,
"avg_line_length": 23.48076923076923,
"alnum_prop": 0.5577395577395577,
"repo_name": "laucia/pyTries",
"id": "fc54778592066f3225f60f474729b91125ea0a90",
"size": "1221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "autocomplete.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5877"
}
],
"symlink_target": ""
} |
from unittest import TestCase
import pandas as pd
from alphaware.base import (Factor,
FactorContainer)
from alphaware.enums import (FactorType,
OutputDataFormat,
FreqType,
FactorNormType)
from alphaware.analyzer import TopMinusBottom
from pandas.util.testing import assert_frame_equal
class TestTopMinusBottom(TestCase):
def test_top_minus_bottom(self):
index = pd.MultiIndex.from_product([['2014-01-30', '2014-02-28', '2014-03-31'], ['001', '002']],
names=['trade_date', 'ticker'])
data1 = pd.DataFrame(index=index, data=[1.0, 2.0, 3.0, 4.0, 5.0, 6.0])
factor_test1 = Factor(data=data1, name='alpha1')
factor_test3 = Factor(data=data1, name='alpha2')
test2_property = {'type': FactorType.FWD_RETURN,
'data_format': OutputDataFormat.MULTI_INDEX_DF,
'norm_type': FactorNormType.Null,
'freq': FreqType.EOM}
data2 = pd.DataFrame(index=index, data=[3.0, 2.0, 3.0, 7.0, 8.0, 9.0])
factor_test2 = Factor(data=data2, name='fwd_return1', property_dict=test2_property)
factor_test4 = Factor(data=data2, name='fwd_return2', property_dict=test2_property)
fc = FactorContainer('2014-01-30', '2014-02-28', [factor_test1, factor_test2, factor_test3, factor_test4])
t = TopMinusBottom(quantiles=2)
calculate = t.predict(fc)
expected = pd.DataFrame(
data=[[-1.0, -1.0, -1.0, -1.0], [4.0, 4.0, 4.0, 4.0]],
index=pd.DatetimeIndex(['2014-01-30', '2014-02-28'], freq=None),
columns=['alpha1_fwd_return1', 'alpha2_fwd_return1',
'alpha1_fwd_return2',
'alpha2_fwd_return2'])
assert_frame_equal(calculate, expected)
| {
"content_hash": "75dffc9f930cb7274709d773ed05634d",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 114,
"avg_line_length": 50.473684210526315,
"alnum_prop": 0.5656934306569343,
"repo_name": "iLampard/alphaware",
"id": "0d14d6a40c50ddd85cae61d59e93b8d3eb67704a",
"size": "1943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alphaware/tests/analyzer/test_top_minus_bottom.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "155109"
}
],
"symlink_target": ""
} |
from eden.converter.fasta import fasta_to_sequence
from eden.converter.fasta import sequence_to_eden
def test_fasta_to_sequence_graph():
fa_fn = "test/test_fasta_to_sequence.fa"
seq = fasta_to_sequence(fa_fn)
sequence_to_eden(seq)
| {
"content_hash": "07dbaafbfb0ed93c7ab192701e7e92a3",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 50,
"avg_line_length": 30.625,
"alnum_prop": 0.7346938775510204,
"repo_name": "antworteffekt/EDeN",
"id": "1f02b6ff43ae3833a551a35962417744c3c4b013",
"size": "245",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_graphprot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "599071"
}
],
"symlink_target": ""
} |
import logging
import optparse
import os
import sys
import traceback
from webkitpy.common.host import Host
from webkitpy.layout_tests.controllers.manager import Manager
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.port import configuration_options, platform_options
from webkitpy.layout_tests.views import buildbot_results
from webkitpy.layout_tests.views import printing
from webkitpy.layout_tests.generate_results_dashboard import GenerateDashBoard
_log = logging.getLogger(__name__)
def main(argv, stdout, stderr):
options, args = parse_args(argv)
if options.platform and 'test' in options.platform and not 'browser_test' in options.platform:
# It's a bit lame to import mocks into real code, but this allows the user
# to run tests against the test platform interactively, which is useful for
# debugging test failures.
from webkitpy.common.host_mock import MockHost
host = MockHost()
else:
host = Host()
if options.lint_test_files:
from webkitpy.layout_tests.lint_test_expectations import run_checks
return run_checks(host, options, stderr)
try:
port = host.port_factory.get(options.platform, options)
except NotImplementedError, e:
# FIXME: is this the best way to handle unsupported port names?
print >> stderr, str(e)
return test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
try:
run_details = run(port, options, args, stderr)
if ((run_details.exit_code not in test_run_results.ERROR_CODES or
run_details.exit_code == test_run_results.EARLY_EXIT_STATUS) and
not run_details.initial_results.keyboard_interrupted):
bot_printer = buildbot_results.BuildBotPrinter(stdout, options.debug_rwt_logging)
bot_printer.print_results(run_details)
if options.enable_versioned_results:
gen_dash_board = GenerateDashBoard(port)
gen_dash_board.generate()
return run_details.exit_code
# We need to still handle KeyboardInterrupt, atleast for webkitpy unittest cases.
except KeyboardInterrupt:
return test_run_results.INTERRUPTED_EXIT_STATUS
except test_run_results.TestRunException as e:
print >> stderr, e.msg
return e.code
except BaseException as e:
if isinstance(e, Exception):
print >> stderr, '\n%s raised: %s' % (e.__class__.__name__, str(e))
traceback.print_exc(file=stderr)
return test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
def parse_args(args):
option_group_definitions = []
option_group_definitions.append(("Platform options", platform_options()))
option_group_definitions.append(("Configuration options", configuration_options()))
option_group_definitions.append(("Printing Options", printing.print_options()))
option_group_definitions.append(("Android-specific Options", [
optparse.make_option("--adb-device",
action="append", default=[],
help="Run Android layout tests on these devices."),
# FIXME: Flip this to be off by default once we can log the device setup more cleanly.
optparse.make_option("--no-android-logging",
action="store_false", dest='android_logging', default=True,
help="Do not log android-specific debug messages (default is to log as part of --debug-rwt-logging"),
]))
option_group_definitions.append(("Results Options", [
optparse.make_option("--add-platform-exceptions", action="store_true", default=False,
help="Save generated results into the *most-specific-platform* directory rather than the *generic-platform* directory"),
optparse.make_option("--additional-drt-flag", action="append",
default=[], help="Additional command line flag to pass to the driver "
"Specify multiple times to add multiple flags."),
optparse.make_option("--additional-expectations", action="append", default=[],
help="Path to a test_expectations file that will override previous expectations. "
"Specify multiple times for multiple sets of overrides."),
optparse.make_option("--additional-platform-directory", action="append",
default=[], help="Additional directory where to look for test "
"baselines (will take precendence over platform baselines). "
"Specify multiple times to add multiple search path entries."),
optparse.make_option("--build-directory",
help="Path to the directory under which build files are kept (should not include configuration)"),
optparse.make_option("--clobber-old-results", action="store_true",
default=False, help="Clobbers test results from previous runs."),
optparse.make_option("--compare-port", action="store", default=None,
help="Use the specified port's baselines first"),
optparse.make_option("--driver-name", type="string",
help="Alternative driver binary to use"),
optparse.make_option("--enable-versioned-results", action="store_true",
default=False, help="Archive the test results for later access."),
optparse.make_option("--full-results-html", action="store_true",
default=False,
help="Show all failures in results.html, rather than only regressions"),
optparse.make_option("--new-baseline", action="store_true",
default=False, help="Save generated results as new baselines "
"into the *most-specific-platform* directory, overwriting whatever's "
"already there. Equivalent to --reset-results --add-platform-exceptions"),
optparse.make_option("--no-new-test-results", action="store_false",
dest="new_test_results", default=True,
help="Don't create new baselines when no expected results exist"),
optparse.make_option("--no-show-results", action="store_false",
default=True, dest="show_results",
help="Don't launch a browser with results after the tests "
"are done"),
optparse.make_option("-p", "--pixel", "--pixel-tests", action="store_true",
dest="pixel_tests", help="Enable pixel-to-pixel PNG comparisons"),
optparse.make_option("--no-pixel", "--no-pixel-tests", action="store_false",
dest="pixel_tests", help="Disable pixel-to-pixel PNG comparisons"),
#FIXME: we should support a comma separated list with --pixel-test-directory as well.
optparse.make_option("--pixel-test-directory", action="append", default=[], dest="pixel_test_directories",
help="A directory where it is allowed to execute tests as pixel tests. "
"Specify multiple times to add multiple directories. "
"This option implies --pixel-tests. If specified, only those tests "
"will be executed as pixel tests that are located in one of the "
"directories enumerated with the option. Some ports may ignore this "
"option while others can have a default value that can be overridden here."),
optparse.make_option("--reset-results", action="store_true",
default=False, help="Reset expectations to the "
"generated results in their existing location."),
optparse.make_option("--results-directory", help="Location of test results"),
optparse.make_option("--skip-failing-tests", action="store_true",
default=False, help="Skip tests that are expected to fail. "
"Note: When using this option, you might miss new crashes "
"in these tests."),
optparse.make_option("--smoke", action="store_true",
help="Run just the SmokeTests"),
optparse.make_option("--no-smoke", dest="smoke", action="store_false",
help="Do not run just the SmokeTests"),
]))
option_group_definitions.append(("Testing Options", [
optparse.make_option("--additional-env-var", type="string", action="append", default=[],
help="Passes that environment variable to the tests (--additional-env-var=NAME=VALUE)"),
optparse.make_option("--batch-size",
help=("Run a the tests in batches (n), after every n tests, "
"the driver is relaunched."), type="int", default=None),
optparse.make_option("--build", dest="build",
action="store_true", default=True,
help="Check to ensure the build is up-to-date (default)."),
optparse.make_option("--no-build", dest="build",
action="store_false", help="Don't check to see if the build is up-to-date."),
optparse.make_option("--child-processes",
help="Number of drivers to run in parallel."),
optparse.make_option("--disable-breakpad", action="store_true",
help="Don't use breakpad to symbolize unexpected crashes."),
optparse.make_option("--driver-logging", action="store_true",
help="Print detailed logging of the driver/content_shell"),
optparse.make_option("--enable-leak-detection", action="store_true",
help="Enable the leak detection of DOM objects."),
optparse.make_option("--enable-sanitizer", action="store_true",
help="Only alert on sanitizer-related errors and crashes"),
optparse.make_option("--exit-after-n-crashes-or-timeouts", type="int",
default=None, help="Exit after the first N crashes instead of "
"running all tests"),
optparse.make_option("--exit-after-n-failures", type="int", default=None,
help="Exit after the first N failures instead of running all "
"tests"),
optparse.make_option("--ignore-builder-category", action="store",
help=("The category of builders to use with the --ignore-flaky-tests "
"option ('layout' or 'deps').")),
optparse.make_option("--ignore-flaky-tests", action="store",
help=("Control whether tests that are flaky on the bots get ignored."
"'very-flaky' == Ignore any tests that flaked more than once on the bot."
"'maybe-flaky' == Ignore any tests that flaked once on the bot."
"'unexpected' == Ignore any tests that had unexpected results on the bot.")),
optparse.make_option("--iterations", type="int", default=1, help="Number of times to run the set of tests (e.g. ABCABCABC)"),
optparse.make_option("--max-locked-shards", type="int", default=0,
help="Set the maximum number of locked shards"),
optparse.make_option("--no-retry-failures", action="store_false",
dest="retry_failures",
help="Don't re-try any tests that produce unexpected results."),
optparse.make_option("--nocheck-sys-deps", action="store_true",
default=False,
help="Don't check the system dependencies (themes)"),
optparse.make_option("--order", action="store", default="natural",
help=("determine the order in which the test cases will be run. "
"'none' == use the order in which the tests were listed either in arguments or test list, "
"'natural' == use the natural order (default), "
"'random-seeded' == randomize the test order using a fixed seed, "
"'random' == randomize the test order.")),
optparse.make_option("--profile", action="store_true",
help="Output per-test profile information."),
optparse.make_option("--profiler", action="store",
help="Output per-test profile information, using the specified profiler."),
optparse.make_option("--repeat-each", type="int", default=1, help="Number of times to run each test (e.g. AAABBBCCC)"),
optparse.make_option("--retry-failures", action="store_true",
help="Re-try any tests that produce unexpected results. Default is to not retry if an explicit list of tests is passed to run-webkit-tests."),
optparse.make_option("--run-chunk",
help=("Run a specified chunk (n:l), the nth of len l, "
"of the layout tests")),
optparse.make_option("--run-part", help=("Run a specified part (n:m), "
"the nth of m parts, of the layout tests")),
optparse.make_option("--run-singly", action="store_true",
default=False, help="DEPRECATED, same as --batch-size=1 --verbose"),
optparse.make_option("--skipped", action="store", default=None,
help=("control how tests marked SKIP are run. "
"'default' == Skip tests unless explicitly listed on the command line, "
"'ignore' == Run them anyway, "
"'only' == only run the SKIP tests, "
"'always' == always skip, even if listed on the command line.")),
optparse.make_option("--test-list", action="append",
help="read list of tests to run from file", metavar="FILE"),
optparse.make_option("--time-out-ms",
help="Set the timeout for each test"),
optparse.make_option("--wrapper",
help="wrapper command to insert before invocations of "
"the driver; option is split on whitespace before "
"running. (Example: --wrapper='valgrind --smc-check=all')"),
# FIXME: Display default number of child processes that will run.
optparse.make_option("-f", "--fully-parallel", action="store_true",
help="run all tests in parallel"),
optparse.make_option("-i", "--ignore-tests", action="append", default=[],
help="directories or test to ignore (may specify multiple times)"),
optparse.make_option("-n", "--dry-run", action="store_true",
default=False,
help="Do everything but actually run the tests or upload results."),
]))
option_group_definitions.append(("Miscellaneous Options", [
optparse.make_option("--lint-test-files", action="store_true",
default=False, help=("Makes sure the test files parse for all "
"configurations. Does not run any tests.")),
]))
# FIXME: Move these into json_results_generator.py
option_group_definitions.append(("Result JSON Options", [
optparse.make_option("--build-name", default="DUMMY_BUILD_NAME",
help=("The name of the builder used in its path, e.g. "
"webkit-rel.")),
optparse.make_option("--build-number", default="DUMMY_BUILD_NUMBER",
help=("The build number of the builder running this script.")),
optparse.make_option("--builder-name", default="",
help=("The name of the builder shown on the waterfall running "
"this script e.g. WebKit.")),
optparse.make_option("--master-name", help="The name of the buildbot master."),
optparse.make_option("--test-results-server", default="",
help=("If specified, upload results json files to this appengine "
"server.")),
optparse.make_option("--write-full-results-to",
help=("If specified, copy full_results.json from the results dir "
"to the specified path.")),
]))
option_parser = optparse.OptionParser()
for group_name, group_options in option_group_definitions:
option_group = optparse.OptionGroup(option_parser, group_name)
option_group.add_options(group_options)
option_parser.add_option_group(option_group)
return option_parser.parse_args(args)
def _set_up_derived_options(port, options, args):
"""Sets the options values that depend on other options values."""
if not options.child_processes:
options.child_processes = os.environ.get("WEBKIT_TEST_CHILD_PROCESSES",
str(port.default_child_processes()))
if not options.max_locked_shards:
options.max_locked_shards = int(os.environ.get("WEBKIT_TEST_MAX_LOCKED_SHARDS",
str(port.default_max_locked_shards())))
if not options.configuration:
options.configuration = port.default_configuration()
if options.pixel_tests is None:
options.pixel_tests = port.default_pixel_tests()
if not options.time_out_ms:
options.time_out_ms = str(port.default_timeout_ms())
options.slow_time_out_ms = str(5 * int(options.time_out_ms))
if options.additional_platform_directory:
additional_platform_directories = []
for path in options.additional_platform_directory:
additional_platform_directories.append(port.host.filesystem.abspath(path))
options.additional_platform_directory = additional_platform_directories
if options.new_baseline:
options.reset_results = True
options.add_platform_exceptions = True
if options.pixel_test_directories:
options.pixel_tests = True
varified_dirs = set()
pixel_test_directories = options.pixel_test_directories
for directory in pixel_test_directories:
# FIXME: we should support specifying the directories all the ways we support it for additional
# arguments specifying which tests and directories to run. We should also move the logic for that
# to Port.
filesystem = port.host.filesystem
if not filesystem.isdir(filesystem.join(port.layout_tests_dir(), directory)):
_log.warning("'%s' was passed to --pixel-test-directories, which doesn't seem to be a directory" % str(directory))
else:
varified_dirs.add(directory)
options.pixel_test_directories = list(varified_dirs)
if options.run_singly:
options.batch_size = 1
options.verbose = True
if not args and not options.test_list and options.smoke is None:
options.smoke = port.default_smoke_test_only()
if options.smoke:
if not args and not options.test_list and options.retry_failures is None:
# Retry failures by default if we're doing just a smoke test (no additional tests).
options.retry_failures = True
if not options.test_list:
options.test_list = []
options.test_list.append(port.host.filesystem.join(port.layout_tests_dir(), 'SmokeTests'))
if not options.skipped:
options.skipped = 'always'
if not options.skipped:
options.skipped = 'default'
def run(port, options, args, logging_stream):
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if options.debug_rwt_logging else logging.INFO)
try:
printer = printing.Printer(port, options, logging_stream, logger=logger)
_set_up_derived_options(port, options, args)
manager = Manager(port, options, printer)
printer.print_config(port.results_directory())
run_details = manager.run(args)
_log.debug("Testing completed, Exit status: %d" % run_details.exit_code)
return run_details
finally:
printer.cleanup()
if __name__ == '__main__':
sys.exit(main(sys.argv[1:], sys.stdout, sys.stderr))
| {
"content_hash": "4773988ece0f0f3fc86cad3ed13ec578",
"timestamp": "",
"source": "github",
"line_count": 359,
"max_line_length": 154,
"avg_line_length": 54.03621169916435,
"alnum_prop": 0.6373524408474663,
"repo_name": "hgl888/crosswalk-android-extensions",
"id": "83a81417478af1be045890904fc0fc4d2a1f209b",
"size": "21066",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "build/idl-generator/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/run_webkit_tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "75866"
},
{
"name": "JavaScript",
"bytes": "15654"
},
{
"name": "Python",
"bytes": "95429"
}
],
"symlink_target": ""
} |
import os
import datetime
import json
from tempfile import NamedTemporaryFile
from django.db import models
from django.core.files import File
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.utils.text import capfirst
import file_picker
class Image(models.Model):
"""
Image Model for tests.
"""
name = models.CharField(max_length=255)
description_1 = models.TextField(blank=True)
description_2 = models.TextField(blank=True)
file = models.ImageField(upload_to='images/')
class MockRequest(object):
"""
Incomplete Mock Request object.
"""
GET = {}
POST = {}
FILES = {}
class MockImagePicker(file_picker.ImagePickerBase):
def __init__(self, name, model, columns, extra_headers, extra={}):
if columns:
self.columns = columns
if extra_headers:
self.extra_headers = extra_headers
for key, value in extra.items():
setattr(self, key, value)
super(MockImagePicker, self).__init__(name, model)
class BasePickerTest(TestCase):
"""
Base class to build the
"""
def setUp(self):
self.path = os.path.abspath('%s' % os.path.dirname(__file__))
self.image_file = File(open(os.path.join(self.path, 'static/img/attach.png'), 'rb'), "test_file.png")
self.image = Image(
name='Test Image',
description_1='test desc 1',
description_2='test desc 2',
file=self.image_file,
)
self.image.save()
self.request = MockRequest()
class TestListPage(BasePickerTest):
"""
Test listing page.
"""
def setUp(self):
super(TestListPage, self).setUp()
self.field_names = [f.name for f in Image._meta.get_fields()]
self.field_names.remove('file')
def test_all_fields(self):
"""
Test neither columns nor extra_headers defined.
"""
image_picker = MockImagePicker('image_test', Image, None, None)
response = image_picker.list(self.request)
list_resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(self.field_names, list_resp['columns'])
self.assertEqual([capfirst(Image._meta.get_field(i).verbose_name)
for i in self.field_names], list_resp['extra_headers'])
def test_columns(self):
"""
Test only columns defined.
"""
columns = ['description_2', 'name']
image_picker = MockImagePicker('image_test', Image, columns, None)
response = image_picker.list(self.request)
list_resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(columns, list_resp['columns'])
extra_headers = [capfirst(Image._meta.get_field(i).verbose_name)
for i in columns]
self.assertEqual(extra_headers, list_resp['extra_headers'])
def test_extra_headers(self):
"""
Test only extra headers defined. Should ignore it completely.
"""
image_picker = MockImagePicker('image_test', Image, None, ['Header'])
response = image_picker.list(self.request)
list_resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(self.field_names, list_resp['columns'])
self.assertEqual([capfirst(Image._meta.get_field(i).verbose_name)
for i in self.field_names], list_resp['extra_headers'])
def test_columns_and_headers(self):
"""
Test custom columns and extra headers.
"""
columns = ['description_2', 'name', 'description_1']
extra_headers = ['Top Description', 'Image Name', 'Bottom Description']
image_picker = MockImagePicker('image_test', Image, columns, extra_headers)
response = image_picker.list(self.request)
list_resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(columns, list_resp['columns'])
self.assertEqual(extra_headers, list_resp['extra_headers'])
def test_file_list(self):
"""
Make sure that the file list gives the correct url.
"""
image_picker = MockImagePicker('image_test', Image, None, None)
response = image_picker.list(self.request)
list_resp = json.loads(response.content.decode('utf-8'))
results = list_resp['result']
self.assertEqual(len(results), 1)
result = results[0]
self.assertEqual(result['url'], self.image.file.url)
def test_extra_links(self):
"""
Test having multiple links works.
"""
extra = {
'link_headers': ['URL', 'URL Caps'],
}
link_content = ['Click to insert', 'Click to insert Cap']
class CustomPicker(MockImagePicker):
def append(self, obj):
extra = {}
for name in self.columns:
value = getattr(obj, name)
if isinstance(value, (datetime.datetime, datetime.date)):
value = value.strftime('%b %d, %Y')
else:
value = str(value)
extra[name] = value
return {
'name': str(obj),
'url': getattr(obj, self.field).url,
'extra': extra,
'insert': [getattr(obj, self.field).url,
getattr(obj, self.field).url.upper()],
'link_content': link_content,
}
image_picker = CustomPicker('image_test', Image, None, None, extra=extra)
response = image_picker.list(self.request)
self.assertEqual(response.status_code, 200)
resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(resp['link_headers'], extra['link_headers'])
self.assertEqual(resp['link_headers'], extra['link_headers'])
result = resp['result']
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['insert'][0].upper(), result[0]['insert'][1])
self.assertEqual(result[0]['link_content'], link_content)
def test_search_page(self):
"""
Make sure that the search is checking text fields and finding the
correct results.
"""
for i in range(0, 3):
image = Image(
name='no find %s' % i,
description_1='desc 1 %s' % i,
description_2='desc 2 %s' % i,
file=self.image_file,
)
image.save()
image_picker = MockImagePicker('image_test', Image, None, None)
qs = image_picker.get_queryset('Test')
images = qs.all()
self.assertEqual(images.count(), 1)
self.assertTrue(self.image in images)
self.assertFalse(image in images)
class TestUploadPage(TestCase):
"""
Test the upload
"""
def setUp(self):
self.request = MockRequest()
cwd = os.path.dirname(__file__)
self.image_picker = MockImagePicker('image_test', Image, None, None)
self.image_file = File(open(os.path.join(cwd, 'static/img/attach.png'), 'rb'), "test_file.png")
def test_upload_form_page(self):
"""
Test form generation.
"""
response = self.image_picker.upload_file(self.request)
resp = json.loads(response.content.decode('utf-8'))
self.assertEqual(response.status_code, 200)
self.assertTrue('form' in resp)
def test_upload(self):
"""
Test the file upload and post.
"""
request = self.request
request.FILES = {'userfile': self.image_file}
response = self.image_picker.upload_file(request)
self.assertEqual(response.status_code, 200)
resp = json.loads(response.content.decode('utf-8'))
self.assertTrue('name' in resp)
tmp_file = resp['name']
request.FILES = {}
request.POST = {
'name': 'Test Image',
'description_1': 'description',
'file': tmp_file,
}
response = self.image_picker.upload_file(request)
resp = json.loads(response.content.decode('utf-8'))
url = resp['url']
images = Image.objects.all()
self.assertEqual(images.count(), 1)
image = images[0]
self.assertEqual(url, image.file.url)
class TestPickerSites(TestCase):
"""
Test the site/registration aspect of file picker.
"""
def setUp(self):
self.picker_name = 'test-images'
file_picker.site.register(Image, file_picker.ImagePickerBase, name=self.picker_name,)
self.url = reverse('filepicker:index')
def test_site_index(self):
response = self.client.get(self.url, {'pickers': [self.picker_name]})
resp = json.loads(response.content.decode('utf-8'))
for key, value in resp['pickers'].items():
self.assertEqual(key, self.picker_name)
self.assertEqual(value, '/file-picker/%s/' % self.picker_name)
def test_images_urls(self):
url = reverse('filepicker:%s:init' % self.picker_name)
response = self.client.get(url)
data = json.loads(response.content.decode('utf-8'))
urls = [list(u.values())[0] for u in list(data['urls'].values())]
for url in urls:
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
class FilePickerUploadFormTests(TestCase):
def setUp(self):
self.upload_file = NamedTemporaryFile()
filename = self.upload_file.name
self.basename = os.path.basename(filename)
self.data = {
'name': 'Pretty Name for this File',
'file': filename,
}
def test_image_form(self):
form = file_picker.uploads.file_pickers.ImageForm(data=self.data)
self.assertTrue(form.is_valid(), form.errors)
instance = form.save()
# Assert that the file gets placed into the upload_to for this model
upload_to = 'uploads/images'
self.assertEqual('{}/{}'.format(upload_to, self.basename), instance.file.name)
def test_file_form(self):
form = file_picker.uploads.file_pickers.FileForm(data=self.data)
self.assertTrue(form.is_valid(), form.errors)
instance = form.save()
# Assert that the file gets placed into the upload_to for this model
upload_to = 'uploads/files'
self.assertEqual('{}/{}'.format(upload_to, self.basename), instance.file.name)
| {
"content_hash": "2b3c95c23e7a235f44ea051f42e29a4d",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 109,
"avg_line_length": 37.37847222222222,
"alnum_prop": 0.5902461681374825,
"repo_name": "caktus/django-file-picker",
"id": "9da10a30c6986a6897327afd2ea1e938f1c5e402",
"size": "10765",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "file_picker/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "45812"
},
{
"name": "HTML",
"bytes": "867"
},
{
"name": "JavaScript",
"bytes": "242024"
},
{
"name": "PHP",
"bytes": "1052"
},
{
"name": "Python",
"bytes": "37894"
}
],
"symlink_target": ""
} |
from utils.exec_command import call_subprocess
###############################################################################
#
def do(action_context):
# Persistent configuration of hostname is simply content of
# `/etc/hostname` file.
# Use `w` to overwrite content of the file.
with open('/etc/hostname', 'w') as config_file:
config_file.write(action_context.conf_m.set_hostname['hostname'])
# Set currently used hostname.
call_subprocess(
command_args = [
'/bin/hostname',
action_context.conf_m.set_hostname['hostname'],
],
raise_on_error = True,
capture_stdout = False,
capture_stderr = False,
)
###############################################################################
# EOF
###############################################################################
| {
"content_hash": "ac6769da2da82a3501e380efcb3df29c",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 79,
"avg_line_length": 30.137931034482758,
"alnum_prop": 0.4462242562929062,
"repo_name": "uvsmtid/common-salt-states",
"id": "68503d0752732e63b6436011701b9661aa4f9ff0",
"size": "874",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "states/bootstrap/bootstrap.dir/modules/steps/deploy/set_hostname/rhel7.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7830"
},
{
"name": "Erlang",
"bytes": "21224"
},
{
"name": "Java",
"bytes": "987"
},
{
"name": "Python",
"bytes": "307941"
},
{
"name": "Ruby",
"bytes": "20793"
},
{
"name": "SaltStack",
"bytes": "1039440"
},
{
"name": "Scheme",
"bytes": "5298"
},
{
"name": "Shell",
"bytes": "50876"
},
{
"name": "VimL",
"bytes": "3502"
}
],
"symlink_target": ""
} |
import IPython
import time
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QTAgg as NavigationToolbar
from matplotlib.figure import Figure
import matplotlib
import numpy as np
from adc_plot_ui import Ui_AdcPlotDialog
import kid_readout.roach.baseband
class AdcPlotDialog(QDialog,Ui_AdcPlotDialog):
def __init__(self, qApp, parent=None):
super(AdcPlotDialog, self).__init__(parent)
self.__app = qApp
self.setupUi(self)
self.dpi = 72
self.fig = Figure((9.1, 5.2), dpi=self.dpi)
# self.fig = Figure(dpi=self.dpi)
self.canvas = FigureCanvas(self.fig)
self.canvas.setParent(self)
self.verticalLayout.insertWidget(0,self.canvas)
self.canvas.setSizePolicy(QSizePolicy.Expanding,QSizePolicy.Expanding)
self.axes = self.fig.add_subplot(221)
self.axes2 = self.fig.add_subplot(222)
self.axes3 = self.fig.add_subplot(223)
self.axes4 = self.fig.add_subplot(224)
# Use matplotlib event handler
# self.canvas.mpl_connect('button_press_event', self.onclick)
self.mpl_toolbar = NavigationToolbar(self.canvas,self.navbar) #self.adc_plot_box)
self.line = None
self.line2 = None
self.line3 = None
self.line4 = None
self.pause_update = False
self.ri = kid_readout.roach.baseband.RoachBasebandWide()
self.ri.set_tone_freqs(np.array([77.915039]),nsamp=2**22)
# self.adc_atten_spin.editingFinished.connect(self.on_adc_atten)
# self.dac_atten_spin.editingFinished.connect(self.on_dac_atten)
self.push_apply_atten.clicked.connect(self.apply_atten)
self.push_tone.clicked.connect(self.onpush_set_tone)
QTimer.singleShot(1000, self.update_all)
@pyqtSlot()
def onpush_set_tone(self):
frq = float(self.line_tone_freq.text())
self.pause_update = True
self.ri.set_tone_freqs(np.array([frq]),nsamp=2**20)
self.pause_update = False
@pyqtSlot()
def apply_atten(self):
self.on_adc_atten()
self.on_dac_atten()
self.ri.set_fft_gain(int(self.spin_fft_gain.value()))
@pyqtSlot()
def on_adc_atten(self):
val = self.adc_atten_spin.value()
self.ri.set_adc_attenuator(val)
@pyqtSlot()
def on_dac_atten(self):
val = self.dac_atten_spin.value()
self.ri.set_dac_attenuator(val)
def update_all(self):
tic = time.time()
if not self.pause_update:
self.plot_adc()
self.status_label.setText("%.3f" % (time.time()-tic))
QTimer.singleShot(1000, self.update_all)
def plot_adc(self):
x,y = self.ri.get_raw_adc()
pxx,fr = matplotlib.mlab.psd(x,NFFT=1024,Fs=self.ri.fs*1e6,scale_by_freq = True)
fr = fr/1e6
pxx = 10*np.log10(pxx)
t = np.arange(len(x))/self.ri.fs
demod = self.check_demod.isChecked()
d,addr = self.ri.get_data(2,demod=demod)
print d.shape
nfft = np.min((1024*32,d.shape[0]/16))
dpxx,dfr = matplotlib.mlab.psd(d[:,0],NFFT=nfft,Fs=self.ri.fs*1e6/(2.0*self.ri.nfft),scale_by_freq=True)
dpxx = 10*np.log10(dpxx)
if self.line:
# xlim = self.axes.get_xlim()
# ylim = self.axes.get_ylim()
self.line.set_xdata(fr)
self.line.set_ydata(pxx)
self.line2.set_ydata(x)
self.line3.set_data(d[:,0].real,d[:,0].imag)
self.line4.set_ydata(dpxx)
else:
self.line, = self.axes.plot(fr,pxx)
self.line2, = self.axes2.plot(t,x)
self.line3, = self.axes3.plot(d[:,0].real,d[:,0].imag,'.')
self.line4, = self.axes4.plot(dfr,dpxx)
self.axes4.set_xscale('symlog')
self.axes.set_xlabel('MHz')
self.axes.set_ylabel('dB/Hz')
self.axes.grid(True)
self.axes2.set_xlabel('$\mu$s')
self.axes2.set_xlim(0,1.0)
self.axes4.set_xlabel('Hz')
self.axes4.set_ylabel('dB/Hz')
self.axes3.set_xlim(-2.**15,2**15)
self.axes3.set_ylim(-2.**15,2**15)
self.axes3.hlines([-2**15,2**15],-2**15,2**15)
self.axes3.vlines([-2**15,2**15],-2**15,2**15)
self.canvas.draw()
def main():
app = QApplication(sys.argv)
app.quitOnLastWindowClosed = True
form = AdcPlotDialog(app)
form.show()
# form.raise_()
# app.connect(form, SIGNAL('closeApplication'), app.exit)
IPython.embed()
# form.exec_()
app.exit()
# sys.exit(app.exec_())
if __name__ == "__main__":
main()
| {
"content_hash": "eece509e822da0104126e342037418ec",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 112,
"avg_line_length": 35.80882352941177,
"alnum_prop": 0.5907597535934291,
"repo_name": "ColumbiaCMB/kid_readout",
"id": "b7be96fac9318883ec65728ae15ae642e16f9774",
"size": "4870",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/adcmon.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "13672"
},
{
"name": "Python",
"bytes": "2033932"
}
],
"symlink_target": ""
} |
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import shutil
import subprocess
import sys
import os
def pline(msg, cmd=None):
sys.stdout.write('\n' + '_')
sys.stdin.readline()
sys.stdout.write('===>> ' + msg + '\n')
if cmd:
sys.stdout.write(cmd + '\n')
sys.stdin.readline()
if __name__ == '__main__':
subprocess.call(['psql', '-c', 'DROP TABLE IF EXISTS DEMO1,DEMO2'])
subprocess.call(['rm', '-rf', '/gpdata/agrawa2/gpdata/gpdata_WAL-REPLICATION/master_standby'])
sys.stdout.write('''
=================================================
| |
| Demo for WAL based Streaming Replication |
| |
=================================================
''')
pline('Check Standby Master configured...', 'gpstate -f')
subprocess.call(['gpstate', '-f'])
# pline('Added flexibility, new options to gpinitstandby')
# subprocess.call(['gpinitstandby', '--help'])
cmd = ('gpinitstandby -s support002.esc.dh.greenplum.com -P 5433 -F pg_system:/gpdata/agrawa2/gpdata/gpdata_WAL-REPLICATION/master_standby')
pline('Create Standby Master...', cmd);
subprocess.call(['gpinitstandby', '-s', 'support002.esc.dh.greenplum.com', '-P', '5433', '-F', 'pg_system:/gpdata/agrawa2/gpdata/gpdata_WAL-REPLICATION/master_standby'])
pline('Check Standby Master configured...', 'gpstate -f')
subprocess.call(['gpstate', '-f'])
cmd = ('SELECT * FROM pg_stat_replication;')
pline('Different way to fetch the same...', cmd)
subprocess.call(['psql', '-c', cmd, '-x'])
sql = ('CREATE TABLE demo1 AS SELECT i a FROM generate_series(1, 1000)i;')
pline('Create a Table with Data...', sql)
subprocess.call(['psql', '-c', sql])
sql = ('CREATE TABLE demo2 AS SELECT i b FROM generate_series(1, 500)i;')
pline('One more Table with Data...', sql)
subprocess.call(['psql', '-c', sql])
pline('Lets again check Standby Master state...', 'gpstate -f')
subprocess.call(['gpstate', '-f'])
cmd = ('kill -9 `ps -ef | grep 2200 | grep -v grep | awk \'{print $2}\'`')
pline('Lets create *** DISASTER *** hammer down Master', cmd)
subprocess.call(['/gpdata/agrawa2/code/TINC/private/mpp.gpdb.tests.storage.walrepl/kill_master.sh'])
os.environ["PGPORT"] = "5433"
os.environ["MASTER_DATA_DIRECTORY"] = "/gpdata/agrawa2/gpdata/gpdata_WAL-REPLICATION/master_standby"
pline('Just Activate Standby Master...', 'gpactivatestandby')
subprocess.call(['gpactivatestandby', '-f'])
pline('Access data from NEW Master...IMP point connect to new master PORT', 'SELECT count(*) FROM demo1')
subprocess.call(['psql', '-p', '5433', '-c', 'SELECT count(*) FROM demo1'])
pline('SELECT count(*) FROM demo2')
subprocess.call(['psql', '-p', '5433', '-c', 'SELECT count(*) FROM demo2'])
sys.stdout.write('''
==========================================================
| |
| DEMO COMPLETE for WAL based Streaming Replication |
| |
==========================================================
''')
| {
"content_hash": "64fa8eefa90429f03f16719042ad595a",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 173,
"avg_line_length": 39.74226804123711,
"alnum_prop": 0.5875486381322957,
"repo_name": "Quikling/gpdb",
"id": "6157af3d0edf2754eee689d7baabd29133674799",
"size": "3878",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "src/test/tinc/tincrepo/mpp/gpdb/tests/storage/walrepl/demo0822.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5665"
},
{
"name": "Batchfile",
"bytes": "11492"
},
{
"name": "C",
"bytes": "35104900"
},
{
"name": "C++",
"bytes": "3826418"
},
{
"name": "CMake",
"bytes": "17118"
},
{
"name": "CSS",
"bytes": "7407"
},
{
"name": "Csound Score",
"bytes": "179"
},
{
"name": "DTrace",
"bytes": "1160"
},
{
"name": "Fortran",
"bytes": "14777"
},
{
"name": "GDB",
"bytes": "576"
},
{
"name": "Gherkin",
"bytes": "731336"
},
{
"name": "HTML",
"bytes": "191406"
},
{
"name": "Java",
"bytes": "268348"
},
{
"name": "JavaScript",
"bytes": "23969"
},
{
"name": "Lex",
"bytes": "196275"
},
{
"name": "M4",
"bytes": "105042"
},
{
"name": "Makefile",
"bytes": "428681"
},
{
"name": "PLSQL",
"bytes": "261269"
},
{
"name": "PLpgSQL",
"bytes": "5487194"
},
{
"name": "Perl",
"bytes": "3894496"
},
{
"name": "Perl 6",
"bytes": "14219"
},
{
"name": "Python",
"bytes": "8656525"
},
{
"name": "Roff",
"bytes": "51338"
},
{
"name": "Ruby",
"bytes": "26724"
},
{
"name": "SQLPL",
"bytes": "3824391"
},
{
"name": "Shell",
"bytes": "541518"
},
{
"name": "XS",
"bytes": "8405"
},
{
"name": "XSLT",
"bytes": "5779"
},
{
"name": "Yacc",
"bytes": "488297"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, unicode_literals
from . import _base
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
class Filter(_base.Filter):
def __iter__(self):
for token in _base.Filter.__iter__(self):
if token['type'] in ('StartTag', 'EmptyTag'):
attrs = OrderedDict()
for name, value in sorted(token['data'].items(),
key=lambda x: x[0]):
attrs[name] = value
token['data'] = attrs
yield token
| {
"content_hash": "8c18fd0ce24f315155e9a1a087b709e9",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 66,
"avg_line_length": 31.2,
"alnum_prop": 0.5448717948717948,
"repo_name": "mozilla/fjord",
"id": "70e7f7a2856b28055069421a71f5adb514841c17",
"size": "624",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vendor/src/html5lib-python/html5lib/filters/alphabeticalattributes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "161912"
},
{
"name": "HTML",
"bytes": "142781"
},
{
"name": "JavaScript",
"bytes": "305185"
},
{
"name": "Python",
"bytes": "903100"
},
{
"name": "Shell",
"bytes": "11313"
},
{
"name": "Smarty",
"bytes": "691"
}
],
"symlink_target": ""
} |
from django import forms
from django_select2 import *
from .models import Employee, Dept, ClassRoom, Lab, Word, School, Tag, Question, WordList
from .fields import GetSearchTestField, GetModelSearchTestField
from django.core.exceptions import ValidationError
def validate_fail_always(value):
raise ValidationError(u'%s not valid. Infact nothing is valid!' % value)
############# Choice fields ###################
class EmployeeChoices(AutoModelSelect2Field):
queryset = Employee.objects
search_fields = ['name__icontains', ]
class ClassRoomChoices(AutoModelSelect2MultipleField):
queryset = ClassRoom.objects
search_fields = ['number__icontains', ]
class ClassRoomSingleChoices(AutoModelSelect2Field):
queryset = ClassRoom.objects
search_fields = ['number__icontains', ]
class WordChoices(AutoModelSelect2Field):
queryset = Word.objects
search_fields = ['word__icontains', ]
class MultiWordChoices(AutoModelSelect2MultipleField):
queryset = Word.objects
search_fields = ['word__icontains', ]
class TagField(AutoModelSelect2TagField):
queryset = Tag.objects
search_fields = ['tag__icontains', ]
def get_model_field_values(self, value):
return {'tag': value}
class SelfChoices(AutoSelect2Field):
def get_val_txt(self, value):
if not hasattr(self, 'res_map'):
self.res_map = {}
return self.res_map.get(value, None)
def get_results(self, request, term, page, context):
if not hasattr(self, 'res_map'):
self.res_map = {}
mlen = len(self.res_map)
res = []
for i in range(1, 6):
idx = i + mlen
res.append((idx, term * i,))
self.res_map[idx] = term * i
self.choices = res
return (NO_ERR_RESP, False, res)
class SelfMultiChoices(AutoSelect2MultipleField):
big_data = {
1: u"First", 2: u"Second", 3: u"Third",
}
def validate_value(self, value):
if value in [v for v in self.big_data]:
return True
else:
return False
def coerce_value(self, value):
return int(value)
def get_val_txt(self, value):
if not hasattr(self, '_big_data'):
self._big_data = dict(self.big_data)
return self._big_data.get(value, None)
def get_results(self, request, term, page, context):
if not hasattr(self, '_big_data'):
self._big_data = dict(self.big_data)
res = [(v, self._big_data[v]) for v in self._big_data]
blen = len(res)
for i in range(1, 6):
idx = i + blen
res.append((idx, term * i,))
self._big_data[idx] = term * i
self.choices = res
return (NO_ERR_RESP, False, res)
########### Forms ##############]
class SchoolForm(forms.ModelForm):
classes = ClassRoomChoices()
class Meta:
model = School
class EmployeeForm(forms.ModelForm):
manager = EmployeeChoices(required=False)
dept = ModelSelect2Field(queryset=Dept.objects)
class Meta:
model = Employee
class DeptForm(forms.ModelForm):
allotted_rooms = ClassRoomChoices()
allotted_labs = ModelSelect2MultipleField(queryset=Lab.objects, required=False)
class Meta:
model = Dept
class MixedForm(forms.Form):
emp1 = EmployeeChoices()
rooms1 = ClassRoomChoices()
emp2 = EmployeeChoices()
rooms2 = ClassRoomChoices()
rooms3 = ClassRoomSingleChoices()
any_word = WordChoices()
self_choices = SelfChoices(label='Self copy choices')
self_multi_choices = SelfMultiChoices(label='Self copy multi-choices')
issue11_test = EmployeeChoices(
label='Issue 11 Test (Employee)',
widget=AutoHeavySelect2Widget(
select2_options={
'width': '32em',
'placeholder': u"Search foo"
}
)
)
always_fail_rooms = ClassRoomSingleChoices(validators=[validate_fail_always])
always_fail_rooms_multi = ClassRoomChoices(validators=[validate_fail_always])
always_fail_self_choice = SelfChoices(validators=[validate_fail_always], auto_id='always_fail_self_choice')
always_fail_self_choice_multi = SelfMultiChoices(validators=[validate_fail_always], auto_id='always_fail_self_choice_multi')
model_with_both_required_and_empty_label_false = ModelSelect2Field(
queryset=Employee.objects, empty_label=None, required=False) #issue#26
# These are just for testing Auto registration of fields
EmployeeChoices() # Should already be registered
EmployeeChoices(auto_id="EmployeeChoices_CustomAutoId") # Should get registered
class InitialValueForm(forms.Form):
select2Choice = Select2ChoiceField(initial=2,
choices=((1, "First"), (2, "Second"), (3, "Third"), ))
select2MultipleChoice = Select2MultipleChoiceField(initial=[2,3],
choices=((1, "First"), (2, "Second"), (3, "Third"), ))
heavySelect2Choice = AutoSelect2Field(initial=2,
choices=((1, "First"), (2, "Second"), (3, "Third"), ))
heavySelect2MultipleChoice = AutoSelect2MultipleField(initial=[1,3],
choices=((1, "First"), (2, "Second"), (3, "Third"), ))
self_choices = SelfChoices(label='Self copy choices', initial=2,
choices=((1, "First"), (2, "Second"), (3, "Third"), ))
self_multi_choices = SelfMultiChoices(label='Self copy multi-choices', initial=[2,3])
select2ChoiceWithQuotes = Select2ChoiceField(initial=2,
choices=((1, "'Single-Quote'"), (2, "\"Double-Quotes\""), (3, "\"Mixed-Quotes'"), ))
heavySelect2ChoiceWithQuotes = AutoSelect2Field(initial=2,
choices=((1, "'Single-Quote'"), (2, "\"Double-Quotes\""), (3, "\"Mixed-Quotes'"), ))
class QuestionForm(forms.ModelForm):
question = forms.CharField()
description = forms.CharField(widget=forms.Textarea)
tags = TagField()
class Meta:
model = Question
class WordsForm(forms.ModelForm):
word = WordChoices()
words = MultiWordChoices()
class Meta:
model = WordList
exclude = ['kind']
class GetSearchTestForm(forms.Form):
name = GetSearchTestField(required=False, label='Name')
dept = GetModelSearchTestField(required=False, label='Department')
class AnotherWordForm(forms.ModelForm):
word = WordChoices(widget=AutoHeavySelect2Widget())
class Meta:
model = WordList
exclude = ['kind', 'words']
| {
"content_hash": "692526b3c869a035c4214ed4e25a551f",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 128,
"avg_line_length": 34.365591397849464,
"alnum_prop": 0.6473717146433041,
"repo_name": "jrief/django-select2",
"id": "f7c4a8d8f20ea111d496ad9b0cf9e05fd63000c9",
"size": "6392",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "testapp/testapp/testmain/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('face', '0006_auto_20170403_1707'),
]
operations = [
migrations.AlterField(
model_name='face',
name='adivasi',
field=models.CharField(max_length=100, null=True, blank=True),
),
migrations.AlterField(
model_name='face',
name='occupation',
field=models.CharField(help_text='Enter the occupation of the person', max_length=100, null=True, blank=True),
),
]
| {
"content_hash": "172021aaa644deaff073fc06f3924175",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 122,
"avg_line_length": 27.17391304347826,
"alnum_prop": 0.592,
"repo_name": "PARINetwork/pari",
"id": "47da464b17dd5ba5005416c4b3e13ae34388a77c",
"size": "649",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "face/migrations/0007_increase_occupation_and_adivasi_max_length_to_100.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "94103"
},
{
"name": "HTML",
"bytes": "452629"
},
{
"name": "JavaScript",
"bytes": "124537"
},
{
"name": "Less",
"bytes": "229040"
},
{
"name": "Python",
"bytes": "479247"
},
{
"name": "Shell",
"bytes": "3919"
}
],
"symlink_target": ""
} |
{% if created %}from django.db import models{% endif %}
class {{model_name}}(models.Model):
def __unicode__(self):
return self.title
user = models.CharField(max_length=200, db_index=True)
title = models.CharField(max_length=100)
#description = models.TextField(blank=False, null=False)
#date = models.DateTimeField('Start Date', blank=False, null=False, db_index=True)
#example_image_field = models.ImageField(blank=True, null=True, upload_to='expenses')
#example_choices_field = models.CharField(max_length=100, choices=STATUS_CHOICES, default='Submitted')
created = models.DateTimeField(auto_now_add=True, db_index=True)
updated = models.DateTimeField(auto_now=True, db_index=True)
@staticmethod
def quick_create(title=None, user=1):
"""
`quick_create` is a utility method which allows you to quickly create model instances.
This is particularly useful in your testing.
"""
if title is None:
title = "Some title"
data = {
"title": title,
"user": user,
}
return {{model_name}}.objects.create(**data) | {
"content_hash": "f96a7828e1361c1953a63ff34aac49c3",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 106,
"avg_line_length": 31.7027027027027,
"alnum_prop": 0.6376811594202898,
"repo_name": "TangentMicroServices/PyMicroUtils",
"id": "aef04376db505c04e3bc47268e36fe46232d1760",
"size": "1173",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "fabness/templates/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21631"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
import operator
from toolz import first
import numpy as np
from datashape import dshape, var, DataShape
from dateutil.parser import parse as dt_parse
from datashape.predicates import isscalar, isboolean, isnumeric
from datashape import coretypes as ct, discover, unsigned, promote, optionify
from .core import parenthesize, eval_str
from .expressions import Expr, shape, ElemWise
from ..dispatch import dispatch
from ..compatibility import _strtypes
__all__ = '''BinOp UnaryOp Arithmetic Add Mult Sub Div FloorDiv Pow Mod USub
Relational Eq Ne Ge Lt Le Gt Gt And Or Not'''.split()
def name(o):
if hasattr(o, '_name'):
return o._name
else:
return None
class BinOp(ElemWise):
__slots__ = '_hash', 'lhs', 'rhs'
__inputs__ = 'lhs', 'rhs'
def __init__(self, lhs, rhs):
self.lhs = lhs
self.rhs = rhs
def __str__(self):
lhs = parenthesize(eval_str(self.lhs))
rhs = parenthesize(eval_str(self.rhs))
return '%s %s %s' % (lhs, self.symbol, rhs)
@property
def _name(self):
if not isscalar(self.dshape.measure):
return None
l, r = name(self.lhs), name(self.rhs)
if l and not r:
return l
if r and not l:
return r
if l == r:
return l
@property
def _inputs(self):
result = []
if isinstance(self.lhs, Expr):
result.append(self.lhs)
if isinstance(self.rhs, Expr):
result.append(self.rhs)
return tuple(result)
def maxvar(L):
"""
>>> maxvar([1, 2, var])
Var()
>>> maxvar([1, 2, 3])
3
"""
if var in L:
return var
else:
return max(L)
def maxshape(shapes):
"""
>>> maxshape([(10, 1), (1, 10), ()])
(10, 10)
>>> maxshape([(4, 5), (5,)])
(4, 5)
"""
shapes = [shape for shape in shapes if shape]
if not shapes:
return ()
ndim = max(map(len, shapes))
shapes = [(1,) * (ndim - len(shape)) + shape for shape in shapes]
for dims in zip(*shapes):
if len(set(dims) - set([1])) >= 2:
raise ValueError("Shapes don't align, %s" % str(dims))
return tuple(map(maxvar, zip(*shapes)))
class UnaryOp(ElemWise):
__slots__ = '_hash', '_child',
def __init__(self, child):
self._child = child
def __str__(self):
return '%s(%s)' % (self.symbol, eval_str(self._child))
@property
def symbol(self):
return type(self).__name__
@property
def dshape(self):
return DataShape(*(shape(self._child) + (self._dtype,)))
@property
def _name(self):
return self._child._name
class Arithmetic(BinOp):
""" Super class for arithmetic operators like add or mul """
@property
def _dtype(self):
# we can't simply use .schema or .datashape because we may have a bare
# integer, for example
lhs, rhs = discover(self.lhs).measure, discover(self.rhs).measure
return promote(lhs, rhs)
@property
def dshape(self):
# TODO: better inference. e.g. int + int -> int
return DataShape(*(maxshape([shape(self.lhs), shape(self.rhs)]) +
(self._dtype,)))
class Add(Arithmetic):
symbol = '+'
op = operator.add
class Mult(Arithmetic):
symbol = '*'
op = operator.mul
class Sub(Arithmetic):
symbol = '-'
op = operator.sub
class Div(Arithmetic):
symbol = '/'
op = operator.truediv
@property
def _dtype(self):
lhs, rhs = discover(self.lhs).measure, discover(self.rhs).measure
return optionify(lhs, rhs, ct.float64)
class FloorDiv(Arithmetic):
symbol = '//'
op = operator.floordiv
@property
def _dtype(self):
lhs, rhs = discover(self.lhs).measure, discover(self.rhs).measure
is_unsigned = lhs in unsigned and rhs in unsigned
max_width = max(lhs.itemsize, rhs.itemsize)
prefix = 'u' if is_unsigned else ''
measure = getattr(ct, '%sint%d' % (prefix, max_width * 8))
return optionify(lhs, rhs, measure)
class Pow(Arithmetic):
symbol = '**'
op = operator.pow
class Mod(Arithmetic):
symbol = '%'
op = operator.mod
class USub(UnaryOp):
op = operator.neg
symbol = '-'
def __str__(self):
return '-%s' % parenthesize(eval_str(self._child))
@property
def _dtype(self):
# TODO: better inference. -uint -> int
return self._child.schema
@dispatch(ct.Option, object)
def scalar_coerce(ds, val):
if val or val == 0:
return scalar_coerce(ds.ty, val)
else:
return None
@dispatch((ct.Record, ct.Mono, ct.Option, DataShape), Expr)
def scalar_coerce(ds, val):
return val
@dispatch(ct.Date, _strtypes)
def scalar_coerce(_, val):
dt = dt_parse(val)
if dt.time():
raise ValueError("Can not coerce %s to type Date, "
"contains time information")
return dt.date()
@dispatch(ct.DateTime, _strtypes)
def scalar_coerce(_, val):
return dt_parse(val)
@dispatch(ct.CType, _strtypes)
def scalar_coerce(dt, val):
return np.asscalar(np.asarray(val, dtype=dt.to_numpy_dtype()))
@dispatch(ct.Record, object)
def scalar_coerce(rec, val):
if len(rec.fields) == 1:
return scalar_coerce(first(rec.types), val)
else:
raise TypeError("Trying to coerce complex datashape\n"
"got dshape: %s\n"
"scalar_coerce only intended for scalar values" % rec)
@dispatch(ct.DataShape, object)
def scalar_coerce(ds, val):
return scalar_coerce(ds.measure, val)
@dispatch(object, object)
def scalar_coerce(dtype, val):
return val
@dispatch(_strtypes, object)
def scalar_coerce(ds, val):
return scalar_coerce(dshape(ds), val)
def _neg(self):
return USub(self)
def _add(self, other):
result = Add(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _radd(self, other):
result = Add(scalar_coerce(self.dshape, other), self)
result.dshape # Check that shapes and dtypes match up
return result
def _mul(self, other):
result = Mult(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _rmul(self, other):
result = Mult(scalar_coerce(self.dshape, other), self)
result.dshape # Check that shapes and dtypes match up
return result
def _div(self, other):
result = Div(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _rdiv(self, other):
result = Div(scalar_coerce(self.dshape, other), self)
result.dshape # Check that shapes and dtypes match up
return result
def _floordiv(self, other):
result = FloorDiv(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _rfloordiv(self, other):
result = FloorDiv(scalar_coerce(self.dshape, other), self)
result.dshape # Check that shapes and dtypes match up
return result
def _sub(self, other):
result = Sub(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _rsub(self, other):
result = Sub(scalar_coerce(self.dshape, other), self)
result.dshape # Check that shapes and dtypes match up
return result
def _pow(self, other):
result = Pow(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _rpow(self, other):
result = Pow(scalar_coerce(self.dshape, other), self)
result.dshape # Check that shapes and dtypes match up
return result
def _mod(self, other):
result = Mod(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _rmod(self, other):
result = Mod(scalar_coerce(self.dshape, other), self)
result.dshape # Check that shapes and dtypes match up
return result
class Relational(Arithmetic):
_dtype = ct.bool_
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(Arithmetic):
symbol = '&'
op = operator.and_
_dtype = ct.bool_
class Or(Arithmetic):
symbol = '|'
op = operator.or_
_dtype = ct.bool_
class Not(UnaryOp):
symbol = '~'
op = operator.invert
_dtype = ct.bool_
def __str__(self):
return '~%s' % parenthesize(eval_str(self._child))
def _eq(self, other):
result = Eq(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _ne(self, other):
result = Ne(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _lt(self, other):
result = Lt(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _le(self, other):
result = Le(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _gt(self, other):
result = Gt(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _ge(self, other):
result = Ge(self, scalar_coerce(self.dshape, other))
result.dshape # Check that shapes and dtypes match up
return result
def _invert(self):
result = Invert(self)
result.dshape # Check that shapes and dtypes match up
return result
def _and(self, other):
result = And(self, other)
result.dshape # Check that shapes and dtypes match up
return result
def _rand(self, other):
result = And(other, self)
result.dshape # Check that shapes and dtypes match up
return result
def _or(self, other):
result = Or(self, other)
result.dshape # Check that shapes and dtypes match up
return result
def _ror(self, other):
result = Or(other, self)
result.dshape # Check that shapes and dtypes match up
return result
def _invert(self):
result = Not(self)
result.dshape # Check that shapes and dtypes match up
return result
Invert = Not
BitAnd = And
BitOr = Or
from .expressions import schema_method_list
schema_method_list.extend([
(isnumeric,
set([_add, _radd, _mul,
_rmul, _div, _rdiv, _floordiv, _rfloordiv, _sub, _rsub, _pow,
_rpow, _mod, _rmod, _neg])),
(isscalar, set([_eq, _ne, _lt, _le, _gt, _ge])),
(isboolean, set([_or, _ror, _and, _rand, _invert])),
])
| {
"content_hash": "b98711ffc683fe38f1a88f8dda7a67ec",
"timestamp": "",
"source": "github",
"line_count": 458,
"max_line_length": 78,
"avg_line_length": 24.048034934497817,
"alnum_prop": 0.6247503177773742,
"repo_name": "mrocklin/blaze",
"id": "66fca34e08c21066288cab619d2fa6e2c40fd9ae",
"size": "11014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blaze/expr/arithmetic.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "581781"
},
{
"name": "Shell",
"bytes": "6532"
}
],
"symlink_target": ""
} |
from memory_profiler import memory_usage
def some_func(*args, **kwargs):
return args, kwargs
def test_memory_usage():
# Check that memory_usage works with functions with star args.
mem, ret = memory_usage((some_func, (1, 2), dict(a=1)), retval=True)
assert ret[0] == (1, 2)
assert ret[1] == dict(a=1)
if __name__ == "__main__":
test_memory_usage()
| {
"content_hash": "e1d56c92c4e43abe27c454de3fc6b3aa",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 72,
"avg_line_length": 25.133333333333333,
"alnum_prop": 0.6259946949602122,
"repo_name": "olivetree123/memory_profiler",
"id": "2b98a414a8fb7f234b7f958607122a1fdd9357b5",
"size": "377",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "test/test_memory_usage.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "405"
},
{
"name": "Python",
"bytes": "54671"
}
],
"symlink_target": ""
} |
"""
Generic date range views.
Django's generic date views only deal with a single date per
model. The date range views replicate the API but deal with
a start and an end date.
"""
import datetime
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.http import Http404
from django.utils import timezone
from django.utils.encoding import force_str, force_text
from django.utils.translation import ugettext as _
from django.views.generic.base import View
from django.views.generic.detail import (
BaseDetailView, SingleObjectTemplateResponseMixin,
)
from django.views.generic.list import (
MultipleObjectMixin, MultipleObjectTemplateResponseMixin,
)
from django.views.generic.dates import (
YearMixin, MonthMixin, DayMixin, WeekMixin,
DateMixin,
)
from .. import conf, preview
if conf.USE_PREVIEW_DATETIME:
effective_datetime = preview.datetime
else:
effective_datetime = timezone.datetime
class DateRangeMixin(DateMixin):
"""
Mixin class for views manipulating date-based data.
"""
date_field = None
end_date_field = None
allow_future = False
def get_end_date_field(self):
"""
Get the name of the end date field to be used to filter by.
"""
if self.end_date_field is None:
raise ImproperlyConfigured("%s.end_date_field is required." % self.__class__.__name__)
return self.end_date_field
# Note: the following three methods only work in subclasses that also
# inherit SingleObjectMixin or MultipleObjectMixin.
def _make_date_lookup_arg(self, value):
"""
Convert a date into a datetime when the date field is a DateTimeField.
When time zone support is enabled, `date` is assumed to be in the
current time zone, so that displayed items are consistent with the URL.
"""
if self.uses_datetime_field:
value = datetime.datetime.combine(value, datetime.time.min)
if settings.USE_TZ:
value = timezone.make_aware(value, timezone.get_current_timezone())
return value
def _make_single_date_lookup(self, date):
"""
Get the lookup kwargs for filtering on a single date.
If the date field is a DateTimeField, we can't just filter on
date_field=date because that doesn't take the time into account.
"""
date_field = self.get_date_field()
if self.uses_datetime_field:
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(date + datetime.timedelta(days=1))
return {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
else:
# Skip self._make_date_lookup_arg, it's a no-op in this branch.
return {date_field: date}
class BaseDateListView(MultipleObjectMixin, DateMixin, View):
"""
Abstract base class for date-based views displaying a list of objects.
"""
allow_empty = False
date_list_period = 'year'
def get(self, request, *args, **kwargs):
self.date_list, self.object_list, extra_context = self.get_dated_items()
context = self.get_context_data(object_list=self.object_list,
date_list=self.date_list)
context.update(extra_context)
return self.render_to_response(context)
def get_dated_items(self):
"""
Obtain the list of dates and items.
"""
raise NotImplementedError('A DateView must provide an implementation of get_dated_items()')
def get_ordering(self):
"""
Returns the field or fields to use for ordering the queryset; uses the
date field by default.
"""
return self.get_date_field() if self.ordering is None else self.ordering
def get_dated_queryset(self, **lookup):
"""
Get a queryset properly filtered according to `allow_future` and any
extra lookup kwargs.
"""
qs = self.get_queryset().filter(**lookup)
date_field = self.get_date_field()
allow_future = self.get_allow_future()
allow_empty = self.get_allow_empty()
paginate_by = self.get_paginate_by(qs)
if not allow_future:
now = effective_datetime.now() if self.uses_datetime_field else effective_datetime.today()
qs = qs.filter(**{'%s__lte' % date_field: now})
if not allow_empty:
# When pagination is enabled, it's better to do a cheap query
# than to load the unpaginated queryset in memory.
is_empty = len(qs) == 0 if paginate_by is None else not qs.exists()
if is_empty:
raise Http404(_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': force_text(qs.model._meta.verbose_name_plural)
})
return qs
def get_date_list_period(self):
"""
Get the aggregation period for the list of dates: 'year', 'month', or 'day'.
"""
return self.date_list_period
def get_date_list(self, queryset, date_type=None, ordering='ASC'):
"""
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
"""
date_field = self.get_date_field()
allow_empty = self.get_allow_empty()
if date_type is None:
date_type = self.get_date_list_period()
if self.uses_datetime_field:
date_list = queryset.datetimes(date_field, date_type, ordering)
else:
date_list = queryset.dates(date_field, date_type, ordering)
if date_list is not None and not date_list and not allow_empty:
name = force_text(queryset.model._meta.verbose_name_plural)
raise Http404(_("No %(verbose_name_plural)s available") %
{'verbose_name_plural': name})
return date_list
class BaseArchiveIndexView(BaseDateListView):
"""
Base class for archives of date-based items.
Requires a response mixin.
"""
context_object_name = 'latest'
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
qs = self.get_dated_queryset()
date_list = self.get_date_list(qs, ordering='DESC')
if not date_list:
qs = qs.none()
return (date_list, qs, {})
class ArchiveIndexView(MultipleObjectTemplateResponseMixin, BaseArchiveIndexView):
"""
Top-level archive of date-based items.
"""
template_name_suffix = '_archive'
class BaseYearArchiveView(YearMixin, BaseDateListView):
"""
List of objects published in a given year.
"""
date_list_period = 'month'
make_object_list = False
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_year(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
if not self.get_make_object_list():
# We need this to be a queryset since parent classes introspect it
# to find information about the model.
qs = qs.none()
return (date_list, qs, {
'year': date,
'next_year': self.get_next_year(date),
'previous_year': self.get_previous_year(date),
})
def get_make_object_list(self):
"""
Return `True` if this view should contain the full list of objects in
the given year.
"""
return self.make_object_list
class YearArchiveView(MultipleObjectTemplateResponseMixin, BaseYearArchiveView):
"""
List of objects published in a given year.
"""
template_name_suffix = '_archive_year'
class BaseMonthArchiveView(YearMixin, MonthMixin, BaseDateListView):
"""
List of objects published in a given month.
"""
date_list_period = 'day'
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
month = self.get_month()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_month(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
return (date_list, qs, {
'month': date,
'next_month': self.get_next_month(date),
'previous_month': self.get_previous_month(date),
})
class MonthArchiveView(MultipleObjectTemplateResponseMixin, BaseMonthArchiveView):
"""
List of objects published in a given month.
"""
template_name_suffix = '_archive_month'
class BaseWeekArchiveView(YearMixin, WeekMixin, BaseDateListView):
"""
List of objects published in a given week.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
week = self.get_week()
date_field = self.get_date_field()
week_format = self.get_week_format()
week_start = {
'%W': '1',
'%U': '0',
}[week_format]
date = _date_from_string(year, self.get_year_format(),
week_start, '%w',
week, week_format)
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_week(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'week': date,
'next_week': self.get_next_week(date),
'previous_week': self.get_previous_week(date),
})
class WeekArchiveView(MultipleObjectTemplateResponseMixin, BaseWeekArchiveView):
"""
List of objects published in a given week.
"""
template_name_suffix = '_archive_week'
class BaseDayArchiveView(YearMixin, MonthMixin, DayMixin, BaseDateListView):
"""
List of objects published on a given day.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
return self._get_dated_items(date)
def _get_dated_items(self, date):
"""
Do the actual heavy lifting of getting the dated items; this accepts a
date object so that TodayArchiveView can be trivial.
"""
lookup_kwargs = self._make_single_date_lookup(date)
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'day': date,
'previous_day': self.get_previous_day(date),
'next_day': self.get_next_day(date),
'previous_month': self.get_previous_month(date),
'next_month': self.get_next_month(date)
})
class DayArchiveView(MultipleObjectTemplateResponseMixin, BaseDayArchiveView):
"""
List of objects published on a given day.
"""
template_name_suffix = "_archive_day"
class BaseTodayArchiveView(BaseDayArchiveView):
"""
List of objects published today.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
return self._get_dated_items(datetime.date.today())
class TodayArchiveView(MultipleObjectTemplateResponseMixin, BaseTodayArchiveView):
"""
List of objects published today.
"""
template_name_suffix = "_archive_day"
class BaseDateDetailView(YearMixin, MonthMixin, DayMixin, DateMixin, BaseDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
def get_object(self, queryset=None):
"""
Get the object this request displays.
"""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
# Use a custom queryset if provided
qs = self.get_queryset() if queryset is None else queryset
if not self.get_allow_future() and date > datetime.date.today():
raise Http404(_(
"Future %(verbose_name_plural)s not available because "
"%(class_name)s.allow_future is False."
) % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
'class_name': self.__class__.__name__,
})
# Filter down a queryset from self.queryset using the date from the
# URL. This'll get passed as the queryset to DetailView.get_object,
# which'll handle the 404
lookup_kwargs = self._make_single_date_lookup(date)
qs = qs.filter(**lookup_kwargs)
return super(BaseDetailView, self).get_object(queryset=qs)
class DateDetailView(SingleObjectTemplateResponseMixin, BaseDateDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
template_name_suffix = '_detail'
def _date_from_string(year, year_format, month='', month_format='', day='', day_format='', delim='__'):
"""
Helper: get a datetime.date object given a format string and a year,
month, and day (only year is mandatory). Raise a 404 for an invalid date.
"""
format = delim.join((year_format, month_format, day_format))
datestr = delim.join((year, month, day))
try:
return datetime.datetime.strptime(force_str(datestr), format).date()
except ValueError:
raise Http404(_("Invalid date string '%(datestr)s' given format '%(format)s'") % {
'datestr': datestr,
'format': format,
})
def _get_next_prev(generic_view, date, is_previous, period):
"""
Helper: Get the next or the previous valid date. The idea is to allow
links on month/day views to never be 404s by never providing a date
that'll be invalid for the given view.
This is a bit complicated since it handles different intervals of time,
hence the coupling to generic_view.
However in essence the logic comes down to:
* If allow_empty and allow_future are both true, this is easy: just
return the naive result (just the next/previous day/week/month,
regardless of object existence.)
* If allow_empty is true, allow_future is false, and the naive result
isn't in the future, then return it; otherwise return None.
* If allow_empty is false and allow_future is true, return the next
date *that contains a valid object*, even if it's in the future. If
there are no next objects, return None.
* If allow_empty is false and allow_future is false, return the next
date that contains a valid object. If that date is in the future, or
if there are no next objects, return None.
"""
date_field = generic_view.get_date_field()
allow_empty = generic_view.get_allow_empty()
allow_future = generic_view.get_allow_future()
get_current = getattr(generic_view, '_get_current_%s' % period)
get_next = getattr(generic_view, '_get_next_%s' % period)
# Bounds of the current interval
start, end = get_current(date), get_next(date)
# If allow_empty is True, the naive result will be valid
if allow_empty:
if is_previous:
result = get_current(start - datetime.timedelta(days=1))
else:
result = end
if allow_future or result <= effective_datetime.today():
return result
else:
return None
# Otherwise, we'll need to go to the database to look for an object
# whose date_field is at least (greater than/less than) the given
# naive result
else:
# Construct a lookup and an ordering depending on whether we're doing
# a previous date or a next date lookup.
if is_previous:
lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
ordering = '-%s' % date_field
else:
lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
ordering = date_field
# Filter out objects in the future if appropriate.
if not allow_future:
# Fortunately, to match the implementation of allow_future,
# we need __lte, which doesn't conflict with __lt above.
if generic_view.uses_datetime_field:
now = effective_datetime.now()
else:
now = effective_datetime.today()
lookup['%s__lte' % date_field] = now
qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)
# Snag the first object from the queryset; if it doesn't exist that
# means there's no next/previous link available.
try:
result = getattr(qs[0], date_field)
except IndexError:
return None
# Convert datetimes to dates in the current time zone.
if generic_view.uses_datetime_field:
if settings.USE_TZ:
result = effective_datetime.localtime(result)
result = result.date()
# Return the first day of the period.
return get_current(result)
| {
"content_hash": "d0dfa70746658ad1746647f870d954bb",
"timestamp": "",
"source": "github",
"line_count": 550,
"max_line_length": 103,
"avg_line_length": 33.95636363636363,
"alnum_prop": 0.6059113300492611,
"repo_name": "sha-red/django-shared-utils",
"id": "8d1e66272600bd92977678df044be14eaf20f1cb",
"size": "18709",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shared/utils/views/daterange.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3436"
},
{
"name": "Python",
"bytes": "83510"
}
],
"symlink_target": ""
} |
"""
WSGI config for atlas_site project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "atlas.settings")
application = get_wsgi_application()
| {
"content_hash": "d18473d90d1466765214d6505e5a8faf",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.5625,
"alnum_prop": 0.7684478371501272,
"repo_name": "flaviodrt/atlas_site",
"id": "cd15ab1d8a8030aaa2681f9593a1b2faabdbc1fc",
"size": "393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atlas/atlas/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2871"
},
{
"name": "HTML",
"bytes": "6719"
},
{
"name": "JavaScript",
"bytes": "3875"
},
{
"name": "Python",
"bytes": "39097"
},
{
"name": "Shell",
"bytes": "942"
}
],
"symlink_target": ""
} |
import re
import socket
import string
import errno
class SqueakNetException(Exception):
"""
Exception class for SqueakNet errors
"""
def __init__(self, errormessage,errnum):
self.errmsg = errormessage
self.errnum = errnum
def __str__(self):
return "SqueakNetException[%d]: %s\n" % (self.errnum,self.errmsg)
class SqueakNet():
"""
A class to handle communication with the SqueakFS Squeak TCP Server.
TODO: We need to magically support all of squeak's CR/CRLF/\t etc etc.
"""
def __init__(self,port):
self.host='localhost'
self.port=int(port)
self.timeouts = 0
self.MAX_TIMEOUT = 5 #5 timeouts a`1 second before giving up.
self.__connectSocket()
self.replacevars = {'\\': "__BACKSLASH__",
'/': "__SLASH__",
'*': "__STAR__"}
self.backwards_replacevars = {"__BACKSLASH__": '\\',
"__SLASH__": '/' ,
"__STAR__": '*' }
def __connectSocket(self):
if self.timeouts > self.MAX_TIMEOUT:
#Okay, server is dead, let's give up.
self.sock = None
raise SqueakNetException("Socket not connected",-2)
self.sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
# self.sock.settimeout(1)
self.sock.connect((self.host,self.port))
# self.sFile = self.sock.makefile("rw",2048)
def sendConvertSpecial(self,str):
self.send(re.sub(r'(__STAR__)|(__BACKSLASH__)|(__SLASH__)', lambda m: self.backwards_replacevars[m.group(0)],str))
def send(self,str):
if(not self.sock):
raise SqueakNetException("Socket not connected",-2)
try:
self.__send(str)
except socket.error, e:
#Most probably a Broken Pipe, let's confirm
if e[0] == errno.EPIPE:
self.timeouts = self.timeouts + 1
self.__connectSocket()
self.send(str) #try again.
return #raise SqueakNetException("Broken pipe",e[0])
else:
raise
def __send(self,str):
self.sock.send(str + "\n")
def recv(self):
return self.__recv()
def __recv(self):
if(not self.sock):
raise SqueakNetException("Socket not connected",-2)
try:
#Receive data length
# recvlen = self.sFile.readline() #breaks on LF
# recvlen = int(recvlen)
# results = self.sFile.read(recvlen)
data = self.sock.recv(1024)
res = data.split("\n",1)
results = res[1]
while(int(res[0]) != len(results)):
results = results + self.sock.recv(int(res[0])-len(results))
if(results.startswith("Error:")):
raise SqueakNetException(results,-1)
except socket.timeout,e:
#Socket probably dead. Let's try and reconnect it.
self.timeouts = self.timeouts + 1
self.__connectSocket()
raise SqueakNetException("Error: Timeout",-1)
return results
def readResponse(self):
return self.__recv()
def readResponseConvertNL(self):
data = self.recv().replace("\r","\n") + "\n"
return data
def readResponseAsArray(self):
data = self.__recv().rstrip("\r").split("\r")
if(data[0] == ''): data = []
return data
def readResponseAsArrayConvertSpecial(self):
data = self.__recv().rstrip("\r").split("\r")
if(data[0] == ''): data = []
return map(lambda x: re.sub('[\\*\/]', lambda m: self.replacevars[m.group(0)],x),data)
def readResponseAsBool(self):
try:
data = self.__recv()
except SqueakNetException,e:
return False
return True
def getSuperClass(self,inClass):
"""
Receives the name of the superclass.
"""
self.sendConvertSpecial("getSuperClass:\t%s"%(inClass))
return self.readResponse()
def getSubClasses(self,inClass):
"""
Receives the names of all subclasses in an array.
"""
self.sendConvertSpecial("getSubClasses:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getDirectSubClasses(self,inClass):
"""
Receives the names of all direct subclasses in an array.
"""
self.sendConvertSpecial("getDirectSubClasses:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getAllClasses(self):
"""
Receives the name of all classes as an array.
"""
self.send("getAllClasses")
return self.readResponseAsArrayConvertSpecial()
def getInstanceMethod(self,inClass,method):
"""
Receives the sourcecode of an instancemethod.
XXX: How to output this in a good way? They use \r for newlines.
"""
self.sendConvertSpecial("getInstanceMethod:InClass:\t%s\t%s"%(method,inClass))
return self.readResponseConvertNL()
def getClassMethod(self,inClass,method):
"""
Receives the sourcecode of a classmethod.
"""
self.sendConvertSpecial("getClassMethod:InClass:\t%s\t%s"%(method,inClass))
return self.readResponseConvertNL()
def getCategories(self):
"""
Receives a list with all top-level categories.
"""
self.send("getCategories")
return self.readResponseAsArrayConvertSpecial()
def getClassMembers(self,inClass):
"""
Receives a list of all class member variables.
"""
self.sendConvertSpecial("getClassMembers:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getInstanceMembers(self,inClass):
"""
Receives a list of all instance member variables.
"""
self.sendConvertSpecial("getInstanceMembers:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getInstanceProtocols(self,inClass):
"""
Receives a list of all protocols for instance methods.
Note, this does not contain -- all --, as that one is just faked by the standard squeak browser.
"""
self.sendConvertSpecial("getInstanceProtocols:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getClassProtocols(self,inClass):
"""
Receives a list of all protocols for class methods.
Note, this does not contain -- all --, as that one is just faked by the standard squeak browser.
"""
self.sendConvertSpecial("getClassProtocols:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getMethodsInInstanceProtocol(self,inClass,inProtocol):
"""
Receives all methods in an instanceprotocol.
You can't use -- all -- here.
"""
self.sendConvertSpecial("getMethodsInInstanceProtocol:InClass:\t%s\t%s"%(inProtocol,inClass))
return self.readResponseAsArrayConvertSpecial()
def getMethodsInClassProtocol(self,inClass,inProtocol):
"""
Receives all methods in a classprotocol.
You can't use -- all -- here.
"""
self.sendConvertSpecial("getMethodsInClassProtocol:InClass:\t%s\t%s"%(inProtocol,inClass))
return self.readResponseAsArrayConvertSpecial()
def getClassComment(self,inClass):
"""
Receives the comment of a class.
"""
self.sendConvertSpecial("getClassComment:\t%s"%(inClass))
return self.readResponseConvertNL()
def getClassesInCategory(self,category):
"""
Receives the classes available under a category.
"""
self.sendConvertSpecial("getClassesInCategory:\t%s"%(category))
return self.readResponseAsArrayConvertSpecial()
def getInstanceMethodsInClass(self,inClass):
"""
Returns an array with all instancemethods in a class.
"""
self.sendConvertSpecial("getInstanceMethodsInClass:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getClassMethodsInClass(self,inClass):
"""
Returns an array with all classmethods in a class
"""
self.sendConvertSpecial("getClassMethodsInClass:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getTraits(self,inClass):
self.sendConvertSpecial("getTraits:\t%s"%(inClass))
return self.readResponseAsArrayConvertSpecial()
def getAllTraits(self):
self.send("getAllTraits")
return self.readResponseAsArrayConvertSpecial()
def getTraitUsers(self,inTrait):
self.sendConvertSpecial("getTraitUsers:\t%s"%(inTrait))
return self.readResponseAsArrayConvertSpecial()
def isTrait(self,inClass):
self.sendConvertSpecial("isTrait:\t%s"%(inClass))
return self.readResponseAsBool()
def isClassAvailable(self,inClass):
"""
Checks if a class is available in the squeak image.
returns True if it's available.
"""
self.sendConvertSpecial("isClassAvailable:\t%s"%(inClass))
return self.readResponseAsBool()
def isInstanceMethodAvailable(self,inClass,method):
"""
Checks if a instance method is available for the selected class.
"""
self.sendConvertSpecial("isInstanceMethodAvailable:inClass:\t%s\t%s"%(method,inClass))
return self.readResponseAsBool()
def isClassMethodAvailable(self,inClass,method):
"""
Checks if an class method is available for the selected class
FIXME: Make this faster.
"""
try:
res = self.getClassMethod(inClass,method)
except SqueakNetException,e:
return False
return True
def isInstanceProtocolAvailable(self,protocol,inClass):
"""
"""
self.sendConvertSpecial("isInstanceProtocolAvailable:inClass:\t%s\t%s"%(protocol,inClass))
return self.readResponseAsBool()
def isClassProtocolAvailable(self,protocol,inClass):
self.sendConvertSpecial("isClassProtocolAvailable:inClass:\t%s\t%s"%(protocol,inClass))
return self.readResponseAsBool()
def isCategoryAvailable(self,category):
self.sendConvertSpecial("isCategoryAvailable:inClass:\t%s\t%s"%(category))
return self.readResponseAsBool()
def isClassMethodInProtocol(self,method,protocol,inClass):
self.sendConvertSpecial("isClassMethod:InProtocol:inClass:\t%s\t%s\t%s"%(method,protocol,inClass))
return self.readResponseAsBool()
def isInstanceMethodInProtocol(self,method,protocol,inClass):
self.sendConvertSpecial("isInstanceMethod:InProtocol:inClass:\t%s\t%s\t%s"%(method,protocol,inClass))
return self.readResponseAsBool()
def isClassInCategory(self,category,inClass):
self.sendConvertSpecial("isClass:InCategory:\t%s\t%s"%(inClass,category))
return self.readResponseAsBool()
def isCategoryAvailable(self,category):
self.sendConvertSpecial("isCategoryAvailable:\t%s"%(category))
return self.readResponseAsBool()
def getNumberOfClasses(self):
self.send("getNumberOfClasses")
return int(self.readResponse())
if __name__ == "__main__":
print "Please run unittests (py.test) or squeakfs.py to start the filesystem"
k = SqueakNet(40000)
k.isInstanceMethodAvailable("Project","children")
k.isClassMethodAvailable("Project","sdfs")
| {
"content_hash": "9a399cc7c357a0b95330ee4be99e0780",
"timestamp": "",
"source": "github",
"line_count": 327,
"max_line_length": 122,
"avg_line_length": 36.37003058103976,
"alnum_prop": 0.6077524594299167,
"repo_name": "phb/squeakfs",
"id": "d3640c2710d7aaaa39f44a56ab9e05ebc85c81bb",
"size": "11893",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "squeakNet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "57741"
}
],
"symlink_target": ""
} |
"""
Provides support to auto-tuning networks using AutoTVM.
"""
import os.path
import logging
import time
from copy import deepcopy
from typing import Any, Optional, Dict, List, Union
from urllib.parse import urlparse
import tvm
from tvm import autotvm, auto_scheduler
from tvm.auto_scheduler.search_task import HardwareParams
from tvm.autotvm.tuner import GATuner
from tvm.autotvm.tuner import GridSearchTuner
from tvm.autotvm.tuner import RandomTuner
from tvm.autotvm.tuner import XGBTuner
from tvm.target import Target
from . import TVMCException, composite_target, frontends
from .main import register_parser
from .model import TVMCModel
from .target import target_from_cli, generate_target_args, reconstruct_target_args
from .shape_parser import parse_shape_string
from .transform import convert_graph_layout
# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
@register_parser
def add_tune_parser(subparsers, _):
"""Include parser for 'tune' subcommand"""
parser = subparsers.add_parser("tune", help="auto-tune a model")
parser.set_defaults(func=drive_tune)
parser.add_argument(
"--early-stopping",
type=int,
help="minimum number of trials before early stopping",
)
# There is some extra processing required to define the actual default value
# for --min-repeat-ms. This is done in `tune_model`.
parser.add_argument(
"--min-repeat-ms",
default=None,
type=int,
help="minimum time to run each trial, in milliseconds. "
"Defaults to 0 on x86 and 1000 on all other targets",
)
parser.add_argument(
"--model-format",
choices=frontends.get_frontend_names(),
help="specify input model format",
)
parser.add_argument(
"--number",
default=10,
type=int,
help="number of runs a single repeat is made of. "
"The final number of tuning executions is: "
"(1 + number * repeat)",
)
parser.add_argument(
"-o",
"--output",
required=True,
help="output file to store the tuning records for the tuning process",
)
parser.add_argument(
"--parallel",
default=4,
type=int,
help="the maximum number of parallel devices to use when tuning",
)
parser.add_argument(
"--repeat",
type=int,
default=1,
help="how many times to repeat each measurement",
)
parser.add_argument(
"--rpc-key",
help="the RPC tracker key of the target device. "
"Required when --rpc-tracker is provided.",
)
parser.add_argument(
"--rpc-tracker",
help="hostname (required) and port (optional, defaults to 9090) of the RPC tracker, "
"e.g. '192.168.0.100:9999'",
)
generate_target_args(parser)
parser.add_argument(
"--target-host",
help="the host compilation target, defaults to 'llvm'",
default="llvm",
)
parser.add_argument("--timeout", type=int, default=10, help="compilation timeout, in seconds")
parser.add_argument(
"--trials",
type=int,
default=1000,
help="the maximum number of tuning trials to perform",
)
parser.add_argument(
"--tuning-records",
metavar="PATH",
help="path to an auto-tuning log file by AutoTVM.",
)
parser.add_argument(
"--desired-layout",
choices=["NCHW", "NHWC"],
default=None,
help="change the data layout of the whole graph",
)
parser.add_argument(
"--enable-autoscheduler",
help="enable tuning the graph through the autoscheduler",
action="store_true",
)
auto_scheduler_group = parser.add_argument_group(
"Autoscheduler options",
"Autoscheduler options, used when --enable-autoscheduler is provided",
)
auto_scheduler_group.add_argument(
"--cache-line-bytes",
type=int,
help="the size of cache line in bytes. "
"If not specified, it will be autoset for the current machine.",
)
auto_scheduler_group.add_argument(
"--num-cores",
type=int,
help="the number of device cores. "
"If not specified, it will be autoset for the current machine.",
)
auto_scheduler_group.add_argument(
"--vector-unit-bytes",
type=int,
help="the width of vector units in bytes. "
"If not specified, it will be autoset for the current machine.",
)
auto_scheduler_group.add_argument(
"--max-shared-memory-per-block",
type=int,
help="the max shared memory per block in bytes. "
"If not specified, it will be autoset for the current machine.",
)
auto_scheduler_group.add_argument(
"--max-local-memory-per-block",
type=int,
help="the max local memory per block in bytes. "
"If not specified, it will be autoset for the current machine.",
)
auto_scheduler_group.add_argument(
"--max-threads-per-block",
type=int,
help="the max number of threads per block. "
"If not specified, it will be autoset for the current machine.",
)
auto_scheduler_group.add_argument(
"--max-vthread-extent",
type=int,
help="the max vthread extent. "
"If not specified, it will be autoset for the current machine.",
)
auto_scheduler_group.add_argument(
"--warp-size",
type=int,
help="the thread numbers of a warp. "
"If not specified, it will be autoset for the current machine.",
)
auto_scheduler_group.add_argument(
"--include-simple-tasks",
help="whether to extract simple tasks that do not include complicated ops",
action="store_true",
)
auto_scheduler_group.add_argument(
"--log-estimated-latency",
help="whether to log the estimated latency to the file after tuning a task",
action="store_true",
)
autotvm_group = parser.add_argument_group(
"autotvm options",
"autotvm options, used when the autoscheduler is not enabled",
)
autotvm_group.add_argument(
"--tuner",
choices=["ga", "gridsearch", "random", "xgb", "xgb_knob", "xgb-rank"],
default="xgb",
help="type of tuner to use when tuning with autotvm.",
)
# TODO (@leandron) This is a path to a physical file, but
# can be improved in future to add integration with a modelzoo
# or URL, for example.
parser.add_argument("FILE", help="path to the input model file")
parser.add_argument(
"--input-shapes",
help="specify non-generic shapes for model to run, format is "
'"input_name:[dim1,dim2,...,dimn] input_name2:[dim1,dim2]"',
type=parse_shape_string,
)
def drive_tune(args):
"""Invoke auto-tuning with command line arguments
Parameters
----------
args: argparse.Namespace
Arguments from command line parser.
"""
tvmc_model = frontends.load_model(args.FILE, args.model_format, shape_dict=args.input_shapes)
# Specify hardware parameters, although they'll only be used if autoscheduling.
hardware_params = auto_scheduler.HardwareParams(
num_cores=args.num_cores,
vector_unit_bytes=args.vector_unit_bytes,
cache_line_bytes=args.cache_line_bytes,
max_shared_memory_per_block=args.max_shared_memory_per_block,
max_local_memory_per_block=args.max_local_memory_per_block,
max_threads_per_block=args.max_threads_per_block,
max_vthread_extent=args.max_vthread_extent,
warp_size=args.warp_size,
target=args.target,
target_host=args.target_host,
)
if args.rpc_tracker:
parsed_url = urlparse("//%s" % args.rpc_tracker)
rpc_hostname = parsed_url.hostname
rpc_port = parsed_url.port or 9090
logger.info("RPC tracker hostname: %s", rpc_hostname)
logger.info("RPC tracker port: %s", rpc_port)
if not args.rpc_key:
raise TVMCException("need to provide an RPC tracker key (--rpc-key) for remote tuning")
else:
rpc_hostname = None
rpc_port = None
tune_model(
tvmc_model,
args.target,
tuning_records=args.output,
prior_records=args.tuning_records,
enable_autoscheduler=args.enable_autoscheduler,
rpc_key=args.rpc_key,
hostname=rpc_hostname,
port=rpc_port,
trials=args.trials,
target_host=args.target_host,
tuner=args.tuner,
min_repeat_ms=args.min_repeat_ms,
early_stopping=args.early_stopping,
desired_layout=args.desired_layout,
timeout=args.timeout,
repeat=args.repeat,
number=args.number,
parallel=args.parallel,
hardware_params=hardware_params,
include_simple_tasks=args.include_simple_tasks,
log_estimated_latency=args.log_estimated_latency,
additional_target_options=reconstruct_target_args(args),
)
def tune_model(
tvmc_model: TVMCModel,
target: str,
tuning_records: Optional[str] = None,
prior_records: Optional[str] = None,
enable_autoscheduler: bool = False,
rpc_key: Optional[str] = None,
hostname: Optional[str] = None,
port: Optional[Union[int, str]] = 9090,
trials: int = 10000,
target_host: Optional[str] = None,
tuner: str = "xgb",
min_repeat_ms: Optional[int] = None,
early_stopping: Optional[int] = None,
desired_layout: Optional[str] = None,
timeout: int = 10,
repeat: int = 1,
number: int = 10,
parallel: int = 4,
hardware_params: Optional[HardwareParams] = None,
include_simple_tasks: bool = False,
log_estimated_latency: bool = False,
additional_target_options: Optional[Dict[str, Dict[str, Any]]] = None,
):
"""Use tuning to automatically optimize the functions in a model.
Parameters
----------
tvmc_model : TVMCModel
The model to be optimized.
target : str
Compilation target as plain string, inline JSON or path to a JSON file.
tuning_records: str, optional
The path to a file that tuning results will be saved to. If not specified,
a temporary file will be used.
prior_records: str, optional
A path to previous tuning results that will be used to hot-start the tuning
cost model if provided.
enable_autoscheduler : bool, optional
When true, use autoscheduling rather than autotvm. This should produce
faster kernels for compatible model-target pairs.
rpc_key : str, optional
The RPC tracker key of the target device. Required when rpc_tracker is provided.
hostname : str, optional
The IP address of an RPC tracker, used when benchmarking remotely.
port : int or str, optional
The port of the RPC tracker to connect to. Defaults to 9090.
trials : int, optional
The number of schedules to try out for the entire model. Note that the default
value is chosen as a decent average for most models, but larger models may need
more trials to reach a good result while smaller models will converge with fewer
trials.
tuner : str, optional
The type of tuner to use when tuning with autotvm. Can be one of
"ga", "gridsearch", "random", "xgb", "xgb_knob", and "xgb-rank".
min_repeat_ms : int, optional
Minimum time to run each trial. Defaults to 0 on x86 and 1000 on other targets.
early_stopping : int, optional
When specified, stop tuning after this number of trials if results aren't improving.
desired_layout : str, optional
Can be one of "NCHW" or "NHWC". When specified, compatible operations in the graph
will have their layout set to this format. Tasks will then be tuned using this
specified layout.
timeout : int, optional,
If a kernel trial lasts longer than this duration in seconds, it will be
considered a failure.
repeat : int, optional
How many times each measurement should be repeated.
number : int, optional
The number of runs a single repeat is made of.
parallel : int, optional
The maximum number of parallel devices to use when tuning.
hardware_params : auto_scheduler.HardwareParams, optional
When using the autoscheduler, this object defines the configuration of the target hardware.
include_simple_tasks : bool, optional
Whether to extract simple operations or only computationally intensive ones when using
the autoscheduler.
log_estimated_latency : bool, optional
If using the autoscheduler, write the estimated latency at each step of tuning to file.
additional_target_options: Optional[Dict[str, Dict[str, Any]]]
Additional target options in a dictionary to combine with initial Target arguments
Returns
-------
tuning_records : str
The path to the produced tuning log file.
"""
target, extra_targets = target_from_cli(target, additional_target_options)
target, target_host = Target.check_and_update_host_consist(target, target_host)
# TODO(jwfromm) Remove this deepcopy once AlterOpLayout bug that mutates source
# model is fixed. For now, creating a clone avoids the issue.
mod = deepcopy(tvmc_model.mod)
params = tvmc_model.params
if tuning_records is None:
tuning_records = tvmc_model.default_tuning_records_path()
for codegen_from_cli in extra_targets:
codegen = composite_target.get_codegen_by_target(codegen_from_cli["name"])
partition_function = codegen["pass_pipeline"]
mod = partition_function(mod, params, **codegen_from_cli["opts"])
# min_repeat_ms should be:
# a. the value provided by the user, if any, or
# b. 0ms in case target is "cpu"; otherwise 1000ms
if min_repeat_ms is None:
min_repeat_ms = 0 if target.keys[0] == "cpu" else 1000
logger.info("Default --min-repeat-ms for this target is %s", min_repeat_ms)
if rpc_key:
if hostname is None or port is None:
raise TVMCException(
"You must provide a hostname and port to connect to a remote RPC device."
)
if isinstance(port, str):
port = int(port)
logger.info("Tuning will be performed on device %s at %s:%d.", rpc_key, hostname, port)
runner_ctor = auto_scheduler.RPCRunner if enable_autoscheduler else autotvm.RPCRunner
runner = runner_ctor(
key=rpc_key,
host=hostname,
port=port,
number=number,
repeat=repeat,
n_parallel=parallel,
timeout=timeout,
min_repeat_ms=min_repeat_ms,
)
else:
logger.info("Starting localhost tuning.")
runner_ctor = (
auto_scheduler.LocalRPCMeasureContext if enable_autoscheduler else autotvm.LocalRunner
)
local_server = runner_ctor(
number=number,
repeat=repeat,
timeout=timeout,
min_repeat_ms=min_repeat_ms,
)
# For autoscheduling on some devices, we need to maintain a LocalRPCMeasureContext object.
if enable_autoscheduler:
runner = local_server.runner
else:
runner = local_server
if enable_autoscheduler:
tasks, weights = autoscheduler_get_tuning_tasks(
mod=mod,
params=params,
target=target,
alter_layout=desired_layout,
hardware_params=hardware_params,
include_simple_tasks=include_simple_tasks,
)
# Create the autoscheduler tuning options
tuning_options = auto_scheduler.TuningOptions(
num_measure_trials=trials,
measure_callbacks=[auto_scheduler.RecordToFile(tuning_records)],
runner=runner,
early_stopping=early_stopping,
)
logger.info("Autoscheduling with configuration: %s", tuning_options)
# Schedule the tasks (i.e., produce a schedule for each task)
schedule_tasks(tasks, weights, tuning_options, prior_records, log_estimated_latency)
else:
tasks = autotvm_get_tuning_tasks(
mod=mod,
params=params,
target=target,
alter_layout=desired_layout,
)
# In autotvm, trials is specified per task. We can convert the per-model input
# provided to per-task trials by dividing by the number of tasks.
trials = int(trials / len(tasks))
logger.info("Autotuning with %d trials per task.", trials)
tuning_options = {
"tuner": tuner,
"trials": trials,
"early_stopping": early_stopping,
"measure_option": autotvm.measure_option(
builder=autotvm.LocalBuilder(build_func="default"), runner=runner
),
"tuning_records": prior_records,
}
logger.info("Autotuning with configuration: %s", tuning_options)
tune_tasks(tasks, tuning_records, **tuning_options)
return tuning_records
def autotvm_get_tuning_tasks(
mod: tvm.IRModule,
params: Dict[str, tvm.nd.NDArray],
target: str,
target_host: Optional[str] = None,
alter_layout: Optional[str] = None,
):
"""Get the autotvm tuning tasks for a given relay module.
Parameters
----------
mod : tvm.IRModule
The relay module from which to extract tuning tasks.
params : dict
The params for the relay module.
target : tvm.target.Target
The compilation target.
target_host : str, optional
The compilation target for the host.
alter_layout : str, optional
The layout to convert the graph to. Note, the convert layout
pass doesn't currently guarantee the whole of the graph will
be converted to the chosen layout.
Returns
-------
tasks : list of autotvm.Tasks
list of tasks to be tuned
"""
target, target_host = Target.check_and_update_host_consist(target, target_host)
if alter_layout:
mod = convert_graph_layout(mod, alter_layout)
tasks = autotvm.task.extract_from_program(
mod["main"],
target=target,
params=params,
)
return tasks
def autoscheduler_get_tuning_tasks(
mod: tvm.IRModule,
params: Dict[str, tvm.nd.NDArray],
target: str,
target_host: Optional[str] = None,
alter_layout: Optional[str] = None,
hardware_params: Optional[HardwareParams] = None,
include_simple_tasks: bool = False,
):
"""Get the autoscheduler tuning tasks for a given relay module.
Parameters
----------
mod : tvm.IRModule
The relay module from which to extract tuning tasks.
params : dict
The params for the relay module.
target : tvm.target.Target
The compilation target.
target_host : str, optional
The compilation target for the host.
alter_layout : str, optional
The layout to convert the graph to. Note, the convert layout
pass doesn't currently guarantee the whole of the graph will
be converted to the chosen layout.
hardware_params : Optional[HardwareParams]
Hardware parameters used for the search tasks
Returns
-------
tasks : list of autotvm.Tasks
list of tasks to be tuned
weights : List[int]
the weight (i.e. the number of appearance) of extracted tasks
"""
target, target_host = Target.check_and_update_host_consist(target, target_host)
if alter_layout:
mod = convert_graph_layout(mod, alter_layout)
# Extract the tasks
tasks, task_weights = auto_scheduler.extract_tasks(
mod["main"],
params,
target=target,
hardware_params=hardware_params,
include_simple_tasks=include_simple_tasks,
)
return tasks, task_weights
def schedule_tasks(
tasks: List[auto_scheduler.SearchTask],
task_weights: List[float],
tuning_options: auto_scheduler.TuningOptions,
prior_records: Optional[str] = None,
log_estimated_latency: bool = False,
):
"""Generate the schedules for the different tasks (i.e., subgraphs) contained in the module.
Store the schedules in a json file that will be used later by the compiler.
Parameters
----------
tasks : list
A list of auto_scheduler.SearchTask to tune.
task_weights : list
The weight (i.e. the number of appearance) of extracted tasks
tuning_options: auto_scheduler.TuningOptions
The options of tuning
prior_records : str, optional
The json file used to preload the autoscheduler
log_estimated_latency : bool, optional
If true, writes the estimated runtime of the model during each step of tuning to file.
"""
if not log_estimated_latency:
callbacks = [auto_scheduler.task_scheduler.PrintTableInfo()]
else:
callbacks = [
auto_scheduler.task_scheduler.PrintTableInfo(),
auto_scheduler.task_scheduler.LogEstimatedLatency(("total_latency.tsv")),
]
# Create the scheduler
tuner = auto_scheduler.TaskScheduler(
tasks, task_weights, load_log_file=prior_records, callbacks=callbacks
)
# Tune the tasks
tuner.tune(tuning_options)
def tune_tasks(
tasks: List[autotvm.task.Task],
log_file: str,
measure_option: autotvm.measure_option,
tuner: str,
trials: int,
early_stopping: Optional[int] = None,
tuning_records: Optional[str] = None,
):
"""Tune a list of tasks and output the history to a log file.
Parameters
----------
tasks : list
A list of autotvm.Tasks to tune.
log_file : str
A file to output the tuning history, in JSON.
measure_option : autotvm.measure_option
Options to build and run a tuning task.
tuner : str
Which tuner to use.
trials : int
The maximum number of tuning trials to perform.
early_stopping : int, optional
The minimum number of tuning trials to perform.
This will be equal to 'trials' if not specified.
tuning_records: str, optional
Path to the file produced by the tuning, to be used during
tuning.
"""
if not tasks:
logger.warning("there were no tasks found to be tuned")
return
if not early_stopping:
early_stopping = trials
for i, tsk in enumerate(tasks):
prefix = "[Task %2d/%2d] " % (i + 1, len(tasks))
# Create a tuner
if tuner in ("xgb", "xgb-rank"):
tuner_obj = XGBTuner(tsk, loss_type="rank")
elif tuner == "xgb_knob":
tuner_obj = XGBTuner(tsk, loss_type="rank", feature_type="knob")
elif tuner == "ga":
tuner_obj = GATuner(tsk, pop_size=50)
elif tuner == "random":
tuner_obj = RandomTuner(tsk)
elif tuner == "gridsearch":
tuner_obj = GridSearchTuner(tsk)
else:
raise TVMCException("invalid tuner: %s " % tuner)
# If transfer learning is being used, load the existing results
if tuning_records and os.path.exists(tuning_records):
logger.info("loading tuning records from %s", tuning_records)
start_time = time.time()
tuner_obj.load_history(autotvm.record.load_from_file(tuning_records))
logging.info("loaded history in %.2f sec(s)", time.time() - start_time)
tuner_obj.tune(
n_trial=min(trials, len(tsk.config_space)),
early_stopping=early_stopping,
measure_option=measure_option,
callbacks=[
autotvm.callback.progress_bar(trials, prefix=prefix),
autotvm.callback.log_to_file(log_file),
],
)
| {
"content_hash": "67398f853d72f50544a7d65d743d9d8e",
"timestamp": "",
"source": "github",
"line_count": 678,
"max_line_length": 99,
"avg_line_length": 35.32005899705015,
"alnum_prop": 0.6351526287217606,
"repo_name": "Laurawly/tvm-1",
"id": "8f14c80b9695aabf73b1f4ddc79563376b619f7b",
"size": "24732",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/tvm/driver/tvmc/autotuner.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4093"
},
{
"name": "C",
"bytes": "351611"
},
{
"name": "C++",
"bytes": "11660999"
},
{
"name": "CMake",
"bytes": "228510"
},
{
"name": "Cuda",
"bytes": "16902"
},
{
"name": "Cython",
"bytes": "28979"
},
{
"name": "Go",
"bytes": "111527"
},
{
"name": "HTML",
"bytes": "2664"
},
{
"name": "Java",
"bytes": "199950"
},
{
"name": "JavaScript",
"bytes": "15305"
},
{
"name": "Makefile",
"bytes": "67149"
},
{
"name": "Objective-C",
"bytes": "24259"
},
{
"name": "Objective-C++",
"bytes": "87655"
},
{
"name": "Python",
"bytes": "16256580"
},
{
"name": "RenderScript",
"bytes": "1895"
},
{
"name": "Rust",
"bytes": "391076"
},
{
"name": "Shell",
"bytes": "228674"
},
{
"name": "TypeScript",
"bytes": "94385"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('build', '0017_auto_20180904_1457'),
]
operations = [
migrations.AddField(
model_name='rebuild',
name='qa_comment',
field=models.TextField(blank=True, null=True),
),
]
| {
"content_hash": "23818c9d6621a46fc51c2c88c037a207",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 58,
"avg_line_length": 21.5,
"alnum_prop": 0.5917312661498708,
"repo_name": "SalesforceFoundation/mrbelvedereci",
"id": "dd45594c22d684ec7413c97e8bc5c93037364458",
"size": "461",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metaci/build/migrations/0018_rebuild_qa_comment.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2069"
},
{
"name": "HTML",
"bytes": "123214"
},
{
"name": "JavaScript",
"bytes": "3993"
},
{
"name": "Python",
"bytes": "245560"
},
{
"name": "Shell",
"bytes": "4590"
}
],
"symlink_target": ""
} |
"""
This sub-module provides a collection of filters for providing linq-style
programming (inspired by RxPy).
Each function appears as a method on the OutputThing base class, allowing for
easy chaining of calls. For example::
sensor.where(lambda x: x > 100).select(lambda x: x*2)
If the @filtermethod decorator is used, then a standalone function is also
defined that takes all the arguments except the publisher and returns a
function which, when called, takes a publisher and subscribes to the publisher.
We call this returned function a "thunk". Thunks can be used with combinators
(like compose(), parallel(), and passthrough(), all defined in combinators.py)
as well as directly with the scheduler. For example::
scheduler.schedule_sensor(sensor, where(lambda x: x> 100),
select(lambda x: x*2))
The implementation code for a linq-style filter typically looks like the
following::
@filtermethod(OutputThing)
def example(this, ...):
def _filter(self, x):
....
return FunctionFilter(this, _filter, name="example")
Note that, by convention, we use `this` as the first argument of the function,
rather than self. The `this` parameter corresponds to the previous element in
the chain, while the `self` parameter used in the _filter() function represents
the current element in the chain. If you get these mixed up, you can get an
infinite loop!
In general, a linq-style filter takes the previous OutputThing/filter in a
chain as its first input, parameters to the filter as subsequent inputs, and
returns a OutputThing/filter that should be used as the input to the next step
in the filter chain.
"""
from . import buffer
from . import first
from . import never
from . import output
from . import scan
from . import select
from . import skip
from . import some
from . import take
from . import transducer
from . import timeout
from . import where
from . import combinators
| {
"content_hash": "33452c1ded3c37cb713cc93a5309a207",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 79,
"avg_line_length": 35.125,
"alnum_prop": 0.7381799694966955,
"repo_name": "mpi-sws-rse/thingflow-python",
"id": "03602b5a9e42c6a836e595fb86212124b2db7f82",
"size": "2064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "thingflow/filters/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "382"
},
{
"name": "Python",
"bytes": "290455"
},
{
"name": "Shell",
"bytes": "6604"
}
],
"symlink_target": ""
} |
import re
class SandhiRule:
def __init__(self, rule, tags=None):
"""
rule is string of form A|B>C<D|E
tags is set of strings annotating the rule for later filtering, etc.
"""
self.tags = tags or set()
self.a, bcd, self.e = rule.split("|")
self.b, cd = bcd.split(">")
self.c, self.d = cd.split("<")
self.theme = self.a
self.stem = self.a + self.b
self.suffix = self.d + self.e
self.distinguisher = self.c + self.e
self.surface = self.a + self.c + self.e
def __repr__(self):
if self.tags:
return "SandhiRule('{0.a}|{0.b}>{0.c}<{0.d}|{0.e}', " \
"tags={1})".format(self, self.tags)
else:
return "SandhiRule('{0.a}|{0.b}>{0.c}<{0.d}|{0.e}')".format(self)
def match_theme(self, stem):
"""
If the given stem matches this rule's stem part, return the theme
(which may be more than this rule's theme part if this rule's stem part
is only the rightmost part of the given stem) or return None if stems
don't match.
"""
if re.match(".*" + self.stem + "$", stem):
if self.b:
return stem[:-len(self.b)]
else:
return stem
else:
return None
| {
"content_hash": "e96394f1e98e0fab9a3cbd81ba97cb08",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 79,
"avg_line_length": 32.5609756097561,
"alnum_prop": 0.5063670411985018,
"repo_name": "jtauber/inflexion",
"id": "8f661d61c5615c3a736d624d3707f1834d5d3d7b",
"size": "1335",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "inflexion/sandhi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14438"
}
],
"symlink_target": ""
} |
from django.db import migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Count
def fix_duplicate_attachments(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""Migration 0041 had a bug, where if multiple messages referenced the
same attachment, rather than creating a single attachment object
for all of them, we would incorrectly create one for each message.
This results in exceptions looking up the Attachment object
corresponding to a file that was used in multiple messages that
predate migration 0041.
This migration fixes this by removing the duplicates, moving their
messages onto a single canonical Attachment object (per path_id).
"""
Attachment = apps.get_model('zerver', 'Attachment')
# Loop through all groups of Attachment objects with the same `path_id`
for group in Attachment.objects.values('path_id').annotate(Count('id')).order_by().filter(id__count__gt=1):
# Sort by the minimum message ID, to find the first attachment
attachments = sorted(list(Attachment.objects.filter(path_id=group['path_id']).order_by("id")),
key = lambda x: min(x.messages.all().values_list('id')[0]))
surviving = attachments[0]
to_cleanup = attachments[1:]
for a in to_cleanup:
# For each duplicate attachment, we transfer its messages
# to the canonical attachment object for that path, and
# then delete the original attachment.
for msg in a.messages.all():
surviving.messages.add(msg)
surviving.is_realm_public = surviving.is_realm_public or a.is_realm_public
surviving.save()
a.delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0073_custom_profile_fields'),
]
operations = [
migrations.RunPython(fix_duplicate_attachments, elidable=True),
]
| {
"content_hash": "01f4aebc49a6a787b365dff9ebd3ae67",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 111,
"avg_line_length": 46.45454545454545,
"alnum_prop": 0.6854207436399217,
"repo_name": "shubhamdhama/zulip",
"id": "c82684d52d025e52bd57a8ce5d27c8b78e952db7",
"size": "2093",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "zerver/migrations/0074_fix_duplicate_attachments.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "400387"
},
{
"name": "Dockerfile",
"bytes": "2939"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "721395"
},
{
"name": "JavaScript",
"bytes": "3095896"
},
{
"name": "Perl",
"bytes": "398763"
},
{
"name": "Puppet",
"bytes": "71124"
},
{
"name": "Python",
"bytes": "6896725"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "119898"
},
{
"name": "TypeScript",
"bytes": "14645"
}
],
"symlink_target": ""
} |
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from thrift.protocol.TBase import TBase, TExceptionBase
class NotFoundError(TExceptionBase):
"""
Attributes:
- key
"""
__slots__ = [
'key',
]
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
)
def __init__(self, key=None,):
self.key = key
def __str__(self):
return repr(self)
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.key)
return value
| {
"content_hash": "e965dfbc2b4150e559d844622238c2bd",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 80,
"avg_line_length": 16.46875,
"alnum_prop": 0.5958254269449715,
"repo_name": "Willyham/tchannel-python",
"id": "9c2219cf28f241b5104e3cc2193a9193635e092e",
"size": "1807",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/guide/keyvalue/keyvalue/service/ttypes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1902"
},
{
"name": "Python",
"bytes": "719974"
},
{
"name": "Shell",
"bytes": "1473"
},
{
"name": "Thrift",
"bytes": "13859"
}
],
"symlink_target": ""
} |
import collections
import pandas as pd
def get_sec(timestamp):
"""
This function receives a timestamp in HH:MM:SS format
and returns the number of seconds
:param timestamp: the timestamp (i.e. call duration)
"""
call_length = timestamp.split(':') # Split timestamp on colon
return int(call_length[0]) * 3600 + int(call_length[1]) * 60 + int(call_length[2])
# Read CSV file into Pandas DataFrame object
df = pd.read_csv('fire_clean.csv')
# Isolate relevant columns and sort by type
df = df[['duration', 'type']]
df.sort_values(by='type')
# Create empty dict for data accumulation
durations_by_type = {}
"""
Loop through DataFrame to reset the duration for each row from
HH:MM:SS timestamp to seconds, and populate durations_by_type dict
"""
for i in df.index:
# Reset call duration for current row to seconds
df.loc[i, 'duration'] = get_sec(df['duration'][i])
# Create variables for readability
call_type = df['type'][i]
call_duration = df['duration'][i]
if call_type not in durations_by_type:
durations_by_type[call_type] = {'total_duration': 0,
'count': 0,
'avg_duration': 0}
"""
1. Increment number of calls for this call_type
2. Add current duration to total_duration
3. Update average duration, rounding to two decimal places
"""
durations_by_type[call_type]['count'] += 1
durations_by_type[call_type]['total_duration'] += call_duration
durations_by_type[call_type]['avg_duration'] = round(
durations_by_type[call_type]['total_duration'] / durations_by_type[call_type]['count']
)
# Create OrderedDict for sorting of keys
durations_by_type = collections.OrderedDict(sorted(durations_by_type.items()))
# Print average duration for each call type
for key, value in durations_by_type.items():
print('Average duration for call of type {0}: {1} seconds.'.format(key,
value['avg_duration']))
| {
"content_hash": "7084d30ec0b31530548f66f88ea497da",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 94,
"avg_line_length": 35.51724137931034,
"alnum_prop": 0.6364077669902912,
"repo_name": "brandonwolfgang/CEN4350-open-source-web-technologies",
"id": "535ebed77813e4683c1c3554510cb7de26435c5e",
"size": "2086",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Data Munging/call_length_by_type.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "341788"
},
{
"name": "Python",
"bytes": "5131"
}
],
"symlink_target": ""
} |
"""
This bot goes over multiple pages of a wiki, and edits them without changes.
This is for example used to get category links in templates working.
Command-line arguments:
-purge Purge the page instead of touching it
Touch mode (default):
-botflag Force botflag in case of edits with changes.
Purge mode:
-converttitles Convert titles to other variants if necessary
-forcelinkupdate Update the links tables
-forcerecursivelinkupdate Update the links table, and update the links tables
for any page that uses this page as a template
-redirects Automatically resolve redirects
¶ms;
"""
#
# (C) Pywikibot team, 2009-2021
#
# Distributed under the terms of the MIT license.
#
import pywikibot
from pywikibot import pagegenerators
from pywikibot.bot import MultipleSitesBot
from pywikibot.exceptions import (
LockedPageError,
NoCreateError,
NoPageError,
PageSaveRelatedError,
)
docuReplacements = {'¶ms;': pagegenerators.parameterHelp} # noqa: N816
class TouchBot(MultipleSitesBot):
"""Page touch bot."""
update_options = {
'botflag': False,
}
def treat(self, page) -> None:
"""Touch the given page."""
try:
page.touch(botflag=self.opt.botflag)
except (NoCreateError, NoPageError):
pywikibot.error('Page {} does not exist.'
.format(page.title(as_link=True)))
except LockedPageError:
pywikibot.error('Page {} is locked.'
.format(page.title(as_link=True)))
except PageSaveRelatedError as e:
pywikibot.error('Page {} not saved:\n{}'.format(page, e.args))
class PurgeBot(MultipleSitesBot):
"""Purge each page on the generator."""
available_options = {
'converttitles': None,
'forcelinkupdate': None,
'forcerecursivelinkupdate': None,
'redirects': None
}
def treat(self, page) -> None:
"""Purge the given page."""
pywikibot.output('Page {}{} purged'
.format(page,
'' if page.purge(**self.opt) else ' not'))
def main(*args: str) -> None:
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
:param args: command line arguments
"""
options = {}
# Process global and pagegenerators args
local_args = pywikibot.handle_args(args)
gen_factory = pagegenerators.GeneratorFactory()
local_args = gen_factory.handle_args(local_args)
bot_class = TouchBot
for arg in local_args:
if arg == '-purge':
bot_class = PurgeBot
elif arg.startswith('-'):
options[arg[1:].lower()] = True
if gen_factory.gens:
gen = gen_factory.getCombinedGenerator(preload=True)
pywikibot.Site().login()
bot_class(generator=gen, **options).run()
else:
pywikibot.bot.suggest_help(missing_generator=True)
if __name__ == '__main__':
main()
| {
"content_hash": "b54cf726cc2df5be559b5526270f8d5a",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 77,
"avg_line_length": 27.228070175438596,
"alnum_prop": 0.6156572164948454,
"repo_name": "wikimedia/pywikibot-core",
"id": "292f8ad11841c231d5ff8d36d791a25513b2019e",
"size": "3123",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/touch.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "97"
},
{
"name": "HTML",
"bytes": "1365"
},
{
"name": "Python",
"bytes": "4504123"
}
],
"symlink_target": ""
} |
"""Support for the Netatmo Weather Service."""
import logging
import threading
from datetime import timedelta
from time import time
import pyatmo
import requests
import urllib3
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME,
CONF_MODE,
TEMP_CELSIUS,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_BATTERY,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import call_later
from homeassistant.util import Throttle
from .const import DATA_NETATMO_AUTH, DOMAIN
_LOGGER = logging.getLogger(__name__)
CONF_MODULES = "modules"
CONF_STATION = "station"
CONF_AREAS = "areas"
CONF_LAT_NE = "lat_ne"
CONF_LON_NE = "lon_ne"
CONF_LAT_SW = "lat_sw"
CONF_LON_SW = "lon_sw"
DEFAULT_MODE = "avg"
MODE_TYPES = {"max", "avg"}
DEFAULT_NAME_PUBLIC = "Netatmo Public Data"
# This is the Netatmo data upload interval in seconds
NETATMO_UPDATE_INTERVAL = 600
# NetAtmo Public Data is uploaded to server every 10 minutes
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=600)
SUPPORTED_PUBLIC_SENSOR_TYPES = [
"temperature",
"pressure",
"humidity",
"rain",
"windstrength",
"guststrength",
"sum_rain_1",
"sum_rain_24",
]
SENSOR_TYPES = {
"temperature": [
"Temperature",
TEMP_CELSIUS,
"mdi:thermometer",
DEVICE_CLASS_TEMPERATURE,
],
"co2": ["CO2", "ppm", "mdi:periodic-table-co2", None],
"pressure": ["Pressure", "mbar", "mdi:gauge", None],
"noise": ["Noise", "dB", "mdi:volume-high", None],
"humidity": ["Humidity", "%", "mdi:water-percent", DEVICE_CLASS_HUMIDITY],
"rain": ["Rain", "mm", "mdi:weather-rainy", None],
"sum_rain_1": ["sum_rain_1", "mm", "mdi:weather-rainy", None],
"sum_rain_24": ["sum_rain_24", "mm", "mdi:weather-rainy", None],
"battery_vp": ["Battery", "", "mdi:battery", None],
"battery_lvl": ["Battery_lvl", "", "mdi:battery", None],
"battery_percent": ["battery_percent", "%", None, DEVICE_CLASS_BATTERY],
"min_temp": ["Min Temp.", TEMP_CELSIUS, "mdi:thermometer", None],
"max_temp": ["Max Temp.", TEMP_CELSIUS, "mdi:thermometer", None],
"windangle": ["Angle", "", "mdi:compass", None],
"windangle_value": ["Angle Value", "º", "mdi:compass", None],
"windstrength": ["Wind Strength", "km/h", "mdi:weather-windy", None],
"gustangle": ["Gust Angle", "", "mdi:compass", None],
"gustangle_value": ["Gust Angle Value", "º", "mdi:compass", None],
"guststrength": ["Gust Strength", "km/h", "mdi:weather-windy", None],
"reachable": ["Reachability", "", "mdi:signal", None],
"rf_status": ["Radio", "", "mdi:signal", None],
"rf_status_lvl": ["Radio_lvl", "", "mdi:signal", None],
"wifi_status": ["Wifi", "", "mdi:wifi", None],
"wifi_status_lvl": ["Wifi_lvl", "dBm", "mdi:wifi", None],
"health_idx": ["Health", "", "mdi:cloud", None],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_STATION): cv.string,
vol.Optional(CONF_MODULES): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_AREAS): vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_LAT_NE): cv.latitude,
vol.Required(CONF_LAT_SW): cv.latitude,
vol.Required(CONF_LON_NE): cv.longitude,
vol.Required(CONF_LON_SW): cv.longitude,
vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.In(MODE_TYPES),
vol.Optional(CONF_NAME, default=DEFAULT_NAME_PUBLIC): cv.string,
}
],
),
}
)
MODULE_TYPE_OUTDOOR = "NAModule1"
MODULE_TYPE_WIND = "NAModule2"
MODULE_TYPE_RAIN = "NAModule3"
MODULE_TYPE_INDOOR = "NAModule4"
NETATMO_DEVICE_TYPES = {
"WeatherStationData": "weather station",
"HomeCoachData": "home coach",
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the available Netatmo weather sensors."""
dev = []
auth = hass.data[DATA_NETATMO_AUTH]
if config.get(CONF_AREAS) is not None:
for area in config[CONF_AREAS]:
data = NetatmoPublicData(
auth,
lat_ne=area[CONF_LAT_NE],
lon_ne=area[CONF_LON_NE],
lat_sw=area[CONF_LAT_SW],
lon_sw=area[CONF_LON_SW],
)
for sensor_type in SUPPORTED_PUBLIC_SENSOR_TYPES:
dev.append(
NetatmoPublicSensor(
area[CONF_NAME], data, sensor_type, area[CONF_MODE]
)
)
else:
def find_devices(data):
"""Find all devices."""
all_module_infos = data.get_module_infos()
all_module_names = [e["module_name"] for e in all_module_infos.values()]
module_names = config.get(CONF_MODULES, all_module_names)
entities = []
for module_name in module_names:
if module_name not in all_module_names:
_LOGGER.info("Module %s not found", module_name)
for module in all_module_infos.values():
if module["module_name"] not in module_names:
continue
_LOGGER.debug(
"Adding module %s %s", module["module_name"], module["id"]
)
for condition in data.station_data.monitoredConditions(
moduleId=module["id"]
):
entities.append(NetatmoSensor(data, module, condition.lower()))
return entities
def _retry(_data):
try:
entities = find_devices(_data)
except requests.exceptions.Timeout:
return call_later(
hass, NETATMO_UPDATE_INTERVAL, lambda _: _retry(_data)
)
if entities:
add_entities(entities, True)
for data_class in [pyatmo.WeatherStationData, pyatmo.HomeCoachData]:
try:
data = NetatmoData(auth, data_class, config.get(CONF_STATION))
except pyatmo.NoDevice:
_LOGGER.info(
"No %s devices found", NETATMO_DEVICE_TYPES[data_class.__name__]
)
continue
try:
dev.extend(find_devices(data))
except requests.exceptions.Timeout:
call_later(hass, NETATMO_UPDATE_INTERVAL, lambda _: _retry(data))
if dev:
add_entities(dev, True)
class NetatmoSensor(Entity):
"""Implementation of a Netatmo sensor."""
def __init__(self, netatmo_data, module_info, sensor_type):
"""Initialize the sensor."""
self.netatmo_data = netatmo_data
device = self.netatmo_data.station_data.moduleById(mid=module_info["id"])
if not device:
# Assume it's a station if module can't be found
device = self.netatmo_data.station_data.stationById(sid=module_info["id"])
if device["type"] == "NHC":
self.module_name = module_info["station_name"]
else:
self.module_name = (
f"{module_info['station_name']} {module_info['module_name']}"
)
self._name = f"{DOMAIN} {self.module_name} {SENSOR_TYPES[sensor_type][0]}"
self.type = sensor_type
self._state = None
self._device_class = SENSOR_TYPES[self.type][3]
self._icon = SENSOR_TYPES[self.type][2]
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
self._module_type = device["type"]
self._module_id = module_info["id"]
self._unique_id = f"{self._module_id}-{self.type}"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def unique_id(self):
"""Return the unique ID for this sensor."""
return self._unique_id
def update(self):
"""Get the latest data from Netatmo API and updates the states."""
self.netatmo_data.update()
if self.netatmo_data.data is None:
if self._state is None:
return
_LOGGER.warning("No data found for %s", self.module_name)
self._state = None
return
data = self.netatmo_data.data.get(self._module_id)
if data is None:
_LOGGER.warning("No data found for %s", self.module_name)
self._state = None
return
try:
if self.type == "temperature":
self._state = round(data["Temperature"], 1)
elif self.type == "humidity":
self._state = data["Humidity"]
elif self.type == "rain":
self._state = data["Rain"]
elif self.type == "sum_rain_1":
self._state = round(data["sum_rain_1"], 1)
elif self.type == "sum_rain_24":
self._state = data["sum_rain_24"]
elif self.type == "noise":
self._state = data["Noise"]
elif self.type == "co2":
self._state = data["CO2"]
elif self.type == "pressure":
self._state = round(data["Pressure"], 1)
elif self.type == "battery_percent":
self._state = data["battery_percent"]
elif self.type == "battery_lvl":
self._state = data["battery_vp"]
elif self.type == "battery_vp" and self._module_type == MODULE_TYPE_WIND:
if data["battery_vp"] >= 5590:
self._state = "Full"
elif data["battery_vp"] >= 5180:
self._state = "High"
elif data["battery_vp"] >= 4770:
self._state = "Medium"
elif data["battery_vp"] >= 4360:
self._state = "Low"
elif data["battery_vp"] < 4360:
self._state = "Very Low"
elif self.type == "battery_vp" and self._module_type == MODULE_TYPE_RAIN:
if data["battery_vp"] >= 5500:
self._state = "Full"
elif data["battery_vp"] >= 5000:
self._state = "High"
elif data["battery_vp"] >= 4500:
self._state = "Medium"
elif data["battery_vp"] >= 4000:
self._state = "Low"
elif data["battery_vp"] < 4000:
self._state = "Very Low"
elif self.type == "battery_vp" and self._module_type == MODULE_TYPE_INDOOR:
if data["battery_vp"] >= 5640:
self._state = "Full"
elif data["battery_vp"] >= 5280:
self._state = "High"
elif data["battery_vp"] >= 4920:
self._state = "Medium"
elif data["battery_vp"] >= 4560:
self._state = "Low"
elif data["battery_vp"] < 4560:
self._state = "Very Low"
elif self.type == "battery_vp" and self._module_type == MODULE_TYPE_OUTDOOR:
if data["battery_vp"] >= 5500:
self._state = "Full"
elif data["battery_vp"] >= 5000:
self._state = "High"
elif data["battery_vp"] >= 4500:
self._state = "Medium"
elif data["battery_vp"] >= 4000:
self._state = "Low"
elif data["battery_vp"] < 4000:
self._state = "Very Low"
elif self.type == "min_temp":
self._state = data["min_temp"]
elif self.type == "max_temp":
self._state = data["max_temp"]
elif self.type == "windangle_value":
self._state = data["WindAngle"]
elif self.type == "windangle":
if data["WindAngle"] >= 330:
self._state = "N (%d\xb0)" % data["WindAngle"]
elif data["WindAngle"] >= 300:
self._state = "NW (%d\xb0)" % data["WindAngle"]
elif data["WindAngle"] >= 240:
self._state = "W (%d\xb0)" % data["WindAngle"]
elif data["WindAngle"] >= 210:
self._state = "SW (%d\xb0)" % data["WindAngle"]
elif data["WindAngle"] >= 150:
self._state = "S (%d\xb0)" % data["WindAngle"]
elif data["WindAngle"] >= 120:
self._state = "SE (%d\xb0)" % data["WindAngle"]
elif data["WindAngle"] >= 60:
self._state = "E (%d\xb0)" % data["WindAngle"]
elif data["WindAngle"] >= 30:
self._state = "NE (%d\xb0)" % data["WindAngle"]
elif data["WindAngle"] >= 0:
self._state = "N (%d\xb0)" % data["WindAngle"]
elif self.type == "windstrength":
self._state = data["WindStrength"]
elif self.type == "gustangle_value":
self._state = data["GustAngle"]
elif self.type == "gustangle":
if data["GustAngle"] >= 330:
self._state = "N (%d\xb0)" % data["GustAngle"]
elif data["GustAngle"] >= 300:
self._state = "NW (%d\xb0)" % data["GustAngle"]
elif data["GustAngle"] >= 240:
self._state = "W (%d\xb0)" % data["GustAngle"]
elif data["GustAngle"] >= 210:
self._state = "SW (%d\xb0)" % data["GustAngle"]
elif data["GustAngle"] >= 150:
self._state = "S (%d\xb0)" % data["GustAngle"]
elif data["GustAngle"] >= 120:
self._state = "SE (%d\xb0)" % data["GustAngle"]
elif data["GustAngle"] >= 60:
self._state = "E (%d\xb0)" % data["GustAngle"]
elif data["GustAngle"] >= 30:
self._state = "NE (%d\xb0)" % data["GustAngle"]
elif data["GustAngle"] >= 0:
self._state = "N (%d\xb0)" % data["GustAngle"]
elif self.type == "guststrength":
self._state = data["GustStrength"]
elif self.type == "reachable":
self._state = data["reachable"]
elif self.type == "rf_status_lvl":
self._state = data["rf_status"]
elif self.type == "rf_status":
if data["rf_status"] >= 90:
self._state = "Low"
elif data["rf_status"] >= 76:
self._state = "Medium"
elif data["rf_status"] >= 60:
self._state = "High"
elif data["rf_status"] <= 59:
self._state = "Full"
elif self.type == "wifi_status_lvl":
self._state = data["wifi_status"]
elif self.type == "wifi_status":
if data["wifi_status"] >= 86:
self._state = "Low"
elif data["wifi_status"] >= 71:
self._state = "Medium"
elif data["wifi_status"] >= 56:
self._state = "High"
elif data["wifi_status"] <= 55:
self._state = "Full"
elif self.type == "health_idx":
if data["health_idx"] == 0:
self._state = "Healthy"
elif data["health_idx"] == 1:
self._state = "Fine"
elif data["health_idx"] == 2:
self._state = "Fair"
elif data["health_idx"] == 3:
self._state = "Poor"
elif data["health_idx"] == 4:
self._state = "Unhealthy"
except KeyError:
_LOGGER.error("No %s data found for %s", self.type, self.module_name)
self._state = None
return
class NetatmoPublicSensor(Entity):
"""Represent a single sensor in a Netatmo."""
def __init__(self, area_name, data, sensor_type, mode):
"""Initialize the sensor."""
self.netatmo_data = data
self.type = sensor_type
self._mode = mode
self._name = "{} {}".format(area_name, SENSOR_TYPES[self.type][0])
self._area_name = area_name
self._state = None
self._device_class = SENSOR_TYPES[self.type][3]
self._icon = SENSOR_TYPES[self.type][2]
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._unit_of_measurement
def update(self):
"""Get the latest data from Netatmo API and updates the states."""
self.netatmo_data.update()
if self.netatmo_data.data is None:
_LOGGER.warning("No data found for %s", self._name)
self._state = None
return
data = None
if self.type == "temperature":
data = self.netatmo_data.data.getLatestTemperatures()
elif self.type == "pressure":
data = self.netatmo_data.data.getLatestPressures()
elif self.type == "humidity":
data = self.netatmo_data.data.getLatestHumidities()
elif self.type == "rain":
data = self.netatmo_data.data.getLatestRain()
elif self.type == "sum_rain_1":
data = self.netatmo_data.data.get60minRain()
elif self.type == "sum_rain_24":
data = self.netatmo_data.data.get24hRain()
elif self.type == "windstrength":
data = self.netatmo_data.data.getLatestWindStrengths()
elif self.type == "guststrength":
data = self.netatmo_data.data.getLatestGustStrengths()
if not data:
_LOGGER.warning(
"No station provides %s data in the area %s", self.type, self._area_name
)
self._state = None
return
values = [x for x in data.values() if x is not None]
if self._mode == "avg":
self._state = round(sum(values) / len(values), 1)
elif self._mode == "max":
self._state = max(values)
class NetatmoPublicData:
"""Get the latest data from Netatmo."""
def __init__(self, auth, lat_ne, lon_ne, lat_sw, lon_sw):
"""Initialize the data object."""
self.auth = auth
self.data = None
self.lat_ne = lat_ne
self.lon_ne = lon_ne
self.lat_sw = lat_sw
self.lon_sw = lon_sw
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Request an update from the Netatmo API."""
data = pyatmo.PublicData(
self.auth,
LAT_NE=self.lat_ne,
LON_NE=self.lon_ne,
LAT_SW=self.lat_sw,
LON_SW=self.lon_sw,
filtering=True,
)
if data.CountStationInArea() == 0:
_LOGGER.warning("No Stations available in this area.")
return
self.data = data
class NetatmoData:
"""Get the latest data from Netatmo."""
def __init__(self, auth, data_class, station):
"""Initialize the data object."""
self.auth = auth
self.data_class = data_class
self.data = {}
self.station_data = self.data_class(self.auth)
self.station = station
self.station_id = None
if station:
station_data = self.station_data.stationByName(self.station)
if station_data:
self.station_id = station_data.get("_id")
self._next_update = time()
self._update_in_progress = threading.Lock()
def get_module_infos(self):
"""Return all modules available on the API as a dict."""
if self.station_id is not None:
return self.station_data.getModules(station_id=self.station_id)
return self.station_data.getModules()
def update(self):
"""Call the Netatmo API to update the data.
This method is not throttled by the builtin Throttle decorator
but with a custom logic, which takes into account the time
of the last update from the cloud.
"""
if time() < self._next_update or not self._update_in_progress.acquire(False):
return
try:
try:
self.station_data = self.data_class(self.auth)
_LOGGER.debug("%s detected!", str(self.data_class.__name__))
except pyatmo.NoDevice:
_LOGGER.warning(
"No Weather or HomeCoach devices found for %s", str(self.station)
)
return
except (requests.exceptions.Timeout, urllib3.exceptions.ReadTimeoutError):
_LOGGER.warning("Timed out when connecting to Netatmo server.")
return
data = self.station_data.lastData(
station=self.station_id, exclude=3600, byId=True
)
if not data:
self._next_update = time() + NETATMO_UPDATE_INTERVAL
return
self.data = data
newinterval = 0
try:
for module in self.data:
if "When" in self.data[module]:
newinterval = self.data[module]["When"]
break
except TypeError:
_LOGGER.debug("No %s modules found", self.data_class.__name__)
if newinterval:
# Try and estimate when fresh data will be available
newinterval += NETATMO_UPDATE_INTERVAL - time()
if newinterval > NETATMO_UPDATE_INTERVAL - 30:
newinterval = NETATMO_UPDATE_INTERVAL
else:
if newinterval < NETATMO_UPDATE_INTERVAL / 2:
# Never hammer the Netatmo API more than
# twice per update interval
newinterval = NETATMO_UPDATE_INTERVAL / 2
_LOGGER.info(
"Netatmo refresh interval reset to %d seconds", newinterval
)
else:
# Last update time not found, fall back to default value
newinterval = NETATMO_UPDATE_INTERVAL
self._next_update = time() + newinterval
finally:
self._update_in_progress.release()
| {
"content_hash": "c1cb3b86c3a61ec01c4cb27a08d1fb8e",
"timestamp": "",
"source": "github",
"line_count": 622,
"max_line_length": 88,
"avg_line_length": 37.9903536977492,
"alnum_prop": 0.518874312314854,
"repo_name": "joopert/home-assistant",
"id": "1ae076c6560775bf6285ac9e8e2d95b19af112c0",
"size": "23632",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/netatmo/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18670593"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
} |
"""Contains ModelAdmin classes for the habits app.
Classes: [UserHabitInline, HabitAdmin, ScheduleAdmin]
"""
from django.contrib import admin
from .models import Habit, UserHabit, Schedule
class UserHabitInline(admin.TabularInline):
model = UserHabit
extra = 1
class HabitAdmin(admin.ModelAdmin):
"""Describes how a Habit instance appears in the admin site."""
inlines = (UserHabitInline,)
list_display = ('name', 'description', 'generated_by', 'make_default',)
list_filter = ('generated_by', 'make_default',)
search_fields = ('name', 'description',)
ordering = ('-created',)
class ScheduleAdmin(admin.ModelAdmin):
"""Describes how a Schedule instance appears in the admin site."""
list_display = ('event_time', 'repeat', 'user_habit',)
list_filter = ('repeat',)
search_fields = (
'user_habit__habit__name',
'user_habit__habit__description',
)
ordering = ('-created',)
admin.site.register(Habit, HabitAdmin)
admin.site.register(Schedule, ScheduleAdmin)
| {
"content_hash": "975424cabd8f4cb44a039399c8ad19c0",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 75,
"avg_line_length": 28,
"alnum_prop": 0.6795366795366795,
"repo_name": "prathapsridharan/health_project",
"id": "a6314f8e8a91b06eb416d705ee728820e9792105",
"size": "1036",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "health_project/habits/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1212"
},
{
"name": "HTML",
"bytes": "28585"
},
{
"name": "JavaScript",
"bytes": "2387"
},
{
"name": "Python",
"bytes": "157425"
}
],
"symlink_target": ""
} |
from django.db import models
from django.contrib.sites.models import Site as Site_django
class Organization(models.Model):
"""
Describing an Organization that uses geonition services
"""
name = models.CharField(max_length=50,
primary_key=True)
class Site(models.Model):
"""
An Organizations site and definitions of the site
specific settings.
"""
organization = models.ForeignKey(Organization)
site = models.OneToOneField(Site_django)
#site specific settings
apps_in_use = models.ManyToManyField(Application)
database = models.ForeignKey(Database)
history = models.BooleanField()
mongodb = models.BooleanField()
class Meta:
unique_together = (("organization", "site"),)
class Application(models.Model):
"""
An application service provided by geonition
"""
name = models.CharField(max_length=50,
primary_key=True)
class Database(models.Model):
"""
Should support modification of the following fields:
'ENGINE': 'django.contrib.gis.db.backends.postgis', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '',
"""
name = models.CharField(max_length=30)
host = models.IPAddressField()
port = models.PositiveIntegerField() | {
"content_hash": "1325c2fd3c81112ce260ed8b42f82e8f",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 130,
"avg_line_length": 32.745098039215684,
"alnum_prop": 0.611377245508982,
"repo_name": "geonition/geonition_organization_management",
"id": "d9b9596541219c0411edafb4c90094d5987667e8",
"size": "1670",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "organization_management/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2079"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from bcc import BPF
from sys import argv
import sys
import socket
import os
#args
def usage():
print("USAGE: %s [-i <if_name>]" % argv[0])
print("")
print("Try '%s -h' for more options." % argv[0])
exit()
#help
def help():
print("USAGE: %s [-i <if_name>]" % argv[0])
print("")
print("optional arguments:")
print(" -h print this help")
print(" -i if_name select interface if_name. Default is eth0")
print("")
print("examples:")
print(" http-parse # bind socket to eth0")
print(" http-parse -i wlan0 # bind socket to wlan0")
exit()
#arguments
interface="eth0"
if len(argv) == 2:
if str(argv[1]) == '-h':
help()
else:
usage()
if len(argv) == 3:
if str(argv[1]) == '-i':
interface = argv[2]
else:
usage()
if len(argv) > 3:
usage()
print ("binding socket to '%s'" % interface)
# initialize BPF - load source code from http-parse-simple.c
bpf = BPF(src_file = "http-parse-simple.c",debug = 0)
#load eBPF program http_filter of type SOCKET_FILTER into the kernel eBPF vm
#more info about eBPF program types
#http://man7.org/linux/man-pages/man2/bpf.2.html
function_http_filter = bpf.load_func("http_filter", BPF.SOCKET_FILTER)
#create raw socket, bind it to interface
#attach bpf program to socket created
BPF.attach_raw_socket(function_http_filter, interface)
#get file descriptor of the socket previously created inside BPF.attach_raw_socket
socket_fd = function_http_filter.sock
#create python socket object, from the file descriptor
sock = socket.fromfd(socket_fd,socket.PF_PACKET,socket.SOCK_RAW,socket.IPPROTO_IP)
#set it as blocking socket
sock.setblocking(True)
while 1:
#retrieve raw packet from socket
packet_str = os.read(socket_fd,2048)
#DEBUG - print raw packet in hex format
#packet_hex = toHex(packet_str)
#print ("%s" % packet_hex)
#convert packet into bytearray
packet_bytearray = bytearray(packet_str)
#ethernet header length
ETH_HLEN = 14
#IP HEADER
#https://tools.ietf.org/html/rfc791
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# |Version| IHL |Type of Service| Total Length |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
#IHL : Internet Header Length is the length of the internet header
#value to multiply * 4 byte
#e.g. IHL = 5 ; IP Header Length = 5 * 4 byte = 20 byte
#
#Total length: This 16-bit field defines the entire packet size,
#including header and data, in bytes.
#calculate packet total length
total_length = packet_bytearray[ETH_HLEN + 2] #load MSB
total_length = total_length << 8 #shift MSB
total_length = total_length + packet_bytearray[ETH_HLEN+3] #add LSB
#calculate ip header length
ip_header_length = packet_bytearray[ETH_HLEN] #load Byte
ip_header_length = ip_header_length & 0x0F #mask bits 0..3
ip_header_length = ip_header_length << 2 #shift to obtain length
#TCP HEADER
#https://www.rfc-editor.org/rfc/rfc793.txt
# 12 13 14 15
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Data | |U|A|P|R|S|F| |
# | Offset| Reserved |R|C|S|S|Y|I| Window |
# | | |G|K|H|T|N|N| |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
#Data Offset: This indicates where the data begins.
#The TCP header is an integral number of 32 bits long.
#value to multiply * 4 byte
#e.g. DataOffset = 5 ; TCP Header Length = 5 * 4 byte = 20 byte
#calculate tcp header length
tcp_header_length = packet_bytearray[ETH_HLEN + ip_header_length + 12] #load Byte
tcp_header_length = tcp_header_length & 0xF0 #mask bit 4..7
tcp_header_length = tcp_header_length >> 2 #SHR 4 ; SHL 2 -> SHR 2
#calculate payload offset
payload_offset = ETH_HLEN + ip_header_length + tcp_header_length
#print first line of the HTTP GET/POST request
#line ends with 0xOD 0xOA (\r\n)
#(if we want to print all the header print until \r\n\r\n)
for i in range (payload_offset,len(packet_bytearray)-1):
if (packet_bytearray[i]== 0x0A):
if (packet_bytearray[i-1] == 0x0D):
break
print ("%c" % chr(packet_bytearray[i]), end = "")
print("")
| {
"content_hash": "e76adede44dedd00ebac74df3123c193",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 97,
"avg_line_length": 33.81294964028777,
"alnum_prop": 0.5717021276595745,
"repo_name": "iovisor/bcc",
"id": "9d1e9aab01cfb436205ae54d3a392af4c5b90eb1",
"size": "5260",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/networking/http_filter/http-parse-simple.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11636356"
},
{
"name": "C++",
"bytes": "916663"
},
{
"name": "CMake",
"bytes": "58262"
},
{
"name": "HTML",
"bytes": "2997"
},
{
"name": "Lua",
"bytes": "299473"
},
{
"name": "Makefile",
"bytes": "5763"
},
{
"name": "Python",
"bytes": "1449659"
},
{
"name": "Shell",
"bytes": "21840"
}
],
"symlink_target": ""
} |
import rdtest
import renderdoc as rd
class D3D11_Mesh_Zoo(rdtest.TestCase):
demos_test_name = 'D3D11_Mesh_Zoo'
def __init__(self):
rdtest.TestCase.__init__(self)
self.zoo_helper = rdtest.Mesh_Zoo()
def check_capture(self):
self.zoo_helper.check_capture(self.capture_filename, self.controller)
| {
"content_hash": "02202e1086524c3c9cb11cd9680bdd78",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 77,
"avg_line_length": 25.615384615384617,
"alnum_prop": 0.6696696696696697,
"repo_name": "baldurk/renderdoc",
"id": "c694240b32036e44b5d63198e493be43b28cd3b2",
"size": "333",
"binary": false,
"copies": "2",
"ref": "refs/heads/v1.x",
"path": "util/test/tests/D3D11/D3D11_Mesh_Zoo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7661350"
},
{
"name": "C++",
"bytes": "33814371"
},
{
"name": "CMake",
"bytes": "101335"
},
{
"name": "CSS",
"bytes": "1642"
},
{
"name": "Dockerfile",
"bytes": "119"
},
{
"name": "GLSL",
"bytes": "58063"
},
{
"name": "HLSL",
"bytes": "80557"
},
{
"name": "Java",
"bytes": "2241"
},
{
"name": "JavaScript",
"bytes": "10593"
},
{
"name": "Objective-C",
"bytes": "53867"
},
{
"name": "Objective-C++",
"bytes": "156322"
},
{
"name": "Python",
"bytes": "933133"
},
{
"name": "QMake",
"bytes": "15225"
},
{
"name": "SWIG",
"bytes": "54789"
},
{
"name": "Shell",
"bytes": "51606"
}
],
"symlink_target": ""
} |
import os
from twisted.cred import portal
from epsilon.scripts import certcreate
from axiom import errors as userbase
from axiom.scripts import axiomatic
from axiom.dependency import installOn
from xmantissa import website
from sine import confession
class Install(axiomatic.AxiomaticSubCommand):
longdesc = """
Install confession things
"""
optParameters = [
('domain', 'd', 'localhost',
"Domain this registrar is authoritative for;\
i.e., the domain local users belong to."),
('port', 'p', '5060',
'Port to listen on for SIP.')
]
def postOptions(self):
s = self.parent.getStore()
s.findOrCreate(userbase.LoginSystem, lambda i: installOn(i, s))
for ws in s.query(website.WebSite):
break
else:
ws = website.WebSite(
store=s,
portNumber=8080,
securePortNumber=8443,
certificateFile='server.pem')
installOn(ws, s)
if not os.path.exists('server.pem'):
certcreate.main([])
#Is there a real way to do this?
u = portal.IRealm(s).addAccount(u'confession', self['domain'], u'no password :(')
us = u.avatars.open()
installOn(confession.AnonConfessionUser(store=us), us)
installOn(confession.ConfessionDispatcher(store=us, localHost=self['domain']), us)
| {
"content_hash": "dd94187a7b7844fd696428ba69f2da06",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 90,
"avg_line_length": 29.6875,
"alnum_prop": 0.6112280701754386,
"repo_name": "habnabit/divmod-sine",
"id": "c554099fbac2d664a0be849629e0ff5364a2a703",
"size": "1425",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "axiom/plugins/confessioncmd.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "379734"
}
],
"symlink_target": ""
} |
from urllib.request import urlopen
from bs4 import BeautifulSoup
html = urlopen("http://www.pythonscraping.com/pages/page1.html");
bsObj = BeautifulSoup(html.read(), "html.parser")
print(bsObj.h1)
| {
"content_hash": "841d6058b6d83f78cc362533e81525ac",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 65,
"avg_line_length": 39.4,
"alnum_prop": 0.7766497461928934,
"repo_name": "fairesy/web-scraping-with-python",
"id": "8332222fea628c205d08f2620b9e7f04c4399c99",
"size": "197",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ch01/first-scraper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1188"
}
],
"symlink_target": ""
} |
import pytest
import ray
import ray.cluster_utils
@pytest.mark.parametrize(
"ray_start_regular", [{"local_mode": True}, {"local_mode": False}], indirect=True
)
def test_args_force_positional(ray_start_regular):
def force_positional(*, a="hello", b="helxo", **kwargs):
return a, b, kwargs
class TestActor:
def force_positional(self, a="hello", b="heo", *args, **kwargs):
return a, b, args, kwargs
def test_function(fn, remote_fn):
assert fn(a=1, b=3, c=5) == ray.get(remote_fn.remote(a=1, b=3, c=5))
assert fn(a=1) == ray.get(remote_fn.remote(a=1))
assert fn(a=1) == ray.get(remote_fn.remote(a=1))
remote_test_function = ray.remote(test_function)
remote_force_positional = ray.remote(force_positional)
test_function(force_positional, remote_force_positional)
ray.get(remote_test_function.remote(force_positional, remote_force_positional))
remote_actor_class = ray.remote(TestActor)
remote_actor = remote_actor_class.remote()
actor_method = remote_actor.force_positional
local_actor = TestActor()
local_method = local_actor.force_positional
test_function(local_method, actor_method)
ray.get(remote_test_function.remote(local_method, actor_method))
@pytest.mark.parametrize(
"ray_start_regular", [{"local_mode": False}, {"local_mode": True}], indirect=True
)
def test_args_intertwined(ray_start_regular):
def args_intertwined(a, *args, x="hello", **kwargs):
return a, args, x, kwargs
class TestActor:
def args_intertwined(self, a, *args, x="hello", **kwargs):
return a, args, x, kwargs
@classmethod
def cls_args_intertwined(cls, a, *args, x="hello", **kwargs):
return a, args, x, kwargs
def test_function(fn, remote_fn):
assert fn(1, 2, 3, x="hi", y="hello") == ray.get(
remote_fn.remote(1, 2, 3, x="hi", y="hello")
)
assert fn(1, 2, 3, y="1hello") == ray.get(remote_fn.remote(1, 2, 3, y="1hello"))
assert fn(1, y="1hello") == ray.get(remote_fn.remote(1, y="1hello"))
remote_test_function = ray.remote(test_function)
remote_args_intertwined = ray.remote(args_intertwined)
test_function(args_intertwined, remote_args_intertwined)
ray.get(remote_test_function.remote(args_intertwined, remote_args_intertwined))
remote_actor_class = ray.remote(TestActor)
remote_actor = remote_actor_class.remote()
actor_method = remote_actor.args_intertwined
local_actor = TestActor()
local_method = local_actor.args_intertwined
test_function(local_method, actor_method)
ray.get(remote_test_function.remote(local_method, actor_method))
actor_method = remote_actor.cls_args_intertwined
local_actor = TestActor()
local_method = local_actor.cls_args_intertwined
test_function(local_method, actor_method)
ray.get(remote_test_function.remote(local_method, actor_method))
if __name__ == "__main__":
import pytest
import os
import sys
if os.environ.get("PARALLEL_CI"):
sys.exit(pytest.main(["-n", "auto", "--boxed", "-vs", __file__]))
else:
sys.exit(pytest.main(["-sv", __file__]))
| {
"content_hash": "aa0242cf460ca63c5e54307feef0821d",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 88,
"avg_line_length": 35.96629213483146,
"alnum_prop": 0.6494845360824743,
"repo_name": "ray-project/ray",
"id": "4ebe4ce2f5496f230eb066e5e2f4e42bdf607d0e",
"size": "3217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/ray/tests/test_args.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "37490"
},
{
"name": "C++",
"bytes": "5972422"
},
{
"name": "CSS",
"bytes": "10912"
},
{
"name": "Cython",
"bytes": "227477"
},
{
"name": "Dockerfile",
"bytes": "20210"
},
{
"name": "HTML",
"bytes": "30382"
},
{
"name": "Java",
"bytes": "1160849"
},
{
"name": "JavaScript",
"bytes": "1128"
},
{
"name": "Jinja",
"bytes": "6371"
},
{
"name": "Jupyter Notebook",
"bytes": "1615"
},
{
"name": "Makefile",
"bytes": "234"
},
{
"name": "PowerShell",
"bytes": "1114"
},
{
"name": "Python",
"bytes": "19539109"
},
{
"name": "Shell",
"bytes": "134583"
},
{
"name": "Starlark",
"bytes": "334862"
},
{
"name": "TypeScript",
"bytes": "190599"
}
],
"symlink_target": ""
} |
"""The module contains functionality for working with json templates"""
import copy
import json
import os
from lib.constants import url
from lib.constants import objects
class TemplateProvider(object):
"""Processes json templates"""
RELATIVE_PATH_TEMPLATE = "template/{0}.json"
parsed_data = dict()
@staticmethod
def get_template_as_dict(obj_type, **kwargs):
"""Return object representation based on json template"""
try:
obj = copy.deepcopy(TemplateProvider.parsed_data[obj_type])
except KeyError:
path = os.path.join(
os.path.dirname(__file__),
TemplateProvider.RELATIVE_PATH_TEMPLATE.format(obj_type))
with open(path) as json_file:
json_data = json_file.read()
data = json.loads(json_data)
TemplateProvider.parsed_data[obj_type] = data
obj = copy.deepcopy(data)
obj.update(kwargs)
contact = {"contact": TemplateProvider.generate_object(1, objects.PEOPLE)}
obj.update(contact)
return {obj_type: obj}
@staticmethod
def generate_object(obj_id, obj_type):
"""Return minimal object representation by id and type"""
result = {}
result["id"] = obj_id
result["href"] = "/".join([url.API, obj_type, str(obj_id)])
result["type"] = objects.get_singular(obj_type)
return result
| {
"content_hash": "9bc49b5f850d1177c5624865e093f4ca",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 78,
"avg_line_length": 31.071428571428573,
"alnum_prop": 0.6743295019157088,
"repo_name": "selahssea/ggrc-core",
"id": "a908d622644d0666f4a318b96982725296b3bf8d",
"size": "1460",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "test/selenium/src/lib/service/rest/template_provider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "211857"
},
{
"name": "HTML",
"bytes": "1056523"
},
{
"name": "JavaScript",
"bytes": "1852333"
},
{
"name": "Makefile",
"bytes": "7044"
},
{
"name": "Mako",
"bytes": "4320"
},
{
"name": "Python",
"bytes": "2613417"
},
{
"name": "Shell",
"bytes": "31273"
}
],
"symlink_target": ""
} |
from .companies_house import ch_client
from .ops import ops_client
from .twitter import twitter_handler
from .html_scraper import html_scraper
| {
"content_hash": "1953a3f4012978cf5bd98b14d386d781",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 38,
"avg_line_length": 35.75,
"alnum_prop": 0.8181818181818182,
"repo_name": "nestauk/inet",
"id": "985ff7393aa2fc71f6bd48abc160a4ae814caaa5",
"size": "143",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "inet/sources/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23113"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_space_greeter_dantooine_old_pilot.iff"
result.attribute_template_id = 9
result.stfName("npc_name","human_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "04907d3ba5a3e8be3453e432fca7c4ce",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 79,
"avg_line_length": 24.076923076923077,
"alnum_prop": 0.6996805111821086,
"repo_name": "obi-two/Rebelion",
"id": "a54a4a36a8ba9c66722a737a94da9405b6150f4b",
"size": "458",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/mobile/shared_space_greeter_dantooine_old_pilot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
import types
from SmartMeshSDK.ApiException import CommandError
import logging
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger('ApiDefinition')
log.setLevel(logging.ERROR)
log.addHandler(NullHandler())
class FieldFormats(object):
'''
\brief Enumeration of possible field formats.
'''
STRING = 'string'
BOOL = 'bool'
INT = 'int'
INTS = 'ints'
HEXDATA = 'hex'
HEXDATA_VL = 'hex_vl' # variable length
FLOAT = 'float'
ARRAY = 'array'
class FieldOptions(object):
'''
\brief Possible options for a command field
'''
optionName = None
validOptions = None
optionDescs = None
def __init__(self,optionsDef,fieldOptions,fieldName):
self.optionDescs = []
if not optionsDef:
self.validOptions = None
else:
if optionsDef==True:
self.optionName = fieldName
else:
self.optionName = optionsDef
self.validOptions = []
for i in fieldOptions[self.optionName]:
self.validOptions.append(i[0])
self.optionDescs.append(i[1])
def isValidValue(self,val):
if (
(not self.validOptions) or
(val in self.validOptions)
):
return True
return False
def valueToDesc(self,val):
if self.validOptions:
for i in range(len(self.validOptions)):
if self.validOptions[i]==val:
return self.optionDescs[i]
raise CommandError(CommandError.VALUE_NOT_IN_OPTIONS,
'option='+str(self.optionName)+' val='+str(val))
class Field(object):
'''
\brief Object representing one field of a command.
'''
def __init__(self,fieldDef,fieldOptions):
self.name = fieldDef[0]
self.format = fieldDef[1]
self.length = fieldDef[2]
self.options = FieldOptions(fieldDef[3],fieldOptions,self.name)
def isValidValue(self,val):
# check format and length
if self.format==FieldFormats.STRING:
if ( (type(val) not in [types.StringType,types.UnicodeType]) or
len(val)>self.length
):
return False
elif self.format==FieldFormats.BOOL:
if type(val)!=types.BooleanType:
return False
elif self.format==FieldFormats.INT:
if ( (type(val)!=types.IntType and type(val)!=types.LongType) or
val>pow(2,8*self.length)
):
return False
elif self.format==FieldFormats.INTS:
if ( (type(val)!=types.IntType and type(val)!=types.LongType) or
val>(pow(2,8*self.length)/2) or
val<(-pow(2,8*self.length)/2)
):
return False
elif self.format==FieldFormats.HEXDATA:
if type(val)!=types.ListType and type(val)!=types.TupleType:
return False
if self.length and len(val)>self.length:
return False
for i in val:
if type(i)!=types.IntType:
return False
if i>=pow(2,8):
return False
elif self.format==FieldFormats.FLOAT:
if ( (type(val)!=types.IntType and type(val)!=types.FloatType) ):
return False
else:
raise SystemError('unknown field format='+self.format)
# check options
if self.options.isValidValue(val)==False:
return False
return True
class ApiDefinition(object):
'''
\ingroup ApiDefinition
\brief Base class for all API definitions objects.
'''
RC = 'RC'
SUBID1 = '_subId1'
SUBID2 = '_subId2'
RESERVED = [SUBID1,SUBID2,'magic']
OPTIONAL = []
COMMAND = 'command'
NOTIFICATION = 'notification'
RC_OK = 0
#======================== id and name =====================================
def __init__(self, array2scalar = True) :
if array2scalar :
self._array2scalar(self.commands)
self._array2scalar(self.notifications)
def _array2scalar(self, defs) :
'''
\brief Convert ARRAY to list of scalars
'''
for fields in defs:
if 'subCommands' in fields :
self._array2scalar(fields['subCommands'])
if 'response' in fields and 'FIELDS' in fields['response'] :
arrays = [f for f in fields['response']['FIELDS'] if f[1] == FieldFormats.ARRAY]
for array in arrays :
scalars = []
for n in range(array[2]) :
for item in array[3] :
name = '{}_{}'.format(item[0], n+1)
scalars.append([name] + item[1:])
fields['response']['FIELDS'].remove(array)
fields['response']['FIELDS'] += scalars
def idToName(self,type,id):
'''
\brief Translate a command or notification ID into a command name.
\exception CommandError.INVALID_COMMAND Command does
not exist
\returns The command name.
'''
list = self._getList(type)
for item in list:
if item['id']==id:
return item['name']
raise CommandError(CommandError.INVALID_COMMAND,
'id=%s' % str(id))
def nameToId(self,type,nameArray):
'''
\brief Translate a command or notification name into a command ID.
\exception CommandError.INVALID_COMMAND Command does
not exist
\returns The command ID.
'''
list = self._getList(type)
for item in list:
if item['name']==nameArray[0]:
return item['id']
raise CommandError(CommandError.INVALID_COMMAND,
nameArray[0])
def rcToLabel(self,rc):
'''
\brief Translate a return code (RC) into its label, i.e. 'RC_OK' for 0x00.
\param rc A return code, an int.
\exception CommandError If RC does not exist.
\returns The label for this RC, a string.
'''
# get the RC description
rcLabel = None
for r in self.fieldOptions[self.RC]:
if r[0]==rc:
rcLabel = r[1]
break
if not rcLabel:
raise CommandError(CommandError.VALUE_NOT_IN_OPTIONS,
'rc={0} does not exist'.format(rc))
return rcLabel
def rcToDescription(self,rc,nameArray):
'''
\brief Translate a return code (RC) into a description.
If this RC is described in the API definition for thise nameArray, then
that description is returned. If not, the generic description of that
RC is returned, preceeded by the string "[generic]".
\param rc A return code, an int.
\exception CommandError.VALUE_NOT_IN_OPTIONS if rc not in generic RC's.
\returns The description for this RC, a string.
'''
returnVal = ''
# get the RC description
rcLabel = None
rcGenericDesc = None
for r in self.fieldOptions[self.RC]:
if r[0]==rc:
rcLabel = r[1]
rcGenericDesc = r[2]
break
if not rcLabel:
raise CommandError(CommandError.VALUE_NOT_IN_OPTIONS,
'rc={0} does not exist'.format(rc))
# retrieve rcDescription
definition = self.getDefinition(self.COMMAND,nameArray)
try:
returnVal = definition['responseCodes'][rcLabel]
except KeyError:
returnVal = '[generic] {0}'.format(rcGenericDesc)
return returnVal
def getIds(self,type):
'''
\brief Get the list of command IDs this API defines
\returns A array of numbers, each numbers representing a command ID
'''
list = self._getList(type)
return [item['id'] for item in list]
def getNames(self,type,nameArray=None):
'''
\brief Get the list of (sub)command names this API defines
\param type Type of definition to be looked up
Supported values are: COMMAND or NOTIFICATION
\param nameArray Optional, used only when accessing names of
subcommands. Specifies the name of the command we
want the subcommand names for.
\returns A array of strings, each string representing the name of a
command
'''
list = self._getList(type)
definition = None
if nameArray:
definition,list = self._commandIterator(nameArray,list)
if not list:
raise CommandError(CommandError.INVALID_COMMAND,
'.'.join(nameArray))
return [command['name'] for command in list]
def getDefinition(self,type,nameArray):
'''
\brief Get the complete definition of a (sub)command, from its name.
\param type Type of definition to be looked up
Supported values are: COMMAND or NOTIFICATION
\param nameArray An array of the form [commandName, subCommandname]
The array can be of any length, and is of length 1
if no subcommands are used.
\exception CommandError(INVALID_COMMAND) The (sub)command
does not exist.
\returns The definition of a (sub)command, represented as a dictionary.
'''
list = self._getList(type)
definition = None
definition,list = self._commandIterator(nameArray,list)
return definition
def getDescription(self,type,nameArray):
'''
\brief Get the description of a command.
\param type Type of definition to be looked up
Supported values are: COMMAND or NOTIFICATION
\param nameArray An array of the form [commandName, subCommandname]
The array can be of any length, and is of length 1
if no subcommands are used.
\exception CommandError(INVALID_COMMAND) The (sub)command
does not exist.
\returns The description of a (sub)command, represented as a dictionary.
'' if no description
'''
definition = self.getDefinition(type,nameArray)
return definition['description']
def hasSubcommands(self,type,nameArray):
return 'subCommands' in self.getDefinition(type,nameArray)
def subcommandIdToName(self,type,nameArray,id):
subcommands = self.getSubcommands(type,nameArray)
for subcommand in subcommands:
if subcommand['id']==id:
return subcommand['name']
raise CommandError(CommandError.UNKNOWN_SUBCOMMAND,
str(id))
def subcommandNameToId(self,type,nameArray,name):
subcommands = self.getSubcommands(type,nameArray)
for subcommand in subcommands:
if subcommand['name']==name:
return subcommand['id']
raise CommandError(CommandError.UNKNOWN_SUBCOMMAND,
str(name))
def getSubcommands(self,type,nameArray):
definition = self.getDefinition(type,nameArray)
if not 'subCommands' in definition:
raise CommandError(CommandError.NO_SUBCOMMANDS,
'.'.join(nameArray))
return definition['subCommands']
def _getList(self,type):
if type==self.COMMAND:
list = self.commands
elif type==self.NOTIFICATION:
list = self.notifications
else:
raise ValueError("type="+str(type)+" unsupported")
return list
def _commandIterator(self,nameArray,list):
for commandName in nameArray:
if not list:
raise CommandError(CommandError.INVALID_COMMAND,
'.'.join(nameArray))
found = False
for elem in list:
if elem['name']==commandName:
found = True
definition = elem
if 'subCommands' in elem:
list = definition['subCommands']
else:
list = None
break
if found==False:
raise CommandError(CommandError.INVALID_COMMAND,
'.'.join(nameArray))
return definition,list
def getRequestFieldNames(self,commandArray):
'''
\brief Get the request fields of a (sub)command, from its name.
\param commandArray An array of the form [commandName, subCommandname]
The array can be of any length, and is of length 1
if no subcommands are used.
\exception CommandError(NO_REQUEST) The (sub)command has
no request fields.
\returns The list of request fields.
'''
fields = self.getRequestFields(commandArray)
return [field.name for field in fields]
def getRequestFieldFormat(self,commandArray,fieldName):
return self.getRequestField(commandArray,fieldName).format
def getRequestFieldLength(self,commandArray,fieldName):
return self.getRequestField(commandArray,fieldName).length
def getRequestFieldOptions(self,commandArray,fieldName):
return self.getRequestField(commandArray,fieldName).options
def getRequestField(self,commandArray,fieldName):
fields = self.getRequestFields(commandArray)
for field in fields:
if field.name==fieldName:
return field
raise CommandError(CommandError.UNKNOWN_FIELD,
'%s in %s' % (fieldName, '.'.join(commandArray)))
def getRequestFields(self,commandArray):
commandDef = self.getDefinition(self.COMMAND,commandArray)
if 'request' not in commandDef:
raise CommandError(CommandError.NO_REQUEST,
'.'.join(commandArray))
fields = [Field(fieldRaw,self.fieldOptions)
for fieldRaw in commandDef['request']]
return fields
def getResponseFieldNames(self,type,nameArray):
'''
\brief Get the response fields of a (sub)command, from its name.
\param type Command or notification?
\param nameArray An array of the form [commandName, subCommandname]
The array can be of any length, and is of length 1
if no subcommands are used.
\exception CommandError(NO_RESPONSE) The (sub)command has
no request fields.
\returns The list of request fields.
'''
fields = self.getResponseFields(type,nameArray)
return [field.name for field in fields]
def getResponseFieldFormat(self,type,nameArray,fieldName):
return self.getResponseField(type,nameArray,fieldName).format
def getResponseFieldLength(self,type,nameArray,fieldName):
return self.getResponseField(type,nameArray,fieldName).length
def getResponseFieldOptions(self,type,nameArray,fieldName):
return self.getResponseField(type,nameArray,fieldName).options
def getResponseField(self,type,nameArray,fieldName):
for i in range(len(nameArray)):
fields = self.getResponseFields(type,nameArray[:i+1])
for field in fields:
if field.name==fieldName:
return field
raise CommandError(CommandError.UNKNOWN_FIELD,
'%s in %s' % (fieldName, '.'.join(nameArray)))
def getResponseFields(self,type,nameArray):
commandDef = self.getDefinition(type,nameArray)
if 'response' not in commandDef:
raise CommandError(CommandError.NO_RESPONSE,
'.'.join(nameArray))
keys = commandDef['response'].keys()
responseName = keys[0]
fields = [Field(fieldRaw,self.fieldOptions)
for fieldRaw in commandDef['response'][responseName]]
return fields
def responseFieldValueToDesc(self,nameArray,fieldName,fieldValue):
return self.fieldValueToDesc(
self.COMMAND,
nameArray,
fieldName,
fieldValue)
def notifFieldValueToDesc(self,nameArray,fieldName,fieldValue):
return self.fieldValueToDesc(
self.NOTIFICATION,
nameArray,
fieldName,
fieldValue)
def fieldValueToDesc(self,type,nameArray,fieldName,fieldValue):
responseField = self.getResponseField(type,nameArray,fieldName)
return responseField.options.valueToDesc(fieldValue)
@classmethod
def fieldOptionToShortDesc(self,name,value):
for option in self.fieldOptions[name]:
if option[0]==value:
return option[1]
@classmethod
def fieldFormatToString(self,fieldLength,fieldFormat):
'''
\brief Turns the field format into a human-readable string.
\param fieldLength The number of bytes (an int) of the field
\param fieldFormat The format of a field, expressed as a string
\return A human-readable string.
'''
returnVal = ''
if fieldFormat==FieldFormats.INT and fieldLength==1:
returnVal += 'INT8U'
elif fieldFormat==FieldFormats.INT and fieldLength==2:
returnVal += 'INT16U'
elif fieldFormat==FieldFormats.INT and fieldLength==4:
returnVal += 'INT32U'
elif fieldFormat==FieldFormats.INTS and fieldLength==1:
returnVal += 'INT8S'
elif fieldFormat==FieldFormats.INTS and fieldLength==2:
returnVal += 'INT16'
elif fieldFormat==FieldFormats.INTS and fieldLength==4:
returnVal += 'INT32'
elif fieldFormat==FieldFormats.BOOL:
returnVal += 'BOOL'
else:
if fieldLength:
returnVal += "{0}B ({1})".format(fieldLength,fieldFormat)
else:
returnVal += "{0}".format(fieldFormat)
return returnVal
#======================== validation ======================================
def areSameFieldNames(self,fieldsCommand,fieldsPassed):
'''
\brief Determine whether the fields passed contains the same field names
as defined in the commands.
\exception CommandError.TOO_FEW_FIELDS Too few fields are
present in the fields passed.
\exception CommandError.TOO_MANY_FIELDS Too many fields are
present in the fields passed.
\exception CommandError.UNKNOWN_FIELD At least one unknown
fields in the fields passed.
'''
# list of field names expected in the command
namesCommand = [field[0] for field in fieldsCommand]
namesCommand.sort()
# list of field names in the passed fields
namesPassed = fieldsPassed.keys()
namesPassed.sort()
if len(namesPassed)<len(namesCommand):
raise CommandError(CommandError.TOO_FEW_FIELDS)
elif len(namesPassed)>len(namesCommand):
raise CommandError(CommandError.TOO_MANY_FIELDS)
else:
for i in range(len(namesPassed)):
if namesPassed[i]!=namesCommand[i]:
raise CommandError(CommandError.UNKNOWN_FIELD,namesPassed[i])
def isValidFieldFormatting(self,commandArray,
fieldName,
fieldValue):
'''
\brief Determine whether the field passed contains a correct format
according to the command definition passed.
\exception CommandError.UNKNOWN_FIELD The field is not in
the definition.
\exception CommandError.MALFORMED_FIELD The field is malformed.
'''
thisField = self.getRequestField(commandArray,fieldName)
# check whether this value is valid
if thisField.isValidValue(fieldValue)==False:
raise CommandError(
CommandError.MALFORMED_FIELD,
'commandArray={0} fieldName={1} fieldValue={2}'.format(commandArray,fieldName,fieldValue)
)
def validateRequest(self,commandArray,fields):
'''
\brief Validate that the fields passed form a valid request for the
specified command or subcommand. Raises a CommandError exception
if the request fields are invalid.
\param commandArray An array of the form [commandName, subCommandname]
The array can be of any length, and is of length 1
if no subcommands are used.
\param fields A dictionary indicating the value of every field
in that (sub)command, of the form
<tt>
{
\<fieldName1\>: \<fieldValue1\>,
\<fieldName2\>: \<fieldValue2\>,
...,
}
</tt>
\exception CommandError Describes the validation error
'''
definition = self.getDefinition(self.COMMAND,commandArray)
if 'request' not in definition:
raise CommandError(CommandError.NO_REQUEST,
'.'.join(commandArray))
# step 1. make sure the command definition and the fields passed have
# the same fields (raises errors if not)
self.areSameFieldNames(definition['request'],fields)
# step 2. for each field, make sure formatting is correct
# (raises errors if not)
for fieldName,fieldValue in fields.items():
self.isValidFieldFormatting(commandArray,
fieldName,
fieldValue)
#======================== serialization ===================================
def _getSerializer(self,commandArray):
'''
\brief Get the serializer associated with a command or subcommand.
\param commandArray An array of the form [commandName, subCommandname]
The array can be of any length, and is of length 1
if no subcommands are used.
\returns The definition of a subcommand, represented as a dictionary.
'''
return self.getDefinition(self.COMMAND,commandArray)['serializer']
def serialize(self,commandArray,fields):
'''
\brief Serialize a command.
This function applies the serializer function specified in this
(sub)command\'s entry in the API definition.
\param commandArray An array of the form [commandName, subCommandname]
The array can be of any length, and is of length 1
if no subcommands are used.
\param fields A dictionary indicating the value of every field
in that (sub)command, of the form
<tt>
{
\<fieldName1\>: \<fieldValue1\>,
\<fieldName2\>: \<fieldValue2\>,
...,
}
</tt>
\returns The serialized command, in the format specified by this field\'s
serializer.
'''
# verify that this is a valid request (raises exception if not)
self.validateRequest(commandArray,fields)
# get the serializer associated with this (sub)command
try:
serialize_func = getattr(self, self._getSerializer(commandArray))
except KeyError:
serialize_func = self.default_serializer
# apply the serializer and return the result
return serialize_func(commandArray,fields)
#======================== abstract methods ================================
def default_serializer(self,commandArray,fields):
'''
\brief Default serializer for the API if no 'serializer' is specified
in the command description
The API-specific child class is expected to implement this method
'''
raise NotImplementedError('No default serializer')
#======================== abstract attributes =============================
##
# \brief Enumeration of valid options for certain fields.
#
# This structure is a Python dictionary. The key of each entry is an
# arbitrary but unique string. The value of each entry is a array of
# options. Each option is an array of three elements:
# - a valid value for the field
# - a short description of that value
# - a long description of that value
#
# An example redefinition is:
# <tt>
# fieldOptions = {
# 'packetPriority' : [
# [0, 'low', 'low'],
# [1, 'high', 'high'],
# ],
# }
# </tt>
#
# \note While the commands variable is defined in this parent ApiDefinition
# class, it is meant to be redefined by inheriting classes.
#
fieldOptions = {
}
##
# \brief Commands in this API.
#
# Each command is a dictionary with the following mandatory keys:
# - 'id' : The ID of this command (a number)
# - 'name' : The name of the command (a string)
# - 'description' : A plain-English description of the command
#
# The following keys are optional:
# - 'serializer' : a function to call to serialize this command. If this
# field is absent, the default serializer will be
# called.
#
# The remaining fields define the format of the request and response.
#
# In the general case, the following fields are present:
# - 'request' : array of fields in the request
# - 'response' : array of fields in the response
#
# A 'request' (resp. response) contains the fields contained in the request
# (resp. response).
#
# \note Commands can define only a request (resp. response), in which case
# the response (resp. request) field is not present.
#
# In some cases, a command does not defines request/response directly, but
# rather subcommands. In that case, the following fields are present:
# - 'subcommands' : The name of the dictionary subcomands for this
# function
#
# \note While the commands variable is defined in the parent API class,
# it is meant to be redefined by classes inheriting from this one.
#
commands = [
]
##
# \brief Notifications in this API.
#
# \note While the notifications variable is defined in the parent API class,
# it is meant to be redefined by classes inheriting from this one.
#
notifications = [
]
| {
"content_hash": "bf77a7034e65e5b9e774a9134453f28b",
"timestamp": "",
"source": "github",
"line_count": 746,
"max_line_length": 105,
"avg_line_length": 38.76005361930295,
"alnum_prop": 0.5423828462735605,
"repo_name": "realms-team/solmanager",
"id": "9ccec45e2d24e03ff3ab902f619e5612e695af8b",
"size": "28934",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "libs/smartmeshsdk-REL-1.3.0.1/libs/SmartMeshSDK/ApiDefinition/ApiDefinition.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3408"
},
{
"name": "CSS",
"bytes": "1148"
},
{
"name": "HTML",
"bytes": "1568"
},
{
"name": "JavaScript",
"bytes": "1430296"
},
{
"name": "Makefile",
"bytes": "8195"
},
{
"name": "Python",
"bytes": "3428922"
},
{
"name": "Smarty",
"bytes": "5800"
}
],
"symlink_target": ""
} |
from pyvows import Vows, expect
@Vows.batch
class AssertionInclude(Vows.Context):
class WhenItIsAString(Vows.Context):
def topic(self):
return "some big string"
def we_can_find_some(self, topic):
expect(topic).to_include('some')
def we_can_find_big(self, topic):
expect(topic).to_include('big')
def we_can_find_string(self, topic):
expect(topic).to_include('string')
def we_cant_find_else(self, topic):
expect(topic).Not.to_include('else')
class WhenItIsAList(Vows.Context):
def topic(self):
return ["some", "big", "string"]
def we_can_find_some(self, topic):
expect(topic).to_include('some')
def we_can_find_big(self, topic):
expect(topic).to_include('big')
def we_can_find_string(self, topic):
expect(topic).to_include('string')
def we_cant_find_else(self, topic):
expect(topic).Not.to_include('else')
class WhenItIsATuple(Vows.Context):
def topic(self):
return tuple(["some", "big", "string"])
def we_can_find_some(self, topic):
expect(topic).to_include('some')
def we_can_find_big(self, topic):
expect(topic).to_include('big')
def we_can_find_string(self, topic):
expect(topic).to_include('string')
def we_cant_find_else(self, topic):
expect(topic).Not.to_include('else')
class WhenItIsADict(Vows.Context):
def topic(self):
return {"some": 1, "big": 2, "string": 3}
def we_can_find_some(self, topic):
expect(topic).to_include('some')
def we_can_find_big(self, topic):
expect(topic).to_include('big')
def we_can_find_string(self, topic):
expect(topic).to_include('string')
def we_cant_find_else(self, topic):
expect(topic).Not.to_include('else')
class WhenWeGetAnError(Vows.Context):
@Vows.capture_error
def topic(self, last):
expect('a').to_include('b')
def we_get_an_understandable_message(self, topic):
expect(topic).to_have_an_error_message_of("Expected topic('a') to include 'b'")
class WhenWeGetAnErrorOnNot(Vows.Context):
@Vows.capture_error
def topic(self, last):
expect('a').not_to_include('a')
def we_get_an_understandable_message(self, topic):
expect(topic).to_have_an_error_message_of("Expected topic('a') not to include 'a'")
| {
"content_hash": "5dfc5f03e9d1508f070dc5a605e85687",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 95,
"avg_line_length": 30.44705882352941,
"alnum_prop": 0.5768933539412674,
"repo_name": "marcelometal/pyvows",
"id": "1150804d746eb1daa5260f3647b789d1dfe7de6b",
"size": "2842",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/assertions/inclusion_vows.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "412"
},
{
"name": "Python",
"bytes": "193088"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from cubes.browser import *
from cubes.errors import *
from cubes.model import *
from .store import DEFAULT_TIME_HIERARCHY
from .utils import *
from collections import defaultdict
from datetime import datetime
import pytz
class _MixpanelResponseAggregator(object):
def __init__(self, browser, responses, aggregate_names, drilldown, split,
actual_time_level):
"""Aggregator for multiple mixpanel responses (multiple dimensions)
with drill-down post-aggregation.
Arguments:
* `browser` – owning browser
* `reposnes` – mixpanel responses by `measure_names`
* `aggregate_names` – list of collected measures
* `drilldown` – a `Drilldown` object from the browser aggregation
query
* `split` - a split Cell object from the browser aggregation query
Object attributes:
* `aggregate_names` – list of measure names from the response
* `aggregate_data` – a dictionary where keys are measure names and
values are actual data points.
* `time_cells` – an ordered dictionary of collected cells from the
response. Key is time path, value is cell contents without the time
dimension.
"""
self.browser = browser
self.logger = browser.logger
self.drilldown = drilldown
self.aggregate_names = aggregate_names
self.actual_time_level = actual_time_level
# Extract the data
self.aggregate_data = {}
for aggregate in aggregate_names:
self.aggregate_data = responses[aggregate]["data"]["values"]
# Get time drilldown levels, if we are drilling through time
time_drilldowns = drilldown.drilldown_for_dimension("time")
if time_drilldowns:
time_drilldown = time_drilldowns[0]
self.last_time_level = str(time_drilldown.levels[-1])
self.time_levels = ["time."+str(l) for l in time_drilldown.levels]
self.time_hierarchy = str(time_drilldown.hierarchy)
else:
time_drilldown = None
self.last_time_level = None
self.time_levels = []
self.time_hierarchy = DEFAULT_TIME_HIERARCHY
self.drilldown_on = None
for obj in drilldown:
if obj.dimension.name != "time":
# this is a DrilldownItem object. represent it as 'dim.level' or just 'dim' if flat
self.drilldown_on = ( "%s.%s" % (obj.dimension.name, obj.levels[-1].name) ) if ( not obj.dimension.is_flat ) else obj.dimension.name
self.drilldown_on_value_func = lambda x: x
if self.drilldown_on is None and split:
self.drilldown_on = SPLIT_DIMENSION_NAME
self.drilldown_on_value_func = lambda x: True if x == "true" else False
# Time-keyed cells:
# (time_path, group) -> dictionary
self.time_cells = {}
self.cells = []
# Do it:
#
# Collect, Map&Reduce, Order
# ==========================
#
# Process the response. The methods are operating on the instance
# variable `time_cells`
self._collect_cells()
# TODO: handle week
if actual_time_level != self.last_time_level:
self._reduce_cells()
self._finalize_cells()
# Result is stored in the `cells` instance variable.
def _collect_cells(self):
for aggregate in self.aggregate_names:
self._collect_aggregate_cells(aggregate)
def _collect_aggregate_cells(self, aggregate):
"""Collects the cells from the response in a time series dictionary
`time_cells` where keys are tuples: `(time_path, group)`. `group` is
drill-down key value for the cell, such as `New York` for `city`."""
# Note: For no-drilldown this would be only one pass and group will be
# a cube name
# TODO: To add multiple drill-down dimensions in the future, add them
# to the `group` part of the key tuple
for group_key, group_series in self.aggregate_data.items():
for time_key, value in group_series.items():
time_path = time_to_path(time_key, self.last_time_level,
self.time_hierarchy)
key = (time_path, group_key)
# self.logger.debug("adding cell %s" % (key, ))
cell = self.time_cells.setdefault(key, {})
cell[aggregate] = value
# FIXME: do this only on drilldown
if self.drilldown_on:
cell[self.drilldown_on] = group_key
def _reduce_cells(self):
"""Reduce the cells according to the time dimensions."""
def reduce_cell(result, cell):
# We assume only _sum aggergation
# All measures should be prepared so we can to this
for aggregate in self.aggregate_names:
result[aggregate] = result.get(aggregate, 0) + \
cell.get(aggregate, 0)
return result
# 1. Map cells to reduced time path
#
reduced_map = defaultdict(list)
reduced_len = len(self.time_levels)
for key, cell in self.time_cells.items():
time_path = key[0]
reduced_path = time_path[0:reduced_len]
reduced_key = (reduced_path, key[1])
# self.logger.debug("reducing %s -> %s" % (key, reduced_key))
reduced_map[reduced_key].append(cell)
self.browser.logger.debug("response cell count: %s reduced to: %s" %
(len(self.time_cells), len(reduced_map)))
# 2. Reduce the cells
#
# See the function reduce_cell() above for aggregation:
#
reduced_cells = {}
for key, cells in reduced_map.items():
# self.browser.logger.debug("Reducing: %s -> %s" % (key, cells))
cell = reduce(reduce_cell, cells, {})
reduced_cells[key] = cell
self.time_cells = reduced_cells
def _finalize_cells(self):
"""Orders the `time_cells` according to the time and "the other"
dimension and puts the result into the `cells` instance variable.
This method also adds the time dimension keys."""
# Order by time (as path) and then drilldown dimension value (group)
# The key[0] is a list of paths: time, another_drilldown
order = lambda left, right: cmp(left[0], right[0])
cells = self.time_cells.items()
cells.sort(order)
# compute the current datetime, convert to path
current_time_path = time_to_path(
pytz.timezone('UTC').localize(datetime.utcnow()).astimezone(self.browser.timezone).strftime("%Y-%m-%d %H:00:00"),
self.last_time_level,
self.time_hierarchy)
self.cells = []
for key, cell in cells:
# If we are aggregating at finer granularity than "all":
time_key = key[0]
if time_key:
# if time_key ahead of current time path, discard
if time_key > current_time_path:
continue
cell.update(zip(self.time_levels, time_key))
# append the drilldown_on attribute ref
if self.drilldown_on:
cell[self.drilldown_on] = self.drilldown_on_value_func(key[1])
self.cells.append(cell)
| {
"content_hash": "f0d8a14c7c78b8f88f8b8a83b474ffaf",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 148,
"avg_line_length": 37.691542288557216,
"alnum_prop": 0.5818373812038015,
"repo_name": "DataBrewery/cubes-mixpanel",
"id": "1a5734dea82122b77c95351190ebbe095b1b7d51",
"size": "7612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cubes-mixpanel/aggregator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "41013"
}
],
"symlink_target": ""
} |
import os
import cv2
import numpy as np
import sys
import pickle
from optparse import OptionParser
import time
from keras_frcnn import config
import keras_frcnn.resnet as nn
from keras import backend as K
from keras.layers import Input
from keras.models import Model
from keras_frcnn import roi_helpers
from keras_frcnn import data_generators
from sklearn.metrics import average_precision_score
def get_map(pred, gt, f):
T = {}
P = {}
fx, fy = f
for bbox in gt:
bbox['bbox_matched'] = False
pred_probs = np.array([s['prob'] for s in pred])
box_idx_sorted_by_prob = np.argsort(pred_probs)[::-1]
for box_idx in box_idx_sorted_by_prob:
pred_box = pred[box_idx]
pred_class = pred_box['class']
pred_x1 = pred_box['x1']
pred_x2 = pred_box['x2']
pred_y1 = pred_box['y1']
pred_y2 = pred_box['y2']
pred_prob = pred_box['prob']
if pred_class not in P:
P[pred_class] = []
T[pred_class] = []
P[pred_class].append(pred_prob)
found_match = False
for gt_box in gt:
gt_class = gt_box['class']
gt_x1 = gt_box['x1']/fx
gt_x2 = gt_box['x2']/fx
gt_y1 = gt_box['y1']/fy
gt_y2 = gt_box['y2']/fy
gt_seen = gt_box['bbox_matched']
if gt_class != pred_class:
continue
if gt_seen:
continue
iou = data_generators.iou((pred_x1, pred_y1, pred_x2, pred_y2), (gt_x1, gt_y1, gt_x2, gt_y2))
if iou >= 0.5:
found_match = True
gt_box['bbox_matched'] = True
break
else:
continue
T[pred_class].append(int(found_match))
for gt_box in gt:
if not gt_box['bbox_matched'] and not gt_box['difficult']:
if gt_box['class'] not in P:
P[gt_box['class']] = []
T[gt_box['class']] = []
T[gt_box['class']].append(1)
P[gt_box['class']].append(0)
#import pdb
#pdb.set_trace()
return T, P
sys.setrecursionlimit(40000)
parser = OptionParser()
parser.add_option("-p", "--path", dest="test_path", help="Path to test data.")
parser.add_option("-n", "--num_rois", dest="num_rois",
help="Number of ROIs per iteration. Higher means more memory use.", default=32)
parser.add_option("--config_filename", dest="config_filename", help=
"Location to read the metadata related to the training (generated when training).",
default="config.pickle")
parser.add_option("-o", "--parser", dest="parser", help="Parser to use. One of simple or pascal_voc",
default="pascal_voc"),
(options, args) = parser.parse_args()
if not options.test_path: # if filename is not given
parser.error('Error: path to test data must be specified. Pass --path to command line')
if options.parser == 'pascal_voc':
from keras_frcnn.pascal_voc_parser import get_data
elif options.parser == 'simple':
from keras_frcnn.simple_parser import get_data
else:
raise ValueError("Command line option parser must be one of 'pascal_voc' or 'simple'")
config_output_filename = options.config_filename
with open(config_output_filename, 'r') as f_in:
C = pickle.load(f_in)
# turn off any data augmentation at test time
C.use_horizontal_flips = False
C.use_vertical_flips = False
C.rot_90 = False
img_path = options.test_path
def format_img(img, C):
img_min_side = float(C.im_size)
(height,width,_) = img.shape
if width <= height:
f = img_min_side/width
new_height = int(f * height)
new_width = int(img_min_side)
else:
f = img_min_side/height
new_width = int(f * width)
new_height = int(img_min_side)
fx = width/float(new_width)
fy = height/float(new_height)
img = cv2.resize(img, (new_width, new_height), interpolation=cv2.INTER_CUBIC)
img = img[:, :, (2, 1, 0)]
img = img.astype(np.float32)
img[:, :, 0] -= C.img_channel_mean[0]
img[:, :, 1] -= C.img_channel_mean[1]
img[:, :, 2] -= C.img_channel_mean[2]
img /= C.img_scaling_factor
img = np.transpose(img, (2, 0, 1))
img = np.expand_dims(img, axis=0)
return img, fx, fy
class_mapping = C.class_mapping
if 'bg' not in class_mapping:
class_mapping['bg'] = len(class_mapping)
class_mapping = {v: k for k, v in class_mapping.iteritems()}
print(class_mapping)
class_to_color = {class_mapping[v]: np.random.randint(0, 255, 3) for v in class_mapping}
C.num_rois = int(options.num_rois)
if K.image_dim_ordering() == 'th':
input_shape_img = (3, None, None)
input_shape_features = (1024, None, None)
else:
input_shape_img = (None, None, 3)
input_shape_features = (None, None, 1024)
img_input = Input(shape=input_shape_img)
roi_input = Input(shape=(C.num_rois, 4))
feature_map_input = Input(shape=input_shape_features)
# define the base network (resnet here, can be VGG, Inception, etc)
shared_layers = nn.nn_base(img_input, trainable=True)
# define the RPN, built on the base layers
num_anchors = len(C.anchor_box_scales) * len(C.anchor_box_ratios)
rpn_layers = nn.rpn(shared_layers, num_anchors)
classifier = nn.classifier(feature_map_input, roi_input, C.num_rois, nb_classes=len(class_mapping), trainable=True)
model_rpn = Model(img_input, rpn_layers)
model_classifier_only = Model([feature_map_input, roi_input], classifier)
model_classifier = Model([feature_map_input, roi_input], classifier)
model_rpn.load_weights(C.model_path, by_name=True)
model_classifier.load_weights(C.model_path, by_name=True)
model_rpn.compile(optimizer='sgd', loss='mse')
model_classifier.compile(optimizer='sgd', loss='mse')
all_imgs, _, _ = get_data(options.test_path)
test_imgs = [s for s in all_imgs if s['imageset'] == 'test']
T = {}
P = {}
for idx, img_data in enumerate(test_imgs):
print('{}/{}'.format(idx,len(test_imgs)))
st = time.time()
filepath = img_data['filepath']
img = cv2.imread(filepath)
X, fx, fy = format_img(img, C)
if K.image_dim_ordering() == 'tf':
X = np.transpose(X, (0, 2, 3, 1))
# get the feature maps and output from the RPN
[Y1, Y2, F] = model_rpn.predict(X)
R = roi_helpers.rpn_to_roi(Y1, Y2, C, K.image_dim_ordering(), overlap_thresh=0.7)
# convert from (x1,y1,x2,y2) to (x,y,w,h)
R[:, 2] -= R[:, 0]
R[:, 3] -= R[:, 1]
# apply the spatial pyramid pooling to the proposed regions
bboxes = {}
probs = {}
for jk in range(R.shape[0] // C.num_rois + 1):
ROIs = np.expand_dims(R[C.num_rois * jk:C.num_rois * (jk + 1), :], axis=0)
if ROIs.shape[1] == 0:
break
if jk == R.shape[0] // C.num_rois:
# pad R
curr_shape = ROIs.shape
target_shape = (curr_shape[0], C.num_rois, curr_shape[2])
ROIs_padded = np.zeros(target_shape).astype(ROIs.dtype)
ROIs_padded[:, :curr_shape[1], :] = ROIs
ROIs_padded[0, curr_shape[1]:, :] = ROIs[0, 0, :]
ROIs = ROIs_padded
[P_cls, P_regr] = model_classifier_only.predict([F, ROIs])
for ii in range(P_cls.shape[1]):
if np.argmax(P_cls[0, ii, :]) == (P_cls.shape[2] - 1):
continue
cls_name = class_mapping[np.argmax(P_cls[0, ii, :])]
if cls_name not in bboxes:
bboxes[cls_name] = []
probs[cls_name] = []
(x, y, w, h) = ROIs[0, ii, :]
cls_num = np.argmax(P_cls[0, ii, :])
try:
(tx, ty, tw, th) = P_regr[0, ii, 4 * cls_num:4 * (cls_num + 1)]
tx /= C.classifier_regr_std[0]
ty /= C.classifier_regr_std[1]
tw /= C.classifier_regr_std[2]
th /= C.classifier_regr_std[3]
x, y, w, h = roi_helpers.apply_regr(x, y, w, h, tx, ty, tw, th)
except:
raise
pass
bboxes[cls_name].append([16 * x, 16 * y, 16 * (x + w), 16 * (y + h)])
probs[cls_name].append(np.max(P_cls[0, ii, :]))
all_dets = []
for key in bboxes:
bbox = np.array(bboxes[key])
new_boxes, new_probs = roi_helpers.non_max_suppression_fast(bbox, np.array(probs[key]), overlap_thresh=0.5)
for jk in range(new_boxes.shape[0]):
(x1, y1, x2, y2) = new_boxes[jk, :]
det = {'x1': x1, 'x2': x2, 'y1': y1, 'y2': y2, 'class': key, 'prob': new_probs[jk]}
all_dets.append(det)
print('Elapsed time = {}'.format(time.time() - st))
t, p = get_map(all_dets, img_data['bboxes'], (fx, fy))
for key in t.keys():
if key not in T:
T[key] = []
P[key] = []
T[key].extend(t[key])
P[key].extend(p[key])
all_aps = []
for key in T.keys():
ap = average_precision_score(T[key], P[key])
print('{} AP: {}'.format(key, ap))
all_aps.append(ap)
print('mAP = {}'.format(np.mean(np.array(all_aps))))
#print(T)
#print(P)
| {
"content_hash": "1c29a0a3629ad53dcfadcaa2697eb3da",
"timestamp": "",
"source": "github",
"line_count": 285,
"max_line_length": 115,
"avg_line_length": 31.99298245614035,
"alnum_prop": 0.5701908313226585,
"repo_name": "npetrenko/recurrent_frcnn",
"id": "34534bce4a8d1b77842ae486c84e9b4cb1fcb877",
"size": "9118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "measure_map.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1285359"
},
{
"name": "Python",
"bytes": "110331"
},
{
"name": "Shell",
"bytes": "156"
}
],
"symlink_target": ""
} |
import nibabel as nib
import numpy as np
import logging
from time import time
import os.path as op
from AFQ.definitions.utils import Definition, find_file
from dipy.align import syn_registration, affine_registration
import AFQ.registration as reg
import AFQ.data.s3bids as afs
from AFQ.tasks.utils import get_fname
from dipy.align.imaffine import AffineMap
try:
from fsl.data.image import Image
from fsl.transform.fnirt import readFnirt
from fsl.transform.nonlinear import applyDeformation
has_fslpy = True
except ModuleNotFoundError:
has_fslpy = False
try:
import h5py
has_h5py = True
except ModuleNotFoundError:
has_h5py = False
__all__ = ["FnirtMap", "SynMap", "SlrMap", "AffMap", "ItkMap"]
logger = logging.getLogger('AFQ')
# For map defintions, get_for_subses should return only the mapping
# Where the mapping has transform and transform_inverse functions
# which each accept data, **kwargs
class FnirtMap(Definition):
"""
Use an existing FNIRT map. Expects a warp file
and an image file for each subject / session; image file
is used as src space for warp.
Parameters
----------
warp_path : str, optional
path to file to get warp from. Use this or warp_suffix.
Default: None
space_path : str, optional
path to file to get warp from. Use this or space_suffix.
Default: None
warp_suffix : str, optional
suffix to pass to bids_layout.get() to identify the warp file.
Default: None
space_suffix : str, optional
suffix to pass to bids_layout.get() to identify the space file.
Default: None
warp_filters : str, optional
Additional filters to pass to bids_layout.get() to identify
the warp file.
Default: {}
space_filters : str, optional
Additional filters to pass to bids_layout.get() to identify
the space file.
Default: {}
Examples
--------
fnirt_map = FnirtMap(
warp_suffix="warp",
space_suffix="MNI",
warp_filters={"scope": "TBSS"},
space_filters={"scope": "TBSS"})
api.GroupAFQ(mapping=fnirt_map)
"""
def __init__(self, warp_path=None, space_path=None,
warp_suffix=None, space_suffix=None,
warp_filters={}, space_filters={}):
if not has_fslpy:
raise ImportError(
"Please install fslpy if you want to use FnirtMap")
if warp_path is None and warp_suffix is None:
raise ValueError((
"One of `warp_path` or `warp_suffix` should be set "
"to a value other than None."))
if space_path is None and space_suffix is None:
raise ValueError(
"One of space_path or space_suffix must not be None.")
if warp_path is not None and space_path is None\
or space_path is not None and warp_path is None:
raise ValueError((
"If passing a value for `warp_path`, "
"you must also pass a value for `space_path`"))
if warp_path is not None:
self._from_path = True
self.fnames = (warp_path, space_path)
else:
self._from_path = False
self.warp_suffix = warp_suffix
self.warp_filters = warp_filters
self.space_suffix = space_suffix
self.space_filters = space_filters
self.fnames = {}
def find_path(self, bids_layout, from_path, subject, session):
if self._from_path:
return
if session not in self.fnames:
self.fnames[session] = {}
nearest_warp = find_file(
bids_layout, from_path, self.warp_filters, self.warp_suffix,
session, subject)
nearest_space = find_file(
bids_layout, from_path, self.space_filters, self.space_suffix,
session, subject)
self.fnames[session][subject] = (nearest_warp, nearest_space)
def get_for_subses(self, base_fname, dwi, bids_info, reg_subject,
reg_template):
if self._from_path:
nearest_warp, nearest_space = self.fnames
else:
nearest_warp, nearest_space = self.fnames[
bids_info['session']][bids_info['subject']]
our_templ = reg_template
subj = Image(dwi)
their_templ = Image(nearest_space)
warp = readFnirt(nearest_warp, their_templ, subj)
return ConformedFnirtMapping(warp, our_templ.affine)
class ConformedFnirtMapping():
"""
ConformedFnirtMapping which matches the generic mapping API.
"""
def __init__(self, warp, ref_affine):
self.ref_affine = ref_affine
self.warp = warp
def transform_inverse(self, data, **kwargs):
data_img = Image(nib.Nifti1Image(
data.astype(np.float32), self.ref_affine))
return np.asarray(applyDeformation(data_img, self.warp).data)
def transform(self, data, **kwargs):
raise NotImplementedError(
"Fnirt based mappings can currently"
+ " only transform from template to subject space")
class IdentityMap(Definition):
"""
Does not perform any transformations from MNI to subject where
pyAFQ normally would.
Examples
--------
my_example_mapping = IdentityMap()
api.GroupAFQ(mapping=my_example_mapping)
"""
def __init__(self):
pass
def find_path(self, bids_layout, from_path, subject, session):
pass
def get_for_subses(self, base_fname, dwi, bids_info, reg_subject,
reg_template):
return ConformedAffineMapping(
np.identity(4),
domain_grid_shape=reg.reduce_shape(
reg_subject.shape),
domain_grid2world=reg_subject.affine,
codomain_grid_shape=reg.reduce_shape(
reg_template.shape),
codomain_grid2world=reg_template.affine)
class ItkMap(Definition):
"""
Use an existing Itk map (e.g., from ANTS). Expects the warp file
from MNI to T1.
Parameters
----------
warp_path : str, optional
path to file to get warp from. Use this or warp_suffix.
Default: None
warp_suffix : str, optional
suffix to pass to bids_layout.get() to identify the warp file.
warp_filters : str, optional
Additional filters to pass to bids_layout.get() to identify
the warp file.
Default: {}
Examples
--------
itk_map = ItkMap(
warp_suffix="xfm",
warp_filters={
"scope": "qsiprep",
"from": "MNI152NLin2009cAsym",
"to": "T1w"})
api.GroupAFQ(mapping=itk_map)
"""
def __init__(self, warp_path=None, warp_suffix=None, warp_filters={}):
if not has_h5py:
raise ImportError(
"Please install h5py if you want to use ItkMap")
if warp_path is None and warp_suffix is None:
raise ValueError((
"One of `warp_path` or `warp_suffix` should be set "
"to a value other than None."))
if warp_path is not None:
self._from_path = True
self.fname = warp_path
else:
self._from_path = False
self.warp_suffix = warp_suffix
self.warp_filters = warp_filters
self.fnames = {}
def find_path(self, bids_layout, from_path, subject, session):
if self._from_path:
return
if session not in self.fnames:
self.fnames[session] = {}
self.fnames[session][subject] = find_file(
bids_layout, from_path, self.warp_filters, self.warp_suffix,
session, subject, extension="h5")
def get_for_subses(self, base_fname, dwi, bids_info, reg_subject,
reg_template):
if self._from_path:
nearest_warp = self.fname
else:
nearest_warp = self.fnames[
bids_info['session']][bids_info['subject']]
warp_f5 = h5py.File(nearest_warp)
their_shape = np.asarray(warp_f5["TransformGroup"]['1'][
'TransformFixedParameters'], dtype=int)[:3]
our_shape = reg_template.get_fdata().shape
if (our_shape != their_shape).any():
raise ValueError((
f"The shape of your ITK mapping ({their_shape})"
f" is not the same as your template for registration"
f" ({our_shape})"))
their_forward = np.asarray(warp_f5["TransformGroup"]['1'][
'TransformParameters']).reshape([*their_shape, 3])
their_disp = np.zeros((*their_shape, 3, 2))
their_disp[..., 0] = their_forward
their_disp = nib.Nifti1Image(
their_disp, reg_template.affine)
their_prealign = np.zeros((4, 4))
their_prealign[:3, :3] = np.asarray(warp_f5["TransformGroup"]["2"][
"TransformParameters"])[:9].reshape((3, 3))
their_prealign[:3, 3] = np.asarray(warp_f5["TransformGroup"]["2"][
"TransformParameters"])[9:]
their_prealign[3, 3] = 1.0
warp_f5.close()
mapping = reg.read_mapping(
their_disp, dwi,
reg_template, prealign=their_prealign)
def transform(self, data, **kwargs):
raise NotImplementedError(
"ITK based mappings can currently"
+ " only transform from template to subject space")
mapping.transform = transform
return mapping
class GeneratedMapMixin(object):
"""
Helper Class
Useful for maps that are generated by pyAFQ
"""
def get_fnames(self, extension, base_fname):
mapping_file = get_fname(
base_fname,
'_desc-mapping_from-DWI_to-MNI_xform')
meta_fname = f'{mapping_file}.json'
mapping_file = mapping_file + extension
return mapping_file, meta_fname
def prealign(self, base_fname, reg_subject, reg_template, save=True):
prealign_file_desc = "_desc-prealign_from-DWI_to-MNI_xform"
prealign_file = get_fname(
base_fname, f'{prealign_file_desc}.npy')
if not op.exists(prealign_file):
start_time = time()
_, aff = affine_registration(
reg_subject,
reg_template,
**self.affine_kwargs)
meta = dict(
type="rigid",
dependent="dwi",
timing=time() - start_time)
if not save:
return aff
logger.info(f"Saving {prealign_file}")
np.save(prealign_file, aff)
meta_fname = get_fname(
base_fname, f'{prealign_file_desc}.json')
afs.write_json(meta_fname, meta)
return prealign_file if save else np.load(prealign_file)
def get_for_subses(self, base_fname, dwi, bids_info, reg_subject,
reg_template, subject_sls=None, template_sls=None):
mapping_file, meta_fname = self.get_fnames(
self.extension, base_fname)
if self.use_prealign:
reg_prealign = np.load(self.prealign(
base_fname, reg_subject, reg_template))
else:
reg_prealign = None
if not op.exists(mapping_file):
start_time = time()
mapping = self.gen_mapping(
base_fname, reg_subject, reg_template,
subject_sls, template_sls,
reg_prealign)
total_time = time() - start_time
logger.info(f"Saving {mapping_file}")
reg.write_mapping(mapping, mapping_file)
meta = dict(
type="displacementfield",
timing=total_time)
if subject_sls is None:
meta["dependent"] = "dwi"
else:
meta["dependent"] = "trk"
afs.write_json(meta_fname, meta)
reg_prealign_inv = np.linalg.inv(reg_prealign) if self.use_prealign\
else None
mapping = reg.read_mapping(
mapping_file,
dwi,
reg_template,
prealign=reg_prealign_inv)
return mapping
class SynMap(GeneratedMapMixin, Definition):
"""
Calculate a Syn registration for each subject/session
using reg_subject and reg_template.
Parameters
----------
use_prealign : bool
Whether to perform a linear pre-registration.
Default: True
affine_kwargs : dictionary, optional
Parameters to pass to affine_registration
in dipy.align, which does the linear pre-alignment.
Only used if use_prealign is True.
Default: {}
syn_kwargs : dictionary, optional
Parameters to pass to syn_registration
in dipy.align, which does the SyN alignment.
Default: {}
Examples
--------
api.GroupAFQ(mapping=SynMap())
"""
def __init__(self, use_prealign=True, affine_kwargs={}, syn_kwargs={}):
self.use_prealign = use_prealign
self.affine_kwargs = affine_kwargs
self.syn_kwargs = syn_kwargs
self.extension = ".nii.gz"
def find_path(self, bids_layout, from_path, subject, session):
pass
def gen_mapping(self, base_fname, reg_subject, reg_template,
subject_sls, template_sls,
reg_prealign):
_, mapping = syn_registration(
reg_subject.get_fdata(),
reg_template.get_fdata(),
moving_affine=reg_subject.affine,
static_affine=reg_template.affine,
prealign=reg_prealign,
**self.syn_kwargs)
if self.use_prealign:
mapping.codomain_world2grid = np.linalg.inv(reg_prealign)
return mapping
class SlrMap(GeneratedMapMixin, Definition):
"""
Calculate a SLR registration for each subject/session
using reg_subject and reg_template.
slr_kwargs : dictionary, optional
Parameters to pass to whole_brain_slr
in dipy, which does the SLR alignment.
Default: {}
Examples
--------
api.GroupAFQ(mapping=SlrMap())
"""
def __init__(self, slr_kwargs={}):
self.slr_kwargs = {}
self.use_prealign = False
self.extension = ".npy"
def find_path(self, bids_layout, from_path, subject, session):
pass
def gen_mapping(self, base_fname, reg_template, reg_subject,
subject_sls, template_sls, reg_prealign):
return reg.slr_registration(
subject_sls, template_sls,
moving_affine=reg_subject.affine,
moving_shape=reg_subject.shape,
static_affine=reg_template.affine,
static_shape=reg_template.shape,
**self.slr_kwargs)
class AffMap(GeneratedMapMixin, Definition):
"""
Calculate an affine registration for each subject/session
using reg_subject and reg_template.
affine_kwargs : dictionary, optional
Parameters to pass to affine_registration
in dipy.align, which does the linear pre-alignment.
Default: {}
Examples
--------
api.GroupAFQ(mapping=AffMap())
"""
def __init__(self, affine_kwargs={}):
self.use_prealign = False
self.affine_kwargs = affine_kwargs
self.extension = ".npy"
def find_path(self, bids_layout, from_path, subject, session):
pass
def gen_mapping(self, base_fname, reg_subject, reg_template,
subject_sls, template_sls,
reg_prealign):
return ConformedAffineMapping(
np.linalg.inv(self.prealign(
base_fname, reg_subject, reg_template, save=False)),
domain_grid_shape=reg.reduce_shape(
reg_subject.shape),
domain_grid2world=reg_subject.affine,
codomain_grid_shape=reg.reduce_shape(
reg_template.shape),
codomain_grid2world=reg_template.affine)
class ConformedAffineMapping(AffineMap):
"""
Modifies AffineMap API to match DiffeomorphicMap API.
Important for SLR maps API to be indistinguishable from SYN maps API.
"""
def transform(self, *args, interpolation='linear', **kwargs):
kwargs['interp'] = interpolation
return super().transform_inverse(*args, **kwargs)
def transform_inverse(self, *args, interpolation='linear', **kwargs):
kwargs['interp'] = interpolation
return super().transform(*args, **kwargs)
| {
"content_hash": "8f9e25b0987e90199a72b81595cb491b",
"timestamp": "",
"source": "github",
"line_count": 495,
"max_line_length": 76,
"avg_line_length": 33.51717171717172,
"alnum_prop": 0.5867036345006329,
"repo_name": "yeatmanlab/pyAFQ",
"id": "2d2a15a1c0f03e1195f12469dc7d62002390fccd",
"size": "16591",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "AFQ/definitions/mapping.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "409"
},
{
"name": "Makefile",
"bytes": "374"
},
{
"name": "Python",
"bytes": "640881"
},
{
"name": "Shell",
"bytes": "1831"
}
],
"symlink_target": ""
} |
import requests
from biicode.common.settings.fixed_string import FixedStringWithValue
from biicode.client.exception import BiiException, ConnectionErrorException
from requests.auth import AuthBase
from requests.exceptions import ConnectionError, Timeout
import urllib
from biicode.common.utils.bii_logging import logger
class RestApiException(BiiException):
"""Base class exception of this module"""
pass
class MethodNotFoundInApiException(RestApiException):
"""API method not found"""
def __init__(self, expr):
RestApiException.__init__(self)
self.expr = expr
def __str__(self):
return repr("Method: " + self.expr)
class HttpMethodNotImplementedException(RestApiException):
"""Http method not found"""
pass
class InvalidURLException(RestApiException):
def __init__(self, expr):
RestApiException.__init__(self)
self.expr = expr
def __str__(self):
return repr("URL: " + self.expr)
class HttpRequestsLibMethod(FixedStringWithValue):
"""Available methods"""
map_values = {'GET': requests.get, 'POST': requests.post,
'PUT': requests.put, 'HEAD': requests.head,
'OPTIONS': requests.options, 'DELETE': requests.delete}
class JWTAuth(AuthBase):
"""Attaches JWT Authentication to the given Request object."""
def __init__(self, token):
self.token = token
def __call__(self, request):
request.headers['Authorization'] = "Bearer %s" % self.token
return request
class RestApiClient(object):
DEFAULT_TIMEOUT = 15
def __init__(self, base_url, authorized_methods,
http_lib_methods=HttpRequestsLibMethod, timeout=None, proxies=None, verify=False):
self.base_url = base_url
self.authorized_methods = authorized_methods
self.timeout = timeout or self.DEFAULT_TIMEOUT
self.proxies = proxies or urllib.getproxies()
self.verify = verify
self.http_lib_methods = http_lib_methods
assert(isinstance(self.http_lib_methods, FixedStringWithValue.__class__))
def call(self, function_name, url_params=None, params=None, data=None, auth=None,
headers=None, timeout=None):
url_params = url_params or {}
method = self._get_method(function_name)
pattern = self._get_pattern(function_name)
url = self._get_url(pattern, url_params)
try:
return method(url, params=params, data=data, auth=auth, headers=headers,
verify=self.verify, timeout=timeout or self.timeout,
proxies=self.proxies)
except (ConnectionError, Timeout) as e:
logger.debug(str(e))
raise ConnectionErrorException("Can't connect to biicode, check internet connection!")
def _get_method(self, function_name):
try:
return self.http_lib_methods(self.authorized_methods[function_name]['method']).value
except KeyError:
raise MethodNotFoundInApiException(function_name) # From dict method
except ValueError:
# From FixedStringWithValue
raise HttpMethodNotImplementedException("Http method specified for %s" % function_name)
def _get_pattern(self, function_name):
try:
return self.authorized_methods[function_name]['pattern']
except KeyError:
raise MethodNotFoundInApiException(function_name) # From dict method
def _get_url(self, pattern, url_params):
url = (self.base_url + self._build_path(pattern, url_params))
if not self.valid_url(url):
raise InvalidURLException(url)
return url
def _build_path(self, pattern, url_params):
for var_name in url_params.keys():
varValue = url_params[var_name]
pattern = pattern.replace(":" + var_name, str(varValue))
return pattern
def valid_url(self, url):
return url.find("/:") == -1 # There is some parameter not filled
| {
"content_hash": "336402157ca57102d56f7453e21d9d05",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 99,
"avg_line_length": 35.24561403508772,
"alnum_prop": 0.6508213041314087,
"repo_name": "biicode/client",
"id": "bb7f240a2d445943513da36b376b0cfd7f490e73",
"size": "4018",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "rest/rest_api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "622"
},
{
"name": "CMake",
"bytes": "90594"
},
{
"name": "Python",
"bytes": "367469"
},
{
"name": "Shell",
"bytes": "738"
}
],
"symlink_target": ""
} |
import zipfile as zipfile
import os
def IsPathValid(path, ignoreDir, ignoreExt):
splited = None
if os.path.isfile(path):
if ignoreExt:
_, ext = os.path.splitext(path)
if ext in ignoreExt:
return False
splited = os.path.dirname(path).split('\\/')
else:
if not ignoreDir:
return True
splited = path.split('\\/')
for s in splited:
if s in ignoreDir: # You can also use set.intersection or [x for],
return False
return True
def zipDirHelper(path, rootDir, zf, ignoreDir = [], ignoreExt = []):
# zf is zipfile handle
if os.path.isfile(path):
if IsPathValid(path, ignoreDir, ignoreExt):
relative = os.path.relpath(path, rootDir)
zf.write(path, relative)
return
ls = os.listdir(path)
for subFileOrDir in ls:
if not IsPathValid(subFileOrDir, ignoreDir, ignoreExt):
continue
joinedPath = os.path.join(path, subFileOrDir)
zipDirHelper(joinedPath, rootDir, zf, ignoreDir, ignoreExt)
def ZipDir(path, zf, ignoreDir = [], ignoreExt = []):
rootDir = path if os.path.isdir(path) else os.path.dirname(path)
zipDirHelper(path, rootDir, zf, ignoreDir, ignoreExt)
pass
to_exclude = ["./Asset/ArtDev", ]
#ignores excluded directories and .exe files
def get_ignored(path, filenames):
ret = []
for filename in filenames:
theFilePath = os.path.join(path, filename)
filePathAbs = os.path.abspath(theFilePath)
for excludeDir in to_exclude:
excludeDirAbs = os.path.abspath(excludeDir)
if excludeDirAbs in filePathAbs:
print("exclude")
ret.append(filename)
return ret
from wand.image import Image
import os
import shutil
shutil.copytree("./Asset/", "./Asset_cook/", ignore = get_ignored)
#convert to dds
g = os.walk("./Asset_cook/")
for path,directories,filelist in g:
directories[:] = [d for d in directories if d not in ['UITexture']]#ignore UI Texture
for filename in filelist:
theFilePath = os.path.join(path, filename)
if os.path.splitext(theFilePath)[1] in (".png",".PNG", ".tga", ".TGA", ".jpg", ".JPG", ".bmp", ".BMP"):
print("cook texture",theFilePath)
os.system("texconv.exe -f DXT5 -vflip -keepcoverage 0.5 -nologo -o "+os.path.dirname(theFilePath) + " \""+ theFilePath + "\"")
os.remove(theFilePath)
# with Image(filename= theFilePath) as img:
# img.flip()
# img.save(filename=os.path.splitext(theFilePath)[0]+'.dds')
# #remove Old one
# os.remove(theFilePath)
print("packing..")
theZipFile = zipfile.ZipFile("./asset.pkg", 'w', compression=zipfile.ZIP_DEFLATED)
ZipDir("./Asset_cook", theZipFile, ignoreDir=["ArtDev"], ignoreExt=[".zip"])
#remove cooked file
print("clean up")
shutil.rmtree("./Asset_cook/", ignore_errors=False, onerror=None)
print ("done.") | {
"content_hash": "7050debfaafddbdcda879594005ee4b5",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 138,
"avg_line_length": 34.26136363636363,
"alnum_prop": 0.6159203980099502,
"repo_name": "tangziwen/Cube-Engine",
"id": "4fc3326b96990b95f16c1ea3b4a9d41a59c92316",
"size": "3015",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DoShipping.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AngelScript",
"bytes": "122"
},
{
"name": "Batchfile",
"bytes": "19"
},
{
"name": "C",
"bytes": "5777952"
},
{
"name": "C++",
"bytes": "11121724"
},
{
"name": "CMake",
"bytes": "42135"
},
{
"name": "GLSL",
"bytes": "81340"
},
{
"name": "Lua",
"bytes": "25073"
},
{
"name": "Objective-C",
"bytes": "19871"
},
{
"name": "Python",
"bytes": "13310"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import requests
import json
from typing import Optional, Text
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.decorator import human_users_only
from zerver.lib.push_notifications import add_push_device_token, \
b64_to_hex, remove_push_device_token
from zerver.lib.request import has_request_variables, REQ, JsonableError
from zerver.lib.response import json_success, json_error
from zerver.lib.validator import check_string, check_list, check_bool
from zerver.models import PushDeviceToken, UserProfile
def validate_token(token_str, kind):
# type: (bytes, int) -> None
if token_str == '' or len(token_str) > 4096:
raise JsonableError(_('Empty or invalid length token'))
if kind == PushDeviceToken.APNS:
# Validate that we can actually decode the token.
try:
b64_to_hex(token_str)
except Exception:
raise JsonableError(_('Invalid APNS token'))
@human_users_only
@has_request_variables
def add_apns_device_token(request, user_profile, token=REQ(),
appid=REQ(default=settings.ZULIP_IOS_APP_ID)):
# type: (HttpRequest, UserProfile, bytes, str) -> HttpResponse
validate_token(token, PushDeviceToken.APNS)
add_push_device_token(user_profile, token, PushDeviceToken.APNS, ios_app_id=appid)
return json_success()
@human_users_only
@has_request_variables
def add_android_reg_id(request, user_profile, token=REQ()):
# type: (HttpRequest, UserProfile, bytes) -> HttpResponse
validate_token(token, PushDeviceToken.GCM)
add_push_device_token(user_profile, token, PushDeviceToken.GCM)
return json_success()
@human_users_only
@has_request_variables
def remove_apns_device_token(request, user_profile, token=REQ()):
# type: (HttpRequest, UserProfile, bytes) -> HttpResponse
validate_token(token, PushDeviceToken.APNS)
remove_push_device_token(user_profile, token, PushDeviceToken.APNS)
return json_success()
@human_users_only
@has_request_variables
def remove_android_reg_id(request, user_profile, token=REQ()):
# type: (HttpRequest, UserProfile, bytes) -> HttpResponse
validate_token(token, PushDeviceToken.GCM)
remove_push_device_token(user_profile, token, PushDeviceToken.GCM)
return json_success()
| {
"content_hash": "a9b11be0ab126ef8c9c5794b81c7bda8",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 86,
"avg_line_length": 38.61290322580645,
"alnum_prop": 0.7309941520467836,
"repo_name": "vaidap/zulip",
"id": "2959a2819870f186cbca5349e82a329d085840f1",
"size": "2394",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/views/push_notifications.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "416449"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "472724"
},
{
"name": "JavaScript",
"bytes": "2123247"
},
{
"name": "Nginx",
"bytes": "1280"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "401825"
},
{
"name": "Puppet",
"bytes": "84574"
},
{
"name": "Python",
"bytes": "3669105"
},
{
"name": "Ruby",
"bytes": "249744"
},
{
"name": "Shell",
"bytes": "44486"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models
class Tag(models.Model):
name = models.CharField(max_length = 100)
def __str__(self):
return self.name
class Category(models.Model):
name = models.CharField(max_length = 100)
def __str__(self):
return self.name
class Author(models.Model):
name = models.CharField(max_length = 200, unique=True)
class Book(models.Model):
created_time = models.DateTimeField('Created on', auto_now_add=True)
modified_time = models.DateTimeField('Modified on', auto_now=True)
name = models.CharField(max_length = 200)
author = models.ForeignKey(Author)
category = models.ForeignKey(Category)
tags = models.ManyToManyField(Tag, related_name='book', null=True)
def __str__(self):
return self.name
class Scan(models.Model):
created_time = models.DateTimeField('Created on', auto_now_add=True)
page = models.IntegerField()
loc = models.CharField(max_length = 256)
book = models.ForeignKey(Book, on_delete=models.CASCADE)
| {
"content_hash": "6517c38c874e8a586e9fa2f7284ae309",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 69,
"avg_line_length": 30.272727272727273,
"alnum_prop": 0.7327327327327328,
"repo_name": "narsi84/digilib",
"id": "a526779d2b822d6e000fecdfce1fb4d2d044b1ac",
"size": "999",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gui/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "843"
},
{
"name": "HTML",
"bytes": "4358"
},
{
"name": "JavaScript",
"bytes": "4551"
},
{
"name": "Python",
"bytes": "22810"
}
],
"symlink_target": ""
} |
import os
import ast
import importlib
import types
import sys
import json
def activatefile(self, source, convention, SnafuFunctionSource):
sourceinfos = None
try:
sourceinfos = SnafuFunctionSource(source)
sourcecode = sourceinfos.content
except:
print("Warning: {} is not parseable, skipping.".format(source), file=sys.stderr)
return
if not self.quiet:
print("» module:", source)
handler = None
config = None
configname = source.split(".")[0] + ".config"
if os.path.isfile(configname):
if not self.quiet:
print(" config:", configname)
config = json.load(open(configname))
if config:
if "Handler" in config:
handler = config["Handler"]
connectorconfig = None
connectorconfigname = source.split(".")[0] + ".ini"
if os.path.isfile(connectorconfigname):
if not self.quiet:
print(" connectors:", connectorconfigname)
connectorconfig = connectorconfigname
sourcetree = ast.parse(sourcecode)
loader = importlib.machinery.SourceFileLoader(os.path.basename(source), source)
mod = types.ModuleType(loader.name)
sourceinfos.module = mod
if not os.path.dirname(source) in sys.path:
sys.path.append(os.path.dirname(source))
try:
loader.exec_module(mod)
except Exception as e:
if not self.quiet:
print(" Warning: skipping due to import error: {}".format(e))
return
sourcename = os.path.basename(source).split(".")[0]
for node in ast.walk(sourcetree):
if type(node) == ast.FunctionDef:
if not handler:
handlername = "lambda_handler"
handlerbase = sourcename
else:
handlerbase, handlername = handler.split(".")
if convention not in ("lambda", "openwhisk") or (node.name == handlername and sourcename == handlerbase):
funcname = sourcename + "." + node.name
if config and "FunctionName" in config and convention in ("lambda", "openwhisk"):
funcname = config["FunctionName"]
try:
func = getattr(mod, node.name)
except:
print(" skip method {}.{}".format(sourcename, node.name))
else:
if not self.quiet:
print(" function: {}".format(funcname))
#if not node.name in self.functions:
# self.functions[node.name] = {}
#self.functions[node.name][sourcename] = (func, config, sourceinfos)
self.functions[funcname] = (func, config, sourceinfos)
if connectorconfig:
self.functionconnectors[funcname] = connectorconfig
else:
if not self.quiet:
print(" skip function {}.{}".format(sourcename, node.name))
| {
"content_hash": "d8a98a1529698e28b75f3b948a5aab15",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 108,
"avg_line_length": 31.974025974025974,
"alnum_prop": 0.6941510966693745,
"repo_name": "serviceprototypinglab/snafu",
"id": "3f165edbd914229a5a7d2a486bf03af21fcdd6e6",
"size": "2505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "snafulib/parsers/python.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "809"
},
{
"name": "Java",
"bytes": "4682"
},
{
"name": "Makefile",
"bytes": "658"
},
{
"name": "Python",
"bytes": "95874"
},
{
"name": "Shell",
"bytes": "2587"
}
],
"symlink_target": ""
} |
from defaults import *
from i18n import *
from static import *
from generic.date_based import * | {
"content_hash": "b3d6d3ae96976ed4ec14c5f1a9b63022",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 32,
"avg_line_length": 23.75,
"alnum_prop": 0.7789473684210526,
"repo_name": "paulsmith/geodjango",
"id": "2c8c5b4a92645d9147afbfa931ba05df9769b55f",
"size": "95",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/regressiontests/views/tests/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "71605"
},
{
"name": "Python",
"bytes": "3433375"
},
{
"name": "Shell",
"bytes": "804"
}
],
"symlink_target": ""
} |
import argparse
import pysam
def get_reads(bam_file, reference_name,mapq=20):
bamfile = pysam.AlignmentFile(bam_file)
reads = bamfile.fetch(reference_name)
count_total_ref = 0
for read in reads:
if (read.mapping_quality >= mapq):
count_total_ref += 1
head = bamfile.header
for item in head['SQ']:
if(item['SN'] != reference_name):
print item['SN'],
count_contam = 0
for read in bamfile.fetch(item['SN']):
if read.mapping_quality >= mapq:
count_contam +=1
print(count_contam/float(count_total_ref))
def main():
parser=argparse.ArgumentParser(description="Compares against other reference Genomes for evidence of Contamination")
parser.add_argument("bam_file",help="Bam file input, contains the multiple references")
parser.add_argument("-r","--reference",dest="reference_name",help="Reference_name")
parser.add_argument('-m','--mapping-quality',dest='mapq',help="Mapping quality",default=20)
args = parser.parse_args()
assert args.bam_file is not None, \
"Need a bam file as input"
assert args.reference_name is not None, \
"Make sure you input a reference, which is the actual referenc rest are contamination spots"
get_reads(args.bam_file, args.reference_name, mapq=args.mapq)
if __name__=="__main__":
main()
| {
"content_hash": "3313476b5cb777bb74c3a0e2cb37891f",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 120,
"avg_line_length": 41.44117647058823,
"alnum_prop": 0.6394606103619588,
"repo_name": "theboocock/ancient_dna_pipeline",
"id": "ebfa7fb9e00cdb568ec3c22831489fe6f4bc036d",
"size": "1557",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python_scripts/contamination_percentage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "494754"
},
{
"name": "C++",
"bytes": "276798"
},
{
"name": "Groff",
"bytes": "25164"
},
{
"name": "Java",
"bytes": "36594"
},
{
"name": "JavaScript",
"bytes": "22549"
},
{
"name": "Makefile",
"bytes": "9447"
},
{
"name": "Perl",
"bytes": "33073"
},
{
"name": "Python",
"bytes": "342459"
},
{
"name": "R",
"bytes": "12143"
},
{
"name": "Shell",
"bytes": "46220"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division, unicode_literals
import argparse
import os
from astropy.io import votable
from astropy.coordinates import SkyCoord
from astropy import units
import casda
def parseargs():
"""
Parse the command line arguments
:return: An args map with the parsed arguments
"""
parser = argparse.ArgumentParser(description="Download cutouts of specific locations from the specified image")
parser.add_argument("opal_username",
help="Your user name on the ATNF's online proposal system (normally an email address)")
parser.add_argument("-p", "--opal_password", help="Your password on the ATNF's online proposal system")
parser.add_argument("--password_file", help="The file holding your password for the ATNF's online proposal system")
parser.add_argument("proj", help="The text in project name, e.g. EMU, or Rapid ")
parser.add_argument("source_list_file",
help="The file holding the list of positions, with one RA and Dec pair per line.")
parser.add_argument("destination_directory", help="The directory where the resulting files will be stored")
parser.add_argument("radius", help="Radius, in degrees, of the cutouts")
args = parser.parse_args()
return args
def parse_sources_file(filename):
"""
Read in a file of sources, with one source each line. Each source is specified as a
right ascension and declination pair separated by space.
e.g.
1:34:56 -45:12:30
320.20 -43.5
:param filename: The name of the file contining the list of sources
:return: A list of SkyCoord objects representing the parsed sources.
"""
sourcelist = []
with open(filename, 'r') as f:
for line in f:
if line and line[0] != '#':
parts = line.split()
if len(parts) > 1:
if parts[0].find(':') > -1 or parts[0].find('h') > -1:
sky_loc = SkyCoord(parts[0], parts[1], frame='icrs',
unit=(units.hourangle, units.deg))
else:
sky_loc = SkyCoord(parts[0], parts[1], frame='icrs',
unit=(units.deg, units.deg))
sourcelist.append(sky_loc)
return sourcelist
def produce_cutouts(source_list, proj, username, password, destination_dir, cutout_radius_degrees):
# Use CASDA VO (secure) to query for the images associated with the given scheduling_block_id
print ("\n\n** Retreiving image details for %s ... \n\n" % proj)
filename = destination_dir + str(proj) + ".xml"
#Do initial filter of images, allow for 3 deg cone around position (get ASKAP image which is ~30 sq deg).
src_num = 0
for sky_loc in source_list:
src_num = src_num + 1
ra = sky_loc.ra.degree
dec = sky_loc.dec.degree
data_product_id_query = "select * from ivoa.obscore where obs_collection LIKE '%" + proj + \
"%' and dataproduct_subtype = 'cont.restored.t0' and pol_states = '/I/' and 1 = CONTAINS(POINT('ICRS',"+ str(ra) + ","+ str(dec) + "),s_region)"
casda.sync_tap_query(data_product_id_query, filename, username=username, password=password)
image_cube_votable = votable.parse(filename, pedantic=False)
results_array = image_cube_votable.get_table_by_id('results').array
# For each of the image cubes, query datalink to get the secure datalink details
print ("\n\n** Retrieving datalink for each image containing source number " + str(src_num) + " ...\n\n")
authenticated_id_tokens = []
for image_cube_result in results_array:
image_cube_id = image_cube_result['obs_publisher_did'].decode('utf-8')
async_url, authenticated_id_token = casda.get_service_link_and_id(image_cube_id, username,
password,
service='cutout_service',
destination_dir=destination_dir)
if authenticated_id_token is not None:
authenticated_id_tokens.append(authenticated_id_token)
if len(authenticated_id_tokens) == 0:
print ("No image cubes found")
return 1
# Create the async job
job_location = casda.create_async_soda_job(authenticated_id_tokens)
# For each entry in the results of the catalogue query, add the position filter as a parameter to the async job
cutout_filters = []
circle = "CIRCLE " + str(ra) + " " + str(dec) + " " + str(cutout_radius_degrees)
cutout_filters.append(circle)
casda.add_params_to_async_job(job_location, 'pos', cutout_filters)
# Run the job
status = casda.run_async_job(job_location)
# Download all of the files, or alert if it didn't complete
if status == 'COMPLETED':
print ("\n\n** Downloading results...\n\n")
casda.download_all(job_location, destination_dir)
returnflag = 0
else:
print ("Job did not complete: Status was %s." % status)
returnflag = 1
if returnflag == 0:
return 0
else:
return 1
def main():
args = parseargs()
password = casda.get_opal_password(args.opal_password, args.password_file)
# Change this to choose which environment to use, prod is the default
# casda.use_dev()
destination_dir = args.destination_directory + "/" + str(args.proj) + "/" # directory where files will be saved
# 1) Read in the list of sources
print ("\n\n** Parsing the source list ...\n")
source_list = parse_sources_file(args.source_list_file)
print ("\n** Read %d sources...\n\n" % (len(source_list)))
# 2) Create the destination directory
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
# Do the work
return produce_cutouts(source_list, args.proj, args.opal_username, password, destination_dir, args.radius)
if __name__ == '__main__':
exit(main())
| {
"content_hash": "c4fcfa1375e570eb0d87670a2755f168",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 172,
"avg_line_length": 44.67142857142857,
"alnum_prop": 0.6039334825711544,
"repo_name": "csiro-rds/casda-samples",
"id": "fed7e894c4ea46a62410f98982ce151f7fcb3094",
"size": "7634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cutouts_by_proj.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "59772"
}
],
"symlink_target": ""
} |
from mock import Mock
from tests.testcase import TestCase
from jobrunner.logbook import get_atoms_for_flow
class TestGetAtomsForFLow(TestCase):
def setUp(self):
self.flow = Mock()
self.persistence_backend = Mock()
def test_get_atoms_for_flow_gets_atoms_for_flow(self):
get_atoms_for_flow(self.flow, self.persistence_backend)
self.persistence_backend.get_atoms_for_flow.assert_called_once_with(
self.flow.uuid
)
def test_get_flows_from_logbook_returns_flows(self):
ret = get_atoms_for_flow(self.flow, self.persistence_backend)
self.assertEqual(
ret, self.persistence_backend.get_atoms_for_flow.return_value
)
| {
"content_hash": "8cd8a0c84bffa7efa63294d8f9d381fe",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 76,
"avg_line_length": 28.64,
"alnum_prop": 0.6759776536312849,
"repo_name": "vdloo/jobrunner",
"id": "40e97996b3775f0bc56e39065185f8084b21ddad",
"size": "716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/jobrunner/logbook/test_get_atoms_for_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "102107"
},
{
"name": "Shell",
"bytes": "2630"
}
],
"symlink_target": ""
} |
from model.new_user_data import N_u_d
import random
def test_new_contact(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.contact.Create()
app.contact.Fill_in(N_u_d(namef="Rus", namem="Si", namel="An"))
old_contacts = db.get_contact_list()
contact = random.choice(old_contacts)
app.contact.delete_by_id(contact.id)
new_contacts = db.get_contact_list()
assert len(old_contacts) - 1 == len(new_contacts)
old_contacts.remove(contact)
assert sorted(old_contacts, key=N_u_d.id_or_max) == sorted(new_contacts, key=N_u_d.id_or_max)
if check_ui:
assert sorted(new_contacts, key=N_u_d.id_or_max) == sorted(app.contact.get_contact_list(), key=N_u_d.id_or_max)
| {
"content_hash": "9d5fabe1930cfc62b797ae5041d74e20",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 119,
"avg_line_length": 42.529411764705884,
"alnum_prop": 0.656984785615491,
"repo_name": "SCLT1975/python_training",
"id": "7103a3d22e1f798562fd17356d9c681b7eaa2c8d",
"size": "747",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_del_contact.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1233"
},
{
"name": "C",
"bytes": "409362"
},
{
"name": "C++",
"bytes": "129981"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "29810"
},
{
"name": "Tcl",
"bytes": "1285363"
}
],
"symlink_target": ""
} |
"""
Display DNS resolution success on a configured domain.
This module launch a simple query on each nameservers for the specified domain.
Nameservers are dynamically retrieved. The FQDN is the only one mandatory parameter.
It's also possible to add additional nameservers by appending them in nameservers list.
The default resolver can be overwritten with my_resolver.nameservers parameter.
Configuration parameters:
- domain : domain name to check
- lifetime : resolver lifetime
- nameservers : comma separated list of reference DNS nameservers
- resolvers : comma separated list of DNS resolvers to use
@author nawadanp
"""
import dns.resolver
import socket
class Py3status:
"""
"""
# available configuration parameters
domain = ''
lifetime = 0.3
nameservers = ''
resolvers = ''
def ns_checker(self, i3s_output_list, i3s_config):
response = {'full_text': ''}
counter = 0
error = False
nameservers = []
# parse some configuration parameters
if not isinstance(self.nameservers, list):
self.nameservers = self.nameservers.split(',')
if not isinstance(self.resolvers, list):
self.resolvers = self.resolvers.split(',')
my_resolver = dns.resolver.Resolver()
my_resolver.lifetime = self.lifetime
if self.resolvers:
my_resolver.nameservers = self.resolvers
my_ns = my_resolver.query(self.domain, 'NS')
# Insert each NS ip address in nameservers
for ns in my_ns:
nameservers.append(str(socket.gethostbyname(str(ns))))
for ns in self.nameservers:
nameservers.append(str(ns))
# Perform a simple DNS query, for each NS servers
for ns in nameservers:
my_resolver.nameservers = [ns]
counter += 1
try:
my_resolver.query(self.domain, 'A')
except:
error = True
if error:
response['full_text'] = str(counter) + ' NS NOK'
response['color'] = i3s_config['color_bad']
else:
response['full_text'] = str(counter) + ' NS OK'
response['color'] = i3s_config['color_good']
return response
if __name__ == "__main__":
"""
Test this module by calling it directly.
"""
from time import sleep
x = Py3status()
config = {
'color_good': '#00FF00',
'color_bad': '#FF0000',
}
while True:
print(x.ns_checker([], config))
sleep(1)
| {
"content_hash": "a7770ef4717d9119a9a87d76ca5a2a1d",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 87,
"avg_line_length": 29.45977011494253,
"alnum_prop": 0.6082715567694108,
"repo_name": "hburg1234/py3status",
"id": "f91843e6a0f03d3f2ff31cf9852d5e35cbdcb0c6",
"size": "2587",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "py3status/modules/ns_checker.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "215125"
}
],
"symlink_target": ""
} |
"""Person detection using Sighthound cloud service."""
import io
import logging
from pathlib import Path
from PIL import Image, ImageDraw, UnidentifiedImageError
import simplehound.core as hound
import voluptuous as vol
from homeassistant.components.image_processing import (
CONF_ENTITY_ID,
CONF_NAME,
CONF_SOURCE,
PLATFORM_SCHEMA,
ImageProcessingEntity,
)
from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY
from homeassistant.core import split_entity_id
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
from homeassistant.util.pil import draw_box
_LOGGER = logging.getLogger(__name__)
EVENT_PERSON_DETECTED = "sighthound.person_detected"
ATTR_BOUNDING_BOX = "bounding_box"
ATTR_PEOPLE = "people"
CONF_ACCOUNT_TYPE = "account_type"
CONF_SAVE_FILE_FOLDER = "save_file_folder"
CONF_SAVE_TIMESTAMPTED_FILE = "save_timestamped_file"
DATETIME_FORMAT = "%Y-%m-%d_%H:%M:%S"
DEV = "dev"
PROD = "prod"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_ACCOUNT_TYPE, default=DEV): vol.In([DEV, PROD]),
vol.Optional(CONF_SAVE_FILE_FOLDER): cv.isdir,
vol.Optional(CONF_SAVE_TIMESTAMPTED_FILE, default=False): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the platform."""
# Validate credentials by processing image.
api_key = config[CONF_API_KEY]
account_type = config[CONF_ACCOUNT_TYPE]
api = hound.cloud(api_key, account_type)
try:
api.detect(b"Test")
except hound.SimplehoundException as exc:
_LOGGER.error("Sighthound error %s setup aborted", exc)
return
save_file_folder = config.get(CONF_SAVE_FILE_FOLDER)
if save_file_folder:
save_file_folder = Path(save_file_folder)
entities = []
for camera in config[CONF_SOURCE]:
sighthound = SighthoundEntity(
api,
camera[CONF_ENTITY_ID],
camera.get(CONF_NAME),
save_file_folder,
config[CONF_SAVE_TIMESTAMPTED_FILE],
)
entities.append(sighthound)
add_entities(entities)
class SighthoundEntity(ImageProcessingEntity):
"""Create a sighthound entity."""
def __init__(
self, api, camera_entity, name, save_file_folder, save_timestamped_file
):
"""Init."""
self._api = api
self._camera = camera_entity
if name:
self._name = name
else:
camera_name = split_entity_id(camera_entity)[1]
self._name = f"sighthound_{camera_name}"
self._state = None
self._last_detection = None
self._image_width = None
self._image_height = None
self._save_file_folder = save_file_folder
self._save_timestamped_file = save_timestamped_file
def process_image(self, image):
"""Process an image."""
detections = self._api.detect(image)
people = hound.get_people(detections)
self._state = len(people)
if self._state > 0:
self._last_detection = dt_util.now().strftime(DATETIME_FORMAT)
metadata = hound.get_metadata(detections)
self._image_width = metadata["image_width"]
self._image_height = metadata["image_height"]
for person in people:
self.fire_person_detected_event(person)
if self._save_file_folder and self._state > 0:
self.save_image(image, people, self._save_file_folder)
def fire_person_detected_event(self, person):
"""Send event with detected total_persons."""
self.hass.bus.fire(
EVENT_PERSON_DETECTED,
{
ATTR_ENTITY_ID: self.entity_id,
ATTR_BOUNDING_BOX: hound.bbox_to_tf_style(
person["boundingBox"], self._image_width, self._image_height
),
},
)
def save_image(self, image, people, directory):
"""Save a timestamped image with bounding boxes around targets."""
try:
img = Image.open(io.BytesIO(bytearray(image))).convert("RGB")
except UnidentifiedImageError:
_LOGGER.warning("Sighthound unable to process image, bad data")
return
draw = ImageDraw.Draw(img)
for person in people:
box = hound.bbox_to_tf_style(
person["boundingBox"], self._image_width, self._image_height
)
draw_box(draw, box, self._image_width, self._image_height)
latest_save_path = directory / f"{self._name}_latest.jpg"
img.save(latest_save_path)
if self._save_timestamped_file:
timestamp_save_path = directory / f"{self._name}_{self._last_detection}.jpg"
img.save(timestamp_save_path)
_LOGGER.info("Sighthound saved file %s", timestamp_save_path)
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return ATTR_PEOPLE
@property
def device_state_attributes(self):
"""Return the attributes."""
if not self._last_detection:
return {}
return {"last_person": self._last_detection}
| {
"content_hash": "ebf71c071d0b2579df433397244c503b",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 88,
"avg_line_length": 32.18079096045198,
"alnum_prop": 0.6218398876404494,
"repo_name": "sdague/home-assistant",
"id": "e15fab1aaa3139f1c134249e497ecfff76efd0d5",
"size": "5696",
"binary": false,
"copies": "8",
"ref": "refs/heads/dev",
"path": "homeassistant/components/sighthound/image_processing.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "27869189"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
} |
import argparse
import sys
import iptracker
def main(args=None):
if args is None:
args = sys.argv[1:]
parser = argparse.ArgumentParser(description='Track your IP in realtime')
# parser.add_argument("--", help="increase output verbosity")
iptracker.main(parser.parse_args())
if __name__ == "__main__":
main()
| {
"content_hash": "e8f1426ef6ca126bef2d327c6a41fd02",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 77,
"avg_line_length": 20.11764705882353,
"alnum_prop": 0.652046783625731,
"repo_name": "davidomil/IPTracker",
"id": "7a85b42676b50030744a6ec166be74cca5c7168a",
"size": "365",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "iptracker/__main__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4398"
}
],
"symlink_target": ""
} |
import threading
from django.contrib.sites.models import Site
from django.conf import settings
from django.http.request import split_domain_port
_thread_local = threading.local()
def current_request():
"""
Provides access to the current request at deeper project levels.
"""
return getattr(_thread_local, "request", None)
class SiteMiddleware(object):
"""
Determines the current Site based on the domain in use.
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
domain, port = split_domain_port(request.get_host())
try:
current_site = Site.objects.get(domain=domain)
except Site.DoesNotExist:
current_site = Site.objects.get(id=settings.SITE_ID)
request.site = current_site
_thread_local.request = request
return self.get_response(request)
| {
"content_hash": "3be4b9e8d51c1d0eb1423de4bbff3539",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 68,
"avg_line_length": 24.81081081081081,
"alnum_prop": 0.6612200435729847,
"repo_name": "cdubz/timestrap",
"id": "0ed04a0ae2cbf44ed4a786304d9ce7bcd8e1e509",
"size": "918",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "conf/middleware/site.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "5599"
},
{
"name": "HTML",
"bytes": "9986"
},
{
"name": "JavaScript",
"bytes": "16565"
},
{
"name": "Python",
"bytes": "119046"
},
{
"name": "Vue",
"bytes": "67855"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import time, sys, atexit
from upm import pyupm_isd1820 as upmIsd1820
def main():
# Instantiate a ISD1820 on digital pins 2 (play) and 3 (record)
# This example was tested on the Grove Recorder.
myRecorder = upmIsd1820.ISD1820(2, 3)
doRecord = False
if len(sys.argv) > 1:
doRecord = True
# This lets you run code on exit,
# including functions from myRecorder
def exitHandler():
# turn off whatever we were doing.
if (doRecord):
myRecorder.record(False)
else:
myRecorder.play(False)
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
# if an argument was specified (any argument), go into record mode,
# else playback a previously recorded sample
print("Supply any argument to the command line to record.")
print("Running this example without arguments will play back any ")
print("previously recorded sound.")
print("There is approximately 10 seconds of recording time.\n")
# depending on what was selected, do it, and sleep for 15 seconds
if (doRecord):
myRecorder.record(True)
else:
myRecorder.play(True)
# There are about 10 seconds of recording/playback time, so we will
# sleep for a little extra time.
print("Sleeping for 15 seconds...")
time.sleep(15)
# exitHandler runs automatically
if __name__ == '__main__':
main()
| {
"content_hash": "12bd3cd160c5ef9df1b92c902af7bbb8",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 71,
"avg_line_length": 29.313725490196077,
"alnum_prop": 0.65685618729097,
"repo_name": "spitfire88/upm",
"id": "726ba89b9b6e60aa61611ca32fec68f50b6223a2",
"size": "2651",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "examples/python/isd1820.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2666269"
},
{
"name": "C++",
"bytes": "3221175"
},
{
"name": "CMake",
"bytes": "126440"
},
{
"name": "CSS",
"bytes": "18714"
},
{
"name": "HTML",
"bytes": "33016"
},
{
"name": "JavaScript",
"bytes": "47971"
},
{
"name": "Objective-C",
"bytes": "5854"
},
{
"name": "Python",
"bytes": "39398"
}
],
"symlink_target": ""
} |
from amaranth import Cat, Signal, signed
from amaranth_cfu import all_words, SimpleElaboratable, tree_sum
from .delay import Delayer
from .post_process import PostProcessor
from .registerfile import Xetter
class Madd4Pipeline(SimpleElaboratable):
"""A 4-wide Multiply Add pipeline.
Pipeline takes 2 additional cycles.
f_data and i_data each contain 4 signed 8 bit values. The
calculation performed is:
result = sum((i_data[n] + offset) * f_data[n] for n in range(4))
Public Interface
----------------
offset: Signal(signed(8)) input
Offset to be added to all inputs.
f_data: Signal(32) input
4 bytes of filter data to use next
i_data: Signal(32) input
4 bytes of input data data to use next
result: Signal(signed(32)) output
Result of the multiply and add
"""
PIPELINE_CYCLES = 2
def __init__(self):
super().__init__()
self.offset = Signal(signed(9))
self.f_data = Signal(32)
self.i_data = Signal(32)
self.result = Signal(signed(32))
def elab(self, m):
# Product is 17 bits: 8 bits * 9 bits = 17 bits
products = [Signal(signed(17), name=f"product_{n}") for n in range(4)]
for i_val, f_val, product in zip(
all_words(self.i_data, 8), all_words(self.f_data, 8), products):
f_tmp = Signal(signed(9))
m.d.sync += f_tmp.eq(f_val.as_signed())
i_tmp = Signal(signed(9))
m.d.sync += i_tmp.eq(i_val.as_signed() + self.offset)
m.d.comb += product.eq(i_tmp * f_tmp)
m.d.sync += self.result.eq(tree_sum(products))
class Accumulator(SimpleElaboratable):
"""An accumulator for a Madd4Pipline
Public Interface
----------------
add_en: Signal() input
When to add the input
in_value: Signal(signed(32)) input
The input data to add
clear: Signal() input
Zero accumulator.
result: Signal(signed(32)) output
Result of the multiply and add
"""
def __init__(self):
super().__init__()
self.add_en = Signal()
self.in_value = Signal(signed(32))
self.clear = Signal()
self.result = Signal(signed(32))
def elab(self, m):
accumulator = Signal(signed(32))
m.d.comb += self.result.eq(accumulator)
with m.If(self.add_en):
m.d.sync += accumulator.eq(accumulator + self.in_value)
m.d.comb += self.result.eq(accumulator + self.in_value)
# clear always resets accumulator next cycle, even if add_en is high
with m.If(self.clear):
m.d.sync += accumulator.eq(0)
class ByteToWordShifter(SimpleElaboratable):
"""Shifts bytes into a word.
Bytes are shifted from high to low, so that result is little-endian,
with the "first" byte occupying the LSBs
Public Interface
----------------
shift_en: Signal() input
When to shift the input
in_value: Signal(8) input
The input data to shift
result: Signal(32) output
Result of the shift
"""
def __init__(self):
super().__init__()
self.shift_en = Signal()
self.in_value = Signal(8)
self.clear = Signal()
self.result = Signal(32)
def elab(self, m):
register = Signal(32)
m.d.comb += self.result.eq(register)
with m.If(self.shift_en):
calc = Signal(32)
m.d.comb += [
calc.eq(Cat(register[8:], self.in_value)),
self.result.eq(calc),
]
m.d.sync += register.eq(calc)
| {
"content_hash": "1e04f5c4089321ae0005f2be896e28e6",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 80,
"avg_line_length": 30.208333333333332,
"alnum_prop": 0.5793103448275863,
"repo_name": "google/CFU-Playground",
"id": "a4116d10a3b980cfbe41845d7484b514b68837a9",
"size": "4219",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "proj/mnv2_first/gateware/macc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3800"
},
{
"name": "C",
"bytes": "449862"
},
{
"name": "C++",
"bytes": "4931362"
},
{
"name": "CMake",
"bytes": "976"
},
{
"name": "Dockerfile",
"bytes": "1026"
},
{
"name": "Jupyter Notebook",
"bytes": "35820"
},
{
"name": "Makefile",
"bytes": "40046"
},
{
"name": "Python",
"bytes": "1764584"
},
{
"name": "RobotFramework",
"bytes": "6125"
},
{
"name": "Scala",
"bytes": "18649"
},
{
"name": "Shell",
"bytes": "25687"
},
{
"name": "SystemVerilog",
"bytes": "6923"
},
{
"name": "Verilog",
"bytes": "6884686"
}
],
"symlink_target": ""
} |
from ray.rllib.evaluation.episode import MultiAgentEpisode
from ray.rllib.evaluation.rollout_worker import RolloutWorker
from ray.rllib.evaluation.policy_evaluator import PolicyEvaluator
from ray.rllib.evaluation.interface import EvaluatorInterface
from ray.rllib.evaluation.policy_graph import PolicyGraph
from ray.rllib.evaluation.tf_policy_graph import TFPolicyGraph
from ray.rllib.evaluation.torch_policy_graph import TorchPolicyGraph
from ray.rllib.evaluation.sample_batch import SampleBatch, MultiAgentBatch
from ray.rllib.evaluation.sample_batch_builder import (
SampleBatchBuilder, MultiAgentSampleBatchBuilder)
from ray.rllib.evaluation.sampler import SyncSampler, AsyncSampler
from ray.rllib.evaluation.postprocessing import compute_advantages
from ray.rllib.evaluation.metrics import collect_metrics
__all__ = [
"EvaluatorInterface",
"RolloutWorker",
"PolicyGraph",
"TFPolicyGraph",
"TorchPolicyGraph",
"SampleBatch",
"MultiAgentBatch",
"SampleBatchBuilder",
"MultiAgentSampleBatchBuilder",
"SyncSampler",
"AsyncSampler",
"compute_advantages",
"collect_metrics",
"MultiAgentEpisode",
"PolicyEvaluator",
]
| {
"content_hash": "44115c613c16bd9a6adb4b846d32de7e",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 74,
"avg_line_length": 38.096774193548384,
"alnum_prop": 0.79424216765453,
"repo_name": "stephanie-wang/ray",
"id": "f743cca647725ba7d2ad7c698990476c583a8ec3",
"size": "1181",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "rllib/evaluation/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "29882"
},
{
"name": "C++",
"bytes": "2149909"
},
{
"name": "CSS",
"bytes": "8025"
},
{
"name": "Dockerfile",
"bytes": "5499"
},
{
"name": "Go",
"bytes": "28481"
},
{
"name": "HTML",
"bytes": "30435"
},
{
"name": "Java",
"bytes": "738348"
},
{
"name": "JavaScript",
"bytes": "444"
},
{
"name": "Jupyter Notebook",
"bytes": "1615"
},
{
"name": "Makefile",
"bytes": "1965"
},
{
"name": "Python",
"bytes": "4058862"
},
{
"name": "Shell",
"bytes": "88736"
},
{
"name": "Starlark",
"bytes": "121207"
},
{
"name": "TypeScript",
"bytes": "64161"
}
],
"symlink_target": ""
} |
import unittest
from unittest import mock
from . import model, parts, signing, test_config
def _get_identity_hash(i):
if i == '[IDENTITY]':
return 'identity'
raise
class TestGetParts(unittest.TestCase):
def test_get_parts_no_base(self):
config = test_config.TestConfig()
all_parts = parts.get_parts(config)
self.assertEqual('test.signing.bundle_id', all_parts['app'].identifier)
self.assertEqual('test.signing.bundle_id.framework',
all_parts['framework'].identifier)
self.assertEqual(
'test.signing.bundle_id.framework.AlertNotificationService',
all_parts['helper-alerts'].identifier)
self.assertEqual('test.signing.bundle_id.helper',
all_parts['helper-app'].identifier)
def test_get_parts_no_customize(self):
config = model.Distribution(channel='dev').to_config(
test_config.TestConfig())
all_parts = parts.get_parts(config)
self.assertEqual('test.signing.bundle_id', all_parts['app'].identifier)
self.assertEqual('test.signing.bundle_id.framework',
all_parts['framework'].identifier)
self.assertEqual(
'test.signing.bundle_id.framework.AlertNotificationService',
all_parts['helper-alerts'].identifier)
self.assertEqual('test.signing.bundle_id.helper',
all_parts['helper-app'].identifier)
def test_get_parts_customize(self):
config = model.Distribution(
channel='canary',
app_name_fragment='Canary',
product_dirname='canary',
creator_code='cana',
channel_customize=True).to_config(test_config.TestConfig())
all_parts = parts.get_parts(config)
self.assertEqual('test.signing.bundle_id.canary',
all_parts['app'].identifier)
self.assertEqual('test.signing.bundle_id.framework',
all_parts['framework'].identifier)
self.assertEqual(
'test.signing.bundle_id.canary.framework.AlertNotificationService',
all_parts['helper-alerts'].identifier)
self.assertEqual('test.signing.bundle_id.helper',
all_parts['helper-app'].identifier)
def test_part_options(self):
all_parts = parts.get_parts(test_config.TestConfig())
self.assertEqual(
model.CodeSignOptions.RESTRICT
| model.CodeSignOptions.LIBRARY_VALIDATION
| model.CodeSignOptions.KILL
| model.CodeSignOptions.HARDENED_RUNTIME, all_parts['app'].options)
self.assertEqual(
model.CodeSignOptions.RESTRICT
| model.CodeSignOptions.LIBRARY_VALIDATION
| model.CodeSignOptions.KILL
| model.CodeSignOptions.HARDENED_RUNTIME,
all_parts['helper-app'].options)
self.assertEqual(
model.CodeSignOptions.RESTRICT | model.CodeSignOptions.KILL
| model.CodeSignOptions.HARDENED_RUNTIME,
all_parts['helper-renderer-app'].options)
self.assertEqual(
model.CodeSignOptions.RESTRICT | model.CodeSignOptions.KILL
| model.CodeSignOptions.HARDENED_RUNTIME,
all_parts['helper-gpu-app'].options)
self.assertEqual(
model.CodeSignOptions.RESTRICT | model.CodeSignOptions.KILL
| model.CodeSignOptions.HARDENED_RUNTIME,
all_parts['helper-plugin-app'].options)
self.assertEqual(
model.CodeSignOptions.RESTRICT
| model.CodeSignOptions.LIBRARY_VALIDATION
| model.CodeSignOptions.KILL
| model.CodeSignOptions.HARDENED_RUNTIME,
all_parts['crashpad'].options)
self.assertEqual(
model.CodeSignOptions.RESTRICT
| model.CodeSignOptions.LIBRARY_VALIDATION
| model.CodeSignOptions.KILL
| model.CodeSignOptions.HARDENED_RUNTIME,
all_parts['helper-alerts'].options)
self.assertEqual(
model.CodeSignOptions.RESTRICT
| model.CodeSignOptions.LIBRARY_VALIDATION
| model.CodeSignOptions.KILL
| model.CodeSignOptions.HARDENED_RUNTIME,
all_parts['app-mode-app'].options)
def _get_plist_read(other_version):
def _plist_read(*args):
path = args[0]
first_slash = path.find('/')
path = path[first_slash + 1:]
plists = {
'$W/App Product.app/Contents/Info.plist': {
'KSVersion': '99.0.9999.99'
},
'$W/App Product.app/Contents/Frameworks/Product Framework.framework/Resources/Info.plist':
{
'CFBundleShortVersionString': other_version
}
}
return plists[path]
return _plist_read
@mock.patch.multiple('signing.signing',
**{m: mock.DEFAULT for m in ('sign_part', 'verify_part')})
@mock.patch.multiple('signing.commands', **{
m: mock.DEFAULT
for m in ('copy_files', 'move_file', 'make_dir', 'run_command')
})
@mock.patch('signing.model._get_identity_hash', _get_identity_hash)
class TestSignChrome(unittest.TestCase):
def setUp(self):
self.paths = model.Paths('/$I', '/$O', '/$W')
@mock.patch('signing.parts._sanity_check_version_keys')
def test_sign_chrome(self, *args, **kwargs):
manager = mock.Mock()
for kwarg in kwargs:
manager.attach_mock(kwargs[kwarg], kwarg)
dist = model.Distribution()
config = dist.to_config(test_config.TestConfig())
parts.sign_chrome(self.paths, config, sign_framework=True)
# No files should be moved.
self.assertEqual(0, kwargs['move_file'].call_count)
# Test that the provisioning profile is copied.
self.assertEqual(kwargs['copy_files'].mock_calls, [
mock.call.copy_files(
'/$I/Product Packaging/provisiontest.identity.provisionprofile',
'/$W/App Product.app/Contents/embedded.provisionprofile')
])
# Ensure that all the parts are signed.
signed_paths = [
call[1][2].path for call in kwargs['sign_part'].mock_calls
]
self.assertEqual(
set([p.path for p in parts.get_parts(config).values()]),
set(signed_paths))
# Make sure that the framework and the app are the last two parts that
# are signed.
self.assertEqual(signed_paths[-2:], [
'App Product.app/Contents/Frameworks/Product Framework.framework',
'App Product.app'
])
self.assertEqual(kwargs['run_command'].mock_calls, [
mock.call.run_command([
'codesign', '--display', '--requirements', '-', '--verbose=5',
'/$W/App Product.app'
]),
mock.call.run_command(
['spctl', '--assess', '-vv', '/$W/App Product.app']),
])
@mock.patch('signing.parts._sanity_check_version_keys')
def test_sign_chrome_no_assess(self, *args, **kwargs):
dist = model.Distribution()
class Config(test_config.TestConfig):
@property
def run_spctl_assess(self):
return False
config = dist.to_config(Config())
parts.sign_chrome(self.paths, config, sign_framework=True)
self.assertEqual(kwargs['run_command'].mock_calls, [
mock.call.run_command([
'codesign', '--display', '--requirements', '-', '--verbose=5',
'/$W/App Product.app'
]),
])
@mock.patch('signing.parts._sanity_check_version_keys')
def test_sign_chrome_no_provisioning(self, *args, **kwargs):
dist = model.Distribution()
class Config(test_config.TestConfig):
@property
def provisioning_profile_basename(self):
return None
config = dist.to_config(Config())
parts.sign_chrome(self.paths, config, sign_framework=True)
self.assertEqual(0, kwargs['copy_files'].call_count)
@mock.patch('signing.parts._sanity_check_version_keys')
def test_sign_chrome_no_framework(self, *args, **kwargs):
manager = mock.Mock()
for kwarg in kwargs:
manager.attach_mock(kwargs[kwarg], kwarg)
dist = model.Distribution()
config = dist.to_config(test_config.TestConfig())
parts.sign_chrome(self.paths, config, sign_framework=False)
# No files should be moved.
self.assertEqual(0, kwargs['move_file'].call_count)
# Test that the provisioning profile is copied.
self.assertEqual(kwargs['copy_files'].mock_calls, [
mock.call.copy_files(
'/$I/Product Packaging/provisiontest.identity.provisionprofile',
'/$W/App Product.app/Contents/embedded.provisionprofile')
])
# Ensure that only the app is signed.
signed_paths = [
call[1][2].path for call in kwargs['sign_part'].mock_calls
]
self.assertEqual(signed_paths, ['App Product.app'])
self.assertEqual(kwargs['run_command'].mock_calls, [
mock.call.run_command([
'codesign', '--display', '--requirements', '-', '--verbose=5',
'/$W/App Product.app'
]),
mock.call.run_command(
['spctl', '--assess', '-vv', '/$W/App Product.app']),
])
@mock.patch(
'signing.commands.read_plist',
side_effect=_get_plist_read('99.0.9999.99'))
def test_sanity_check_ok(self, read_plist, **kwargs):
config = model.Distribution().to_config(test_config.TestConfig())
parts.sign_chrome(self.paths, config, sign_framework=True)
@mock.patch(
'signing.commands.read_plist',
side_effect=_get_plist_read('55.0.5555.55'))
def test_sanity_check_bad(self, read_plist, **kwargs):
config = model.Distribution().to_config(test_config.TestConfig())
self.assertRaises(
ValueError, lambda: parts.sign_chrome(
self.paths, config, sign_framework=True))
| {
"content_hash": "414e176198c7d03e8cc93e6d654888c5",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 102,
"avg_line_length": 38.17910447761194,
"alnum_prop": 0.5947028928850665,
"repo_name": "nwjs/chromium.src",
"id": "1735f4e4dbfdb4c88eded932a97ac0559bf899b7",
"size": "10373",
"binary": false,
"copies": "1",
"ref": "refs/heads/nw70",
"path": "chrome/installer/mac/signing/parts_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import os
import subprocess as sp
import sys
from textwrap import dedent
import pytest
@pytest.fixture
def cython_testpackage(tmpdir, request):
"""
Creates a trivial Cython package for use with tests.
"""
test_pkg = tmpdir.mkdir('test_pkg')
test_pkg.mkdir('_eva_').ensure('__init__.py')
test_pkg.join('_eva_').join('unit02.pyx').write(dedent("""\
def pilot():
\"\"\"Returns the pilot of Eva Unit-02.\"\"\"
return True
"""))
import astropy_helpers
test_pkg.join('setup.py').write(dedent("""\
import sys
sys.path.insert(0, {0!r})
from os.path import join
from setuptools import setup, Extension
from astropy_helpers.setup_helpers import register_commands
NAME = '_eva_'
VERSION = 0.1
RELEASE = True
cmdclassd = register_commands(NAME, VERSION, RELEASE)
setup(
name=NAME,
version=VERSION,
cmdclass=cmdclassd,
ext_modules=[Extension('_eva_.unit02',
[join('_eva_', 'unit02.pyx')])]
)
""".format(os.path.dirname(astropy_helpers.__path__[0]))))
test_pkg.chdir()
# Build the Cython module in a subprocess; otherwise strange things can
# happen with Cython's global module state
sp.call([sys.executable, 'setup.py', 'build_ext', '--inplace'])
sys.path.insert(0, str(test_pkg))
import _eva_.unit02
def cleanup(test_pkg=test_pkg):
for modname in ['_eva_', '_eva_.unit02']:
try:
del sys.modules[modname]
except KeyError:
pass
sys.path.remove(str(test_pkg))
request.addfinalizer(cleanup)
return test_pkg
| {
"content_hash": "269913b124729910957cc7ce40022b93",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 75,
"avg_line_length": 25.15714285714286,
"alnum_prop": 0.5718341851220897,
"repo_name": "embray/astropy_helpers",
"id": "64764929bec0fec873e66557b53043d0111fd638",
"size": "1761",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "astropy_helpers/sphinx/ext/tests/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1842"
},
{
"name": "C",
"bytes": "3033"
},
{
"name": "HTML",
"bytes": "3812"
},
{
"name": "PowerShell",
"bytes": "2352"
},
{
"name": "Python",
"bytes": "394400"
},
{
"name": "Shell",
"bytes": "535"
}
],
"symlink_target": ""
} |
from past.builtins import basestring
from builtins import * # noqa: F403, F401
from xml.etree import ElementTree
class XmlDictObject(dict):
"""
Adds object like functionality to the standard dictionary.
"""
def __init__(self, initdict=None):
if initdict is None:
initdict = {}
dict.__init__(self, initdict)
def __getattr__(self, item):
return self.__getitem__(item)
def __setattr__(self, item, value):
self.__setitem__(item, value)
def __str__(self):
if '_text' in self:
return self.__getitem__('_text')
else:
return ''
@staticmethod
def Wrap(x):
"""
Static method to wrap a dictionary recursively as an XmlDictObject
"""
if isinstance(x, dict):
return XmlDictObject((k, XmlDictObject.Wrap(v)) for (k, v) in x.items())
elif isinstance(x, list):
return [XmlDictObject.Wrap(v) for v in x]
else:
return x
@staticmethod
def _UnWrap(x):
if isinstance(x, dict):
return dict((k, XmlDictObject._UnWrap(v)) for (k, v) in x.items())
elif isinstance(x, list):
return [XmlDictObject._UnWrap(v) for v in x]
else:
return x
def UnWrap(self):
"""
Recursively converts an XmlDictObject to a standard dictionary and returns the result.
"""
return XmlDictObject._UnWrap(self)
def _ConvertDictToXmlRecurse(parent, dictitem):
assert not isinstance(dictitem, type([]))
if isinstance(dictitem, dict):
for (tag, child) in dictitem.items():
if str(tag) == '_text':
parent.text = str(child)
elif isinstance(child, type([])):
# iterate through the array and convert
for listchild in child:
elem = ElementTree.Element(tag)
parent.append(elem)
_ConvertDictToXmlRecurse(elem, listchild)
else:
elem = ElementTree.Element(tag)
parent.append(elem)
_ConvertDictToXmlRecurse(elem, child)
else:
parent.text = str(dictitem)
def ConvertDictToXml(xmldict):
"""
Converts a dictionary to an XML ElementTree Element
"""
roottag = list(xmldict)[0]
root = ElementTree.Element(roottag)
_ConvertDictToXmlRecurse(root, xmldict[roottag])
return root
def _ConvertXmlToDictRecurse(node, dictclass):
nodedict = dictclass()
if len(node.items()) > 0:
# if we have attributes, set them
nodedict.update(dict(node.items()))
for child in node:
# recursively add the element's children
newitem = _ConvertXmlToDictRecurse(child, dictclass)
if child.tag in nodedict:
# found duplicate tag, force a list
if isinstance(nodedict[child.tag], type([])):
# append to existing list
nodedict[child.tag].append(newitem)
else:
# convert to list
nodedict[child.tag] = [nodedict[child.tag], newitem]
else:
# only one, directly set the dictionary
nodedict[child.tag] = newitem
if node.text is None:
text = ''
else:
text = node.text.strip()
if len(nodedict) > 0:
# if we have a dictionary add the text as a dictionary value (if there is any)
if len(text) > 0:
nodedict['_text'] = text
else:
# if we don't have child nodes or attributes, just set the text
nodedict = text
return nodedict
def ConvertXmlToDict(root, dictclass=XmlDictObject):
"""
Converts an XML file or ElementTree Element to a dictionary
"""
# If a string is passed in, try to open it as a file
if isinstance(root, basestring):
root = ElementTree.parse(root).getroot()
elif not isinstance(root, ElementTree.Element):
raise TypeError('Expected ElementTree.Element or file path string')
return dictclass({root.tag: _ConvertXmlToDictRecurse(root, dictclass)})
| {
"content_hash": "4cb13ad6e6647ff8415bf0cbae002716",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 94,
"avg_line_length": 30.21897810218978,
"alnum_prop": 0.585024154589372,
"repo_name": "CI-WATER/tethys_dataset_services",
"id": "4bbafe488d16bef693f835db669c88a16b6e4582",
"size": "4234",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tethys_dataset_services/utilities.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "183132"
},
{
"name": "Scheme",
"bytes": "1388"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from djangobmf.conf import settings
from djangobmf.models import BMFModel
from djangobmf.fields import CurrencyField
from djangobmf.fields import MoneyField
from djangobmf.fields import ObjectFileField
import datetime
from decimal import Decimal
from .serializers import InvoiceSerializer
from .serializers import InvoiceProductSerializer
from .workflows import InvoiceWorkflow
from .utils import number_range
@python_2_unicode_compatible
class BaseInvoice(BMFModel):
shipping_address = models.ForeignKey(
settings.CONTRIB_ADDRESS, related_name="shipping_invoice",
blank=False, null=True, on_delete=models.SET_NULL,
)
invoice_address = models.ForeignKey(
settings.CONTRIB_ADDRESS, related_name="quotation_invoice", blank=False,
null=True, on_delete=models.SET_NULL,
)
invoice_number = models.CharField(_('Invoice number'), max_length=255, null=True, blank=False)
invoice = ObjectFileField(verbose_name=_('Invoice'), null=True)
products = models.ManyToManyField(settings.CONTRIB_PRODUCT, through='InvoiceProduct', editable=False)
net = models.FloatField(editable=False, blank=True, null=True)
date = models.DateField(_("Date"), null=True, blank=False)
transaction = models.ForeignKey(
settings.CONTRIB_TRANSACTION, null=True, blank=True, related_name="transation_invoice",
editable=False, on_delete=models.PROTECT,
)
class Meta:
verbose_name = _('Invoice')
verbose_name_plural = _('Invoices')
ordering = ['invoice_number']
abstract = True
swappable = "BMF_CONTRIB_INVOICE"
class BMFMeta:
workflow = InvoiceWorkflow
serializer = InvoiceSerializer
@staticmethod
def post_save(sender, instance, created, raw, *args, **kwargs):
if not instance.invoice_number:
name = number_range.name(instance)
instance._meta.model.objects.filter(pk=instance.pk).update(invoice_number=name)
@staticmethod
def post_delete(sender, instance, *args, **kwargs):
number_range.delete(instance)
def __str__(self):
return '%s' % self.invoice_number
class AbstractInvoice(BaseInvoice):
"""
"""
customer = models.ForeignKey( # TODO: make optional
settings.CONTRIB_CUSTOMER,
null=True,
blank=False,
on_delete=models.SET_NULL,
)
project = models.ForeignKey( # TODO: make optional
settings.CONTRIB_PROJECT, null=True, blank=False, on_delete=models.SET_NULL,
)
employee = models.ForeignKey( # TODO: make optional
settings.CONTRIB_EMPLOYEE, null=True, blank=False, on_delete=models.SET_NULL,
)
due = models.DateField(_("Due"), null=True, blank=True)
notes = models.TextField(_("Notes"), null=True, blank=True)
term_of_payment = models.TextField(_("Term of payment"), blank=True, null=True)
class Meta(BaseInvoice.Meta):
abstract = True
class BMFMeta(BaseInvoice.BMFMeta):
has_files = True
has_comments = True
def bmfget_customer(self):
if hasattr(self, 'customer'):
return self.customer
return None
def bmfget_project(self):
if hasattr(self, 'project'):
return self.project
return None
def get_project_queryset(self, qs):
if self.customer:
return qs.filter(customer=self.customer)
return qs
def get_customer_queryset(self, qs):
if self.project:
return qs.filter(pk=self.project.customer_id)
return qs
def get_products(self):
if not hasattr(self, '_cache_products'):
self._cache_products = self.invoice_products.all().select_related('product')
return self._cache_products
def calc_net(self):
val = Decimal(0)
for item in self.get_products():
val += item.calc_net()
return val
def calc_gross(self):
val = Decimal(0)
for item in self.get_products():
val += item.calc_gross()
return val
def calc_taxes(self):
t = {}
for item in self.get_products():
for tax, value in item.calc_taxes():
if tax in t:
t[tax] += value
else:
t[tax] = value
return t.items()
def clean(self):
# if self.project and not self.customer_id:
# self.customer = self.project.customer
# if self.project and not self.employee_id:
# self.employee_id = self.project.employee_id
# if self.customer and not self.project_id:
# self.project = self.customer.project
if self.customer and not self.invoice_address_id:
self.invoice_address = \
self.customer.customer_address.filter(is_billing=True, default_billing=True).first()
if self.customer and not self.shipping_address_id:
self.shipping_address = \
self.customer.customer_address.filter(is_shipping=True, default_shipping=True).first()
if not self.date:
self.date = datetime.datetime.now().date()
class Invoice(AbstractInvoice):
pass
class InvoiceProduct(BMFModel):
invoice = models.ForeignKey(
settings.CONTRIB_INVOICE, null=True, blank=True,
related_name="invoice_products", on_delete=models.CASCADE,
)
product = models.ForeignKey(
settings.CONTRIB_PRODUCT, null=True, blank=True,
related_name="invoice_products", on_delete=models.PROTECT,
)
name = models.CharField(_("Name"), max_length=255, null=True, blank=False)
price_currency = CurrencyField()
price_precision = models.PositiveSmallIntegerField(
default=0, blank=True, null=True, editable=False,
)
price = MoneyField(_("Price"), blank=False)
amount = models.FloatField(_("Amount"), null=True, blank=False, default=1.0)
# unit = models.CharField() # TODO add units
description = models.TextField(_("Description"), null=True, blank=True)
class BMFMeta:
only_related = True
serializer = InvoiceProductSerializer
def calc_all(self):
if hasattr(self, '_calcs'):
return self._calcs
self._calcs = self.product.calc_tax(self.amount, self.price)
return self._calcs
def calc_net_unit(self):
return self.calc_all()[0]
def calc_net(self):
return self.calc_all()[1]
def calc_gross(self):
return self.calc_all()[2]
def calc_taxes(self):
return self.calc_all()[3]
def clean(self):
if self.product and not self.name:
self.name = self.product.name
if self.product and not self.price:
self.price = self.product.price
| {
"content_hash": "674a3666c843573f044435be53e72a3f",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 105,
"avg_line_length": 33.142857142857146,
"alnum_prop": 0.6426724137931035,
"repo_name": "django-bmf/django-bmf",
"id": "16ac2e2a4f15bd5d9415b39ea2c8b79e63b55294",
"size": "7008",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "djangobmf/contrib/invoice/models.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "11420"
},
{
"name": "CoffeeScript",
"bytes": "3197"
},
{
"name": "HTML",
"bytes": "117091"
},
{
"name": "JavaScript",
"bytes": "80435"
},
{
"name": "Python",
"bytes": "774167"
},
{
"name": "Shell",
"bytes": "736"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import six
from six import reraise
from six.moves import http_client
from six.moves import StringIO
from six.moves import zip
from six import u
from six import iteritems
import sys
import os
import socket
import subprocess
import time
from six.moves.urllib.parse import urlparse
from datetime import datetime
from xml.parsers.expat import ParserCreate
# We have our own escape functionality.
# from xml.sax.saxutils import escape
from pyVmomi.VmomiSupport import *
from pyVmomi.StubAdapterAccessorImpl import StubAdapterAccessorMixin
import pyVmomi.Iso8601
import base64
from xml.parsers.expat import ExpatError
import copy
import contextlib
try:
USERWORLD = os.uname()[0] == 'VMkernel'
except:
USERWORLD = False
# Timeout value used for idle connections in client connection pool.
# Default value is 900 seconds (15 minutes).
CONNECTION_POOL_IDLE_TIMEOUT_SEC = 900
NS_SEP = " "
XML_ENCODING = 'UTF-8'
XML_HEADER = '<?xml version="1.0" encoding="{0}"?>'.format(XML_ENCODING)
XMLNS_SOAPENC = "http://schemas.xmlsoap.org/soap/encoding/"
XMLNS_SOAPENV = "http://schemas.xmlsoap.org/soap/envelope/"
XSI_TYPE = XMLNS_XSI + NS_SEP + u('type')
# Note: Must make a copy to use the SOAP_NSMAP
# TODO: Change to frozendict, if available
SOAP_NSMAP = { XMLNS_SOAPENC: 'soapenc', XMLNS_SOAPENV: 'soapenv',
XMLNS_XSI: 'xsi', XMLNS_XSD: 'xsd' }
SOAP_ENVELOPE_TAG = "{0}:Envelope".format(SOAP_NSMAP[XMLNS_SOAPENV])
SOAP_HEADER_TAG = "{0}:Header".format(SOAP_NSMAP[XMLNS_SOAPENV])
SOAP_FAULT_TAG = "{0}:Fault".format(SOAP_NSMAP[XMLNS_SOAPENV])
SOAP_BODY_TAG = "{0}:Body".format(SOAP_NSMAP[XMLNS_SOAPENV])
SOAP_ENVELOPE_START = '<{0} '.format(SOAP_ENVELOPE_TAG) + \
' '.join(['xmlns:' + prefix + '="' + urn + '"' \
for urn, prefix in iteritems(SOAP_NSMAP)]) + \
'>\n'
SOAP_ENVELOPE_END = "\n</{0}>".format(SOAP_ENVELOPE_TAG)
SOAP_HEADER_START = "<{0}>".format(SOAP_HEADER_TAG)
SOAP_HEADER_END = "</{0}>".format(SOAP_HEADER_TAG)
SOAP_BODY_START = "<{0}>".format(SOAP_BODY_TAG)
SOAP_BODY_END = "</{0}>".format(SOAP_BODY_TAG)
SOAP_START = SOAP_ENVELOPE_START + SOAP_BODY_START + '\n'
SOAP_END = '\n' + SOAP_BODY_END + SOAP_ENVELOPE_END
WSSE_PREFIX = "wsse"
WSSE_HEADER_TAG = "{0}:Security".format(WSSE_PREFIX)
WSSE_NS_URL = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd"
WSSE_NS = 'xmlns:{0}="{1}"'.format(WSSE_PREFIX, WSSE_NS_URL)
WSSE_HEADER_START = "<{0} {1}>".format(WSSE_HEADER_TAG, WSSE_NS)
WSSE_HEADER_END = "</{0}>".format(WSSE_HEADER_TAG)
## MethodFault type
MethodFault = GetVmodlType("vmodl.MethodFault")
## Localized MethodFault type
LocalizedMethodFault = GetVmodlType("vmodl.LocalizedMethodFault")
## Thumbprint mismatch exception
#
class ThumbprintMismatchException(Exception):
def __init__(self, expected, actual):
Exception.__init__(self, "Server has wrong SHA1 thumbprint: %s "
"(required) != %s (server)" % (
expected, actual))
self.expected = expected
self.actual = actual
## Escape <, >, &
def XmlEscape(xmlStr):
escaped = xmlStr.replace("&", "&").replace(">", ">").replace("<", "<")
return escaped
## Get the start tag, end tag, and text handlers of a class
def GetHandlers(obj):
return (obj.StartElementHandler,
obj.EndElementHandler,
obj.CharacterDataHandler,
obj.StartNamespaceDeclHandler,
obj.EndNamespaceDeclHandler)
## Set the start tag, end tag, and text handlers of a parser
def SetHandlers(obj, handlers):
(obj.StartElementHandler,
obj.EndElementHandler,
obj.CharacterDataHandler,
obj.StartNamespaceDeclHandler,
obj.EndNamespaceDeclHandler) = handlers
## Serialize an object to bytes
#
# This function assumes CheckField(info, val) was already called
# @param val the value to serialize
# @param info the field
# @param version the version
# @param nsMap a dict of xml ns -> prefix
# @return the serialized object as bytes
# @param encoding Deprecated this is not used during serialization since we always
# use utf-8 to encode a request message. We didn't remove the
# parameter so it is still compatible with clients that are still using it.
def Serialize(val, info=None, version=None, nsMap=None, encoding=None):
return _SerializeToUnicode(val, info=info, version=version, nsMap=nsMap).encode(XML_ENCODING)
## Serialize an object to unicode
#
# This function assumes CheckField(info, val) was already called
# @param val the value to serialize
# @param info the field
# @param version the version
# @param nsMap a dict of xml ns -> prefix
# @return the serialized object as unicode
def SerializeToUnicode(val, info=None, version=None, nsMap=None):
return _SerializeToUnicode(val, info=info, version=version, nsMap=nsMap)
## Serialize an object to unicode
#
# This function assumes CheckField(info, val) was already called
# @param val the value to serialize
# @param info the field
# @param version the version
# @param nsMap a dict of xml ns -> prefix
# @return the serialized object as unicode
def _SerializeToUnicode(val, info=None, version=None, nsMap=None):
if version is None:
try:
if isinstance(val, list):
itemType = val.Item
version = itemType._version
else:
if val is None:
# neither val nor version is given
return ''
# Pick up the version from val
version = val._version
except AttributeError:
version = BASE_VERSION
if info is None:
info = Object(name="object", type=object, version=version, flags=0)
writer = StringIO()
SoapSerializer(writer, version, nsMap).Serialize(val, info)
return writer.getvalue()
## Serialize fault detail
#
# Serializes a fault as the content of the detail element in a
# soapenv:Fault (i.e. without a LocalizedMethodFault wrapper).
#
# This function assumes CheckField(info, val) was already called
# @param val the value to serialize
# @param info the field
# @param version the version
# @param nsMap a dict of xml ns -> prefix
# @return the serialized object as a unicode string
def SerializeFaultDetail(val, info=None, version=None, nsMap=None, encoding=None):
if version is None:
try:
if not isinstance(val, MethodFault):
raise TypeError('{0} is not a MethodFault'.format(str(val)))
version = val._version
except AttributeError:
version = BASE_VERSION
if info is None:
info = Object(name="object", type=object, version=version, flags=0)
writer = StringIO()
SoapSerializer(writer, version, nsMap, encoding).SerializeFaultDetail(val, info)
return writer.getvalue()
## SOAP serializer
#
class SoapSerializer:
""" SoapSerializer """
## Serializer constructor
#
# @param writer File writer
# @param version the version
# @param nsMap a dict of xml ns -> prefix
# @param encoding Deprecated this is not used during serialization since we always
# use utf-8 to encode a request message. We didn't remove the
# parameter so it is still compatible with clients that are still using it.
def __init__(self, writer, version, nsMap, encoding=None):
""" Constructor """
self.writer = writer
self.version = version
self.nsMap = nsMap and nsMap or {}
for ns, prefix in iteritems(self.nsMap):
if prefix == '':
self.defaultNS = ns
break
else:
self.defaultNS = ''
# Additional attr for outermost tag
self.outermostAttrs = ''
# Fill in required xmlns, if not defined
for nsPrefix, ns, attrName in [('xsi', XMLNS_XSI, 'xsiPrefix'),
('xsd', XMLNS_XSD, 'xsdPrefix')]:
prefix = self.nsMap.get(ns)
if not prefix:
prefix = nsPrefix
self.outermostAttrs += ' xmlns:{0}="{1}"'.format(prefix, ns)
self.nsMap = self.nsMap.copy()
self.nsMap[ns] = prefix
setattr(self, attrName, prefix + ":")
## Serialize an object
#
# This function assumes CheckField(info, val) was already called
# @param val the value to serialize
# @param info the field
def Serialize(self, val, info):
""" Serialize an object """
self._Serialize(val, info, self.defaultNS)
## Serialize fault detail
#
# Serializes a fault as the content of the detail element in a
# soapenv:Fault (i.e. without a LocalizedMethodFault wrapper).
#
# This function assumes CheckField(info, val) was already called
# @param val the value to serialize
# @param info the field
def SerializeFaultDetail(self, val, info):
""" Serialize an object """
self._SerializeDataObject(val, info, ' xsi:typ="{1}"'.format(val._wsdlName), self.defaultNS)
def _NSPrefix(self, ns):
""" Get xml ns prefix. self.nsMap must be set """
if ns == self.defaultNS:
return ''
prefix = self.nsMap[ns]
return prefix and prefix + ':' or ''
def _QName(self, typ, defNS):
""" Get fully qualified wsdl name (prefix:name) """
attr = ''
ns, name = GetQualifiedWsdlName(typ)
if ns == defNS:
prefix = ''
else:
try:
prefix = self.nsMap[ns]
except KeyError:
# We have not seen this ns before
prefix = ns.split(':', 1)[-1]
attr = ' xmlns:{0}="{1}"'.format(prefix, ns)
return attr, prefix and prefix + ':' + name or name
## Serialize an object to unicode (internal)
#
# @param val the value to serialize
# @param info the field
# @param defNS the default namespace
def _Serialize(self, val, info, defNS):
""" Serialize an object """
if not IsChildVersion(self.version, info.version):
return
if val is None:
if info.flags & F_OPTIONAL:
return
else:
raise TypeError('Field "{0}" is not optional'.format(info.name))
elif isinstance(val, list) and len(val) == 0:
if info.type is object:
# Make sure an empty array assigned to Any is typed
if not isinstance(val, Array):
raise TypeError('Field "{0}": Cannot assign empty native python array to an Any'.format(info.name))
elif info.flags & F_OPTIONAL:
# Skip optional non-Any
return
else:
raise TypeError('Field "{0}" not optional'.format(info.name))
if self.outermostAttrs:
attr = self.outermostAttrs
self.outermostAttrs = None
else:
attr = ''
currDefNS = defNS
# Emit default ns if tag ns is not the same
currTagNS = GetWsdlNamespace(info.version)
if currTagNS != defNS:
attr += ' xmlns="{0}"'.format(currTagNS)
currDefNS = currTagNS
if isinstance(val, DataObject):
if isinstance(val, MethodFault):
newVal = LocalizedMethodFault(fault=val, localizedMessage=val.msg)
if info.type is object:
faultType = object
else:
faultType = LocalizedMethodFault
newInfo = Object(name=info.name, type=faultType,
version=info.version, flags=info.flags)
self._SerializeDataObject(newVal, newInfo, attr, currDefNS)
else:
self._SerializeDataObject(val, info, attr, currDefNS)
elif isinstance(val, ManagedObject):
if info.type is object:
nsattr, qName = self._QName(ManagedObject, currDefNS)
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
if val._serverGuid is not None:
attr += ' serverGuid="{0}"'.format(val._serverGuid)
# val in vim type attr is not namespace qualified
# TODO: Add a new "typens" attr?
ns, name = GetQualifiedWsdlName(Type(val))
attr += ' type="{0}"'.format(name)
self.writer.write('<{0}{1}>{2}</{3}>'.format(info.name, attr,
val._moId,
info.name))
elif isinstance(val, list):
if info.type is object:
itemType = val.Item
if (itemType is ManagedMethod or itemType is PropertyPath
or itemType is type):
tag = 'string'
typ = GetVmodlType("string[]")
elif issubclass(itemType, ManagedObject):
tag = 'ManagedObjectReference'
typ = ManagedObject.Array
else:
tag = GetWsdlName(itemType)
typ = Type(val)
nsattr, qName = self._QName(typ, currDefNS)
# For WSDL, since we set tag of ManagedObjects to ManagedObjectReferences,
# the name of its array should be ArrayOfManagedObjectReference
if qName.endswith("ArrayOfManagedObject"):
qName += "Reference"
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
self.writer.write('<{0}{1}>'.format(info.name, attr))
itemInfo = Object(name=tag, type=itemType,
version=info.version, flags=info.flags)
for it in val:
self._Serialize(it, itemInfo, currDefNS)
self.writer.write('</{0}>'.format(info.name))
else:
itemType = info.type.Item
itemInfo = Object(name=info.name, type=itemType,
version=info.version, flags=info.flags)
for it in val:
self._Serialize(it, itemInfo, defNS)
elif isinstance(val, type) or isinstance(val, type(Exception)):
if info.type is object:
attr += ' {0}type="{1}string"'.format(self.xsiPrefix, self.xsdPrefix)
self.writer.write('<{0}{1}>{2}</{0}>'.format(
info.name, attr, GetWsdlName(val)))
elif isinstance(val, ManagedMethod):
if info.type is object:
attr += ' {0}type="{1}string"'.format(self.xsiPrefix, self.xsdPrefix)
self.writer.write('<{0}{1}>{2}</{0}>'.format(
info.name, attr, val.info.wsdlName))
elif isinstance(val, datetime):
if info.type is object:
nsattr, qName = self._QName(Type(val), currDefNS)
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
result = Iso8601.ISO8601Format(val)
self.writer.write('<{0}{1}>{2}</{0}>'.format(info.name, attr, result))
elif isinstance(val, binary):
if info.type is object:
nsattr, qName = self._QName(Type(val), currDefNS)
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
result = base64.b64encode(val)
if PY3:
# In python3 the bytes result after the base64 encoding has a
# leading 'b' which causes error when we use it to construct the
# soap message. Workaround the issue by converting the result to
# string. Since the result of base64 encoding contains only subset
# of ASCII chars, converting to string will not change the value.
result = str(result, XML_ENCODING)
self.writer.write('<{0}{1}>{2}</{0}>'.format(info.name, attr, result))
elif isinstance(val, bool):
if info.type is object:
nsattr, qName = self._QName(Type(val), currDefNS)
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
result = val and "true" or "false"
self.writer.write('<{0}{1}>{2}</{0}>'.format(info.name, attr, result))
elif isinstance(val, six.integer_types) or isinstance(val, float):
if info.type is object:
nsattr, qName = self._QName(Type(val), currDefNS)
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
result = six.text_type(val)
self.writer.write('<{0}{1}>{2}</{0}>'.format(info.name, attr, result))
elif isinstance(val, Enum):
if info.type is object:
nsattr, qName = self._QName(Type(val), currDefNS)
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
self.writer.write('<{0}{1}>{2}</{0}>'.format(info.name, attr, val))
else:
if info.type is object:
if isinstance(val, PropertyPath):
attr += ' {0}type="{1}string"'.format(self.xsiPrefix, self.xsdPrefix)
else:
nsattr, qName = self._QName(Type(val), currDefNS)
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
if isinstance(val, six.binary_type):
# Use UTF-8 rather than self.encoding. self.encoding is for
# output of serializer, while 'val' is our input. And regardless
# of what our output is, our input should be always UTF-8. Yes,
# it means that if you emit output in other encoding than UTF-8,
# you cannot serialize it again once more. That's feature, not
# a bug.
val = val.decode(XML_ENCODING)
result = XmlEscape(val)
self.writer.write('<{0}{1}>{2}</{0}>'.format(info.name, attr, result))
## Serialize a a data object (internal)
#
# @param val the value to serialize
# @param info the field
# @param attr attributes to serialized in the outermost elementt
# @param currDefNS the current default namespace
def _SerializeDataObject(self, val, info, attr, currDefNS):
if info.flags & F_LINK:
# Attribute is a link and Object is present instead of its key.
# We need to serialize just the key and not the entire object
self._Serialize(val.key, info, currDefNS)
return
dynType = GetCompatibleType(Type(val), self.version)
if dynType != info.type:
nsattr, qName = self._QName(dynType, currDefNS)
attr += '{0} {1}type="{2}"'.format(nsattr, self.xsiPrefix, qName)
self.writer.write('<{0}{1}>'.format(info.name, attr))
if dynType is LocalizedMethodFault:
# Serialize a MethodFault as LocalizedMethodFault on wire
# See PR 670229
for prop in val._GetPropertyList():
propVal = getattr(val, prop.name)
if prop.name == 'fault':
propVal = copy.copy(propVal)
propVal.msg = None
self._SerializeDataObject(propVal, prop, '', currDefNS)
else:
self._Serialize(propVal, prop, currDefNS)
else:
for prop in val._GetPropertyList():
self._Serialize(getattr(val, prop.name), prop, currDefNS)
self.writer.write('</{0}>'.format(info.name))
class ParserError(KeyError):
# NOTE (hartsock): extends KeyError since parser logic is written to
# catch KeyError types. Normally, I would want PerserError to be a root
# type for all parser faults.
pass
def ParseData(parser, data):
# NOTE (hartsock): maintaining library internal consistency here, this is
# a refactoring that rolls up some repeated code blocks into a method so
# that we can refactor XML parsing behavior in a single place.
try:
if isinstance(data, six.binary_type) or isinstance(data, six.text_type):
parser.Parse(data)
else:
parser.ParseFile(data)
except Exception:
# wrap all parser faults with additional information for later
# bug reporting on the XML parser code itself.
(ec, ev, tb) = sys.exc_info()
line = parser.CurrentLineNumber
col = parser.CurrentColumnNumber
pe = ParserError("xml document: "
"{0} parse error at: "
"line:{1}, col:{2}".format(data, line, col))
# use six.reraise for python 2.x and 3.x compatability
reraise(ParserError, pe, tb)
## Deserialize an object from a file or string
#
# This function will deserialize one top-level XML node.
# @param data the data to deserialize (a file object or string)
# @param resultType expected result type
# @param stub stub for moRef deserialization
# @return the deserialized object
def Deserialize(data, resultType=object, stub=None):
parser = ParserCreate(namespace_separator=NS_SEP)
ds = SoapDeserializer(stub)
ds.Deserialize(parser, resultType)
ParseData(parser, data)
return ds.GetResult()
## Expat deserializer namespace handler
class ExpatDeserializerNSHandlers:
def __init__(self, nsMap=None):
# nsMap is a dict of ns prefix to a stack (list) of namespaces
# The last element of the stack is current namespace
if not nsMap:
nsMap = {}
self.nsMap = nsMap
## Get current default ns
def GetCurrDefNS(self):
return self._GetNamespaceFromPrefix()
## Get namespace and wsdl name from tag
def GetNSAndWsdlname(self, tag):
""" Map prefix:name tag into ns, name """
idx = tag.find(":")
if idx >= 0:
prefix, name = tag[:idx], tag[idx + 1:]
else:
prefix, name = None, tag
# Map prefix to ns
ns = self._GetNamespaceFromPrefix(prefix)
return ns, name
def _GetNamespaceFromPrefix(self, prefix = None):
namespaces = self.nsMap.get(prefix)
if namespaces:
ns = namespaces[-1]
else:
ns = ""
return ns
## Handle namespace begin
def StartNamespaceDeclHandler(self, prefix, uri):
namespaces = self.nsMap.get(prefix)
if namespaces:
namespaces.append(uri)
else:
self.nsMap[prefix] = [uri]
## Handle namespace end
def EndNamespaceDeclHandler(self, prefix):
self.nsMap[prefix].pop()
## SOAP -> Python Deserializer
class SoapDeserializer(ExpatDeserializerNSHandlers):
## Constructor
#
# @param self self
# @param stub Stub adapter to use for deserializing moRefs
def __init__(self, stub=None, version=None):
ExpatDeserializerNSHandlers.__init__(self)
self.stub = stub
if version:
self.version = version
elif self.stub:
self.version = self.stub.version
else:
self.version = None
self.result = None
## Deserialize a SOAP object
#
# @param self self
# @param parser an expat parser
# @param resultType the static type of the result
# @param isFault true if the response is a fault response
# @param nsMap a dict of prefix -> [xml ns stack]
# @return the deserialized object
def Deserialize(self, parser, resultType=object, isFault=False, nsMap=None):
self.isFault = isFault
self.parser = parser
self.origHandlers = GetHandlers(parser)
SetHandlers(parser, GetHandlers(self))
self.resultType = resultType
self.stack = []
self.data = ""
self.serverGuid = None
if issubclass(resultType, list):
self.result = resultType()
else:
self.result = None
if not nsMap:
nsMap = {}
self.nsMap = nsMap
## Get the result of deserialization
# The links will not be resolved. User needs to explicitly resolve them
# using LinkResolver.
def GetResult(self):
return self.result
def SplitTag(self, tag):
""" Split tag into ns, name """
idx = tag.find(NS_SEP)
if idx >= 0:
return tag[:idx], tag[idx + 1:]
else:
return "", tag
def LookupWsdlType(self, ns, name, allowManagedObjectReference=False):
""" Lookup wsdl type. Handle special case for some vmodl version """
try:
return GetWsdlType(ns, name)
except KeyError:
if allowManagedObjectReference:
if name.endswith('ManagedObjectReference') and ns == XMLNS_VMODL_BASE:
return GetWsdlType(ns, name[:-len('Reference')])
# WARNING!!! This is a temporary hack to get around server not
# honoring @service tag (see bug 521744). Once it is fix, I am
# going to back out this change
if name.endswith('ManagedObjectReference') and allowManagedObjectReference:
return GetWsdlType(XMLNS_VMODL_BASE, name[:-len('Reference')])
return GuessWsdlType(name)
## Handle an opening XML tag
def StartElementHandler(self, tag, attr):
self.data = ""
self.serverGuid = None
deserializeAsLocalizedMethodFault = True
if not self.stack:
if self.isFault:
ns, name = self.SplitTag(tag)
objType = self.LookupWsdlType(ns, name[:-5])
# Only top level soap fault should be deserialized as method fault
deserializeAsLocalizedMethodFault = False
else:
objType = self.resultType
elif isinstance(self.stack[-1], list):
objType = self.stack[-1].Item
elif isinstance(self.stack[-1], DataObject):
# TODO: Check ns matches DataObject's namespace
ns, name = self.SplitTag(tag)
objType = self.stack[-1]._GetPropertyInfo(name).type
# LocalizedMethodFault <fault> tag should be deserialized as method fault
if name == "fault" and isinstance(self.stack[-1], LocalizedMethodFault):
deserializeAsLocalizedMethodFault = False
else:
raise TypeError("Invalid type for tag {0}".format(tag))
xsiType = attr.get(XSI_TYPE)
if xsiType:
# Ignore dynamic type for TypeName, MethodName, PropertyPath
# @bug 150459
if not (objType is type or objType is ManagedMethod or \
objType is PropertyPath):
ns, name = self.GetNSAndWsdlname(xsiType)
dynType = self.LookupWsdlType(ns, name, allowManagedObjectReference=True)
# TODO: Should be something like...
# dynType must be narrower than objType, except for
# ManagedObjectReference
if not (issubclass(dynType, list) and issubclass(objType, list)):
objType = dynType
else:
if issubclass(objType, list):
objType = objType.Item
if self.version:
objType = GetCompatibleType(objType, self.version)
if issubclass(objType, ManagedObject):
typeAttr = attr[u('type')]
# val in vim type attr is not namespace qualified
# However, this doesn't hurt to strip out namespace
# TODO: Get the ns from "typens" attr?
ns, name = self.GetNSAndWsdlname(typeAttr)
if u('serverGuid') in attr:
self.serverGuid = attr[u('serverGuid')]
self.stack.append(GuessWsdlType(name))
elif issubclass(objType, DataObject) or issubclass(objType, list):
if deserializeAsLocalizedMethodFault and issubclass(objType, Exception):
objType = LocalizedMethodFault
self.stack.append(objType())
else:
self.stack.append(objType)
## Handle a closing XML tag
def EndElementHandler(self, tag):
try:
obj = self.stack.pop()
except IndexError:
SetHandlers(self.parser, self.origHandlers)
handler = self.parser.EndElementHandler
del self.parser, self.origHandlers, self.stack, self.resultType
if handler:
return handler(tag)
return
data = self.data
if isinstance(obj, type) or isinstance(obj, type(Exception)):
if obj is type:
if data is None or data == '':
obj = None
else:
try:
# val in type val is not namespace qualified
# However, this doesn't hurt to strip out namespace
ns, name = self.GetNSAndWsdlname(data)
obj = GuessWsdlType(name)
except KeyError:
raise TypeError(data)
elif obj is ManagedMethod:
# val in Method val is not namespace qualified
# However, this doesn't hurt to strip out namespace
ns, name = self.GetNSAndWsdlname(data)
obj = GuessWsdlMethod(name)
elif obj is bool:
if data == "0" or data.lower() == "false":
obj = bool(False)
elif data == "1" or data.lower() == "true":
obj = bool(True)
else:
raise TypeError(data)
elif obj is binary:
# Raise type error if decode failed
obj = obj(base64.b64decode(data))
elif obj is str:
try:
obj = str(data)
except ValueError:
obj = data
elif obj is datetime:
obj = pyVmomi.Iso8601.ParseISO8601(data)
if not obj:
raise TypeError(data)
# issubclass is very expensive. Test last
elif issubclass(obj, ManagedObject):
obj = obj(data, self.stub, self.serverGuid)
elif issubclass(obj, Enum):
obj = getattr(obj, data)
else:
obj = obj(data)
elif isinstance(obj, LocalizedMethodFault):
obj.fault.msg = obj.localizedMessage
obj = obj.fault
if self.stack:
top = self.stack[-1]
if isinstance(top, list):
top.append(obj)
elif isinstance(top, DataObject):
ns, name = self.SplitTag(tag)
info = top._GetPropertyInfo(name)
if not isinstance(obj, list) and issubclass(info.type, list):
getattr(top, info.name).append(obj)
else:
setattr(top, info.name, obj)
else:
ns, name = self.SplitTag(tag)
setattr(top, name, obj)
else:
if not isinstance(obj, list) and issubclass(self.resultType, list):
self.result.append(obj)
else:
self.result = obj
SetHandlers(self.parser, self.origHandlers)
del self.parser, self.origHandlers, self.stack, self.resultType
## Handle text data
def CharacterDataHandler(self, data):
self.data += data
## SOAP Response Deserializer class
class SoapResponseDeserializer(ExpatDeserializerNSHandlers):
## Constructor
#
# @param self self
# @param stub Stub adapter to use for deserializing moRefs
def __init__(self, stub):
ExpatDeserializerNSHandlers.__init__(self)
self.stub = stub
self.deser = SoapDeserializer(stub)
self.soapFaultTag = XMLNS_SOAPENV + NS_SEP + "Fault"
## Deserialize a SOAP response
#
# @param self self
# @param response the response (a file object or a string)
# @param resultType expected result type
# @param nsMap a dict of prefix -> [xml ns stack]
# @return the deserialized object
def Deserialize(self, response, resultType, nsMap=None):
self.resultType = resultType
self.stack = []
self.msg = ""
self.deser.result = None
self.isFault = False
self.parser = ParserCreate(namespace_separator=NS_SEP)
try: # buffer_text only in python >= 2.3
self.parser.buffer_text = True
except AttributeError:
pass
if not nsMap:
nsMap = {}
self.nsMap = nsMap
SetHandlers(self.parser, GetHandlers(self))
ParseData(self.parser, response)
result = self.deser.GetResult()
if self.isFault:
if result is None:
result = GetVmodlType("vmodl.RuntimeFault")()
result.msg = self.msg
del self.resultType, self.stack, self.parser, self.msg, self.data, self.nsMap
return result
## Handle an opening XML tag
def StartElementHandler(self, tag, attr):
self.data = ""
if tag == self.soapFaultTag:
self.isFault = True
elif self.isFault and tag == "detail":
self.deser.Deserialize(self.parser, object, True, self.nsMap)
elif tag.endswith("Response"):
self.deser.Deserialize(self.parser, self.resultType, False, self.nsMap)
## Handle text data
def CharacterDataHandler(self, data):
self.data += data
## Handle a closing XML tag
def EndElementHandler(self, tag):
if self.isFault and tag == "faultstring":
try:
self.msg = str(self.data)
except ValueError:
self.msg = self.data
## Base class that implements common functionality for stub adapters.
## Method that must be provided by the implementation class:
## -- InvokeMethod(ManagedObject mo, Object methodInfo, Object[] args)
class StubAdapterBase(StubAdapterAccessorMixin):
def __init__(self, version):
StubAdapterAccessorMixin.__init__(self)
self.ComputeVersionInfo(version)
## Compute the version information for the specified namespace
#
# @param ns the namespace
def ComputeVersionInfo(self, version):
versionNS = GetVersionNamespace(version)
if versionNS.find("/") >= 0:
self.versionId = '"urn:{0}"'.format(versionNS)
else:
self.versionId = ''
self.version = version
## Base class that implements common functionality for SOAP-based stub adapters.
## Method that must be provided by the implementation class:
## -- InvokeMethod(ManagedObject mo, Object methodInfo, Object[] args)
class SoapStubAdapterBase(StubAdapterBase):
## Serialize a VMOMI request to SOAP
#
# @param version API version
# @param mo the 'this'
# @param info method info
# @param args method arguments
# @return the serialized request
def SerializeRequest(self, mo, info, args):
if not IsChildVersion(self.version, info.version):
raise GetVmodlType("vmodl.fault.MethodNotFound")(receiver=mo,
method=info.name)
nsMap = SOAP_NSMAP.copy()
defaultNS = GetWsdlNamespace(self.version)
nsMap[defaultNS] = ''
# Add xml header and soap envelope
result = [XML_HEADER, '\n', SOAP_ENVELOPE_START]
# Add request context and samlToken to soap header, if exists
reqContexts = GetRequestContext()
if self.requestContext:
reqContexts.update(self.requestContext)
samlToken = getattr(self, 'samlToken', None)
if reqContexts or samlToken:
result.append(SOAP_HEADER_START)
for key, val in iteritems(reqContexts):
# Note: Support req context of string type only
if not isinstance(val, six.string_types):
raise TypeError("Request context key ({0}) has non-string value ({1}) of {2}".format(key, val, type(val)))
ret = _SerializeToUnicode(val,
Object(name=key, type=str, version=self.version),
self.version,
nsMap)
result.append(ret)
if samlToken:
result.append('{0} {1} {2}'.format(WSSE_HEADER_START,
samlToken,
WSSE_HEADER_END))
result.append(SOAP_HEADER_END)
result.append('\n')
# Serialize soap body
result.extend([SOAP_BODY_START,
'<{0} xmlns="{1}">'.format(info.wsdlName, defaultNS),
_SerializeToUnicode(mo, Object(name="_this", type=ManagedObject,
version=self.version),
self.version, nsMap)])
# Serialize soap request parameters
for (param, arg) in zip(info.params, args):
result.append(_SerializeToUnicode(arg, param, self.version, nsMap))
result.extend(['</{0}>'.format(info.wsdlName), SOAP_BODY_END, SOAP_ENVELOPE_END])
return ''.join(result).encode(XML_ENCODING)
## Subclass of HTTPConnection that connects over a Unix domain socket
## instead of a TCP port. The path of the socket is passed in place of
## the hostname. Fairly gross but does the job.
# NOTE (hartsock): rewrite this class as a wrapper, see HTTPSConnectionWrapper
# below for a guide.
class UnixSocketConnection(http_client.HTTPConnection):
# The HTTPConnection ctor expects a single argument, which it interprets
# as the host to connect to; for UnixSocketConnection, we instead interpret
# the parameter as the filesystem path of the Unix domain socket.
def __init__(self, path):
# Pass '' as the host to HTTPConnection; it doesn't really matter
# what we pass (since we've overridden the connect method) as long
# as it's a valid string.
http_client.HTTPConnection.__init__(self, '')
self.path = path
def connect(self):
# Hijack the connect method of HTTPConnection to connect to the
# specified Unix domain socket instead. Obey the same contract
# as HTTPConnection.connect, which puts the socket in self.sock.
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(self.path)
self.sock = sock
try:
# The ssl module is not available in python versions less than 2.6
SSL_THUMBPRINTS_SUPPORTED = True
import ssl
import hashlib
def _VerifyThumbprint(thumbprint, connection):
'''If there is a thumbprint, connect to the server and verify that the
SSL certificate matches the given thumbprint. An exception is thrown
if there is a mismatch.'''
if thumbprint and isinstance(connection, http_client.HTTPSConnection):
if not connection.sock:
connection.connect()
derCert = connection.sock.getpeercert(True)
sha1 = hashlib.sha1()
sha1.update(derCert)
sha1Digest = sha1.hexdigest().lower()
if sha1Digest != thumbprint:
raise ThumbprintMismatchException(thumbprint, sha1Digest)
# Function used to wrap sockets with SSL
_SocketWrapper = ssl.wrap_socket
except ImportError:
SSL_THUMBPRINTS_SUPPORTED = False
def _VerifyThumbprint(thumbprint, connection):
if thumbprint and isinstance(connection, http_client.HTTPSConnection):
raise Exception(
"Thumbprint verification not supported on python < 2.6")
def _SocketWrapper(rawSocket, keyfile, certfile, *args, **kwargs):
wrappedSocket = socket.ssl(rawSocket, keyfile, certfile)
return http_client.FakeSocket(rawSocket, wrappedSocket)
## https connection wrapper
#
# NOTE (hartsock): do not override core library types or implementations
# directly because this makes brittle code that is too easy to break and
# closely tied to implementation details we do not control. Instead, wrap
# the core object to introduce additional behaviors.
#
# Purpose:
# Support ssl.wrap_socket params which are missing from httplib
# HTTPSConnection (e.g. ca_certs)
# Note: Only works iff the ssl params are passing in as kwargs
class HTTPSConnectionWrapper(object):
def __init__(self, *args, **kwargs):
wrapped = http_client.HTTPSConnection(*args, **kwargs)
# Extract ssl.wrap_socket param unknown to httplib.HTTPSConnection,
# and push back the params in connect()
self._sslArgs = {}
tmpKwargs = kwargs.copy()
for key in ["server_side", "cert_reqs", "ssl_version", "ca_certs",
"do_handshake_on_connect", "suppress_ragged_eofs",
"ciphers"]:
if key in tmpKwargs:
self._sslArgs[key] = tmpKwargs.pop(key)
self._wrapped = wrapped
## Override connect to allow us to pass in additional ssl paramters to
# ssl.wrap_socket (e.g. cert_reqs, ca_certs for ca cert verification)
def connect(self, wrapped):
if len(self._sslArgs) == 0 or hasattr(self, '_baseclass'):
# No override
return wrapped.connect
# Big hack. We have to copy and paste the httplib connect fn for
# each python version in order to handle extra ssl paramters. Yuk!
if hasattr(self, "source_address"):
# Python 2.7
sock = socket.create_connection((self.host, self.port),
self.timeout, self.source_address)
if wrapped._tunnel_host:
wrapped.sock = sock
wrapped._tunnel()
wrapped.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, **self._sslArgs)
elif hasattr(self, "timeout"):
# Python 2.6
sock = socket.create_connection((self.host, self.port), self.timeout)
wrapped.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, **self._sslArgs)
return wrapped.connect
# TODO: Additional verification of peer cert if needed
#cert_reqs = self._sslArgs.get("cert_reqs", ssl.CERT_NONE)
#ca_certs = self._sslArgs.get("ca_certs", None)
#if cert_reqs != ssl.CERT_NONE and ca_certs:
# if hasattr(self.sock, "getpeercert"):
# # TODO: verify peer cert
# dercert = self.sock.getpeercert(False)
# # pemcert = ssl.DER_cert_to_PEM_cert(dercert)
def __getattr__(self, item):
if item == 'connect':
return self.connect(self._wrapped)
return getattr(self._wrapped, item)
## Stand-in for the HTTPSConnection class that will connect to a proxy and
## issue a CONNECT command to start an SSL tunnel.
class SSLTunnelConnection(object):
# @param proxyPath The path to pass to the CONNECT command.
def __init__(self, proxyPath):
self.proxyPath = proxyPath
# Connects to a proxy server and initiates a tunnel to the destination
# specified by proxyPath. If successful, a new HTTPSConnection is returned.
#
# @param path The destination URL path.
# @param key_file The SSL key file to use when wrapping the socket.
# @param cert_file The SSL certificate file to use when wrapping the socket.
# @param kwargs In case caller passed in extra parameters not handled by
# SSLTunnelConnection
def __call__(self, path, key_file=None, cert_file=None, **kwargs):
# Only pass in the named arguments that HTTPConnection constructor
# understands
tmpKwargs = {}
for key in http_client.HTTPConnection.__init__.__code__.co_varnames:
if key in kwargs and key != 'self':
tmpKwargs[key] = kwargs[key]
tunnel = http_client.HTTPConnection(path, **tmpKwargs)
tunnel.request('CONNECT', self.proxyPath)
resp = tunnel.getresponse()
if resp.status != 200:
raise http_client.HTTPException("{0} {1}".format(resp.status, resp.reason))
retval = http_client.HTTPSConnection(path)
retval.sock = _SocketWrapper(tunnel.sock,
keyfile=key_file, certfile=cert_file)
return retval
class GzipReader:
GZIP = 1
DEFLATE = 2
def __init__(self, rfile, encoding=GZIP, readChunkSize=512):
self.rfile = rfile
self.chunks = []
self.bufSize = 0 # Remaining buffer
assert(encoding in (GzipReader.GZIP, GzipReader.DEFLATE))
self.encoding = encoding
self.unzip = None
self.readChunkSize = readChunkSize
def _CreateUnzip(self, firstChunk):
import zlib
if self.encoding == GzipReader.GZIP:
wbits = zlib.MAX_WBITS + 16
elif self.encoding == GzipReader.DEFLATE:
# Sniff out real deflate format
chunkLen = len(firstChunk)
# Assume raw deflate
wbits = -zlib.MAX_WBITS
if firstChunk[:3] == ['\x1f', '\x8b', '\x08']:
# gzip: Apache mod_deflate will send gzip. Yurk!
wbits = zlib.MAX_WBITS + 16
elif chunkLen >= 2:
b0 = ord(firstChunk[0])
b1 = ord(firstChunk[1])
if (b0 & 0xf) == 8 and (((b0 * 256 + b1)) % 31) == 0:
# zlib deflate
wbits = min(((b0 & 0xf0) >> 4) + 8, zlib.MAX_WBITS)
else:
assert(False)
self.unzip = zlib.decompressobj(wbits)
return self.unzip
def read(self, bytes=-1):
chunks = self.chunks
bufSize = self.bufSize
while bufSize < bytes or bytes == -1:
# Read and decompress
chunk = self.rfile.read(self.readChunkSize)
if self.unzip == None:
self._CreateUnzip(chunk)
if chunk:
inflatedChunk = self.unzip.decompress(chunk)
bufSize += len(inflatedChunk)
chunks.append(inflatedChunk)
else:
# Returns whatever we have
break
if bufSize <= bytes or bytes == -1:
leftoverBytes = 0
leftoverChunks = []
else:
leftoverBytes = bufSize - bytes
# Adjust last chunk to hold only the left over bytes
lastChunk = chunks.pop()
chunks.append(lastChunk[:-leftoverBytes])
leftoverChunks = [lastChunk[-leftoverBytes:]]
self.chunks = leftoverChunks
self.bufSize = leftoverBytes
buf = b"".join(chunks)
return buf
## SOAP stub adapter object
class SoapStubAdapter(SoapStubAdapterBase):
## Constructor
#
# The endpoint can be specified individually as either a host/port
# combination, or with a URL (using a url= keyword).
#
# @param self self
# @param host host
# @param port port (pass negative port number for no SSL)
# @param **** Deprecated. Please use version instead **** ns API namespace
# @param path location of SOAP VMOMI service
# @param url URL (overrides host, port, path if set)
# @param sock unix domain socket path (overrides host, port, url if set)
# @param poolSize size of HTTP connection pool
# @param certKeyFile The path to the PEM-encoded SSL private key file.
# @param certFile The path to the PEM-encoded SSL certificate file.
# @param httpProxyHost The host name of the proxy server.
# @param httpProxyPort The proxy server port.
# @param sslProxyPath Path to use when tunneling through VC's reverse proxy.
# @param thumbprint The SHA1 thumbprint of the server's SSL certificate.
# Some use a thumbprint of the form xx:xx:xx..:xx. We ignore the ":"
# characters. If set to None, any thumbprint is accepted.
# @param cacertsFile CA certificates file in PEM format
# @param version API version
# @param connectionPoolTimeout Timeout in secs for idle connections in client pool. Use -1 to disable any timeout.
# @param samlToken SAML Token that should be used in SOAP security header for login
# @param sslContext SSL Context describing the various SSL options. It is only
# supported in Python 2.7.9 or higher.
def __init__(self, host='localhost', port=443, ns=None, path='/sdk',
url=None, sock=None, poolSize=5,
certFile=None, certKeyFile=None,
httpProxyHost=None, httpProxyPort=80, sslProxyPath=None,
thumbprint=None, cacertsFile=None, version=None,
acceptCompressedResponses=True,
connectionPoolTimeout=CONNECTION_POOL_IDLE_TIMEOUT_SEC,
samlToken=None, sslContext=None, requestContext=None):
if ns:
assert(version is None)
version = versionMap[ns]
elif not version:
version = 'vim.version.version1'
SoapStubAdapterBase.__init__(self, version=version)
self.cookie = ""
if sock:
self.scheme = UnixSocketConnection
# Store sock in the host member variable because that's where
# the UnixSocketConnection ctor expects to find it -- see above
self.host = sock
elif url:
scheme, self.host, urlpath = urlparse(url)[:3]
# Only use the URL path if it's sensible, otherwise use the path
# keyword argument as passed in.
if urlpath not in ('', '/'):
path = urlpath
self.scheme = scheme == "http" and http_client.HTTPConnection \
or scheme == "https" and HTTPSConnectionWrapper
else:
port, self.scheme = port < 0 and (-port, http_client.HTTPConnection) \
or (port, HTTPSConnectionWrapper)
if host.find(':') != -1: # is IPv6?
host = '[' + host + ']'
self.host = '{0}:{1}'.format(host, port)
self.path = path
if thumbprint:
self.thumbprint = thumbprint.replace(":", "").lower()
if len(self.thumbprint) != 40:
raise Exception("Invalid SHA1 thumbprint -- {0}".format(thumbprint))
else:
self.thumbprint = None
self.is_ssl_tunnel = False
if sslProxyPath:
self.scheme = SSLTunnelConnection(sslProxyPath)
self.is_ssl_tunnel = True
elif httpProxyHost:
if self.scheme == HTTPSConnectionWrapper:
self.scheme = SSLTunnelConnection(self.host)
self.is_ssl_tunnel = True
else:
if url:
self.path = url
else:
self.path = "http://{0}/{1}".format(self.host, path)
# Swap the actual host with the proxy.
self.host = "{0}:{1}".format(httpProxyHost, httpProxyPort)
self.poolSize = poolSize
self.pool = []
self.connectionPoolTimeout = connectionPoolTimeout
self.lock = threading.Lock()
self.schemeArgs = {}
if certKeyFile:
self.schemeArgs['key_file'] = certKeyFile
if certFile:
self.schemeArgs['cert_file'] = certFile
if cacertsFile:
self.schemeArgs['ca_certs'] = cacertsFile
self.schemeArgs['cert_reqs'] = ssl.CERT_REQUIRED
if sslContext:
self.schemeArgs['context'] = sslContext
self.samlToken = samlToken
self.requestContext = requestContext
self.requestModifierList = []
self._acceptCompressedResponses = acceptCompressedResponses
# Force a socket shutdown. Before python 2.7, ssl will fail to close
# the socket (http://bugs.python.org/issue10127).
# Not making this a part of the actual _HTTPSConnection since the internals
# of the httplib.HTTP*Connection seem to pass around the descriptors and
# depend on the behavior that close() still leaves the socket semi-functional.
if sys.version_info[:2] < (2,7):
def _CloseConnection(self, conn):
if self.scheme == HTTPSConnectionWrapper and conn.sock:
conn.sock.shutdown(socket.SHUT_RDWR)
conn.close()
else:
def _CloseConnection(self, conn):
conn.close()
# Context modifier used to modify the SOAP request.
# @param func The func that takes in the serialized message and modifies the
# the request. The func is appended to the requestModifierList and then
# popped after the request is modified.
@contextlib.contextmanager
def requestModifier(self, func):
self.requestModifierList.append(func)
try:
yield
finally:
self.requestModifierList.pop()
## Invoke a managed method
#
# @param self self
# @param mo the 'this'
# @param info method info
# @param args arguments
# @param outerStub If not-None, this should be a reference to the wrapping
# stub adapter. Any ManagedObject references returned from this method
# will have outerStub in their _stub field. Note that this also changes
# the return type to a tuple containing the HTTP status and the
# deserialized object so that it's easier to distinguish an API error from
# a connection error.
def InvokeMethod(self, mo, info, args, outerStub=None):
if outerStub is None:
outerStub = self
headers = {'Cookie' : self.cookie,
'SOAPAction' : self.versionId,
'Content-Type': 'text/xml; charset={0}'.format(XML_ENCODING)}
if self._acceptCompressedResponses:
headers['Accept-Encoding'] = 'gzip, deflate'
req = self.SerializeRequest(mo, info, args)
for modifier in self.requestModifierList:
req = modifier(req)
conn = self.GetConnection()
try:
conn.request('POST', self.path, req, headers)
resp = conn.getresponse()
except (socket.error, http_client.HTTPException):
# The server is probably sick, drop all of the cached connections.
self.DropConnections()
raise
# NOTE (hartsocks): this cookie handling code should go away in a future
# release. The string 'set-cookie' and 'Set-Cookie' but both are
# acceptable, but the supporting library may have a bug making it
# case sensitive when it shouldn't be. The term 'set-cookie' will occur
# more frequently than 'Set-Cookie' based on practical testing.
cookie = resp.getheader('set-cookie')
if cookie is None:
# try case-sensitive header for compatibility
cookie = resp.getheader('Set-Cookie')
status = resp.status
if cookie:
self.cookie = cookie
if status == 200 or status == 500:
try:
fd = resp
encoding = resp.getheader('Content-Encoding', 'identity').lower()
if encoding == 'gzip':
fd = GzipReader(resp, encoding=GzipReader.GZIP)
elif encoding == 'deflate':
fd = GzipReader(resp, encoding=GzipReader.DEFLATE)
deserializer = SoapResponseDeserializer(outerStub)
obj = deserializer.Deserialize(fd, info.result)
except Exception as exc:
self._CloseConnection(conn)
# NOTE (hartsock): This feels out of place. As a rule the lexical
# context that opens a connection should also close it. However,
# in this code the connection is passed around and closed in other
# contexts (ie: methods) that we are blind to here. Refactor this.
# The server might be sick, drop all of the cached connections.
self.DropConnections()
raise exc
else:
resp.read()
self.ReturnConnection(conn)
if outerStub != self:
return (status, obj)
if status == 200:
return obj
else:
raise obj # pylint: disable-msg=E0702
else:
self._CloseConnection(conn)
raise http_client.HTTPException("{0} {1}".format(resp.status, resp.reason))
## Clean up connection pool to throw away idle timed-out connections
# SoapStubAdapter lock must be acquired before this method is called.
def _CloseIdleConnections(self):
if self.connectionPoolTimeout >= 0:
currentTime = time.time()
idleConnections = []
for conn, lastAccessTime in self.pool:
idleTime = currentTime - lastAccessTime
if idleTime >= self.connectionPoolTimeout:
i = self.pool.index((conn, lastAccessTime))
idleConnections = self.pool[i:]
self.pool = self.pool[:i]
break
for conn, _ in idleConnections:
self._CloseConnection(conn)
## Get a HTTP connection from the pool
def GetConnection(self):
self.lock.acquire()
self._CloseIdleConnections()
if self.pool:
result, _ = self.pool.pop(0)
self.lock.release()
else:
self.lock.release()
result = self.scheme(self.host, **self.schemeArgs)
# Always disable NAGLE algorithm
#
# Python httplib (2.6 and below) is splitting a http request into 2
# packets (header and body). It first send the header, but will not
# send the body until it receives the ack (for header) from server
# [NAGLE at work]. The delayed ack time on ESX is around 40 - 100 ms
# (depends on version) and can go up to 200 ms. This effectively slow
# down each pyVmomi call by the same amount of time.
#
# Disable NAGLE on client will force both header and body packets to
# get out immediately, and eliminated the delay
#
# This bug is fixed in python 2.7, however, only if the request
# body is a string (which is true for now)
if sys.version_info[:2] < (2,7):
self.DisableNagle(result)
_VerifyThumbprint(self.thumbprint, result)
return result
## Drop all cached connections to the server.
def DropConnections(self):
self.lock.acquire()
oldConnections = self.pool
self.pool = []
self.lock.release()
for conn, _ in oldConnections:
self._CloseConnection(conn)
## Return a HTTP connection to the pool
def ReturnConnection(self, conn):
self.lock.acquire()
self._CloseIdleConnections()
# In case of ssl tunneling, only add the conn if the conn has not been closed
if len(self.pool) < self.poolSize and (not self.is_ssl_tunnel or conn.sock):
self.pool.insert(0, (conn, time.time()))
self.lock.release()
else:
self.lock.release()
# NOTE (hartsock): this seems to violate good coding practice in that
# the lexical context that opens a connection should also be the
# same context responsible for closing it.
self._CloseConnection(conn)
## Disable nagle on a http connections
def DisableNagle(self, conn):
# Override connections' connect function to force disable NAGLE
if self.scheme != UnixSocketConnection and getattr(conn, "connect"):
orgConnect = conn.connect
def ConnectDisableNagle(*args, **kwargs):
orgConnect(*args, **kwargs)
sock = getattr(conn, "sock")
if sock:
try:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except Exception:
pass
conn.connect = ConnectDisableNagle
## Need to override the depcopy method. Since, the stub is not deep copyable
# due to the thread lock and connection pool, deep copy of a managed object
# fails. Further different instances of a managed object still share the
# same soap stub. Hence, returning self here is fine.
def __deepcopy__(self, memo):
return self
HEADER_SECTION_END = '\r\n\r\n'
## Parse an HTTP response into its headers and body
def ParseHttpResponse(httpResponse):
headerEnd = httpResponse.find(HEADER_SECTION_END)
if headerEnd == -1:
return ('', '')
headerEnd += len(HEADER_SECTION_END);
headerText = httpResponse[:headerEnd]
bodyText = httpResponse[headerEnd:]
return (headerText, bodyText)
## SOAP-over-stdio stub adapter object
class SoapCmdStubAdapter(SoapStubAdapterBase):
## Constructor
#
# @param self self
# @param cmd command to execute
# @param ns API namespace
def __init__(self, cmd, version='vim.version.version1'):
SoapStubAdapterBase.__init__(self, version=version)
self.cmd = cmd
self.systemError = GetVmodlType('vmodl.fault.SystemError')
## Invoke a managed method
#
# @param self self
# @param mo the 'this'
# @param info method info
# @param args arguments
def InvokeMethod(self, mo, info, args):
argv = self.cmd.split()
req = self.SerializeRequest(mo, info, args)
env = dict(os.environ)
env['REQUEST_METHOD'] = 'POST'
env['CONTENT_LENGTH'] = str(len(req))
env['HTTP_SOAPACTION'] = self.versionId[1:-1]
p = subprocess.Popen(argv,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env)
(outText, errText) = p.communicate(req)
if p.returncode < 0:
# Process died with a signal
errText = "Process terminated with signal {0}\n{1}".format(-p.returncode, errText)
raise self.systemError(msg=errText, reason=errText)
try:
(responseHeaders, responseBody) = ParseHttpResponse(outText)
obj = SoapResponseDeserializer(self).Deserialize(responseBody, info.result)
except:
errText = "Failure parsing SOAP response ({0})\n{1}}".format(outText, errText)
raise self.systemError(msg=errText, reason=errText)
if p.returncode == 0:
return obj
elif obj is None:
raise self.systemError(msg=errText, reason=errText)
else:
raise obj # pylint: disable-msg=E0702
class SessionOrientedStub(StubAdapterBase):
'''A session-oriented stub adapter that will relogin to the destination if a
session-oriented exception is thrown.
Here's an example. First, we setup the communication substrate:
>>> soapStub = SoapStubAdapter(host="192.168.1.2", ns="vim25/5.0")
Create a SessionOrientedStub that uses the stub we just created for talking
to the server:
>>> from pyVim.connect import VimSessionOrientedStub
>>> sessionStub = VimSessionOrientedStub(
... soapStub,
... VimSessionOrientedStub.makeUserLoginMethod("root", "vmware"))
Perform some privileged operations without needing to explicitly login:
>>> si = Vim.ServiceInstance("ServiceInstance", sessionStub)
>>> si.content.sessionManager.sessionList
>>> si.content.sessionManager.Logout()
>>> si.content.sessionManager.sessionList
'''
STATE_UNAUTHENTICATED = 0
STATE_AUTHENTICATED = 1
SESSION_EXCEPTIONS = tuple()
def __init__(self, soapStub, loginMethod, retryDelay=0.1, retryCount=4):
'''Construct a SessionOrientedStub.
The stub starts off in the "unauthenticated" state, so it will call the
loginMethod on the first invocation of a method. If a communication error
is encountered, the stub will wait for retryDelay seconds and then try to
call the method again. If the server throws an exception that is in the
SESSION_EXCEPTIONS tuple, it will be caught and the stub will transition
back into the "unauthenticated" state so that another login will be
performed.
@param soapStub The communication substrate.
@param loginMethod A function that takes a single parameter, soapStub, and
performs the necessary operations to authenticate with the server.
@param retryDelay The amount of time to sleep before retrying after a
communication error.
@param retryCount The number of times to retry connecting to the server.
'''
assert callable(loginMethod)
assert retryCount >= 0
StubAdapterBase.__init__(self, version=soapStub.version)
self.lock = threading.Lock()
self.soapStub = soapStub
self.state = self.STATE_UNAUTHENTICATED
self.loginMethod = loginMethod
self.retryDelay = retryDelay
self.retryCount = retryCount
def InvokeMethod(self, mo, info, args):
# This retry logic is replicated in InvokeAccessor and the two copies need
# to be in sync
retriesLeft = self.retryCount
while retriesLeft > 0:
try:
if self.state == self.STATE_UNAUTHENTICATED:
self._CallLoginMethod()
# Invoke the method
status, obj = self.soapStub.InvokeMethod(mo, info, args, self)
except (socket.error, http_client.HTTPException, ExpatError):
if self.retryDelay and retriesLeft:
time.sleep(self.retryDelay)
retriesLeft -= 1
continue
if status == 200:
# Normal return from the server, return the object to the caller.
return obj
# An exceptional return from the server
if isinstance(obj, self.SESSION_EXCEPTIONS):
# Our session might've timed out, change our state and retry.
self._SetStateUnauthenticated()
else:
# It's an exception from the method that was called, send it up.
raise obj
# Raise any socket/httplib errors caught above.
raise SystemError()
## Retrieve a managed property
#
# @param self self
# @param mo managed object
# @param info property info
def InvokeAccessor(self, mo, info):
# This retry logic is replicated in InvokeMethod and the two copies need
# to be in sync
retriesLeft = self.retryCount
while retriesLeft > 0:
try:
if self.state == self.STATE_UNAUTHENTICATED:
self._CallLoginMethod()
# Invoke the method
obj = StubAdapterBase.InvokeAccessor(self, mo, info)
except (socket.error, http_client.HTTPException, ExpatError):
if self.retryDelay and retriesLeft:
time.sleep(self.retryDelay)
retriesLeft -= 1
continue
except Exception as e:
if isinstance(e, self.SESSION_EXCEPTIONS):
# Our session might've timed out, change our state and retry.
self._SetStateUnauthenticated()
else:
raise e
return obj
# Raise any socket/httplib errors caught above.
raise SystemError()
## Handle the login method call
#
# This method calls the login method on the soap stub and changes the state
# to authenticated
def _CallLoginMethod(self):
try:
self.lock.acquire()
if self.state == self.STATE_UNAUTHENTICATED:
self.loginMethod(self.soapStub)
self.state = self.STATE_AUTHENTICATED
finally:
self.lock.release()
## Change the state to unauthenticated
def _SetStateUnauthenticated(self):
self.lock.acquire()
if self.state == self.STATE_AUTHENTICATED:
self.state = self.STATE_UNAUTHENTICATED
self.lock.release()
| {
"content_hash": "7d98ee15b44ab370e11fc5d1343bd222",
"timestamp": "",
"source": "github",
"line_count": 1659,
"max_line_length": 121,
"avg_line_length": 39.41169379144063,
"alnum_prop": 0.6249847057384069,
"repo_name": "Infinidat/pyvmomi",
"id": "8f880714283f6c5de5bf401de46433ed1f2d6e7c",
"size": "66014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyVmomi/SoapAdapter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1278296"
}
],
"symlink_target": ""
} |
from .client import RakutenClient
| {
"content_hash": "2f13faaddd3dd38fe65979de8763f358",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 33,
"avg_line_length": 34,
"alnum_prop": 0.8529411764705882,
"repo_name": "claudetech/python_rakuten",
"id": "ab21d61c55fa92a8fa573be7464d5a498cf00bfb",
"size": "34",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rakuten/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6019"
}
],
"symlink_target": ""
} |
import mock
import pytest
try:
import ipywidgets
except ImportError:
ipywidgets = None
@pytest.mark.skipif(ipywidgets is None, reason='ipywidgets is not installed')
class TestExperimentWidget(object):
@pytest.fixture
def exp(self):
from dallinger.experiment import Experiment
return Experiment()
def test_experiment_initializes_widget(self, exp):
assert exp.widget is not None
def test_experiment_updates_widget_status(self, exp):
exp.update_status(u'Testing')
assert exp.widget.status == u'Testing'
assert 'Testing' in exp.widget.children[0].value
def test_experiment_displays_widget(self, exp):
with mock.patch('IPython.display.display') as display:
exp._ipython_display_()
assert display.called_once_with(exp.widget)
def test_widget_children_no_config(self, exp):
assert exp.widget.children[1].children[0].value == 'Not loaded.'
def test_widget_children_with_config(self, active_config, exp):
assert exp.widget.children[1].children[0].value != 'Not loaded.'
| {
"content_hash": "61acbdc3e2daa86f70d2ba362558fbe6",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 77,
"avg_line_length": 32.38235294117647,
"alnum_prop": 0.6857402361489555,
"repo_name": "jcpeterson/Dallinger",
"id": "18d75ca174c42ca1bb950c586224e1f25fefcb43",
"size": "1101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_jupyter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "366"
},
{
"name": "HTML",
"bytes": "32554"
},
{
"name": "JavaScript",
"bytes": "36269"
},
{
"name": "Python",
"bytes": "715512"
},
{
"name": "Ruby",
"bytes": "1769"
},
{
"name": "Shell",
"bytes": "136"
}
],
"symlink_target": ""
} |
from unittest import mock
from oslotest import base as test_base
from oslo_service import fixture
from oslo_service import loopingcall
class FixtureTestCase(test_base.BaseTestCase):
def setUp(self):
super(FixtureTestCase, self).setUp()
self.sleepfx = self.useFixture(fixture.SleepFixture())
def test_sleep_fixture(self):
@loopingcall.RetryDecorator(max_retry_count=3, inc_sleep_time=2,
exceptions=(ValueError,))
def retried_method():
raise ValueError("!")
self.assertRaises(ValueError, retried_method)
self.assertEqual(3, self.sleepfx.mock_wait.call_count)
# TODO(efried): This is cheating, and shouldn't be done by real callers
# yet - see todo in SleepFixture.
self.sleepfx.mock_wait.assert_has_calls(
[mock.call(x) for x in (2, 4, 6)])
| {
"content_hash": "85b15e7b909892c94a379ebf584d8ee8",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 79,
"avg_line_length": 35.4,
"alnum_prop": 0.6519774011299435,
"repo_name": "openstack/oslo.service",
"id": "6b2670e77bdb5d9e0f9b55814b5a206d711c9aa8",
"size": "1458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oslo_service/tests/test_fixture.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "225398"
}
],
"symlink_target": ""
} |
"""Jacobian ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import gradients as gradient_ops
from tensorflow.python.ops.parallel_for import control_flow_ops
from tensorflow.python.util import nest
def jacobian(output, inputs, use_pfor=True, parallel_iterations=None):
"""Computes jacobian of `output` w.r.t. `inputs`.
Args:
output: A tensor.
inputs: A tensor or a nested structure of tensor objects.
use_pfor: If true, uses pfor for computing the jacobian. Else uses
tf.while_loop.
parallel_iterations: A knob to control how many iterations and dispatched in
parallel. This knob can be used to control the total memory usage.
Returns:
A tensor or a nested structure of tensors with the same structure as
`inputs`. Each entry is the jacobian of `output` w.r.t. to the corresponding
value in `inputs`. If output has shape [y_1, ..., y_n] and inputs_i has
shape [x_1, ..., x_m], the corresponding jacobian has shape
[y_1, ..., y_n, x_1, ..., x_m]. Note that in cases where the gradient is
sparse (IndexedSlices), jacobian function currently makes it dense and
returns a Tensor instead. This may change in the future.
"""
flat_inputs = nest.flatten(inputs)
output_tensor_shape = output.shape
output_shape = array_ops.shape(output)
output = array_ops.reshape(output, [-1])
def loop_fn(i):
y = array_ops.gather(output, i)
return gradient_ops.gradients(y, flat_inputs)
try:
output_size = int(output.shape[0])
except TypeError:
output_size = array_ops.shape(output)[0]
if use_pfor:
pfor_outputs = control_flow_ops.pfor(
loop_fn, output_size, parallel_iterations=parallel_iterations)
else:
pfor_outputs = control_flow_ops.for_loop(
loop_fn,
[output.dtype] * len(flat_inputs),
output_size,
parallel_iterations=parallel_iterations)
for i, out in enumerate(pfor_outputs):
if isinstance(out, ops.Tensor):
new_shape = array_ops.concat(
[output_shape, array_ops.shape(out)[1:]], axis=0)
out = array_ops.reshape(out, new_shape)
out.set_shape(output_tensor_shape.concatenate(flat_inputs[i].shape))
pfor_outputs[i] = out
return nest.pack_sequence_as(inputs, pfor_outputs)
def batch_jacobian(output, inp, use_pfor=True, parallel_iterations=None):
"""Computes and stacks jacobians of `output[i,...]` w.r.t. `input[i,...]`.
e.g.
x = tf.constant([[1, 2], [3, 4]], dtype=tf.float32)
y = x * x
jacobian = batch_jacobian(y, x)
# => [[[2, 0], [0, 4]], [[6, 0], [0, 8]]]
Args:
output: A tensor with shape [b, y1, ..., y_n]. `output[i,...]` should
only depend on `inp[i,...]`.
inp: A tensor with shape [b, x1, ..., x_m]
use_pfor: If true, uses pfor for computing the Jacobian. Else uses a
tf.while_loop.
parallel_iterations: A knob to control how many iterations are vectorized
and dispatched in parallel. The default value of None, when use_pfor is
true, corresponds to vectorizing all the iterations. When use_pfor is
false, the default value of None corresponds to parallel_iterations=10.
This knob can be used to control the total memory usage.
Returns:
A tensor `t` with shape [b, y_1, ..., y_n, x1, ..., x_m] where `t[i, ...]`
is the jacobian of `output[i, ...]` w.r.t. `inp[i, ...]`, i.e. stacked
per-example jacobians.
Raises:
ValueError: if first dimension of `output` and `inp` do not match.
"""
output_shape = output.shape
if not output_shape[0].is_compatible_with(inp.shape[0]):
raise ValueError("Need first dimension of output shape (%s) and inp shape "
"(%s) to match." % (output.shape, inp.shape))
if output_shape.is_fully_defined():
batch_size = int(output_shape[0])
output_row_size = output_shape.num_elements() // batch_size
else:
output_shape = array_ops.shape(output)
batch_size = output_shape[0]
output_row_size = array_ops.size(output) // batch_size
inp_shape = array_ops.shape(inp)
# Flatten output to 2-D.
with ops.control_dependencies(
[check_ops.assert_equal(batch_size, inp_shape[0])]):
output = array_ops.reshape(output, [batch_size, output_row_size])
def loop_fn(i):
y = array_ops.gather(output, i, axis=1)
return gradient_ops.gradients(y, inp)[0]
if use_pfor:
pfor_output = control_flow_ops.pfor(loop_fn, output_row_size,
parallel_iterations=parallel_iterations)
else:
pfor_output = control_flow_ops.for_loop(
loop_fn, output.dtype,
output_row_size,
parallel_iterations=parallel_iterations)
if pfor_output is None:
return None
pfor_output = array_ops.reshape(pfor_output,
[output_row_size, batch_size, -1])
output = array_ops.transpose(pfor_output, [1, 0, 2])
new_shape = array_ops.concat([output_shape, inp_shape[1:]], axis=0)
return array_ops.reshape(output, new_shape)
| {
"content_hash": "8117580ca24f46efd1cc9cdcad1c6647",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 80,
"avg_line_length": 39.1203007518797,
"alnum_prop": 0.6609648279838555,
"repo_name": "renyi533/tensorflow",
"id": "94ab49951aa001bc23e40f4b50af5e44de826c62",
"size": "5892",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/ops/parallel_for/gradients.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "31572"
},
{
"name": "Batchfile",
"bytes": "55269"
},
{
"name": "C",
"bytes": "903309"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "82507951"
},
{
"name": "CMake",
"bytes": "6967"
},
{
"name": "Dockerfile",
"bytes": "113964"
},
{
"name": "Go",
"bytes": "1871425"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "988219"
},
{
"name": "Jupyter Notebook",
"bytes": "550861"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "2073744"
},
{
"name": "Makefile",
"bytes": "66796"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "319021"
},
{
"name": "PHP",
"bytes": "4236"
},
{
"name": "Pascal",
"bytes": "318"
},
{
"name": "Pawn",
"bytes": "20422"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "37811412"
},
{
"name": "RobotFramework",
"bytes": "1779"
},
{
"name": "Roff",
"bytes": "2705"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "SWIG",
"bytes": "6846"
},
{
"name": "Shell",
"bytes": "696058"
},
{
"name": "Smarty",
"bytes": "35725"
},
{
"name": "Starlark",
"bytes": "3655758"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
import sys
from optparse import OptionParser
import urllib2
import json
import getpass
VERSION = '0.1'
SUPPORTED_PROTO = ['tcp']
def getUrl(opts, path):
return URL % (opts.host, opts.port, path)
def buildRequest(data, url, cmd):
j = { "id" : "ovxctl", "method" : cmd , "jsonrpc" : "2.0" }
h = {"Content-Type" : "application/json-rpc"}
if data is not None:
j['params'] = data
return urllib2.Request(url, json.dumps(j), h)
def pa_none(args, cmd):
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=USAGE.format(cmd), description=ldesc)
(options, args) = parser.parse_args(args)
return (options, args)
#Create calls
def pa_addControllers(args, cmd):
usage = "%s <tenant_id> <vdpid> <ctrlUrls>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_addControllers(gopts, opts, args):
if len(args) != 3:
print "addControllers: Must specify tenant id, virtual dpid, controller list"
sys.exit()
req = { "controllerUrls" : buildControllerList(args[2]), \
"tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":",""), 16) }
resp = connect(gopts, "tenant", "addControllers", data=req, passwd=getPasswd(gopts))
if resp:
print "Added controllers %s to switch %s" % (args[2], args[1])
print resp
def pa_createNetwork(args, cmd):
usage = "%s <protocol> <controller_urls> <ip_network> <ip_mask>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def buildControllerList(ctrls):
if ctrls.lower() == "none":
return []
l = ctrls.split(',')
controllerUrls = []
for ctrl in l:
parts = ctrl.split(":")
if len(parts) < 3:
print "%s is not a valid controller url" % ctrl
sys.exit()
if parts[0] not in SUPPORTED_PROTO:
print "%s in %s is not a supported protocol" % (parts[0], ctrl)
sys.exit()
try:
int(parts[2])
except:
print "%s in %s is not a valid port number" % (parts[2], ctrl)
sys.exit()
controllerUrls.append(ctrl)
return controllerUrls
def do_createNetwork(gopts, opts, args):
if len(args) != 3:
print "createNetwork : Must specify controllerUrls, network_ip, network_mask"
sys.exit()
req = { "controllerUrls" : buildControllerList(args[0]), \
"networkAddress" : args[1], "mask" : int(args[2]) }
network_id = connect(gopts, "tenant", "createNetwork", data=req, passwd=getPasswd(gopts))
if network_id:
print "Virtual network has been created (network_id %s)." % str(network_id)
def pa_createSwitch(args, cmd):
usage = "%s [options] <tenant_id> <physical_dpids>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
parser.add_option("-d", "--dpid", dest="dpid", type="str", default="0",
help="Specify the DPID for this switch")
return parser.parse_args(args)
def do_createSwitch(gopts, opts, args):
if len(args) != 2:
print ("createSwitch : must specify: " +
"virtual tenant_id and a comma separated list of physical dpids " +
"(e.g. 00:00:00:00:00:00:00:01) which will be associated to the virtual switch")
sys.exit()
dpids = [int(dpid.replace(":", ""), 16) for dpid in args[1].split(',')]
req = { "tenantId" : int(args[0]), "dpids" : dpids, "dpid" : int(opts.dpid.replace(":", ""), 16) }
reply = connect(gopts, "tenant", "createSwitch", data=req, passwd=getPasswd(gopts))
switchId = reply.get('vdpid')
if switchId:
switch_name = '00:' + ':'.join([("%x" % switchId)[i:i+2] for i in range(0, len(("%x" % switchId)), 2)])
print "Virtual switch has been created (tenant_id %s, switch_id %s)" % (args[0], switch_name)
def pa_createPort(args, cmd):
usage = "%s <tenant_id> <physical_dpid> <physical_port>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_createPort(gopts, opts, args):
if len(args) != 3:
print ("createPort : must specify: " +
"virtual tenant_id, physical dpid " +
"(e.g. 00:00:00:00:00:00:00:01) and physical port")
sys.exit()
req = { "tenantId" : int(args[0]), "dpid" : int(args[1].replace(":", ""), 16), "port" : int(args[2]) }
reply = connect(gopts, "tenant", "createPort", data=req, passwd=getPasswd(gopts))
switchId = reply.get('vdpid')
portId = reply.get('vport')
if switchId and portId:
switch_name = '00:' + ':'.join([("%x" %int(switchId))[i:i+2] for i in range(0, len(("%x" %int(switchId))), 2)])
print "Virtual port has been created (tenant_id %s, switch_id %s, port_id %s)" % (args[0], switch_name, portId)
def pa_setInternalRouting(args, cmd):
usage = "%s <tenant_id> <virtual_dpid> <routing_algorithm> <backup_routes_num>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_setInternalRouting(gopts, opts, args):
if len(args) != 4:
print ("setInternalRouting : Must specify virtual tenant_id, virtual switch_id, " +
"algorithm (spf, manual) and number of backup routes")
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16),
"algorithm" : args[2], "backup_num" : int(args[3]) }
reply = connect(gopts, "tenant", "setInternalRouting", data=req, passwd=getPasswd(gopts))
tenantId = reply.get('tenantId')
switchId = reply.get('vdpid')
if tenantId and switchId:
print "Routing has be set for big switch (tenant_id %s, switch_id %s)" % (switchId, tenantId)
def pa_connectHost(args, cmd):
usage = "%s <tenant_id> <vitual_dpid> <virtual_port> <host_mac>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_connectHost(gopts, opts, args):
if len(args) != 4:
print "connectHost : Must specify virtual tenant_id, virtual switch_id, virtual port_id and host MAC address"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16),
"vport" : int(args[2]), "mac" : args[3] }
reply = connect(gopts, "tenant", "connectHost", data=req, passwd=getPasswd(gopts))
hostId = reply.get('hostId')
if hostId:
print "Host (host_id %s) has been connected to virtual port" % (hostId)
def pa_connectLink(args, cmd):
usage = "%s <tenant_id> <src_virtual_dpid> <src_virtual_port> <dst_virtual_dpid> <dst_virtual_port>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_connectLink(gopts, opts, args):
if len(args) != 7:
print ("connectLink : Must specify tenant_id, src_virtual_dpid, src_virtual_port, dst_virtual_dpid, dst_virtual_port, "
+ "algorithm (spf, manual), number of backup routes")
sys.exit()
req = { "tenantId" : int(args[0]), "srcDpid" : int(args[1].replace(":", ""), 16),
"srcPort" : int(args[2]), "dstDpid" : int(args[3].replace(":", ""), 16),
"dstPort" : int(args[4]), "algorithm" : args[5], "backup_num" : int(args[6]) }
reply = connect(gopts, "tenant", "connectLink", data=req, passwd=getPasswd(gopts))
linkId = reply.get('linkId')
if linkId:
print "Virtual link (link_id %s) has been created" % (linkId)
def pa_setLinkPath(args, cmd):
usage = "%s <tenant_id> <link_id> <physical_path> <priority>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_setLinkPath(gopts, opts, args):
if len(args) != 4:
print "setLinkPath : Must specify tenant_id, link_id, the physical path that connect the end-points and the priority [0-255]"
sys.exit()
req = { "tenantId" : int(args[0]), "linkId" : int(args[1]), "path" : translate_path(args[2]), "priority" : int(args[3]) }
reply = connect(gopts, "tenant", "setLinkPath", data=req, passwd=getPasswd(gopts))
linkId = reply.get('linkId')
if linkId:
print "Virtual link (link_id %s) path has been set" % (linkId)
def pa_connectRoute(args, cmd):
usage = "%s <tenant_id> <virtual_dpid> <src_virtual_port> <dst_virtual_port> <physical_path> <priority>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_connectRoute(gopts, opts, args):
if len(args) != 6:
print ("connectRoute : Must specify tenant_id, virtual_dpid, src_virtual_port, dst_virtual_port, " +
"the physical path that connect the end-points and the priority [0-255]")
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16),
"srcPort" : int(args[2]), "dstPort" : int(args[3]),
"path" : translate_path(args[4]), "priority" : int(args[5]) }
reply = connect(gopts, "tenant", "connectRoute", data=req, passwd=getPasswd(gopts))
routeId = reply.get('routeId')
if routeId:
print "Big-switch internal route (route_id %s) has been created" % (routeId)
#Remove calls
def pa_removeNetwork(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_removeNetwork(gopts, opts, args):
if len(args) != 1:
print "removeNetwork : Must specify a virtual tenant_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
result = connect(gopts, "tenant", "removeNetwork", data=req, passwd=getPasswd(gopts))
print "Network (tenant_id %s) has been removed" % (args[0])
def pa_removeSwitch(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_removeSwitch(gopts, opts, args):
if len(args) != 2:
print "removeSwitch : Must specify a virtual tenant_id and a virtual switch_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16) }
result = connect(gopts, "tenant", "removeSwitch", data=req, passwd=getPasswd(gopts))
print "Switch (switch_id %s) has been removed" % (args[1])
def pa_removePort(args, cmd):
usage = "%s <tenant_id> <virtual_dpid> <virtual_port>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_removePort(gopts, opts, args):
if len(args) != 3:
print "removePort : Must specify a virtual tenant_id, a virtual switch_id and a virtual port_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16), "vport" : int(args[2])}
result = connect(gopts, "tenant", "removePort", data=req, passwd=getPasswd(gopts))
print "Port (port_id %s) has been removed from virtual switch (switch_id %s)" % (args[2], args[1])
def pa_disconnectHost(args, cmd):
usage = "%s <tenant_id> <host_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_disconnectHost(gopts, opts, args):
if len(args) != 2:
print "disconnectHost : Must specify a a virtual tenant_id and a host_id"
sys.exit()
req = { "tenantId" : int(args[0]), "hostId" : int(args[1]) }
result = connect(gopts, "tenant", "disconnectHost", data=req, passwd=getPasswd(gopts))
print "Host (host_id %s) has been disconnected from the virtual network (tenant_id %s)" % (args[1], args[0])
def pa_disconnectLink(args, cmd):
usage = "%s <tenant_id> <link_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_disconnectLink(gopts, opts, args):
if len(args) != 2:
print "disconnectLink : Must specify a a virtual tenant_id and a link_id"
sys.exit()
req = { "tenantId" : int(args[0]), "linkId" : int(args[1]) }
result = connect(gopts, "tenant", "disconnectLink", data=req, passwd=getPasswd(gopts))
print "Link (link_id %s) has been disconnected from the virtual network (tenant_id %s)" % (args[1], args[0])
def pa_disconnectRoute(args, cmd):
usage = "%s <tenant_id> <route_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_disconnectRoute(gopts, opts, args):
if len(args) != 3:
print "disconnectRoute : Must specify a virtual tenant_id, switch_id and a route_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16) , "routeId" : int(args[2]) }
result = connect(gopts, "tenant", "disconnectRoute", data=req, passwd=getPasswd(gopts))
print "Route (route_id %s) in virtual big-switch (switch_id %s) has been disconnected from the virtual network (tenant_id %s)" % (args[2], args[1], args[0])
#Runtime operations
def pa_startNetwork(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_startNetwork(gopts, opts, args):
if len(args) != 1:
print "startNetwork : Must specify a tenant_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
result = connect(gopts, "tenant", "startNetwork", data=req, passwd=getPasswd(gopts))
if result:
print "Network (tenant_id %s) has been booted" % (args[0])
def pa_startSwitch(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_startSwitch(gopts, opts, args):
if len(args) != 2:
print "startSwitch : Must specify a tenant_id and a virtual switch_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16)}
result = connect(gopts, "tenant", "startSwitch", data=req, passwd=getPasswd(gopts))
if result:
print "Switch (switch_id %s) has been booted in virtual network (tenant_id %s)" % (args[1], args[0])
def pa_startPort(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_startPort(gopts, opts, args):
if len(args) != 3:
print "startPort : Must specify a tenant_id, a virtual switch_id and a virtual port_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16), "vport" : int(args[2])}
reply = connect(gopts, "tenant", "startPort", data=req, passwd=getPasswd(gopts))
tenantId = reply.get('tenantId')
switchId = reply.get('vdpid')
portId = reply.get('vport')
if tenantId and switchId and hostId:
print "Port (port_id %s) has been started in virtual switch (tenant_id %s, switch_id %s)" % (portId, tenantId, switchId)
def pa_stopNetwork(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_stopNetwork(gopts, opts, args):
if len(args) != 1:
print "stopNetwork : Must specify a tenant_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
result = connect(gopts, "tenant", "stopNetwork", data=req, passwd=getPasswd(gopts))
if result:
print "Network (tenant_id %s) has been shutdown" % (args[0])
def pa_stopSwitch(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_stopSwitch(gopts, opts, args):
if len(args) != 2:
print "stopSwitch : Must specify a tenant_id and a virtual switch_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16)}
result = connect(gopts, "tenant", "stopSwitch", data=req, passwd=getPasswd(gopts))
if result:
print "Switch (switch_id %s) has been shutdown in virtual network (tenant_id %s)" % (args[1], args[0])
def pa_stopPort(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_stopPort(gopts, opts, args):
if len(args) != 3:
print "stopPort : Must specify a tenant_id, a virtual switch_id and a virtual port_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16), "vport" : int(args[2])}
result = connect(gopts, "tenant", "stopPort", data=req, passwd=getPasswd(gopts))
if result:
print "Port (port_id %s) has been shutdown in virtual switch (tenant_id %s, switch_id %s)" % (args[2], args[0], args[1])
def pa_getPhysicalFlowtable(args, cmd):
usage = "%s [<physical_dpid>]" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getPhysicalFlowtable(gopts, opts, args):
if len(args) > 1:
print "getPhysicalFlowtable : May specify optional physical dpid"
sys.exit()
req = {}
if len(args) == 1:
req["dpid"] = int(args[0].replace(":", ""), 16)
result = connect(gopts, "status", "getPhysicalFlowtable", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getPhysicalHosts(args, cmd):
usage = "%s" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getPhysicalHosts(gopts, opts, args):
if len(args) > 0:
print "getPhysicalHosts : No arguments"
sys.exit()
req = {}
result = connect(gopts, "status", "getPhysicalHosts", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getPhysicalTopology(args, cmd):
usage = "%s" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getPhysicalTopology(gopts, opts, args):
if len(args) > 0:
print "getPhysicalTopology : No arguments"
sys.exit()
req = {}
result = connect(gopts, "status", "getPhysicalTopology", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_listVirtualNetworks(args, cmd):
usage = "%s" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_listVirtualNetworks(gopts, opts, args):
if len(args) > 0:
print "listVirtualNetworks : No arguments"
sys.exit()
req = {}
result = connect(gopts, "status", "listVirtualNetworks", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualAddressMapping(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualAddressMapping(gopts, opts, args):
if len(args) != 1:
print "getVirtualAddressMapping : Must specify a tenant_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
result = connect(gopts, "status", "getVirtualAddressMapping", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualFlowtable(args, cmd):
usage = "%s <tenant_id> [<virtual_dpid>]" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualFlowtable(gopts, opts, args):
if (len(args) == 0) or (len(args) > 2):
print "getVirtualFlowtable : Must specify a tenant_id, and optional virtual switch_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
if len(args) == 2:
req["vdpid"] = int(args[1].replace(":", ""), 16)
result = connect(gopts, "status", "getVirtualFlowtable", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualHosts(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualHosts(gopts, opts, args):
if len(args) != 1:
print "getVirtualHosts : Must specify a tenant_id"
sys.exit()
req = { "tenantId": int(args[0]) }
result = connect(gopts, "status", "getVirtualHosts", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualLinkMapping(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualLinkMapping(gopts, opts, args):
if len(args) != 1:
print "getVirtualHosts : Must specify a tenant_id"
sys.exit()
req = { "tenantId": int(args[0]) }
result = connect(gopts, "status", "getVirtualLinkMapping", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualSwitchMapping(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualSwitchMapping(gopts, opts, args):
if len(args) != 1:
print "getVirtualSwitchMapping : Must specify a tenant_id"
sys.exit()
req = { "tenantId": int(args[0]) }
result = connect(gopts, "status", "getVirtualSwitchMapping", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualTopology(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualTopology(gopts, opts, args):
if len(args) != 1:
print "getVirtualTopology : Must specify a tenant_id"
sys.exit()
req = { "tenantId": int(args[0]) }
result = connect(gopts, "status", "getVirtualTopology", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
# Other methods
def translate_path(path_string):
hop_list = path_string.split(",")
path = ""
for hop in hop_list:
src, dst = hop.split("-")
src_dpid, src_port = src.split("/")
dst_dpid, dst_port = dst.split("/")
src_long_dpid = int(src_dpid.replace(":", ""), 16)
dst_long_dpid = int(dst_dpid.replace(":", ""), 16)
path = path + str(src_long_dpid) + "/" + str(src_port) + "-" + str(dst_long_dpid) + "/" + str(dst_port) + ","
if len(path) > 0:
path.rstrip(",")
return path
def pa_help(args, cmd):
usage = "%s <cmd>" % USAGE.format(cmd)
parser = OptionParser(usage=usage)
return parser.parse_args(args)
def do_help(gopts, opts, args):
if len(args) != 1:
raise IndexError
try:
(pa, func) = CMDS[args[0]]
pa(['--help'], args[0])
except KeyError, e:
print "Invalid command : %s is an unknown command." % args[0]
sys.exit()
def connect(opts, path, cmd, data=None, passwd=None):
try:
url = getUrl(opts, path)
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, url, opts.ovx_user, passwd)
authhandler = urllib2.HTTPBasicAuthHandler(passman)
opener = urllib2.build_opener(authhandler)
req = buildRequest(data, url, cmd)
#ph = urllib2.urlopen(req)
ph = opener.open(req)
return parseResponse(ph.read())
except urllib2.URLError as e:
print e
sys.exit(1)
except urllib2.HTTPError as e:
if e.code == 401:
print "Authentication failed: invalid password"
sys.exit(1)
elif e.code == 504:
print "HTTP Error 504: Gateway timeout"
sys.exit(1)
else:
print e
except RuntimeError as e:
print e
def parseResponse(data):
j = json.loads(data)
if 'error' in j:
print j
sys.exit(1)
return j['result']
def printVersion(option, opt, value, parser):
"""Print ovxctl version and exit"""
print "ovxctl-%s" % VERSION
sys.exit()
def printHelp (option, opt, value, parser):
"""Print ovxctl help and exit"""
cmds = [x for x in CMDS.iterkeys()]
cmds.remove('help')
cmds.sort()
print parser.format_help().strip()
print "\n Available commands are: "
for x in cmds:
(sdesc, ldesc) = DESCS[x]
print " {0:25} {1:10}".format(x, sdesc)
print "\n See '%s help <command>' for more info on a specific command." % sys.argv[0]
sys.exit()
CMDS = {
'addControllers': (pa_addControllers, do_addControllers),
'createNetwork': (pa_createNetwork, do_createNetwork),
'createSwitch': (pa_createSwitch, do_createSwitch),
'createPort': (pa_createPort, do_createPort),
'setInternalRouting': (pa_setInternalRouting, do_setInternalRouting),
'connectHost': (pa_connectHost, do_connectHost),
'connectLink': (pa_connectLink, do_connectLink),
'setLinkPath': (pa_setLinkPath, do_setLinkPath),
'connectRoute': (pa_connectRoute, do_connectRoute),
'removeNetwork': (pa_removeNetwork, do_removeNetwork),
'removeSwitch': (pa_removeSwitch, do_removeSwitch),
'removePort': (pa_removePort, do_removePort),
'disconnectHost': (pa_disconnectHost, do_disconnectHost),
'disconnectLink': (pa_disconnectLink, do_disconnectLink),
'disconnectRoute': (pa_disconnectRoute, do_disconnectRoute),
'startNetwork': (pa_startNetwork, do_startNetwork),
'startSwitch': (pa_startSwitch, do_startSwitch),
'startPort': (pa_startPort, do_startPort),
'stopNetwork': (pa_stopNetwork, do_stopNetwork),
'stopSwitch': (pa_stopSwitch, do_stopSwitch),
'stopPort': (pa_stopPort, do_stopPort),
'getPhysicalFlowtable': (pa_getPhysicalFlowtable, do_getPhysicalFlowtable),
'getPhysicalHosts': (pa_getPhysicalHosts, do_getPhysicalHosts),
'getPhysicalTopology': (pa_getPhysicalTopology, do_getPhysicalTopology),
'listVirtualNetworks': (pa_listVirtualNetworks, do_listVirtualNetworks),
'getVirtualAddressMapping': (pa_getVirtualAddressMapping, do_getVirtualAddressMapping),
'getVirtualFlowtable': (pa_getVirtualFlowtable, do_getVirtualFlowtable),
'getVirtualHosts': (pa_getVirtualHosts, do_getVirtualHosts),
'getVirtualLinkMapping': (pa_getVirtualLinkMapping, do_getVirtualLinkMapping),
'getVirtualSwitchMapping': (pa_getVirtualSwitchMapping, do_getVirtualSwitchMapping),
'getVirtualTopology': (pa_getVirtualTopology, do_getVirtualTopology),
'help' : (pa_help, do_help)
}
DESCS = {
'addControllers' : ("Adds controllers to a virtual switch",
("Adds the specified list of controllers to a given virtual switch.\n"
"ExampleL addController <tenantId> <vdpid> <ctrlUrls>")),
'createNetwork' : ("Creates a virtual network",
("Creates a virtual network. Input: protocol, controllerIP, controller port, ip address, mask. "
"\nExample: createNetwork tcp 1.1.1.1 6634 192.168.1.0 24")),
'createSwitch' : ("Create virtual switch",
("Create a virtual switch. Must specify a tenant_id, and a list of the physical_dpids that will be part of the virtual switch."
"\nExample: createSwitch 1 00:00:00:00:00:00:00:01,00:00:00:00:00:00:00:02")),
'createPort' : ("Create virtual port",
("Create a virtual port. Must specify a tenant_id, a physical_dpid and a physical_port."
"\nExample: createPort 1 00:00:00:00:00:00:00:01 1")),
'setInternalRouting' : ("Set big-switch internal routing mechanism",
("Set big-switch internal routing mechanism. Must specify a tenant_id, a virtual switch_id, the routing type (spf, manual) "
"and the number (0-255) of the backup paths that have to be computed."
"\nExample: setInternalRouting 1 00:00:00:00:00:00:00:01 spf 128")),
'connectHost' : ("Connect host to a virtual port",
("Connect host to a virtual port. Must specify a tenant_id, a virtual switch_id, a virtual port_id and the host MAC address."
"\nExample: connectHost 1 00:a4:23:05:00:00:00:01 1 00:00:00:00:00:01")),
'connectLink' : ("Connect two virtual ports through a virtual link",
("Connect two virtual ports through a virtual link. Must specify a tenant_id, a virtual src_switch_id, a virtual src_port_id, "
"a virtual dst_switch_id, a virtual dst_port_id, the routing type (spf, manual) and the number (0-255) of the backup paths that have to be computed."
"\nExample: connectLink 1 00:a4:23:05:00:00:00:01 1 00:a4:23:05:00:00:00:02 1 spf 1")),
'setLinkPath' : ("Set the physical path of a virtual link",
("Set the physical path of a virtual link. Must specify a tenant_id, a virtual link_id, a physical path and a priority (0-255)."
"\nExample: connectLink 1 1 00:00:00:00:00:00:00:01/1-00:00:00:00:00:00:00:02/1,"
"00:00:00:00:00:00:00:2/2-00:00:00:00:00:00:00:3/1 128")),
'connectRoute' : ("Connect two virtual ports inside a virtual big-switch",
("Connect two virtual ports inside a virtual big-switch. Must specify a tenant_id, a virtual switch_id, a virtual src_port_id, "
"a virtual dst_port_id, a physical path and a priority (0-255)."
"\nExample: connectRoute 1 00:a4:23:05:00:00:00:01 1 2 00:00:00:00:00:00:00:01/1-00:00:00:00:00:00:00:02/1,"
"00:00:00:00:00:00:00:2/2-00:00:00:00:00:00:00:3/1 128")),
'removeNetwork' : ("Remove a virtual network",
("Remove a virtual network. Must specify a tenant_id."
"\nExample: removeNetwork 1")),
'removeSwitch' : ("Remove virtual switch",
("Remove a virtual switch. Must specify a tenant_id and a virtual switch_id."
"\nExample: removeSwitch 1 00:a4:23:05:00:00:00:01")),
'removePort' : ("Remove virtual port",
("Remove a virtual port. Must specify a tenant_id, a virtual switch_id and a virtual port_id."
"\nExample: removePort 1 00:a4:23:05:00:00:00:01 1")),
'disconnectHost' : ("Disconnect host from a virtual port",
("Disconnect host from a virtual port. Must specify a tenant_id and the host_id."
"\nExample: disconnectHost 1 1")),
'disconnectLink' : ("Disconnect link between two virtual ports",
("Disconnect link between two virtual ports. Must specify a tenant_id and the link_id."
"\nExample: disconnectLink 1 1")),
'disconnectRoute' : ("Disconnect big-switch internal route between two virtual ports",
("Disconnect big-switch internal route between two virtual ports. Must specify a tenant_id and the route_id."
"\nExample: disconnectRoute 1 00:a4:23:05:00:00:00:01 1")),
'startNetwork' : ("Start a virtual network",
("Start a virtual network. Must specify a tenant_id."
"\nExample: startNetwork 1")),
'startSwitch' : ("Start a virtual switch",
("Start a virtual switch. Must specify a tenant_id and a virtual switch_id."
"\nExample: startSwitch 1 00:a4:23:05:00:00:00:01")),
'startPort' : ("Start a virtual port",
("Start a virtual port. Must specify a tenant_id, a virtual switch_id and a virtual port_id."
"\nExample: startPort 1 00:a4:23:05:00:00:00:01 1")),
'stopNetwork' : ("Stop a virtual network",
("Stop a virtual network. Must specify a tenant_id."
"\nExample: stopNetwork 1")),
'stopSwitch' : ("Shutdown a virtual switch",
("Shutdown a virtual switch. Must specify a tenant_id and a virtual switch_id."
"\nExample: stopSwitch 1 00:a4:23:05:00:00:00:01")),
'stopPort' : ("Shutdown a virtual port",
("Shutdown a virtual port. Must specify a tenant_id, a virtual switch_id and a virtual port_id."
"\nExample: stopPort 1 00:a4:23:05:00:00:00:01 1")),
# Monitoring API - admin only
'getPhysicalFlowtable' : ("Get the physical flowtable of a specified switch or all switches",
("Get the physical flowtable of a specified switch or all switches. Specify optional physical switch_id."
"\nExample: getPhysicalFlowtable 00:00:00:00:00:00:00:01")),
'getPhysicalHosts' : ("Get a list of physical hosts",
("Get a list of physical hosts."
"\nExample: getPhysicalHosts")),
'getPhysicalTopology': ("Get the physical topology",
("Get the physical topology."
"\nExample: getPhysicalTopology")),
'listVirtualNetworks': ("Get a list of all virtual network tenant ID's",
("Get a list of all virtual network tenant ID's."
"\nExample: listVirtualNetworks")),
# Monitoring API - tenant restricted
'getVirtualAddressMapping' : ("Get the virtual to physical address mapping for a specified virtual network",
("Get the virtual to physical address mapping. Must specify a virtual network tenant_id."
"\nExample: getVirtualAddressMapping 1")),
'getVirtualFlowtable' : ("Get the flowtable in the specified virtual network",
("Get the flowtable in the specified virtual network. Must specify a virtual switch_id, optional virtual switch_id."
"\nExample: getVirtualFlowtable 00:a4:23:05:00:00:00:01")),
'getVirtualHosts' : ("Get list of hosts in virtual network",
("Get list of hosts in virtual network. Must specify a tenant_id",
"\nExample: getVirtualHosts 1")),
'getVirtualLinkMapping' : ("Get the virtual to physical link mapping",
("Get the virtual to physical link mapping. Must specify a tenant_id.",
"\nExample: getVirtualLinkMapping 1")),
'getVirtualSwitchMapping' : ("Get the virtual to physical switch mapping",
("Get the virtual to physical switch mapping. Must specify a tenant_id.",
"\nExample: getVirtualSwitchMapping 1")),
'getVirtualTopology' : ("Get the virtual topology",
("Get the virtual topology. Must specify a tenant_id.",
"\nExample: getVirtualTopology 1"))
}
USAGE="%prog {}"
URL = "http://%s:%s/%s"
def getPasswd(opts):
if opts.no_passwd:
return ""
else:
return getpass.getpass("Password: ")
def addCommonOpts (parser):
parser.add_option("-h", "--hostname", dest="host", default="localhost",
help="Specify the OpenVirteX host; default='localhost'")
parser.add_option("-p", "--port", dest="port", default="8080",
help="Specify the OpenVirteX web port; default=8080")
parser.add_option("-u", "--user", dest="ovx_user", default="admin",
help="OpenVirtex admin user; default='admin'")
parser.add_option("-n", "--no-passwd", action="store_true", dest="no_passwd", default=False,
help="Run ovxctl with no password; default false")
parser.add_option("-v", "--version", action="callback", callback=printVersion)
parser.add_option("--help", action="callback", callback=printHelp)
def parse_global_args (arglist):
usage = "%s [options] command [command_args]" % sys.argv[0]
args = []
while (len(arglist) != 0 and arglist[0] not in CMDS):
args.append(arglist[0])
arglist.pop(0)
parser = OptionParser(add_help_option=False, usage=usage)
addCommonOpts(parser)
(opts, pargs) = parser.parse_args(args)
return (opts, arglist, parser)
if __name__ == '__main__':
try:
(gopts, rargs, parser) = parse_global_args(sys.argv[1:])
if len(rargs) < 1:
raise IndexError
(parse_args, do_func) = CMDS[rargs[0]]
(opts, args) = parse_args(rargs[1:], rargs[0])
do_func(gopts, opts, args)
sys.exit(0)
except ValueError, e:
print "The argument types being sent to the function %s are incorrect. Please double check them." % sys.argv[1]
except IndexError, e:
print "%s is an unknown command" % sys.argv[-1]
except Exception, e:
print "uknown error"
printHelp(None,None,None,parser)
| {
"content_hash": "6dec896decf2ca415fcf027d2a2fad76",
"timestamp": "",
"source": "github",
"line_count": 825,
"max_line_length": 172,
"avg_line_length": 46.305454545454545,
"alnum_prop": 0.6189990052876813,
"repo_name": "fnkhan/second",
"id": "52b77d559f44dc12a0d4f568bced3aac727746b3",
"size": "38336",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "utils/ovxctl.py",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import btnlib as btn
import ledlib as led
import time
#the led.startup() function cycles through the leds
led.startup()
time.sleep(1)
print("All on and off")
#to turn on all leds, use the led.turn_on_all() function:
led.turn_on_all()
time.sleep(2)
#to turn off all:
led.turn_off_all()
time.sleep(1)
print("Red on and off")
#to turn on a single led, use a command like this:
led.turn_on(led.red)
#your choices for leds are led.red, led.orange, led.yellow,
# led.green, led.blue, led.purple, and led.white
time.sleep(2)
#to turn it off:
led.turn_off(led.red)
time.sleep(1)
print("Yellow with isOn test")
#the led.isOn(led) function tells you if a particular led is currently on
# same led choices
if led.isOn(led.yellow):
print("Yellow is on")
else :
print("Yellow is off")
time.sleep(2)
led.turn_on(led.yellow)
if led.isOn(led.yellow):
print("Yellow is on")
else :
print("Yellow is off")
time.sleep(2)
led.turn_off(led.yellow)
time.sleep(1)
print("Green and blue switch")
#the led.switch(led) function knows whether an led is on or off and switches its value
# same led choices
led.turn_on(led.green)
time.sleep(2)
led.switch(led.green)
led.switch(led.blue)
time.sleep(2)
led.switch(led.blue)
time.sleep(1)
print("Blink")
#the led.blink(led, number_of_times, seconds_between_blinks) blinks an led on and off
# default is (led.white, 5, 0.25)
led.blink() #blinks the white led 5 times for 0.25 seconds
led.blink(led.purple, 50, 0.01) #blinks the purple led 50 times in 1 second (1/100 of a second pulse length)
print("If switch is on, press yellow for yellow and red for red")
#the btn.isOn(btn) function tells you if a particular button is being pressed or if a switch is on
#your choices for buttons are currently btn.red, btn.yellow, btn.switch
while btn.isOn(btn.switch) :
if btn.isOn(btn.red):
led.switch(led.red)
if btn.isOn(btn.yellow) :
led.switch(led.yellow)
time.sleep(0.25) #this line keeps it from checking too fast and mistaking a long press for multiple presses
print("Goodbye")
btn.GPIO.cleanup()
| {
"content_hash": "68314909adf5abf1a5b4e005e3c23c69",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 109,
"avg_line_length": 28.38888888888889,
"alnum_prop": 0.7270058708414873,
"repo_name": "majikpig/ubtech",
"id": "26fff381d6d235fa574afacde72bd215fcff452e",
"size": "2122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "template.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27071"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0014_auto_20170324_1630'),
]
operations = [
migrations.AlterField(
model_name='product',
name='product_price',
field=models.DecimalField(decimal_places=2, default=11.0, max_digits=5),
preserve_default=False,
),
migrations.AlterField(
model_name='product',
name='product_qtd',
field=models.IntegerField(default=1),
preserve_default=False,
),
]
| {
"content_hash": "aa78558822a6baffebf124f99eefa5a4",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 84,
"avg_line_length": 26,
"alnum_prop": 0.58,
"repo_name": "rodrigocnascimento/django-teste",
"id": "4c7dc41700dcb53b49bb71a59a5c5930cb41ea81",
"size": "723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "product/migrations/0015_auto_20170324_1655.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11148"
},
{
"name": "HTML",
"bytes": "16505"
},
{
"name": "Python",
"bytes": "37910"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.